aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authorrekby <rekby@ydb.tech>2023-12-14 16:56:50 +0300
committerrekby <rekby@ydb.tech>2023-12-14 18:09:44 +0300
commitb2b2bb5997507072ca64548efe64447dd6395426 (patch)
treebbfbf77d11f1972c93ae4101fe561fd440d6ad6a /contrib/python
parent8b8678a6a4f57c62e348cdad8afd3849011a5f11 (diff)
downloadydb-b2b2bb5997507072ca64548efe64447dd6395426.tar.gz
KIKIMR-19900 switch arcadia to python ydb sdk from contrib
Этот PR создан скриптом - для переключения зависимостей на python ydb sdk с версии внутри ydb на код, приезжающий через контриб. Код в обеих версиях одинаковый, так что поломок/изменения функционала на ожидается. На всякий случай посмотрите свои проекты и если будут возражения пишите сюда в issues или в тикет KIKIMR-19900. Если всё ок - шипните, для определённости. При отсутствии блокеров PR будет перегенерирован и влит с force-мёрджем в четверг, 14 декабря.
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/aiohttp/.dist-info/METADATA255
-rw-r--r--contrib/python/aiohttp/.dist-info/top_level.txt1
-rw-r--r--contrib/python/aiohttp/LICENSE.txt13
-rw-r--r--contrib/python/aiohttp/README.rst204
-rw-r--r--contrib/python/aiohttp/aiohttp/__init__.py216
-rw-r--r--contrib/python/aiohttp/aiohttp/_cparser.pxd190
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.c9870
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.h14
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.pxd2
-rw-r--r--contrib/python/aiohttp/aiohttp/_headers.pxi83
-rw-r--r--contrib/python/aiohttp/aiohttp/_helpers.pyx35
-rw-r--r--contrib/python/aiohttp/aiohttp/_http_parser.pyx818
-rw-r--r--contrib/python/aiohttp/aiohttp/_http_writer.pyx163
-rw-r--r--contrib/python/aiohttp/aiohttp/_websocket.pyx56
-rw-r--r--contrib/python/aiohttp/aiohttp/abc.py207
-rw-r--r--contrib/python/aiohttp/aiohttp/base_protocol.py87
-rw-r--r--contrib/python/aiohttp/aiohttp/client.py1304
-rw-r--r--contrib/python/aiohttp/aiohttp/client_exceptions.py342
-rw-r--r--contrib/python/aiohttp/aiohttp/client_proto.py251
-rw-r--r--contrib/python/aiohttp/aiohttp/client_reqrep.py1133
-rw-r--r--contrib/python/aiohttp/aiohttp/client_ws.py300
-rw-r--r--contrib/python/aiohttp/aiohttp/connector.py1451
-rw-r--r--contrib/python/aiohttp/aiohttp/cookiejar.py413
-rw-r--r--contrib/python/aiohttp/aiohttp/formdata.py172
-rw-r--r--contrib/python/aiohttp/aiohttp/hdrs.py114
-rw-r--r--contrib/python/aiohttp/aiohttp/helpers.py878
-rw-r--r--contrib/python/aiohttp/aiohttp/http.py72
-rw-r--r--contrib/python/aiohttp/aiohttp/http_exceptions.py105
-rw-r--r--contrib/python/aiohttp/aiohttp/http_parser.py956
-rw-r--r--contrib/python/aiohttp/aiohttp/http_websocket.py701
-rw-r--r--contrib/python/aiohttp/aiohttp/http_writer.py200
-rw-r--r--contrib/python/aiohttp/aiohttp/locks.py41
-rw-r--r--contrib/python/aiohttp/aiohttp/log.py8
-rw-r--r--contrib/python/aiohttp/aiohttp/multipart.py963
-rw-r--r--contrib/python/aiohttp/aiohttp/payload.py465
-rw-r--r--contrib/python/aiohttp/aiohttp/payload_streamer.py75
-rw-r--r--contrib/python/aiohttp/aiohttp/py.typed1
-rw-r--r--contrib/python/aiohttp/aiohttp/pytest_plugin.py391
-rw-r--r--contrib/python/aiohttp/aiohttp/resolver.py160
-rw-r--r--contrib/python/aiohttp/aiohttp/streams.py660
-rw-r--r--contrib/python/aiohttp/aiohttp/tcp_helpers.py38
-rw-r--r--contrib/python/aiohttp/aiohttp/test_utils.py698
-rw-r--r--contrib/python/aiohttp/aiohttp/tracing.py472
-rw-r--r--contrib/python/aiohttp/aiohttp/typedefs.py64
-rw-r--r--contrib/python/aiohttp/aiohttp/web.py586
-rw-r--r--contrib/python/aiohttp/aiohttp/web_app.py557
-rw-r--r--contrib/python/aiohttp/aiohttp/web_exceptions.py441
-rw-r--r--contrib/python/aiohttp/aiohttp/web_fileresponse.py288
-rw-r--r--contrib/python/aiohttp/aiohttp/web_log.py208
-rw-r--r--contrib/python/aiohttp/aiohttp/web_middlewares.py119
-rw-r--r--contrib/python/aiohttp/aiohttp/web_protocol.py681
-rw-r--r--contrib/python/aiohttp/aiohttp/web_request.py874
-rw-r--r--contrib/python/aiohttp/aiohttp/web_response.py825
-rw-r--r--contrib/python/aiohttp/aiohttp/web_routedef.py213
-rw-r--r--contrib/python/aiohttp/aiohttp/web_runner.py381
-rw-r--r--contrib/python/aiohttp/aiohttp/web_server.py62
-rw-r--r--contrib/python/aiohttp/aiohttp/web_urldispatcher.py1220
-rw-r--r--contrib/python/aiohttp/aiohttp/web_ws.py487
-rw-r--r--contrib/python/aiohttp/aiohttp/worker.py269
-rw-r--r--contrib/python/aiohttp/ya.make101
-rw-r--r--contrib/python/aiosignal/.dist-info/METADATA128
-rw-r--r--contrib/python/aiosignal/.dist-info/top_level.txt1
-rw-r--r--contrib/python/aiosignal/LICENSE201
-rw-r--r--contrib/python/aiosignal/README.rst94
-rw-r--r--contrib/python/aiosignal/aiosignal/__init__.py36
-rw-r--r--contrib/python/aiosignal/aiosignal/py.typed0
-rw-r--r--contrib/python/aiosignal/tests/conftest.py0
-rw-r--r--contrib/python/aiosignal/tests/test_signals.py160
-rw-r--r--contrib/python/aiosignal/tests/ya.make14
-rw-r--r--contrib/python/aiosignal/ya.make32
-rw-r--r--contrib/python/async-timeout/.dist-info/METADATA131
-rw-r--r--contrib/python/async-timeout/.dist-info/top_level.txt1
-rw-r--r--contrib/python/async-timeout/LICENSE13
-rw-r--r--contrib/python/async-timeout/README.rst100
-rw-r--r--contrib/python/async-timeout/async_timeout/__init__.py239
-rw-r--r--contrib/python/async-timeout/async_timeout/py.typed1
-rw-r--r--contrib/python/async-timeout/ya.make23
-rw-r--r--contrib/python/frozenlist/.dist-info/METADATA150
-rw-r--r--contrib/python/frozenlist/.dist-info/top_level.txt1
-rw-r--r--contrib/python/frozenlist/LICENSE201
-rw-r--r--contrib/python/frozenlist/README.rst117
-rw-r--r--contrib/python/frozenlist/frozenlist/__init__.py95
-rw-r--r--contrib/python/frozenlist/frozenlist/_frozenlist.pyx123
-rw-r--r--contrib/python/frozenlist/frozenlist/py.typed1
-rw-r--r--contrib/python/frozenlist/tests/conftest.py0
-rw-r--r--contrib/python/frozenlist/tests/test_frozenlist.py246
-rw-r--r--contrib/python/frozenlist/tests/ya.make14
-rw-r--r--contrib/python/frozenlist/ya.make32
-rw-r--r--contrib/python/multidict/.dist-info/METADATA130
-rw-r--r--contrib/python/multidict/.dist-info/top_level.txt1
-rw-r--r--contrib/python/multidict/LICENSE13
-rw-r--r--contrib/python/multidict/README.rst103
-rw-r--r--contrib/python/multidict/multidict/__init__.py48
-rw-r--r--contrib/python/multidict/multidict/_abc.py48
-rw-r--r--contrib/python/multidict/multidict/_compat.py14
-rw-r--r--contrib/python/multidict/multidict/_multidict.c1824
-rw-r--r--contrib/python/multidict/multidict/_multidict_base.py144
-rw-r--r--contrib/python/multidict/multidict/_multidict_py.py526
-rw-r--r--contrib/python/multidict/multidict/_multilib/defs.h22
-rw-r--r--contrib/python/multidict/multidict/_multilib/dict.h24
-rw-r--r--contrib/python/multidict/multidict/_multilib/istr.h85
-rw-r--r--contrib/python/multidict/multidict/_multilib/iter.h238
-rw-r--r--contrib/python/multidict/multidict/_multilib/pair_list.h1244
-rw-r--r--contrib/python/multidict/multidict/_multilib/views.h464
-rw-r--r--contrib/python/multidict/multidict/py.typed1
-rw-r--r--contrib/python/multidict/tests/__init__.py0
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.014
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.1bin0 -> 71 bytes
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.2bin0 -> 70 bytes
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.3bin0 -> 70 bytes
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.4bin0 -> 73 bytes
-rw-r--r--contrib/python/multidict/tests/cimultidict.pickle.5bin0 -> 73 bytes
-rw-r--r--contrib/python/multidict/tests/conftest.py29
-rw-r--r--contrib/python/multidict/tests/gen_pickles.py32
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.014
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.1bin0 -> 69 bytes
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.2bin0 -> 68 bytes
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.3bin0 -> 68 bytes
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.4bin0 -> 71 bytes
-rw-r--r--contrib/python/multidict/tests/multidict.pickle.5bin0 -> 71 bytes
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.014
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.1bin0 -> 74 bytes
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.2bin0 -> 73 bytes
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.3bin0 -> 73 bytes
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.4bin0 -> 76 bytes
-rw-r--r--contrib/python/multidict/tests/pycimultidict.pickle.5bin0 -> 76 bytes
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.014
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.1bin0 -> 72 bytes
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.2bin0 -> 71 bytes
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.3bin0 -> 71 bytes
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.4bin0 -> 74 bytes
-rw-r--r--contrib/python/multidict/tests/pymultidict.pickle.5bin0 -> 74 bytes
-rw-r--r--contrib/python/multidict/tests/test_abc.py132
-rw-r--r--contrib/python/multidict/tests/test_copy.py79
-rw-r--r--contrib/python/multidict/tests/test_guard.py39
-rw-r--r--contrib/python/multidict/tests/test_istr.py83
-rw-r--r--contrib/python/multidict/tests/test_multidict.py602
-rw-r--r--contrib/python/multidict/tests/test_mutable_multidict.py510
-rw-r--r--contrib/python/multidict/tests/test_mypy.py278
-rw-r--r--contrib/python/multidict/tests/test_pickle.py82
-rw-r--r--contrib/python/multidict/tests/test_types.py109
-rw-r--r--contrib/python/multidict/tests/test_update.py147
-rw-r--r--contrib/python/multidict/tests/test_version.py199
-rw-r--r--contrib/python/multidict/tests/ya.make29
-rw-r--r--contrib/python/multidict/ya.make46
-rw-r--r--contrib/python/yarl/.dist-info/METADATA1010
-rw-r--r--contrib/python/yarl/.dist-info/top_level.txt1
-rw-r--r--contrib/python/yarl/LICENSE202
-rw-r--r--contrib/python/yarl/NOTICE13
-rw-r--r--contrib/python/yarl/README.rst209
-rw-r--r--contrib/python/yarl/tests/test_cache.py28
-rw-r--r--contrib/python/yarl/tests/test_cached_property.py45
-rw-r--r--contrib/python/yarl/tests/test_normalize_path.py34
-rw-r--r--contrib/python/yarl/tests/test_pickle.py23
-rw-r--r--contrib/python/yarl/tests/test_quoting.py450
-rw-r--r--contrib/python/yarl/tests/test_update_query.py366
-rw-r--r--contrib/python/yarl/tests/test_url.py1732
-rw-r--r--contrib/python/yarl/tests/test_url_build.py259
-rw-r--r--contrib/python/yarl/tests/test_url_cmp_and_hash.py88
-rw-r--r--contrib/python/yarl/tests/test_url_parsing.py582
-rw-r--r--contrib/python/yarl/tests/test_url_query.py173
-rw-r--r--contrib/python/yarl/tests/test_url_update_netloc.py228
-rw-r--r--contrib/python/yarl/tests/ya.make24
-rw-r--r--contrib/python/yarl/ya.make41
-rw-r--r--contrib/python/yarl/yarl/__init__.py5
-rw-r--r--contrib/python/yarl/yarl/_quoting.py18
-rw-r--r--contrib/python/yarl/yarl/_quoting_c.pyx371
-rw-r--r--contrib/python/yarl/yarl/_quoting_py.py197
-rw-r--r--contrib/python/yarl/yarl/_url.py1198
-rw-r--r--contrib/python/yarl/yarl/py.typed1
-rw-r--r--contrib/python/ydb/py2/.dist-info/METADATA63
-rw-r--r--contrib/python/ydb/py2/.dist-info/top_level.txt2
-rw-r--r--contrib/python/ydb/py2/AUTHORS4
-rw-r--r--contrib/python/ydb/py2/LICENSE202
-rw-r--r--contrib/python/ydb/py2/README.md37
-rw-r--r--contrib/python/ydb/py2/ya.make71
-rw-r--r--contrib/python/ydb/py2/ydb/__init__.py20
-rw-r--r--contrib/python/ydb/py2/ydb/_apis.py96
-rw-r--r--contrib/python/ydb/py2/ydb/_errors.py60
-rw-r--r--contrib/python/ydb/py2/ydb/_session_impl.py498
-rw-r--r--contrib/python/ydb/py2/ydb/_sp_impl.py399
-rw-r--r--contrib/python/ydb/py2/ydb/_tx_ctx_impl.py179
-rw-r--r--contrib/python/ydb/py2/ydb/_utilities.py161
-rw-r--r--contrib/python/ydb/py2/ydb/auth_helpers.py57
-rw-r--r--contrib/python/ydb/py2/ydb/connection.py550
-rw-r--r--contrib/python/ydb/py2/ydb/convert.py514
-rw-r--r--contrib/python/ydb/py2/ydb/credentials.py234
-rw-r--r--contrib/python/ydb/py2/ydb/dbapi/__init__.py47
-rw-r--r--contrib/python/ydb/py2/ydb/dbapi/connection.py92
-rw-r--r--contrib/python/ydb/py2/ydb/dbapi/cursor.py184
-rw-r--r--contrib/python/ydb/py2/ydb/dbapi/errors.py103
-rw-r--r--contrib/python/ydb/py2/ydb/default_pem.py4691
-rw-r--r--contrib/python/ydb/py2/ydb/driver.py242
-rw-r--r--contrib/python/ydb/py2/ydb/export.py280
-rw-r--r--contrib/python/ydb/py2/ydb/global_settings.py16
-rw-r--r--contrib/python/ydb/py2/ydb/iam/__init__.py3
-rw-r--r--contrib/python/ydb/py2/ydb/iam/auth.py197
-rw-r--r--contrib/python/ydb/py2/ydb/import_client.py162
-rw-r--r--contrib/python/ydb/py2/ydb/interceptor.py68
-rw-r--r--contrib/python/ydb/py2/ydb/issues.py206
-rw-r--r--contrib/python/ydb/py2/ydb/operation.py101
-rw-r--r--contrib/python/ydb/py2/ydb/pool.py528
-rw-r--r--contrib/python/ydb/py2/ydb/resolver.py209
-rw-r--r--contrib/python/ydb/py2/ydb/scheme.py493
-rw-r--r--contrib/python/ydb/py2/ydb/scripting.py109
-rw-r--r--contrib/python/ydb/py2/ydb/settings.py119
-rw-r--r--contrib/python/ydb/py2/ydb/sqlalchemy/__init__.py298
-rw-r--r--contrib/python/ydb/py2/ydb/sqlalchemy/types.py32
-rw-r--r--contrib/python/ydb/py2/ydb/table.py2682
-rw-r--r--contrib/python/ydb/py2/ydb/tracing.py188
-rw-r--r--contrib/python/ydb/py2/ydb/types.py445
-rw-r--r--contrib/python/ydb/py2/ydb/ydb_version.py1
-rw-r--r--contrib/python/ydb/py3/.dist-info/METADATA60
-rw-r--r--contrib/python/ydb/py3/.dist-info/top_level.txt2
-rw-r--r--contrib/python/ydb/py3/AUTHORS5
-rw-r--r--contrib/python/ydb/py3/LICENSE202
-rw-r--r--contrib/python/ydb/py3/README.md37
-rw-r--r--contrib/python/ydb/py3/ya.make98
-rw-r--r--contrib/python/ydb/py3/ydb/__init__.py82
-rw-r--r--contrib/python/ydb/py3/ydb/_apis.py111
-rw-r--r--contrib/python/ydb/py3/ydb/_errors.py53
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/__init__.py0
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/common/__init__.py13
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/__init__.py0
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/common_utils.py316
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_scheme.py36
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic.py1159
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic_public_types.py198
-rw-r--r--contrib/python/ydb/py3/ydb/_session_impl.py456
-rw-r--r--contrib/python/ydb/py3/ydb/_sp_impl.py397
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_common/__init__.py0
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_common/common.py145
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_reader/__init__.py0
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_reader/datatypes.py173
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_reader/topic_reader.py134
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_asyncio.py659
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_sync.py155
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_writer/__init__.py0
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_writer/topic_writer.py287
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_asyncio.py677
-rw-r--r--contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_sync.py124
-rw-r--r--contrib/python/ydb/py3/ydb/_tx_ctx_impl.py171
-rw-r--r--contrib/python/ydb/py3/ydb/_utilities.py184
-rw-r--r--contrib/python/ydb/py3/ydb/aio/__init__.py2
-rw-r--r--contrib/python/ydb/py3/ydb/aio/_utilities.py20
-rw-r--r--contrib/python/ydb/py3/ydb/aio/connection.py254
-rw-r--r--contrib/python/ydb/py3/ydb/aio/credentials.py110
-rw-r--r--contrib/python/ydb/py3/ydb/aio/driver.py61
-rw-r--r--contrib/python/ydb/py3/ydb/aio/iam.py132
-rw-r--r--contrib/python/ydb/py3/ydb/aio/pool.py262
-rw-r--r--contrib/python/ydb/py3/ydb/aio/resolver.py68
-rw-r--r--contrib/python/ydb/py3/ydb/aio/scheme.py21
-rw-r--r--contrib/python/ydb/py3/ydb/aio/table.py499
-rw-r--r--contrib/python/ydb/py3/ydb/auth_helpers.py14
-rw-r--r--contrib/python/ydb/py3/ydb/connection.py517
-rw-r--r--contrib/python/ydb/py3/ydb/convert.py470
-rw-r--r--contrib/python/ydb/py3/ydb/credentials.py228
-rw-r--r--contrib/python/ydb/py3/ydb/dbapi/__init__.py47
-rw-r--r--contrib/python/ydb/py3/ydb/dbapi/connection.py87
-rw-r--r--contrib/python/ydb/py3/ydb/dbapi/cursor.py178
-rw-r--r--contrib/python/ydb/py3/ydb/dbapi/errors.py102
-rw-r--r--contrib/python/ydb/py3/ydb/default_pem.py4686
-rw-r--r--contrib/python/ydb/py3/ydb/driver.py254
-rw-r--r--contrib/python/ydb/py3/ydb/export.py286
-rw-r--r--contrib/python/ydb/py3/ydb/global_settings.py24
-rw-r--r--contrib/python/ydb/py3/ydb/iam/__init__.py3
-rw-r--r--contrib/python/ydb/py3/ydb/iam/auth.py178
-rw-r--r--contrib/python/ydb/py3/ydb/import_client.py164
-rw-r--r--contrib/python/ydb/py3/ydb/interceptor.py68
-rw-r--r--contrib/python/ydb/py3/ydb/issues.py226
-rw-r--r--contrib/python/ydb/py3/ydb/operation.py101
-rw-r--r--contrib/python/ydb/py3/ydb/pool.py504
-rw-r--r--contrib/python/ydb/py3/ydb/resolver.py216
-rw-r--r--contrib/python/ydb/py3/ydb/scheme.py510
-rw-r--r--contrib/python/ydb/py3/ydb/scripting.py104
-rw-r--r--contrib/python/ydb/py3/ydb/settings.py119
-rw-r--r--contrib/python/ydb/py3/ydb/sqlalchemy/__init__.py293
-rw-r--r--contrib/python/ydb/py3/ydb/sqlalchemy/types.py32
-rw-r--r--contrib/python/ydb/py3/ydb/table.py2594
-rw-r--r--contrib/python/ydb/py3/ydb/topic.py398
-rw-r--r--contrib/python/ydb/py3/ydb/tracing.py184
-rw-r--r--contrib/python/ydb/py3/ydb/types.py438
-rw-r--r--contrib/python/ydb/py3/ydb/ydb_version.py1
283 files changed, 83819 insertions, 0 deletions
diff --git a/contrib/python/aiohttp/.dist-info/METADATA b/contrib/python/aiohttp/.dist-info/METADATA
new file mode 100644
index 0000000000..c2f6befe9e
--- /dev/null
+++ b/contrib/python/aiohttp/.dist-info/METADATA
@@ -0,0 +1,255 @@
+Metadata-Version: 2.1
+Name: aiohttp
+Version: 3.8.1
+Summary: Async http client/server framework (asyncio)
+Home-page: https://github.com/aio-libs/aiohttp
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache 2
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
+Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
+Project-URL: Docs: RTD, https://docs.aiohttp.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
+Requires-Dist: attrs (>=17.3.0)
+Requires-Dist: charset-normalizer (<3.0,>=2.0)
+Requires-Dist: multidict (<7.0,>=4.5)
+Requires-Dist: async-timeout (<5.0,>=4.0.0a3)
+Requires-Dist: yarl (<2.0,>=1.0)
+Requires-Dist: frozenlist (>=1.1.1)
+Requires-Dist: aiosignal (>=1.1.2)
+Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
+Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8"
+Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8"
+Provides-Extra: speedups
+Requires-Dist: aiodns ; extra == 'speedups'
+Requires-Dist: Brotli ; extra == 'speedups'
+Requires-Dist: cchardet ; extra == 'speedups'
+
+==================================
+Async http client/server framework
+==================================
+
+.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
+ :height: 64px
+ :width: 64px
+ :alt: aiohttp logo
+
+|
+
+.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+ :alt: GitHub Actions status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiohttp
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiohttp.svg
+ :target: https://pypi.org/project/aiohttp
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
+ :target: https://docs.aiohttp.org/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
+ :target: https://aio-libs.discourse.group
+ :alt: Discourse status
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+
+Key Features
+============
+
+- Supports both client and server side of HTTP protocol.
+- Supports both client and server Web-Sockets out-of-the-box and avoids
+ Callback Hell.
+- Provides Web-server with middlewares and plugable routing.
+
+
+Getting started
+===============
+
+Client
+------
+
+To get something from the web:
+
+.. code-block:: python
+
+ import aiohttp
+ import asyncio
+
+ async def main():
+
+ async with aiohttp.ClientSession() as session:
+ async with session.get('http://python.org') as response:
+
+ print("Status:", response.status)
+ print("Content-type:", response.headers['content-type'])
+
+ html = await response.text()
+ print("Body:", html[:15], "...")
+
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(main())
+
+This prints:
+
+.. code-block::
+
+ Status: 200
+ Content-type: text/html; charset=utf-8
+ Body: <!doctype html> ...
+
+Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
+
+Server
+------
+
+An example using a simple server:
+
+.. code-block:: python
+
+ # examples/server_simple.py
+ from aiohttp import web
+
+ async def handle(request):
+ name = request.match_info.get('name', "Anonymous")
+ text = "Hello, " + name
+ return web.Response(text=text)
+
+ async def wshandle(request):
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+
+ async for msg in ws:
+ if msg.type == web.WSMsgType.text:
+ await ws.send_str("Hello, {}".format(msg.data))
+ elif msg.type == web.WSMsgType.binary:
+ await ws.send_bytes(msg.data)
+ elif msg.type == web.WSMsgType.close:
+ break
+
+ return ws
+
+
+ app = web.Application()
+ app.add_routes([web.get('/', handle),
+ web.get('/echo', wshandle),
+ web.get('/{name}', handle)])
+
+ if __name__ == '__main__':
+ web.run_app(app)
+
+
+Documentation
+=============
+
+https://aiohttp.readthedocs.io/
+
+
+Demos
+=====
+
+https://github.com/aio-libs/aiohttp-demos
+
+
+External links
+==============
+
+* `Third party libraries
+ <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
+* `Built with aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
+* `Powered by aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
+
+Feel free to make a Pull Request for adding your link to these pages!
+
+
+Communication channels
+======================
+
+*aio-libs discourse group*: https://aio-libs.discourse.group
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+We support `Stack Overflow
+<https://stackoverflow.com/questions/tagged/aiohttp>`_.
+Please add *aiohttp* tag to your question there.
+
+Requirements
+============
+
+- Python >= 3.6
+- async-timeout_
+- attrs_
+- charset-normalizer_
+- multidict_
+- yarl_
+
+Optionally you may install the cChardet_ and aiodns_ libraries (highly
+recommended for sake of speed).
+
+.. _charset-normalizer: https://pypi.org/project/charset-normalizer
+.. _aiodns: https://pypi.python.org/pypi/aiodns
+.. _attrs: https://github.com/python-attrs/attrs
+.. _multidict: https://pypi.python.org/pypi/multidict
+.. _yarl: https://pypi.python.org/pypi/yarl
+.. _async-timeout: https://pypi.python.org/pypi/async_timeout
+.. _cChardet: https://pypi.python.org/pypi/cchardet
+
+License
+=======
+
+``aiohttp`` is offered under the Apache 2 license.
+
+
+Keepsafe
+========
+
+The aiohttp community would like to thank Keepsafe
+(https://www.getkeepsafe.com) for its support in the early days of
+the project.
+
+
+Source code
+===========
+
+The latest developer version is available in a GitHub repository:
+https://github.com/aio-libs/aiohttp
+
+Benchmarks
+==========
+
+If you are interested in efficiency, the AsyncIO community maintains a
+list of benchmarks on the official wiki:
+https://github.com/python/asyncio/wiki/Benchmarks
+
+
diff --git a/contrib/python/aiohttp/.dist-info/top_level.txt b/contrib/python/aiohttp/.dist-info/top_level.txt
new file mode 100644
index 0000000000..ee4ba4f3d7
--- /dev/null
+++ b/contrib/python/aiohttp/.dist-info/top_level.txt
@@ -0,0 +1 @@
+aiohttp
diff --git a/contrib/python/aiohttp/LICENSE.txt b/contrib/python/aiohttp/LICENSE.txt
new file mode 100644
index 0000000000..054102f2db
--- /dev/null
+++ b/contrib/python/aiohttp/LICENSE.txt
@@ -0,0 +1,13 @@
+ Copyright 2013-2020 aio-libs collaboration.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/aiohttp/README.rst b/contrib/python/aiohttp/README.rst
new file mode 100644
index 0000000000..d057acbe2f
--- /dev/null
+++ b/contrib/python/aiohttp/README.rst
@@ -0,0 +1,204 @@
+==================================
+Async http client/server framework
+==================================
+
+.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
+ :height: 64px
+ :width: 64px
+ :alt: aiohttp logo
+
+|
+
+.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+ :alt: GitHub Actions status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiohttp
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiohttp.svg
+ :target: https://pypi.org/project/aiohttp
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
+ :target: https://docs.aiohttp.org/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
+ :target: https://aio-libs.discourse.group
+ :alt: Discourse status
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+
+Key Features
+============
+
+- Supports both client and server side of HTTP protocol.
+- Supports both client and server Web-Sockets out-of-the-box and avoids
+ Callback Hell.
+- Provides Web-server with middlewares and plugable routing.
+
+
+Getting started
+===============
+
+Client
+------
+
+To get something from the web:
+
+.. code-block:: python
+
+ import aiohttp
+ import asyncio
+
+ async def main():
+
+ async with aiohttp.ClientSession() as session:
+ async with session.get('http://python.org') as response:
+
+ print("Status:", response.status)
+ print("Content-type:", response.headers['content-type'])
+
+ html = await response.text()
+ print("Body:", html[:15], "...")
+
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(main())
+
+This prints:
+
+.. code-block::
+
+ Status: 200
+ Content-type: text/html; charset=utf-8
+ Body: <!doctype html> ...
+
+Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
+
+Server
+------
+
+An example using a simple server:
+
+.. code-block:: python
+
+ # examples/server_simple.py
+ from aiohttp import web
+
+ async def handle(request):
+ name = request.match_info.get('name', "Anonymous")
+ text = "Hello, " + name
+ return web.Response(text=text)
+
+ async def wshandle(request):
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+
+ async for msg in ws:
+ if msg.type == web.WSMsgType.text:
+ await ws.send_str("Hello, {}".format(msg.data))
+ elif msg.type == web.WSMsgType.binary:
+ await ws.send_bytes(msg.data)
+ elif msg.type == web.WSMsgType.close:
+ break
+
+ return ws
+
+
+ app = web.Application()
+ app.add_routes([web.get('/', handle),
+ web.get('/echo', wshandle),
+ web.get('/{name}', handle)])
+
+ if __name__ == '__main__':
+ web.run_app(app)
+
+
+Documentation
+=============
+
+https://aiohttp.readthedocs.io/
+
+
+Demos
+=====
+
+https://github.com/aio-libs/aiohttp-demos
+
+
+External links
+==============
+
+* `Third party libraries
+ <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
+* `Built with aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
+* `Powered by aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
+
+Feel free to make a Pull Request for adding your link to these pages!
+
+
+Communication channels
+======================
+
+*aio-libs discourse group*: https://aio-libs.discourse.group
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+We support `Stack Overflow
+<https://stackoverflow.com/questions/tagged/aiohttp>`_.
+Please add *aiohttp* tag to your question there.
+
+Requirements
+============
+
+- Python >= 3.6
+- async-timeout_
+- attrs_
+- charset-normalizer_
+- multidict_
+- yarl_
+
+Optionally you may install the cChardet_ and aiodns_ libraries (highly
+recommended for sake of speed).
+
+.. _charset-normalizer: https://pypi.org/project/charset-normalizer
+.. _aiodns: https://pypi.python.org/pypi/aiodns
+.. _attrs: https://github.com/python-attrs/attrs
+.. _multidict: https://pypi.python.org/pypi/multidict
+.. _yarl: https://pypi.python.org/pypi/yarl
+.. _async-timeout: https://pypi.python.org/pypi/async_timeout
+.. _cChardet: https://pypi.python.org/pypi/cchardet
+
+License
+=======
+
+``aiohttp`` is offered under the Apache 2 license.
+
+
+Keepsafe
+========
+
+The aiohttp community would like to thank Keepsafe
+(https://www.getkeepsafe.com) for its support in the early days of
+the project.
+
+
+Source code
+===========
+
+The latest developer version is available in a GitHub repository:
+https://github.com/aio-libs/aiohttp
+
+Benchmarks
+==========
+
+If you are interested in efficiency, the AsyncIO community maintains a
+list of benchmarks on the official wiki:
+https://github.com/python/asyncio/wiki/Benchmarks
diff --git a/contrib/python/aiohttp/aiohttp/__init__.py b/contrib/python/aiohttp/aiohttp/__init__.py
new file mode 100644
index 0000000000..4bbcef2935
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/__init__.py
@@ -0,0 +1,216 @@
+__version__ = "3.8.1"
+
+from typing import Tuple
+
+from . import hdrs as hdrs
+from .client import (
+ BaseConnector as BaseConnector,
+ ClientConnectionError as ClientConnectionError,
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
+ ClientConnectorError as ClientConnectorError,
+ ClientConnectorSSLError as ClientConnectorSSLError,
+ ClientError as ClientError,
+ ClientHttpProxyError as ClientHttpProxyError,
+ ClientOSError as ClientOSError,
+ ClientPayloadError as ClientPayloadError,
+ ClientProxyConnectionError as ClientProxyConnectionError,
+ ClientRequest as ClientRequest,
+ ClientResponse as ClientResponse,
+ ClientResponseError as ClientResponseError,
+ ClientSession as ClientSession,
+ ClientSSLError as ClientSSLError,
+ ClientTimeout as ClientTimeout,
+ ClientWebSocketResponse as ClientWebSocketResponse,
+ ContentTypeError as ContentTypeError,
+ Fingerprint as Fingerprint,
+ InvalidURL as InvalidURL,
+ NamedPipeConnector as NamedPipeConnector,
+ RequestInfo as RequestInfo,
+ ServerConnectionError as ServerConnectionError,
+ ServerDisconnectedError as ServerDisconnectedError,
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
+ ServerTimeoutError as ServerTimeoutError,
+ TCPConnector as TCPConnector,
+ TooManyRedirects as TooManyRedirects,
+ UnixConnector as UnixConnector,
+ WSServerHandshakeError as WSServerHandshakeError,
+ request as request,
+)
+from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
+from .formdata import FormData as FormData
+from .helpers import BasicAuth, ChainMapProxy, ETag
+from .http import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ WebSocketError as WebSocketError,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+)
+from .multipart import (
+ BadContentDispositionHeader as BadContentDispositionHeader,
+ BadContentDispositionParam as BadContentDispositionParam,
+ BodyPartReader as BodyPartReader,
+ MultipartReader as MultipartReader,
+ MultipartWriter as MultipartWriter,
+ content_disposition_filename as content_disposition_filename,
+ parse_content_disposition as parse_content_disposition,
+)
+from .payload import (
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
+ AsyncIterablePayload as AsyncIterablePayload,
+ BufferedReaderPayload as BufferedReaderPayload,
+ BytesIOPayload as BytesIOPayload,
+ BytesPayload as BytesPayload,
+ IOBasePayload as IOBasePayload,
+ JsonPayload as JsonPayload,
+ Payload as Payload,
+ StringIOPayload as StringIOPayload,
+ StringPayload as StringPayload,
+ TextIOPayload as TextIOPayload,
+ get_payload as get_payload,
+ payload_type as payload_type,
+)
+from .payload_streamer import streamer as streamer
+from .resolver import (
+ AsyncResolver as AsyncResolver,
+ DefaultResolver as DefaultResolver,
+ ThreadedResolver as ThreadedResolver,
+)
+from .streams import (
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
+ DataQueue as DataQueue,
+ EofStream as EofStream,
+ FlowControlDataQueue as FlowControlDataQueue,
+ StreamReader as StreamReader,
+)
+from .tracing import (
+ TraceConfig as TraceConfig,
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
+ TraceRequestEndParams as TraceRequestEndParams,
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
+ TraceRequestStartParams as TraceRequestStartParams,
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
+)
+
+__all__: Tuple[str, ...] = (
+ "hdrs",
+ # client
+ "BaseConnector",
+ "ClientConnectionError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponse",
+ "ClientRequest",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ClientSession",
+ "ClientTimeout",
+ "ClientWebSocketResponse",
+ "ContentTypeError",
+ "Fingerprint",
+ "InvalidURL",
+ "RequestInfo",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "TCPConnector",
+ "TooManyRedirects",
+ "UnixConnector",
+ "NamedPipeConnector",
+ "WSServerHandshakeError",
+ "request",
+ # cookiejar
+ "CookieJar",
+ "DummyCookieJar",
+ # formdata
+ "FormData",
+ # helpers
+ "BasicAuth",
+ "ChainMapProxy",
+ "ETag",
+ # http
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ "WSMsgType",
+ "WSCloseCode",
+ "WSMessage",
+ "WebSocketError",
+ # multipart
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "BodyPartReader",
+ "MultipartReader",
+ "MultipartWriter",
+ "content_disposition_filename",
+ "parse_content_disposition",
+ # payload
+ "AsyncIterablePayload",
+ "BufferedReaderPayload",
+ "BytesIOPayload",
+ "BytesPayload",
+ "IOBasePayload",
+ "JsonPayload",
+ "PAYLOAD_REGISTRY",
+ "Payload",
+ "StringIOPayload",
+ "StringPayload",
+ "TextIOPayload",
+ "get_payload",
+ "payload_type",
+ # payload_streamer
+ "streamer",
+ # resolver
+ "AsyncResolver",
+ "DefaultResolver",
+ "ThreadedResolver",
+ # streams
+ "DataQueue",
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "FlowControlDataQueue",
+ "StreamReader",
+ # tracing
+ "TraceConfig",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceRequestChunkSentParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestStartParams",
+ "TraceResponseChunkReceivedParams",
+)
+
+try:
+ from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
+
+ __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
+except ImportError: # pragma: no cover
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/_cparser.pxd b/contrib/python/aiohttp/aiohttp/_cparser.pxd
new file mode 100644
index 0000000000..49055d6a56
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_cparser.pxd
@@ -0,0 +1,190 @@
+from libc.stdint cimport (
+ int8_t,
+ int16_t,
+ int32_t,
+ int64_t,
+ uint8_t,
+ uint16_t,
+ uint32_t,
+ uint64_t,
+)
+
+
+cdef extern from "llhttp.h":
+
+ struct llhttp__internal_s:
+ int32_t _index
+ void* _span_pos0
+ void* _span_cb0
+ int32_t error
+ const char* reason
+ const char* error_pos
+ void* data
+ void* _current
+ uint64_t content_length
+ uint8_t type
+ uint8_t method
+ uint8_t http_major
+ uint8_t http_minor
+ uint8_t header_state
+ uint8_t lenient_flags
+ uint8_t upgrade
+ uint8_t finish
+ uint16_t flags
+ uint16_t status_code
+ void* settings
+
+ ctypedef llhttp__internal_s llhttp__internal_t
+ ctypedef llhttp__internal_t llhttp_t
+
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
+
+ struct llhttp_settings_s:
+ llhttp_cb on_message_begin
+ llhttp_data_cb on_url
+ llhttp_data_cb on_status
+ llhttp_data_cb on_header_field
+ llhttp_data_cb on_header_value
+ llhttp_cb on_headers_complete
+ llhttp_data_cb on_body
+ llhttp_cb on_message_complete
+ llhttp_cb on_chunk_header
+ llhttp_cb on_chunk_complete
+
+ llhttp_cb on_url_complete
+ llhttp_cb on_status_complete
+ llhttp_cb on_header_field_complete
+ llhttp_cb on_header_value_complete
+
+ ctypedef llhttp_settings_s llhttp_settings_t
+
+ enum llhttp_errno:
+ HPE_OK,
+ HPE_INTERNAL,
+ HPE_STRICT,
+ HPE_LF_EXPECTED,
+ HPE_UNEXPECTED_CONTENT_LENGTH,
+ HPE_CLOSED_CONNECTION,
+ HPE_INVALID_METHOD,
+ HPE_INVALID_URL,
+ HPE_INVALID_CONSTANT,
+ HPE_INVALID_VERSION,
+ HPE_INVALID_HEADER_TOKEN,
+ HPE_INVALID_CONTENT_LENGTH,
+ HPE_INVALID_CHUNK_SIZE,
+ HPE_INVALID_STATUS,
+ HPE_INVALID_EOF_STATE,
+ HPE_INVALID_TRANSFER_ENCODING,
+ HPE_CB_MESSAGE_BEGIN,
+ HPE_CB_HEADERS_COMPLETE,
+ HPE_CB_MESSAGE_COMPLETE,
+ HPE_CB_CHUNK_HEADER,
+ HPE_CB_CHUNK_COMPLETE,
+ HPE_PAUSED,
+ HPE_PAUSED_UPGRADE,
+ HPE_USER
+
+ ctypedef llhttp_errno llhttp_errno_t
+
+ enum llhttp_flags:
+ F_CONNECTION_KEEP_ALIVE,
+ F_CONNECTION_CLOSE,
+ F_CONNECTION_UPGRADE,
+ F_CHUNKED,
+ F_UPGRADE,
+ F_CONTENT_LENGTH,
+ F_SKIPBODY,
+ F_TRAILING,
+ F_TRANSFER_ENCODING
+
+ enum llhttp_lenient_flags:
+ LENIENT_HEADERS,
+ LENIENT_CHUNKED_LENGTH
+
+ enum llhttp_type:
+ HTTP_REQUEST,
+ HTTP_RESPONSE,
+ HTTP_BOTH
+
+ enum llhttp_finish_t:
+ HTTP_FINISH_SAFE,
+ HTTP_FINISH_SAFE_WITH_CB,
+ HTTP_FINISH_UNSAFE
+
+ enum llhttp_method:
+ HTTP_DELETE,
+ HTTP_GET,
+ HTTP_HEAD,
+ HTTP_POST,
+ HTTP_PUT,
+ HTTP_CONNECT,
+ HTTP_OPTIONS,
+ HTTP_TRACE,
+ HTTP_COPY,
+ HTTP_LOCK,
+ HTTP_MKCOL,
+ HTTP_MOVE,
+ HTTP_PROPFIND,
+ HTTP_PROPPATCH,
+ HTTP_SEARCH,
+ HTTP_UNLOCK,
+ HTTP_BIND,
+ HTTP_REBIND,
+ HTTP_UNBIND,
+ HTTP_ACL,
+ HTTP_REPORT,
+ HTTP_MKACTIVITY,
+ HTTP_CHECKOUT,
+ HTTP_MERGE,
+ HTTP_MSEARCH,
+ HTTP_NOTIFY,
+ HTTP_SUBSCRIBE,
+ HTTP_UNSUBSCRIBE,
+ HTTP_PATCH,
+ HTTP_PURGE,
+ HTTP_MKCALENDAR,
+ HTTP_LINK,
+ HTTP_UNLINK,
+ HTTP_SOURCE,
+ HTTP_PRI,
+ HTTP_DESCRIBE,
+ HTTP_ANNOUNCE,
+ HTTP_SETUP,
+ HTTP_PLAY,
+ HTTP_PAUSE,
+ HTTP_TEARDOWN,
+ HTTP_GET_PARAMETER,
+ HTTP_SET_PARAMETER,
+ HTTP_REDIRECT,
+ HTTP_RECORD,
+ HTTP_FLUSH
+
+ ctypedef llhttp_method llhttp_method_t;
+
+ void llhttp_settings_init(llhttp_settings_t* settings)
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
+ const llhttp_settings_t* settings)
+
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
+ llhttp_errno_t llhttp_finish(llhttp_t* parser)
+
+ int llhttp_message_needs_eof(const llhttp_t* parser)
+
+ int llhttp_should_keep_alive(const llhttp_t* parser)
+
+ void llhttp_pause(llhttp_t* parser)
+ void llhttp_resume(llhttp_t* parser)
+
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
+
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
+ void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
+ const char* llhttp_errno_name(llhttp_errno_t err)
+
+ const char* llhttp_method_name(llhttp_method_t method)
+
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.c b/contrib/python/aiohttp/aiohttp/_find_header.c
new file mode 100644
index 0000000000..012cba33ac
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.c
@@ -0,0 +1,9870 @@
+/* The file is autogenerated from aiohttp/hdrs.py
+Run ./tools/gen.py to update it after the origin changing. */
+
+#include "_find_header.h"
+
+#define NEXT_CHAR() \
+{ \
+ count++; \
+ if (count == size) { \
+ /* end of search */ \
+ return -1; \
+ } \
+ pchar++; \
+ ch = *pchar; \
+ last = (count == size -1); \
+} while(0);
+
+int
+find_header(const char *str, int size)
+{
+ char *pchar = str;
+ int last;
+ char ch;
+ int count = -1;
+ pchar--;
+
+
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto A;
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto C;
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto D;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto E;
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto F;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto H;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto I;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto I;
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto K;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto K;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto L;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto M;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto O;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto O;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto P;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto R;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto S;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto T;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto T;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto U;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto U;
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto V;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto V;
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto W;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto W;
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto X;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto X;
+ default:
+ return -1;
+ }
+
+A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto AC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto AC;
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto AG;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto AL;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto AU;
+ default:
+ return -1;
+ }
+
+AC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACC;
+ default:
+ return -1;
+ }
+
+ACC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCE;
+ default:
+ return -1;
+ }
+
+ACCE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto ACCEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto ACCEP;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCES;
+ default:
+ return -1;
+ }
+
+ACCEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 0;
+ }
+ goto ACCEPT;
+ case 't':
+ if (last) {
+ return 0;
+ }
+ goto ACCEPT;
+ default:
+ return -1;
+ }
+
+ACCEPT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_;
+ default:
+ return -1;
+ }
+
+ACCEPT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_C;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_E;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_L;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_R;
+ default:
+ return -1;
+ }
+
+ACCEPT_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CH;
+ default:
+ return -1;
+ }
+
+ACCEPT_CH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHA;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHAR;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARS;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARSE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARSE;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARSE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 1;
+ }
+ goto ACCEPT_CHARSET;
+ case 't':
+ if (last) {
+ return 1;
+ }
+ goto ACCEPT_CHARSET;
+ default:
+ return -1;
+ }
+
+ACCEPT_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_EN;
+ default:
+ return -1;
+ }
+
+ACCEPT_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENC;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCO;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCOD;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODI;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODIN;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 2;
+ }
+ goto ACCEPT_ENCODING;
+ case 'g':
+ if (last) {
+ return 2;
+ }
+ goto ACCEPT_ENCODING;
+ default:
+ return -1;
+ }
+
+ACCEPT_L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LA;
+ default:
+ return -1;
+ }
+
+ACCEPT_LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LAN;
+ default:
+ return -1;
+ }
+
+ACCEPT_LAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANG;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGU;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUA;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGUA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUAG;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGUAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 3;
+ }
+ goto ACCEPT_LANGUAGE;
+ case 'e':
+ if (last) {
+ return 3;
+ }
+ goto ACCEPT_LANGUAGE;
+ default:
+ return -1;
+ }
+
+ACCEPT_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RA;
+ default:
+ return -1;
+ }
+
+ACCEPT_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RAN;
+ default:
+ return -1;
+ }
+
+ACCEPT_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANG;
+ default:
+ return -1;
+ }
+
+ACCEPT_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANGE;
+ default:
+ return -1;
+ }
+
+ACCEPT_RANGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 4;
+ }
+ goto ACCEPT_RANGES;
+ case 's':
+ if (last) {
+ return 4;
+ }
+ goto ACCEPT_RANGES;
+ default:
+ return -1;
+ }
+
+ACCES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS;
+ default:
+ return -1;
+ }
+
+ACCESS:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_;
+ default:
+ return -1;
+ }
+
+ACCESS_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_C;
+ default:
+ return -1;
+ }
+
+ACCESS_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CO;
+ default:
+ return -1;
+ }
+
+ACCESS_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CON;
+ default:
+ return -1;
+ }
+
+ACCESS_CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONT;
+ default:
+ return -1;
+ }
+
+ACCESS_CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTRO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_A;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_E;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_M;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_R;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_AL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_C;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_H;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_M;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_O;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_O;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CRE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRED;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CRED:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDEN;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENT;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTIA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIAL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIAL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTIAL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 5;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIALS;
+ case 's':
+ if (last) {
+ return 5;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIALS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 6;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADERS;
+ case 's':
+ if (last) {
+ return 6;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ME;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ME;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ME:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_MET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_MET;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_MET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METH;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHOD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METHOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 7;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHODS;
+ case 's':
+ if (last) {
+ return 7;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHODS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_O:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_OR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_OR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_OR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIG;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORIGI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 8;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGIN;
+ case 'n':
+ if (last) {
+ return 8;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGIN;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EX;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXP;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_H;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 9;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADERS;
+ case 's':
+ if (last) {
+ return 9;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_A;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_AG;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 10;
+ }
+ goto ACCESS_CONTROL_MAX_AGE;
+ case 'e':
+ if (last) {
+ return 10;
+ }
+ goto ACCESS_CONTROL_MAX_AGE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_RE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_RE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_RE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Q':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQ;
+ case 'q':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQ;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQU;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUES;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_H;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_M;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 11;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADERS;
+ case 's':
+ if (last) {
+ return 11;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_ME;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_ME;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_ME:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_MET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_MET;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_MET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METH;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_METH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_METHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 12;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHOD;
+ case 'd':
+ if (last) {
+ return 12;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHOD;
+ default:
+ return -1;
+ }
+
+AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 13;
+ }
+ goto AGE;
+ case 'e':
+ if (last) {
+ return 13;
+ }
+ goto AGE;
+ default:
+ return -1;
+ }
+
+AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ALL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ALL;
+ default:
+ return -1;
+ }
+
+ALL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ALLO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ALLO;
+ default:
+ return -1;
+ }
+
+ALLO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return 14;
+ }
+ goto ALLOW;
+ case 'w':
+ if (last) {
+ return 14;
+ }
+ goto ALLOW;
+ default:
+ return -1;
+ }
+
+AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto AUT;
+ default:
+ return -1;
+ }
+
+AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto AUTH;
+ default:
+ return -1;
+ }
+
+AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto AUTHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto AUTHO;
+ default:
+ return -1;
+ }
+
+AUTHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto AUTHOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto AUTHOR;
+ default:
+ return -1;
+ }
+
+AUTHOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORI;
+ default:
+ return -1;
+ }
+
+AUTHORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Z':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZ;
+ case 'z':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZ;
+ default:
+ return -1;
+ }
+
+AUTHORIZ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZA;
+ default:
+ return -1;
+ }
+
+AUTHORIZA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZAT;
+ default:
+ return -1;
+ }
+
+AUTHORIZAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATI;
+ default:
+ return -1;
+ }
+
+AUTHORIZATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATIO;
+ default:
+ return -1;
+ }
+
+AUTHORIZATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 15;
+ }
+ goto AUTHORIZATION;
+ case 'n':
+ if (last) {
+ return 15;
+ }
+ goto AUTHORIZATION;
+ default:
+ return -1;
+ }
+
+C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CO;
+ default:
+ return -1;
+ }
+
+CA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CAC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CAC;
+ default:
+ return -1;
+ }
+
+CAC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto CACH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto CACH;
+ default:
+ return -1;
+ }
+
+CACH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CACHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CACHE;
+ default:
+ return -1;
+ }
+
+CACHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_;
+ default:
+ return -1;
+ }
+
+CACHE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_C;
+ default:
+ return -1;
+ }
+
+CACHE_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CO;
+ default:
+ return -1;
+ }
+
+CACHE_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CON;
+ default:
+ return -1;
+ }
+
+CACHE_CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONT;
+ default:
+ return -1;
+ }
+
+CACHE_CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTR;
+ default:
+ return -1;
+ }
+
+CACHE_CONTR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTRO;
+ default:
+ return -1;
+ }
+
+CACHE_CONTRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return 16;
+ }
+ goto CACHE_CONTROL;
+ case 'l':
+ if (last) {
+ return 16;
+ }
+ goto CACHE_CONTROL;
+ default:
+ return -1;
+ }
+
+CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CON;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto COO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto COO;
+ default:
+ return -1;
+ }
+
+CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONN;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONT;
+ default:
+ return -1;
+ }
+
+CONN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONNE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONNE;
+ default:
+ return -1;
+ }
+
+CONNE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONNEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONNEC;
+ default:
+ return -1;
+ }
+
+CONNEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONNECT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONNECT;
+ default:
+ return -1;
+ }
+
+CONNECT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTI;
+ default:
+ return -1;
+ }
+
+CONNECTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTIO;
+ default:
+ return -1;
+ }
+
+CONNECTIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 17;
+ }
+ goto CONNECTION;
+ case 'n':
+ if (last) {
+ return 17;
+ }
+ goto CONNECTION;
+ default:
+ return -1;
+ }
+
+CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTE;
+ default:
+ return -1;
+ }
+
+CONTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTEN;
+ default:
+ return -1;
+ }
+
+CONTEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT;
+ default:
+ return -1;
+ }
+
+CONTENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_;
+ default:
+ return -1;
+ }
+
+CONTENT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_D;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_E;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_L;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_M;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_R;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_T;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_T;
+ default:
+ return -1;
+ }
+
+CONTENT_D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DI;
+ default:
+ return -1;
+ }
+
+CONTENT_DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DIS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DIS;
+ default:
+ return -1;
+ }
+
+CONTENT_DIS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISP;
+ default:
+ return -1;
+ }
+
+CONTENT_DISP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPO;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOS;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSI;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSIT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSIT;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSIT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITI;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSITI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITIO;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSITIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 18;
+ }
+ goto CONTENT_DISPOSITION;
+ case 'n':
+ if (last) {
+ return 18;
+ }
+ goto CONTENT_DISPOSITION;
+ default:
+ return -1;
+ }
+
+CONTENT_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_EN;
+ default:
+ return -1;
+ }
+
+CONTENT_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENC;
+ default:
+ return -1;
+ }
+
+CONTENT_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCO;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCOD;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODI;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODIN;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 19;
+ }
+ goto CONTENT_ENCODING;
+ case 'g':
+ if (last) {
+ return 19;
+ }
+ goto CONTENT_ENCODING;
+ default:
+ return -1;
+ }
+
+CONTENT_L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LE;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LO;
+ default:
+ return -1;
+ }
+
+CONTENT_LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LAN;
+ default:
+ return -1;
+ }
+
+CONTENT_LAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANG;
+ default:
+ return -1;
+ }
+
+CONTENT_LANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGU;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUA;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGUA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUAG;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGUAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 20;
+ }
+ goto CONTENT_LANGUAGE;
+ case 'e':
+ if (last) {
+ return 20;
+ }
+ goto CONTENT_LANGUAGE;
+ default:
+ return -1;
+ }
+
+CONTENT_LE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LEN;
+ default:
+ return -1;
+ }
+
+CONTENT_LEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENG;
+ default:
+ return -1;
+ }
+
+CONTENT_LENG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENGT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENGT;
+ default:
+ return -1;
+ }
+
+CONTENT_LENGT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 21;
+ }
+ goto CONTENT_LENGTH;
+ case 'h':
+ if (last) {
+ return 21;
+ }
+ goto CONTENT_LENGTH;
+ default:
+ return -1;
+ }
+
+CONTENT_LO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOC;
+ default:
+ return -1;
+ }
+
+CONTENT_LOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCA;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCAT;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATI;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATIO;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 22;
+ }
+ goto CONTENT_LOCATION;
+ case 'n':
+ if (last) {
+ return 22;
+ }
+ goto CONTENT_LOCATION;
+ default:
+ return -1;
+ }
+
+CONTENT_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_MD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_MD;
+ default:
+ return -1;
+ }
+
+CONTENT_MD:
+ NEXT_CHAR();
+ switch (ch) {
+ case '5':
+ if (last) {
+ return 23;
+ }
+ goto CONTENT_MD5;
+ default:
+ return -1;
+ }
+
+CONTENT_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RA;
+ default:
+ return -1;
+ }
+
+CONTENT_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RAN;
+ default:
+ return -1;
+ }
+
+CONTENT_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RANG;
+ default:
+ return -1;
+ }
+
+CONTENT_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 24;
+ }
+ goto CONTENT_RANGE;
+ case 'e':
+ if (last) {
+ return 24;
+ }
+ goto CONTENT_RANGE;
+ default:
+ return -1;
+ }
+
+CONTENT_T:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TR;
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TY;
+ default:
+ return -1;
+ }
+
+CONTENT_TR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRA;
+ default:
+ return -1;
+ }
+
+CONTENT_TRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRAN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANS;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSF;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFE;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_E;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_EN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENC;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCO;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCOD;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODI;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODIN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 25;
+ }
+ goto CONTENT_TRANSFER_ENCODING;
+ case 'g':
+ if (last) {
+ return 25;
+ }
+ goto CONTENT_TRANSFER_ENCODING;
+ default:
+ return -1;
+ }
+
+CONTENT_TY:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TYP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TYP;
+ default:
+ return -1;
+ }
+
+CONTENT_TYP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 26;
+ }
+ goto CONTENT_TYPE;
+ case 'e':
+ if (last) {
+ return 26;
+ }
+ goto CONTENT_TYPE;
+ default:
+ return -1;
+ }
+
+COO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto COOK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto COOK;
+ default:
+ return -1;
+ }
+
+COOK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto COOKI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto COOKI;
+ default:
+ return -1;
+ }
+
+COOKI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 27;
+ }
+ goto COOKIE;
+ case 'e':
+ if (last) {
+ return 27;
+ }
+ goto COOKIE;
+ default:
+ return -1;
+ }
+
+D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto DA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto DA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto DE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto DE;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DI;
+ default:
+ return -1;
+ }
+
+DA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DAT;
+ default:
+ return -1;
+ }
+
+DAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 28;
+ }
+ goto DATE;
+ case 'e':
+ if (last) {
+ return 28;
+ }
+ goto DATE;
+ default:
+ return -1;
+ }
+
+DE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto DES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto DES;
+ default:
+ return -1;
+ }
+
+DES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DEST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DEST;
+ default:
+ return -1;
+ }
+
+DEST:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DESTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DESTI;
+ default:
+ return -1;
+ }
+
+DESTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto DESTIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto DESTIN;
+ default:
+ return -1;
+ }
+
+DESTIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto DESTINA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto DESTINA;
+ default:
+ return -1;
+ }
+
+DESTINA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DESTINAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DESTINAT;
+ default:
+ return -1;
+ }
+
+DESTINAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATI;
+ default:
+ return -1;
+ }
+
+DESTINATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATIO;
+ default:
+ return -1;
+ }
+
+DESTINATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 29;
+ }
+ goto DESTINATION;
+ case 'n':
+ if (last) {
+ return 29;
+ }
+ goto DESTINATION;
+ default:
+ return -1;
+ }
+
+DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto DIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto DIG;
+ default:
+ return -1;
+ }
+
+DIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto DIGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto DIGE;
+ default:
+ return -1;
+ }
+
+DIGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto DIGES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto DIGES;
+ default:
+ return -1;
+ }
+
+DIGES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 30;
+ }
+ goto DIGEST;
+ case 't':
+ if (last) {
+ return 30;
+ }
+ goto DIGEST;
+ default:
+ return -1;
+ }
+
+E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ET;
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto EX;
+ default:
+ return -1;
+ }
+
+ET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ETA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ETA;
+ default:
+ return -1;
+ }
+
+ETA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 31;
+ }
+ goto ETAG;
+ case 'g':
+ if (last) {
+ return 31;
+ }
+ goto ETAG;
+ default:
+ return -1;
+ }
+
+EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto EXP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto EXP;
+ default:
+ return -1;
+ }
+
+EXP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto EXPE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto EXPE;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto EXPI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto EXPI;
+ default:
+ return -1;
+ }
+
+EXPE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto EXPEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto EXPEC;
+ default:
+ return -1;
+ }
+
+EXPEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 32;
+ }
+ goto EXPECT;
+ case 't':
+ if (last) {
+ return 32;
+ }
+ goto EXPECT;
+ default:
+ return -1;
+ }
+
+EXPI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto EXPIR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto EXPIR;
+ default:
+ return -1;
+ }
+
+EXPIR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto EXPIRE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto EXPIRE;
+ default:
+ return -1;
+ }
+
+EXPIRE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 33;
+ }
+ goto EXPIRES;
+ case 's':
+ if (last) {
+ return 33;
+ }
+ goto EXPIRES;
+ default:
+ return -1;
+ }
+
+F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto FO;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FR;
+ default:
+ return -1;
+ }
+
+FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FOR;
+ default:
+ return -1;
+ }
+
+FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto FORW;
+ default:
+ return -1;
+ }
+
+FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto FORWA;
+ default:
+ return -1;
+ }
+
+FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FORWAR;
+ default:
+ return -1;
+ }
+
+FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto FORWARD;
+ default:
+ return -1;
+ }
+
+FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto FORWARDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto FORWARDE;
+ default:
+ return -1;
+ }
+
+FORWARDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 34;
+ }
+ goto FORWARDED;
+ case 'd':
+ if (last) {
+ return 34;
+ }
+ goto FORWARDED;
+ default:
+ return -1;
+ }
+
+FR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto FRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto FRO;
+ default:
+ return -1;
+ }
+
+FRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return 35;
+ }
+ goto FROM;
+ case 'm':
+ if (last) {
+ return 35;
+ }
+ goto FROM;
+ default:
+ return -1;
+ }
+
+H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto HO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto HO;
+ default:
+ return -1;
+ }
+
+HO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto HOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto HOS;
+ default:
+ return -1;
+ }
+
+HOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 36;
+ }
+ goto HOST;
+ case 't':
+ if (last) {
+ return 36;
+ }
+ goto HOST;
+ default:
+ return -1;
+ }
+
+I:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF;
+ default:
+ return -1;
+ }
+
+IF:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_;
+ default:
+ return -1;
+ }
+
+IF_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_M;
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_N;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_N;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto IF_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto IF_R;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto IF_U;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto IF_U;
+ default:
+ return -1;
+ }
+
+IF_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_MA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_MO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_MO;
+ default:
+ return -1;
+ }
+
+IF_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto IF_MAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto IF_MAT;
+ default:
+ return -1;
+ }
+
+IF_MAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_MATC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_MATC;
+ default:
+ return -1;
+ }
+
+IF_MATC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 37;
+ }
+ goto IF_MATCH;
+ case 'h':
+ if (last) {
+ return 37;
+ }
+ goto IF_MATCH;
+ default:
+ return -1;
+ }
+
+IF_MO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_MOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_MOD;
+ default:
+ return -1;
+ }
+
+IF_MOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODI;
+ default:
+ return -1;
+ }
+
+IF_MODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIF;
+ default:
+ return -1;
+ }
+
+IF_MODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFI;
+ default:
+ return -1;
+ }
+
+IF_MODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIE;
+ default:
+ return -1;
+ }
+
+IF_MODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_S;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SI;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SIN;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SINC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SINC;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SINC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 38;
+ }
+ goto IF_MODIFIED_SINCE;
+ case 'e':
+ if (last) {
+ return 38;
+ }
+ goto IF_MODIFIED_SINCE;
+ default:
+ return -1;
+ }
+
+IF_N:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_NO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_NO;
+ default:
+ return -1;
+ }
+
+IF_NO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_NON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_NON;
+ default:
+ return -1;
+ }
+
+IF_NON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE;
+ default:
+ return -1;
+ }
+
+IF_NONE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_;
+ default:
+ return -1;
+ }
+
+IF_NONE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_M;
+ default:
+ return -1;
+ }
+
+IF_NONE_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MA;
+ default:
+ return -1;
+ }
+
+IF_NONE_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MAT;
+ default:
+ return -1;
+ }
+
+IF_NONE_MAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MATC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MATC;
+ default:
+ return -1;
+ }
+
+IF_NONE_MATC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 39;
+ }
+ goto IF_NONE_MATCH;
+ case 'h':
+ if (last) {
+ return 39;
+ }
+ goto IF_NONE_MATCH;
+ default:
+ return -1;
+ }
+
+IF_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_RA;
+ default:
+ return -1;
+ }
+
+IF_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_RAN;
+ default:
+ return -1;
+ }
+
+IF_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto IF_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto IF_RANG;
+ default:
+ return -1;
+ }
+
+IF_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 40;
+ }
+ goto IF_RANGE;
+ case 'e':
+ if (last) {
+ return 40;
+ }
+ goto IF_RANGE;
+ default:
+ return -1;
+ }
+
+IF_U:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_UN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_UN;
+ default:
+ return -1;
+ }
+
+IF_UN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNM;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNM;
+ default:
+ return -1;
+ }
+
+IF_UNM:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMO;
+ default:
+ return -1;
+ }
+
+IF_UNMO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMOD;
+ default:
+ return -1;
+ }
+
+IF_UNMOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODI;
+ default:
+ return -1;
+ }
+
+IF_UNMODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIF;
+ default:
+ return -1;
+ }
+
+IF_UNMODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFI;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIE;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_S;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SI;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SIN;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SINC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SINC;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SINC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 41;
+ }
+ goto IF_UNMODIFIED_SINCE;
+ case 'e':
+ if (last) {
+ return 41;
+ }
+ goto IF_UNMODIFIED_SINCE;
+ default:
+ return -1;
+ }
+
+K:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto KE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto KE;
+ default:
+ return -1;
+ }
+
+KE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto KEE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto KEE;
+ default:
+ return -1;
+ }
+
+KEE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto KEEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto KEEP;
+ default:
+ return -1;
+ }
+
+KEEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_;
+ default:
+ return -1;
+ }
+
+KEEP_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_A;
+ default:
+ return -1;
+ }
+
+KEEP_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_AL;
+ default:
+ return -1;
+ }
+
+KEEP_AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALI;
+ default:
+ return -1;
+ }
+
+KEEP_ALI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALIV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALIV;
+ default:
+ return -1;
+ }
+
+KEEP_ALIV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 42;
+ }
+ goto KEEP_ALIVE;
+ case 'e':
+ if (last) {
+ return 42;
+ }
+ goto KEEP_ALIVE;
+ default:
+ return -1;
+ }
+
+L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto LA;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LI;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LO;
+ default:
+ return -1;
+ }
+
+LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto LAS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto LAS;
+ default:
+ return -1;
+ }
+
+LAS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LAST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LAST;
+ default:
+ return -1;
+ }
+
+LAST:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto LAST_;
+ default:
+ return -1;
+ }
+
+LAST_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_E;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto LAST_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto LAST_M;
+ default:
+ return -1;
+ }
+
+LAST_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EV;
+ default:
+ return -1;
+ }
+
+LAST_EV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVE;
+ default:
+ return -1;
+ }
+
+LAST_EVE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVEN;
+ default:
+ return -1;
+ }
+
+LAST_EVEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT;
+ default:
+ return -1;
+ }
+
+LAST_EVENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_;
+ default:
+ return -1;
+ }
+
+LAST_EVENT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_I;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_I;
+ default:
+ return -1;
+ }
+
+LAST_EVENT_I:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 43;
+ }
+ goto LAST_EVENT_ID;
+ case 'd':
+ if (last) {
+ return 43;
+ }
+ goto LAST_EVENT_ID;
+ default:
+ return -1;
+ }
+
+LAST_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MO;
+ default:
+ return -1;
+ }
+
+LAST_MO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MOD;
+ default:
+ return -1;
+ }
+
+LAST_MOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODI;
+ default:
+ return -1;
+ }
+
+LAST_MODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIF;
+ default:
+ return -1;
+ }
+
+LAST_MODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFI;
+ default:
+ return -1;
+ }
+
+LAST_MODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFIE;
+ default:
+ return -1;
+ }
+
+LAST_MODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 44;
+ }
+ goto LAST_MODIFIED;
+ case 'd':
+ if (last) {
+ return 44;
+ }
+ goto LAST_MODIFIED;
+ default:
+ return -1;
+ }
+
+LI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto LIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto LIN;
+ default:
+ return -1;
+ }
+
+LIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return 45;
+ }
+ goto LINK;
+ case 'k':
+ if (last) {
+ return 45;
+ }
+ goto LINK;
+ default:
+ return -1;
+ }
+
+LO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto LOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto LOC;
+ default:
+ return -1;
+ }
+
+LOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto LOCA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto LOCA;
+ default:
+ return -1;
+ }
+
+LOCA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LOCAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LOCAT;
+ default:
+ return -1;
+ }
+
+LOCAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LOCATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LOCATI;
+ default:
+ return -1;
+ }
+
+LOCATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LOCATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LOCATIO;
+ default:
+ return -1;
+ }
+
+LOCATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 46;
+ }
+ goto LOCATION;
+ case 'n':
+ if (last) {
+ return 46;
+ }
+ goto LOCATION;
+ default:
+ return -1;
+ }
+
+M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto MA;
+ default:
+ return -1;
+ }
+
+MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto MAX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto MAX;
+ default:
+ return -1;
+ }
+
+MAX:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto MAX_;
+ default:
+ return -1;
+ }
+
+MAX_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto MAX_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto MAX_F;
+ default:
+ return -1;
+ }
+
+MAX_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FO;
+ default:
+ return -1;
+ }
+
+MAX_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FOR;
+ default:
+ return -1;
+ }
+
+MAX_FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORW;
+ default:
+ return -1;
+ }
+
+MAX_FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWA;
+ default:
+ return -1;
+ }
+
+MAX_FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWAR;
+ default:
+ return -1;
+ }
+
+MAX_FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWARD;
+ default:
+ return -1;
+ }
+
+MAX_FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 47;
+ }
+ goto MAX_FORWARDS;
+ case 's':
+ if (last) {
+ return 47;
+ }
+ goto MAX_FORWARDS;
+ default:
+ return -1;
+ }
+
+O:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto OR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto OR;
+ default:
+ return -1;
+ }
+
+OR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ORI;
+ default:
+ return -1;
+ }
+
+ORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ORIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ORIG;
+ default:
+ return -1;
+ }
+
+ORIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ORIGI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ORIGI;
+ default:
+ return -1;
+ }
+
+ORIGI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 48;
+ }
+ goto ORIGIN;
+ case 'n':
+ if (last) {
+ return 48;
+ }
+ goto ORIGIN;
+ default:
+ return -1;
+ }
+
+P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto PR;
+ default:
+ return -1;
+ }
+
+PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PRA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PRO;
+ default:
+ return -1;
+ }
+
+PRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto PRAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto PRAG;
+ default:
+ return -1;
+ }
+
+PRAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto PRAGM;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto PRAGM;
+ default:
+ return -1;
+ }
+
+PRAGM:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return 49;
+ }
+ goto PRAGMA;
+ case 'a':
+ if (last) {
+ return 49;
+ }
+ goto PRAGMA;
+ default:
+ return -1;
+ }
+
+PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto PROX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto PROX;
+ default:
+ return -1;
+ }
+
+PROX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto PROXY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto PROXY;
+ default:
+ return -1;
+ }
+
+PROXY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_;
+ default:
+ return -1;
+ }
+
+PROXY_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_A;
+ default:
+ return -1;
+ }
+
+PROXY_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AU;
+ default:
+ return -1;
+ }
+
+PROXY_AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUT;
+ default:
+ return -1;
+ }
+
+PROXY_AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTH;
+ default:
+ return -1;
+ }
+
+PROXY_AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHE;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHO;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHEN;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTIC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTIC;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTIC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICA;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTICA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICAT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTICAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 50;
+ }
+ goto PROXY_AUTHENTICATE;
+ case 'e':
+ if (last) {
+ return 50;
+ }
+ goto PROXY_AUTHENTICATE;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHOR;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Z':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZ;
+ case 'z':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZ;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZA;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZAT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATIO;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 51;
+ }
+ goto PROXY_AUTHORIZATION;
+ case 'n':
+ if (last) {
+ return 51;
+ }
+ goto PROXY_AUTHORIZATION;
+ default:
+ return -1;
+ }
+
+R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto RA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto RE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto RE;
+ default:
+ return -1;
+ }
+
+RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto RAN;
+ default:
+ return -1;
+ }
+
+RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto RANG;
+ default:
+ return -1;
+ }
+
+RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 52;
+ }
+ goto RANGE;
+ case 'e':
+ if (last) {
+ return 52;
+ }
+ goto RANGE;
+ default:
+ return -1;
+ }
+
+RE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto REF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto REF;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto RET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto RET;
+ default:
+ return -1;
+ }
+
+REF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto REFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto REFE;
+ default:
+ return -1;
+ }
+
+REFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto REFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto REFER;
+ default:
+ return -1;
+ }
+
+REFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto REFERE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto REFERE;
+ default:
+ return -1;
+ }
+
+REFERE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 53;
+ }
+ goto REFERER;
+ case 'r':
+ if (last) {
+ return 53;
+ }
+ goto REFERER;
+ default:
+ return -1;
+ }
+
+RET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto RETR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto RETR;
+ default:
+ return -1;
+ }
+
+RETR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto RETRY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto RETRY;
+ default:
+ return -1;
+ }
+
+RETRY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_;
+ default:
+ return -1;
+ }
+
+RETRY_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_A;
+ default:
+ return -1;
+ }
+
+RETRY_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AF;
+ default:
+ return -1;
+ }
+
+RETRY_AF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFT;
+ default:
+ return -1;
+ }
+
+RETRY_AFT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFTE;
+ default:
+ return -1;
+ }
+
+RETRY_AFTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 54;
+ }
+ goto RETRY_AFTER;
+ case 'r':
+ if (last) {
+ return 54;
+ }
+ goto RETRY_AFTER;
+ default:
+ return -1;
+ }
+
+S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SE;
+ default:
+ return -1;
+ }
+
+SE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SER;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SET;
+ default:
+ return -1;
+ }
+
+SEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SEC_;
+ default:
+ return -1;
+ }
+
+SEC_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto SEC_W;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto SEC_W;
+ default:
+ return -1;
+ }
+
+SEC_W:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WE;
+ default:
+ return -1;
+ }
+
+SEC_WE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'B':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEB;
+ case 'b':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEB;
+ default:
+ return -1;
+ }
+
+SEC_WEB:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBS;
+ default:
+ return -1;
+ }
+
+SEC_WEBS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCK;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_A;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_E;
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_K;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_K;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_P;
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_V;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_V;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_AC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_AC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_AC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACCE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCEP;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACCEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 55;
+ }
+ goto SEC_WEBSOCKET_ACCEPT;
+ case 't':
+ if (last) {
+ return 55;
+ }
+ goto SEC_WEBSOCKET_ACCEPT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EX;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTEN;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSI;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSIO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSION;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSION;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSION:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 56;
+ }
+ goto SEC_WEBSOCKET_EXTENSIONS;
+ case 's':
+ if (last) {
+ return 56;
+ }
+ goto SEC_WEBSOCKET_EXTENSIONS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_K:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_KE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_KE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_KE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return 57;
+ }
+ goto SEC_WEBSOCKET_KEY;
+ case 'y':
+ if (last) {
+ return 57;
+ }
+ goto SEC_WEBSOCKET_KEY;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_KEY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '1':
+ if (last) {
+ return 58;
+ }
+ goto SEC_WEBSOCKET_KEY1;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PR;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PRO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOCO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTOCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return 59;
+ }
+ goto SEC_WEBSOCKET_PROTOCOL;
+ case 'l':
+ if (last) {
+ return 59;
+ }
+ goto SEC_WEBSOCKET_PROTOCOL;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_V:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VER;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSI;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSIO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERSIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 60;
+ }
+ goto SEC_WEBSOCKET_VERSION;
+ case 'n':
+ if (last) {
+ return 60;
+ }
+ goto SEC_WEBSOCKET_VERSION;
+ default:
+ return -1;
+ }
+
+SER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto SERV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto SERV;
+ default:
+ return -1;
+ }
+
+SERV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SERVE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SERVE;
+ default:
+ return -1;
+ }
+
+SERVE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 61;
+ }
+ goto SERVER;
+ case 'r':
+ if (last) {
+ return 61;
+ }
+ goto SERVER;
+ default:
+ return -1;
+ }
+
+SET:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SET_;
+ default:
+ return -1;
+ }
+
+SET_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SET_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SET_C;
+ default:
+ return -1;
+ }
+
+SET_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SET_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SET_CO;
+ default:
+ return -1;
+ }
+
+SET_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SET_COO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SET_COO;
+ default:
+ return -1;
+ }
+
+SET_COO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOK;
+ default:
+ return -1;
+ }
+
+SET_COOK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOKI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOKI;
+ default:
+ return -1;
+ }
+
+SET_COOKI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 62;
+ }
+ goto SET_COOKIE;
+ case 'e':
+ if (last) {
+ return 62;
+ }
+ goto SET_COOKIE;
+ default:
+ return -1;
+ }
+
+T:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 63;
+ }
+ goto TE;
+ case 'e':
+ if (last) {
+ return 63;
+ }
+ goto TE;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto TR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto TR;
+ default:
+ return -1;
+ }
+
+TR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto TRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto TRA;
+ default:
+ return -1;
+ }
+
+TRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto TRAI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto TRAI;
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRAN;
+ default:
+ return -1;
+ }
+
+TRAI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto TRAIL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto TRAIL;
+ default:
+ return -1;
+ }
+
+TRAIL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRAILE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRAILE;
+ default:
+ return -1;
+ }
+
+TRAILE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 64;
+ }
+ goto TRAILER;
+ case 'r':
+ if (last) {
+ return 64;
+ }
+ goto TRAILER;
+ default:
+ return -1;
+ }
+
+TRAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto TRANS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto TRANS;
+ default:
+ return -1;
+ }
+
+TRANS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto TRANSF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto TRANSF;
+ default:
+ return -1;
+ }
+
+TRANSF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFE;
+ default:
+ return -1;
+ }
+
+TRANSFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER;
+ default:
+ return -1;
+ }
+
+TRANSFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_;
+ default:
+ return -1;
+ }
+
+TRANSFER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_E;
+ default:
+ return -1;
+ }
+
+TRANSFER_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_EN;
+ default:
+ return -1;
+ }
+
+TRANSFER_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENC;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCO;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCOD;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODI;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODIN;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 65;
+ }
+ goto TRANSFER_ENCODING;
+ case 'g':
+ if (last) {
+ return 65;
+ }
+ goto TRANSFER_ENCODING;
+ default:
+ return -1;
+ }
+
+U:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto UR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto UR;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto UP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto UP;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto US;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto US;
+ default:
+ return -1;
+ }
+
+UR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return 66;
+ }
+ goto URI;
+ case 'i':
+ if (last) {
+ return 66;
+ }
+ goto URI;
+ default:
+ return -1;
+ }
+
+UP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto UPG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto UPG;
+ default:
+ return -1;
+ }
+
+UPG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto UPGR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto UPGR;
+ default:
+ return -1;
+ }
+
+UPGR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto UPGRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto UPGRA;
+ default:
+ return -1;
+ }
+
+UPGRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto UPGRAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto UPGRAD;
+ default:
+ return -1;
+ }
+
+UPGRAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 67;
+ }
+ goto UPGRADE;
+ case 'e':
+ if (last) {
+ return 67;
+ }
+ goto UPGRADE;
+ default:
+ return -1;
+ }
+
+US:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto USE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto USE;
+ default:
+ return -1;
+ }
+
+USE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto USER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto USER;
+ default:
+ return -1;
+ }
+
+USER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto USER_;
+ default:
+ return -1;
+ }
+
+USER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto USER_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto USER_A;
+ default:
+ return -1;
+ }
+
+USER_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto USER_AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto USER_AG;
+ default:
+ return -1;
+ }
+
+USER_AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGE;
+ default:
+ return -1;
+ }
+
+USER_AGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGEN;
+ default:
+ return -1;
+ }
+
+USER_AGEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 68;
+ }
+ goto USER_AGENT;
+ case 't':
+ if (last) {
+ return 68;
+ }
+ goto USER_AGENT;
+ default:
+ return -1;
+ }
+
+V:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto VA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto VA;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto VI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto VI;
+ default:
+ return -1;
+ }
+
+VA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto VAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto VAR;
+ default:
+ return -1;
+ }
+
+VAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return 69;
+ }
+ goto VARY;
+ case 'y':
+ if (last) {
+ return 69;
+ }
+ goto VARY;
+ default:
+ return -1;
+ }
+
+VI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return 70;
+ }
+ goto VIA;
+ case 'a':
+ if (last) {
+ return 70;
+ }
+ goto VIA;
+ default:
+ return -1;
+ }
+
+W:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto WW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto WW;
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WA;
+ default:
+ return -1;
+ }
+
+WW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto WWW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto WWW;
+ default:
+ return -1;
+ }
+
+WWW:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto WWW_;
+ default:
+ return -1;
+ }
+
+WWW_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WWW_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WWW_A;
+ default:
+ return -1;
+ }
+
+WWW_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AU;
+ default:
+ return -1;
+ }
+
+WWW_AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUT;
+ default:
+ return -1;
+ }
+
+WWW_AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTH;
+ default:
+ return -1;
+ }
+
+WWW_AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHE;
+ default:
+ return -1;
+ }
+
+WWW_AUTHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHEN;
+ default:
+ return -1;
+ }
+
+WWW_AUTHEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENT;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTI;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTIC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTIC;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTIC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICA;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTICA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICAT;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTICAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 71;
+ }
+ goto WWW_AUTHENTICATE;
+ case 'e':
+ if (last) {
+ return 71;
+ }
+ goto WWW_AUTHENTICATE;
+ default:
+ return -1;
+ }
+
+WA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WAN;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto WAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto WAR;
+ default:
+ return -1;
+ }
+
+WAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WANT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WANT;
+ default:
+ return -1;
+ }
+
+WANT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto WANT_;
+ default:
+ return -1;
+ }
+
+WANT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto WANT_D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto WANT_D;
+ default:
+ return -1;
+ }
+
+WANT_D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DI;
+ default:
+ return -1;
+ }
+
+WANT_DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIG;
+ default:
+ return -1;
+ }
+
+WANT_DIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGE;
+ default:
+ return -1;
+ }
+
+WANT_DIGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGES;
+ default:
+ return -1;
+ }
+
+WANT_DIGES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 72;
+ }
+ goto WANT_DIGEST;
+ case 't':
+ if (last) {
+ return 72;
+ }
+ goto WANT_DIGEST;
+ default:
+ return -1;
+ }
+
+WAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WARN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WARN;
+ default:
+ return -1;
+ }
+
+WARN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WARNI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WARNI;
+ default:
+ return -1;
+ }
+
+WARNI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WARNIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WARNIN;
+ default:
+ return -1;
+ }
+
+WARNIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 73;
+ }
+ goto WARNING;
+ case 'g':
+ if (last) {
+ return 73;
+ }
+ goto WARNING;
+ default:
+ return -1;
+ }
+
+X:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto X_;
+ default:
+ return -1;
+ }
+
+X_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto X_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto X_F;
+ default:
+ return -1;
+ }
+
+X_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FO;
+ default:
+ return -1;
+ }
+
+X_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FOR;
+ default:
+ return -1;
+ }
+
+X_FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto X_FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto X_FORW;
+ default:
+ return -1;
+ }
+
+X_FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWA;
+ default:
+ return -1;
+ }
+
+X_FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWAR;
+ default:
+ return -1;
+ }
+
+X_FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARD;
+ default:
+ return -1;
+ }
+
+X_FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDE;
+ default:
+ return -1;
+ }
+
+X_FORWARDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED;
+ default:
+ return -1;
+ }
+
+X_FORWARDED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_F;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_H;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_P;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_FO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 74;
+ }
+ goto X_FORWARDED_FOR;
+ case 'r':
+ if (last) {
+ return 74;
+ }
+ goto X_FORWARDED_FOR;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_HO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HOS;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_HOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 75;
+ }
+ goto X_FORWARDED_HOST;
+ case 't':
+ if (last) {
+ return 75;
+ }
+ goto X_FORWARDED_HOST;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PR;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PRO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PROT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PROT;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PROT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return 76;
+ }
+ goto X_FORWARDED_PROTO;
+ case 'o':
+ if (last) {
+ return 76;
+ }
+ goto X_FORWARDED_PROTO;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARSET:
+ACCEPT_ENCODING:
+ACCEPT_LANGUAGE:
+ACCEPT_RANGES:
+ACCESS_CONTROL_ALLOW_CREDENTIALS:
+ACCESS_CONTROL_ALLOW_HEADERS:
+ACCESS_CONTROL_ALLOW_METHODS:
+ACCESS_CONTROL_ALLOW_ORIGIN:
+ACCESS_CONTROL_EXPOSE_HEADERS:
+ACCESS_CONTROL_MAX_AGE:
+ACCESS_CONTROL_REQUEST_HEADERS:
+ACCESS_CONTROL_REQUEST_METHOD:
+AGE:
+ALLOW:
+AUTHORIZATION:
+CACHE_CONTROL:
+CONNECTION:
+CONTENT_DISPOSITION:
+CONTENT_ENCODING:
+CONTENT_LANGUAGE:
+CONTENT_LENGTH:
+CONTENT_LOCATION:
+CONTENT_MD5:
+CONTENT_RANGE:
+CONTENT_TRANSFER_ENCODING:
+CONTENT_TYPE:
+COOKIE:
+DATE:
+DESTINATION:
+DIGEST:
+ETAG:
+EXPECT:
+EXPIRES:
+FORWARDED:
+FROM:
+HOST:
+IF_MATCH:
+IF_MODIFIED_SINCE:
+IF_NONE_MATCH:
+IF_RANGE:
+IF_UNMODIFIED_SINCE:
+KEEP_ALIVE:
+LAST_EVENT_ID:
+LAST_MODIFIED:
+LINK:
+LOCATION:
+MAX_FORWARDS:
+ORIGIN:
+PRAGMA:
+PROXY_AUTHENTICATE:
+PROXY_AUTHORIZATION:
+RANGE:
+REFERER:
+RETRY_AFTER:
+SEC_WEBSOCKET_ACCEPT:
+SEC_WEBSOCKET_EXTENSIONS:
+SEC_WEBSOCKET_KEY1:
+SEC_WEBSOCKET_PROTOCOL:
+SEC_WEBSOCKET_VERSION:
+SERVER:
+SET_COOKIE:
+TE:
+TRAILER:
+TRANSFER_ENCODING:
+UPGRADE:
+URI:
+USER_AGENT:
+VARY:
+VIA:
+WANT_DIGEST:
+WARNING:
+WWW_AUTHENTICATE:
+X_FORWARDED_FOR:
+X_FORWARDED_HOST:
+X_FORWARDED_PROTO:
+missing:
+ /* nothing found */
+ return -1;
+}
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.h b/contrib/python/aiohttp/aiohttp/_find_header.h
new file mode 100644
index 0000000000..99b7b4f828
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.h
@@ -0,0 +1,14 @@
+#ifndef _FIND_HEADERS_H
+#define _FIND_HEADERS_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int find_header(const char *str, int size);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.pxd b/contrib/python/aiohttp/aiohttp/_find_header.pxd
new file mode 100644
index 0000000000..37a6c37268
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.pxd
@@ -0,0 +1,2 @@
+cdef extern from "_find_header.h":
+ int find_header(char *, int)
diff --git a/contrib/python/aiohttp/aiohttp/_headers.pxi b/contrib/python/aiohttp/aiohttp/_headers.pxi
new file mode 100644
index 0000000000..3744721d47
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_headers.pxi
@@ -0,0 +1,83 @@
+# The file is autogenerated from aiohttp/hdrs.py
+# Run ./tools/gen.py to update it after the origin changing.
+
+from . import hdrs
+cdef tuple headers = (
+ hdrs.ACCEPT,
+ hdrs.ACCEPT_CHARSET,
+ hdrs.ACCEPT_ENCODING,
+ hdrs.ACCEPT_LANGUAGE,
+ hdrs.ACCEPT_RANGES,
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
+ hdrs.ACCESS_CONTROL_MAX_AGE,
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
+ hdrs.AGE,
+ hdrs.ALLOW,
+ hdrs.AUTHORIZATION,
+ hdrs.CACHE_CONTROL,
+ hdrs.CONNECTION,
+ hdrs.CONTENT_DISPOSITION,
+ hdrs.CONTENT_ENCODING,
+ hdrs.CONTENT_LANGUAGE,
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_MD5,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TRANSFER_ENCODING,
+ hdrs.CONTENT_TYPE,
+ hdrs.COOKIE,
+ hdrs.DATE,
+ hdrs.DESTINATION,
+ hdrs.DIGEST,
+ hdrs.ETAG,
+ hdrs.EXPECT,
+ hdrs.EXPIRES,
+ hdrs.FORWARDED,
+ hdrs.FROM,
+ hdrs.HOST,
+ hdrs.IF_MATCH,
+ hdrs.IF_MODIFIED_SINCE,
+ hdrs.IF_NONE_MATCH,
+ hdrs.IF_RANGE,
+ hdrs.IF_UNMODIFIED_SINCE,
+ hdrs.KEEP_ALIVE,
+ hdrs.LAST_EVENT_ID,
+ hdrs.LAST_MODIFIED,
+ hdrs.LINK,
+ hdrs.LOCATION,
+ hdrs.MAX_FORWARDS,
+ hdrs.ORIGIN,
+ hdrs.PRAGMA,
+ hdrs.PROXY_AUTHENTICATE,
+ hdrs.PROXY_AUTHORIZATION,
+ hdrs.RANGE,
+ hdrs.REFERER,
+ hdrs.RETRY_AFTER,
+ hdrs.SEC_WEBSOCKET_ACCEPT,
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
+ hdrs.SEC_WEBSOCKET_KEY,
+ hdrs.SEC_WEBSOCKET_KEY1,
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
+ hdrs.SEC_WEBSOCKET_VERSION,
+ hdrs.SERVER,
+ hdrs.SET_COOKIE,
+ hdrs.TE,
+ hdrs.TRAILER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.URI,
+ hdrs.UPGRADE,
+ hdrs.USER_AGENT,
+ hdrs.VARY,
+ hdrs.VIA,
+ hdrs.WWW_AUTHENTICATE,
+ hdrs.WANT_DIGEST,
+ hdrs.WARNING,
+ hdrs.X_FORWARDED_FOR,
+ hdrs.X_FORWARDED_HOST,
+ hdrs.X_FORWARDED_PROTO,
+)
diff --git a/contrib/python/aiohttp/aiohttp/_helpers.pyx b/contrib/python/aiohttp/aiohttp/_helpers.pyx
new file mode 100644
index 0000000000..665f367c5d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_helpers.pyx
@@ -0,0 +1,35 @@
+cdef class reify:
+ """Use as a class method decorator. It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+
+ """
+
+ cdef object wrapped
+ cdef object name
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+ self.name = wrapped.__name__
+
+ @property
+ def __doc__(self):
+ return self.wrapped.__doc__
+
+ def __get__(self, inst, owner):
+ try:
+ try:
+ return inst._cache[self.name]
+ except KeyError:
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+ except AttributeError:
+ if inst is None:
+ return self
+ raise
+
+ def __set__(self, inst, value):
+ raise AttributeError("reified property is read-only")
diff --git a/contrib/python/aiohttp/aiohttp/_http_parser.pyx b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
new file mode 100644
index 0000000000..77bf0aa598
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
@@ -0,0 +1,818 @@
+#cython: language_level=3
+#
+# Based on https://github.com/MagicStack/httptools
+#
+from __future__ import absolute_import, print_function
+
+from cpython cimport (
+ Py_buffer,
+ PyBUF_SIMPLE,
+ PyBuffer_Release,
+ PyBytes_AsString,
+ PyBytes_AsStringAndSize,
+ PyObject_GetBuffer,
+)
+from cpython.mem cimport PyMem_Free, PyMem_Malloc
+from libc.limits cimport ULLONG_MAX
+from libc.string cimport memcpy
+
+from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
+from yarl import URL as _URL
+
+from aiohttp import hdrs
+
+from .http_exceptions import (
+ BadHttpMessage,
+ BadStatusLine,
+ ContentLengthError,
+ InvalidHeader,
+ InvalidURLError,
+ LineTooLong,
+ PayloadEncodingError,
+ TransferEncodingError,
+)
+from .http_parser import DeflateBuffer as _DeflateBuffer
+from .http_writer import (
+ HttpVersion as _HttpVersion,
+ HttpVersion10 as _HttpVersion10,
+ HttpVersion11 as _HttpVersion11,
+)
+from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
+
+cimport cython
+
+from aiohttp cimport _cparser as cparser
+
+include "_headers.pxi"
+
+from aiohttp cimport _find_header
+
+DEF DEFAULT_FREELIST_SIZE = 250
+
+cdef extern from "Python.h":
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
+ Py_ssize_t PyByteArray_Size(object) except -1
+ char* PyByteArray_AsString(object)
+
+__all__ = ('HttpRequestParser', 'HttpResponseParser',
+ 'RawRequestMessage', 'RawResponseMessage')
+
+cdef object URL = _URL
+cdef object URL_build = URL.build
+cdef object CIMultiDict = _CIMultiDict
+cdef object CIMultiDictProxy = _CIMultiDictProxy
+cdef object HttpVersion = _HttpVersion
+cdef object HttpVersion10 = _HttpVersion10
+cdef object HttpVersion11 = _HttpVersion11
+cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
+cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
+cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
+cdef object StreamReader = _StreamReader
+cdef object DeflateBuffer = _DeflateBuffer
+
+
+cdef inline object extend(object buf, const char* at, size_t length):
+ cdef Py_ssize_t s
+ cdef char* ptr
+ s = PyByteArray_Size(buf)
+ PyByteArray_Resize(buf, s + length)
+ ptr = PyByteArray_AsString(buf)
+ memcpy(ptr + s, at, length)
+
+
+DEF METHODS_COUNT = 46;
+
+cdef list _http_method = []
+
+for i in range(METHODS_COUNT):
+ _http_method.append(
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
+
+
+cdef inline str http_method_str(int i):
+ if i < METHODS_COUNT:
+ return <str>_http_method[i]
+ else:
+ return "<unknown>"
+
+cdef inline object find_header(bytes raw_header):
+ cdef Py_ssize_t size
+ cdef char *buf
+ cdef int idx
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
+ idx = _find_header.find_header(buf, size)
+ if idx == -1:
+ return raw_header.decode('utf-8', 'surrogateescape')
+ return headers[idx]
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawRequestMessage:
+ cdef readonly str method
+ cdef readonly str path
+ cdef readonly object version # HttpVersion
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+ cdef readonly object url # yarl.URL
+
+ def __init__(self, method, path, version, headers, raw_headers,
+ should_close, compression, upgrade, chunked, url):
+ self.method = method
+ self.path = path
+ self.version = version
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+ self.url = url
+
+ def __repr__(self):
+ info = []
+ info.append(("method", self.method))
+ info.append(("path", self.path))
+ info.append(("version", self.version))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ info.append(("url", self.url))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawRequestMessage(' + sinfo + ')>'
+
+ def _replace(self, **dct):
+ cdef RawRequestMessage ret
+ ret = _new_request_message(self.method,
+ self.path,
+ self.version,
+ self.headers,
+ self.raw_headers,
+ self.should_close,
+ self.compression,
+ self.upgrade,
+ self.chunked,
+ self.url)
+ if "method" in dct:
+ ret.method = dct["method"]
+ if "path" in dct:
+ ret.path = dct["path"]
+ if "version" in dct:
+ ret.version = dct["version"]
+ if "headers" in dct:
+ ret.headers = dct["headers"]
+ if "raw_headers" in dct:
+ ret.raw_headers = dct["raw_headers"]
+ if "should_close" in dct:
+ ret.should_close = dct["should_close"]
+ if "compression" in dct:
+ ret.compression = dct["compression"]
+ if "upgrade" in dct:
+ ret.upgrade = dct["upgrade"]
+ if "chunked" in dct:
+ ret.chunked = dct["chunked"]
+ if "url" in dct:
+ ret.url = dct["url"]
+ return ret
+
+cdef _new_request_message(str method,
+ str path,
+ object version,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked,
+ object url):
+ cdef RawRequestMessage ret
+ ret = RawRequestMessage.__new__(RawRequestMessage)
+ ret.method = method
+ ret.path = path
+ ret.version = version
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ ret.url = url
+ return ret
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawResponseMessage:
+ cdef readonly object version # HttpVersion
+ cdef readonly int code
+ cdef readonly str reason
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+
+ def __init__(self, version, code, reason, headers, raw_headers,
+ should_close, compression, upgrade, chunked):
+ self.version = version
+ self.code = code
+ self.reason = reason
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+
+ def __repr__(self):
+ info = []
+ info.append(("version", self.version))
+ info.append(("code", self.code))
+ info.append(("reason", self.reason))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawResponseMessage(' + sinfo + ')>'
+
+
+cdef _new_response_message(object version,
+ int code,
+ str reason,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked):
+ cdef RawResponseMessage ret
+ ret = RawResponseMessage.__new__(RawResponseMessage)
+ ret.version = version
+ ret.code = code
+ ret.reason = reason
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ return ret
+
+
+@cython.internal
+cdef class HttpParser:
+
+ cdef:
+ cparser.llhttp_t* _cparser
+ cparser.llhttp_settings_t* _csettings
+
+ bytearray _raw_name
+ bytearray _raw_value
+ bint _has_value
+
+ object _protocol
+ object _loop
+ object _timer
+
+ size_t _max_line_size
+ size_t _max_field_size
+ size_t _max_headers
+ bint _response_with_body
+ bint _read_until_eof
+
+ bint _started
+ object _url
+ bytearray _buf
+ str _path
+ str _reason
+ object _headers
+ list _raw_headers
+ bint _upgraded
+ list _messages
+ object _payload
+ bint _payload_error
+ object _payload_exception
+ object _last_error
+ bint _auto_decompress
+ int _limit
+
+ str _content_encoding
+
+ Py_buffer py_buf
+
+ def __cinit__(self):
+ self._cparser = <cparser.llhttp_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
+ if self._cparser is NULL:
+ raise MemoryError()
+
+ self._csettings = <cparser.llhttp_settings_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
+ if self._csettings is NULL:
+ raise MemoryError()
+
+ def __dealloc__(self):
+ PyMem_Free(self._cparser)
+ PyMem_Free(self._csettings)
+
+ cdef _init(
+ self, cparser.llhttp_type mode,
+ object protocol, object loop, int limit,
+ object timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ cparser.llhttp_settings_init(self._csettings)
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
+ self._cparser.data = <void*>self
+ self._cparser.content_length = 0
+
+ self._protocol = protocol
+ self._loop = loop
+ self._timer = timer
+
+ self._buf = bytearray()
+ self._payload = None
+ self._payload_error = 0
+ self._payload_exception = payload_exception
+ self._messages = []
+
+ self._raw_name = bytearray()
+ self._raw_value = bytearray()
+ self._has_value = False
+
+ self._max_line_size = max_line_size
+ self._max_headers = max_headers
+ self._max_field_size = max_field_size
+ self._response_with_body = response_with_body
+ self._read_until_eof = read_until_eof
+ self._upgraded = False
+ self._auto_decompress = auto_decompress
+ self._content_encoding = None
+
+ self._csettings.on_url = cb_on_url
+ self._csettings.on_status = cb_on_status
+ self._csettings.on_header_field = cb_on_header_field
+ self._csettings.on_header_value = cb_on_header_value
+ self._csettings.on_headers_complete = cb_on_headers_complete
+ self._csettings.on_body = cb_on_body
+ self._csettings.on_message_begin = cb_on_message_begin
+ self._csettings.on_message_complete = cb_on_message_complete
+ self._csettings.on_chunk_header = cb_on_chunk_header
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
+
+ self._last_error = None
+ self._limit = limit
+
+ cdef _process_header(self):
+ if self._raw_name:
+ raw_name = bytes(self._raw_name)
+ raw_value = bytes(self._raw_value)
+
+ name = find_header(raw_name)
+ value = raw_value.decode('utf-8', 'surrogateescape')
+
+ self._headers.add(name, value)
+
+ if name is CONTENT_ENCODING:
+ self._content_encoding = value
+
+ PyByteArray_Resize(self._raw_name, 0)
+ PyByteArray_Resize(self._raw_value, 0)
+ self._has_value = False
+ self._raw_headers.append((raw_name, raw_value))
+
+ cdef _on_header_field(self, const char* at, size_t length):
+ cdef Py_ssize_t size
+ cdef char *buf
+ if self._has_value:
+ self._process_header()
+
+ size = PyByteArray_Size(self._raw_name)
+ PyByteArray_Resize(self._raw_name, size + length)
+ buf = PyByteArray_AsString(self._raw_name)
+ memcpy(buf + size, at, length)
+
+ cdef _on_header_value(self, const char* at, size_t length):
+ cdef Py_ssize_t size
+ cdef char *buf
+
+ size = PyByteArray_Size(self._raw_value)
+ PyByteArray_Resize(self._raw_value, size + length)
+ buf = PyByteArray_AsString(self._raw_value)
+ memcpy(buf + size, at, length)
+ self._has_value = True
+
+ cdef _on_headers_complete(self):
+ self._process_header()
+
+ method = http_method_str(self._cparser.method)
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
+ upgrade = self._cparser.upgrade
+ chunked = self._cparser.flags & cparser.F_CHUNKED
+
+ raw_headers = tuple(self._raw_headers)
+ headers = CIMultiDictProxy(self._headers)
+
+ if upgrade or self._cparser.method == 5: # cparser.CONNECT:
+ self._upgraded = True
+
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ encoding = None
+ enc = self._content_encoding
+ if enc is not None:
+ self._content_encoding = None
+ enc = enc.lower()
+ if enc in ('gzip', 'deflate', 'br'):
+ encoding = enc
+
+ if self._cparser.type == cparser.HTTP_REQUEST:
+ msg = _new_request_message(
+ method, self._path,
+ self.http_version(), headers, raw_headers,
+ should_close, encoding, upgrade, chunked, self._url)
+ else:
+ msg = _new_response_message(
+ self.http_version(), self._cparser.status_code, self._reason,
+ headers, raw_headers, should_close, encoding,
+ upgrade, chunked)
+
+ if (
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
+ self._cparser.method == 5 or # CONNECT: 5
+ (self._cparser.status_code >= 199 and
+ self._cparser.content_length == 0 and
+ self._read_until_eof)
+ ):
+ payload = StreamReader(
+ self._protocol, timer=self._timer, loop=self._loop,
+ limit=self._limit)
+ else:
+ payload = EMPTY_PAYLOAD
+
+ self._payload = payload
+ if encoding is not None and self._auto_decompress:
+ self._payload = DeflateBuffer(payload, encoding)
+
+ if not self._response_with_body:
+ payload = EMPTY_PAYLOAD
+
+ self._messages.append((msg, payload))
+
+ cdef _on_message_complete(self):
+ self._payload.feed_eof()
+ self._payload = None
+
+ cdef _on_chunk_header(self):
+ self._payload.begin_http_chunk_receiving()
+
+ cdef _on_chunk_complete(self):
+ self._payload.end_http_chunk_receiving()
+
+ cdef object _on_status_complete(self):
+ pass
+
+ cdef inline http_version(self):
+ cdef cparser.llhttp_t* parser = self._cparser
+
+ if parser.http_major == 1:
+ if parser.http_minor == 0:
+ return HttpVersion10
+ elif parser.http_minor == 1:
+ return HttpVersion11
+
+ return HttpVersion(parser.http_major, parser.http_minor)
+
+ ### Public API ###
+
+ def feed_eof(self):
+ cdef bytes desc
+
+ if self._payload is not None:
+ if self._cparser.flags & cparser.F_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data for satisfy transfer length header.")
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
+ raise ContentLengthError(
+ "Not enough data for satisfy content length header.")
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
+ desc = cparser.llhttp_get_error_reason(self._cparser)
+ raise PayloadEncodingError(desc.decode('latin-1'))
+ else:
+ self._payload.feed_eof()
+ elif self._started:
+ self._on_headers_complete()
+ if self._messages:
+ return self._messages[-1][0]
+
+ def feed_data(self, data):
+ cdef:
+ size_t data_len
+ size_t nb
+ cdef cparser.llhttp_errno_t errno
+
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
+ data_len = <size_t>self.py_buf.len
+
+ errno = cparser.llhttp_execute(
+ self._cparser,
+ <char*>self.py_buf.buf,
+ data_len)
+
+ if errno is cparser.HPE_PAUSED_UPGRADE:
+ cparser.llhttp_resume_after_upgrade(self._cparser)
+
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
+
+ PyBuffer_Release(&self.py_buf)
+
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
+ if self._payload_error == 0:
+ if self._last_error is not None:
+ ex = self._last_error
+ self._last_error = None
+ else:
+ ex = parser_error_from_errno(self._cparser)
+ self._payload = None
+ raise ex
+
+ if self._messages:
+ messages = self._messages
+ self._messages = []
+ else:
+ messages = ()
+
+ if self._upgraded:
+ return messages, True, data[nb:]
+ else:
+ return messages, False, b''
+
+ def set_upgraded(self, val):
+ self._upgraded = val
+
+
+cdef class HttpRequestParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+
+ cdef object _on_status_complete(self):
+ cdef int idx1, idx2
+ if not self._buf:
+ return
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
+ try:
+ idx3 = len(self._path)
+ idx1 = self._path.find("?")
+ if idx1 == -1:
+ query = ""
+ idx2 = self._path.find("#")
+ if idx2 == -1:
+ path = self._path
+ fragment = ""
+ else:
+ path = self._path[0: idx2]
+ fragment = self._path[idx2+1:]
+
+ else:
+ path = self._path[0:idx1]
+ idx1 += 1
+ idx2 = self._path.find("#", idx1+1)
+ if idx2 == -1:
+ query = self._path[idx1:]
+ fragment = ""
+ else:
+ query = self._path[idx1: idx2]
+ fragment = self._path[idx2+1:]
+
+ self._url = URL.build(
+ path=path,
+ query_string=query,
+ fragment=fragment,
+ encoded=True,
+ )
+ finally:
+ PyByteArray_Resize(self._buf, 0)
+
+
+cdef class HttpResponseParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True
+ ):
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+
+ cdef object _on_status_complete(self):
+ if self._buf:
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
+ PyByteArray_Resize(self._buf, 0)
+ else:
+ self._reason = self._reason or ''
+
+cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+
+ pyparser._started = True
+ pyparser._headers = CIMultiDict()
+ pyparser._raw_headers = []
+ PyByteArray_Resize(pyparser._buf, 0)
+ pyparser._path = None
+ pyparser._reason = None
+ return 0
+
+
+cdef int cb_on_url(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_status(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef str reason
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_field(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ pyparser._on_status_complete()
+ size = len(pyparser._raw_name) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header name is too long', pyparser._max_field_size, size)
+ pyparser._on_header_field(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_value(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ size = len(pyparser._raw_value) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header value is too long', pyparser._max_field_size, size)
+ pyparser._on_header_value(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_status_complete()
+ pyparser._on_headers_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
+ return 2
+ else:
+ return 0
+
+
+cdef int cb_on_body(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef bytes body = at[:length]
+ try:
+ pyparser._payload.feed_data(body, length)
+ except BaseException as exc:
+ if pyparser._payload_exception is not None:
+ pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
+ else:
+ pyparser._payload.set_exception(exc)
+ pyparser._payload_error = 1
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._started = False
+ pyparser._on_message_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_header()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef parser_error_from_errno(cparser.llhttp_t* parser):
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
+
+ if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
+ cparser.HPE_CB_HEADERS_COMPLETE,
+ cparser.HPE_CB_MESSAGE_COMPLETE,
+ cparser.HPE_CB_CHUNK_HEADER,
+ cparser.HPE_CB_CHUNK_COMPLETE,
+ cparser.HPE_INVALID_CONSTANT,
+ cparser.HPE_INVALID_HEADER_TOKEN,
+ cparser.HPE_INVALID_CONTENT_LENGTH,
+ cparser.HPE_INVALID_CHUNK_SIZE,
+ cparser.HPE_INVALID_EOF_STATE,
+ cparser.HPE_INVALID_TRANSFER_ENCODING):
+ cls = BadHttpMessage
+
+ elif errno == cparser.HPE_INVALID_STATUS:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_METHOD:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_VERSION:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_URL:
+ cls = InvalidURLError
+
+ else:
+ cls = BadHttpMessage
+
+ return cls(desc.decode('latin-1'))
diff --git a/contrib/python/aiohttp/aiohttp/_http_writer.pyx b/contrib/python/aiohttp/aiohttp/_http_writer.pyx
new file mode 100644
index 0000000000..eff8521958
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_http_writer.pyx
@@ -0,0 +1,163 @@
+from cpython.bytes cimport PyBytes_FromStringAndSize
+from cpython.exc cimport PyErr_NoMemory
+from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
+from cpython.object cimport PyObject_Str
+from libc.stdint cimport uint8_t, uint64_t
+from libc.string cimport memcpy
+
+from multidict import istr
+
+DEF BUF_SIZE = 16 * 1024 # 16KiB
+cdef char BUFFER[BUF_SIZE]
+
+cdef object _istr = istr
+
+
+# ----------------- writer ---------------------------
+
+cdef struct Writer:
+ char *buf
+ Py_ssize_t size
+ Py_ssize_t pos
+
+
+cdef inline void _init_writer(Writer* writer):
+ writer.buf = &BUFFER[0]
+ writer.size = BUF_SIZE
+ writer.pos = 0
+
+
+cdef inline void _release_writer(Writer* writer):
+ if writer.buf != BUFFER:
+ PyMem_Free(writer.buf)
+
+
+cdef inline int _write_byte(Writer* writer, uint8_t ch):
+ cdef char * buf
+ cdef Py_ssize_t size
+
+ if writer.pos == writer.size:
+ # reallocate
+ size = writer.size + BUF_SIZE
+ if writer.buf == BUFFER:
+ buf = <char*>PyMem_Malloc(size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ memcpy(buf, writer.buf, writer.size)
+ else:
+ buf = <char*>PyMem_Realloc(writer.buf, size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ writer.buf = buf
+ writer.size = size
+ writer.buf[writer.pos] = <char>ch
+ writer.pos += 1
+ return 0
+
+
+cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
+ cdef uint64_t utf = <uint64_t> symbol
+
+ if utf < 0x80:
+ return _write_byte(writer, <uint8_t>utf)
+ elif utf < 0x800:
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif 0xD800 <= utf <= 0xDFFF:
+ # surogate pair, ignored
+ return 0
+ elif utf < 0x10000:
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
+ return -1
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif utf > 0x10FFFF:
+ # symbol is too large
+ return 0
+ else:
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+
+
+cdef inline int _write_str(Writer* writer, str s):
+ cdef Py_UCS4 ch
+ for ch in s:
+ if _write_utf8(writer, ch) < 0:
+ return -1
+
+
+# --------------- _serialize_headers ----------------------
+
+cdef str to_str(object s):
+ typ = type(s)
+ if typ is str:
+ return <str>s
+ elif typ is _istr:
+ return PyObject_Str(s)
+ elif not isinstance(s, str):
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
+ else:
+ return str(s)
+
+
+cdef void _safe_header(str string) except *:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return character detected in HTTP status message or "
+ "header. This is a potential security issue."
+ )
+
+
+def _serialize_headers(str status_line, headers):
+ cdef Writer writer
+ cdef object key
+ cdef object val
+ cdef bytes ret
+
+ _init_writer(&writer)
+
+ for key, val in headers.items():
+ _safe_header(to_str(key))
+ _safe_header(to_str(val))
+
+ try:
+ if _write_str(&writer, status_line) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ for key, val in headers.items():
+ if _write_str(&writer, to_str(key)) < 0:
+ raise
+ if _write_byte(&writer, b':') < 0:
+ raise
+ if _write_byte(&writer, b' ') < 0:
+ raise
+ if _write_str(&writer, to_str(val)) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
+ finally:
+ _release_writer(&writer)
diff --git a/contrib/python/aiohttp/aiohttp/_websocket.pyx b/contrib/python/aiohttp/aiohttp/_websocket.pyx
new file mode 100644
index 0000000000..94318d2b1b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_websocket.pyx
@@ -0,0 +1,56 @@
+from cpython cimport PyBytes_AsString
+
+
+#from cpython cimport PyByteArray_AsString # cython still not exports that
+cdef extern from "Python.h":
+ char* PyByteArray_AsString(bytearray ba) except NULL
+
+from libc.stdint cimport uint32_t, uint64_t, uintmax_t
+
+
+def _websocket_mask_cython(object mask, object data):
+ """Note, this function mutates its `data` argument
+ """
+ cdef:
+ Py_ssize_t data_len, i
+ # bit operations on signed integers are implementation-specific
+ unsigned char * in_buf
+ const unsigned char * mask_buf
+ uint32_t uint32_msk
+ uint64_t uint64_msk
+
+ assert len(mask) == 4
+
+ if not isinstance(mask, bytes):
+ mask = bytes(mask)
+
+ if isinstance(data, bytearray):
+ data = <bytearray>data
+ else:
+ data = bytearray(data)
+
+ data_len = len(data)
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
+ uint32_msk = (<uint32_t*>mask_buf)[0]
+
+ # TODO: align in_data ptr to achieve even faster speeds
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
+
+ if sizeof(size_t) >= 8:
+ uint64_msk = uint32_msk
+ uint64_msk = (uint64_msk << 32) | uint32_msk
+
+ while data_len >= 8:
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
+ in_buf += 8
+ data_len -= 8
+
+
+ while data_len >= 4:
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
+ in_buf += 4
+ data_len -= 4
+
+ for i in range(0, data_len):
+ in_buf[i] ^= mask_buf[i]
diff --git a/contrib/python/aiohttp/aiohttp/abc.py b/contrib/python/aiohttp/aiohttp/abc.py
new file mode 100644
index 0000000000..06fc831638
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/abc.py
@@ -0,0 +1,207 @@
+import asyncio
+import logging
+from abc import ABC, abstractmethod
+from collections.abc import Sized
+from http.cookies import BaseCookie, Morsel
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+)
+
+from multidict import CIMultiDict
+from yarl import URL
+
+from .helpers import get_running_loop
+from .typedefs import LooseCookies
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+ from .web_exceptions import HTTPException
+ from .web_request import BaseRequest, Request
+ from .web_response import StreamResponse
+else:
+ BaseRequest = Request = Application = StreamResponse = None
+ HTTPException = None
+
+
+class AbstractRouter(ABC):
+ def __init__(self) -> None:
+ self._frozen = False
+
+ def post_init(self, app: Application) -> None:
+ """Post init stage.
+
+ Not an abstract method for sake of backward compatibility,
+ but if the router wants to be aware of the application
+ it can override this.
+ """
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ """Freeze router."""
+ self._frozen = True
+
+ @abstractmethod
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
+ """Return MATCH_INFO for given request"""
+
+
+class AbstractMatchInfo(ABC):
+ @property # pragma: no branch
+ @abstractmethod
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
+ """Execute matched request handler"""
+
+ @property
+ @abstractmethod
+ def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
+ """Expect handler for 100-continue processing"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def http_exception(self) -> Optional[HTTPException]:
+ """HTTPException instance raised on router's resolving, or None"""
+
+ @abstractmethod # pragma: no branch
+ def get_info(self) -> Dict[str, Any]:
+ """Return a dict with additional info useful for introspection"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def apps(self) -> Tuple[Application, ...]:
+ """Stack of nested applications.
+
+ Top level application is left-most element.
+
+ """
+
+ @abstractmethod
+ def add_app(self, app: Application) -> None:
+ """Add application to the nested apps stack."""
+
+ @abstractmethod
+ def freeze(self) -> None:
+ """Freeze the match info.
+
+ The method is called after route resolution.
+
+ After the call .add_app() is forbidden.
+
+ """
+
+
+class AbstractView(ABC):
+ """Abstract class based view."""
+
+ def __init__(self, request: Request) -> None:
+ self._request = request
+
+ @property
+ def request(self) -> Request:
+ """Request instance."""
+ return self._request
+
+ @abstractmethod
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
+ """Execute the view handler."""
+
+
+class AbstractResolver(ABC):
+ """Abstract DNS resolver."""
+
+ @abstractmethod
+ async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
+ """Return IP address for given hostname"""
+
+ @abstractmethod
+ async def close(self) -> None:
+ """Release resolver"""
+
+
+if TYPE_CHECKING: # pragma: no cover
+ IterableBase = Iterable[Morsel[str]]
+else:
+ IterableBase = Iterable
+
+
+ClearCookiePredicate = Callable[["Morsel[str]"], bool]
+
+
+class AbstractCookieJar(Sized, IterableBase):
+ """Abstract Cookie Jar."""
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = get_running_loop(loop)
+
+ @abstractmethod
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ """Clear all cookies if no predicate is passed."""
+
+ @abstractmethod
+ def clear_domain(self, domain: str) -> None:
+ """Clear all cookies for domain and all subdomains."""
+
+ @abstractmethod
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+
+ @abstractmethod
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ """Return the jar's cookies filtered by their attributes."""
+
+
+class AbstractStreamWriter(ABC):
+ """Abstract stream writer."""
+
+ buffer_size = 0
+ output_size = 0
+ length = 0 # type: Optional[int]
+
+ @abstractmethod
+ async def write(self, chunk: bytes) -> None:
+ """Write chunk into stream."""
+
+ @abstractmethod
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ """Write last chunk."""
+
+ @abstractmethod
+ async def drain(self) -> None:
+ """Flush the write buffer."""
+
+ @abstractmethod
+ def enable_compression(self, encoding: str = "deflate") -> None:
+ """Enable HTTP body compression"""
+
+ @abstractmethod
+ def enable_chunking(self) -> None:
+ """Enable HTTP chunked mode"""
+
+ @abstractmethod
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write HTTP headers"""
+
+
+class AbstractAccessLogger(ABC):
+ """Abstract writer to access log."""
+
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
+ self.logger = logger
+ self.log_format = log_format
+
+ @abstractmethod
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ """Emit log to logger."""
diff --git a/contrib/python/aiohttp/aiohttp/base_protocol.py b/contrib/python/aiohttp/aiohttp/base_protocol.py
new file mode 100644
index 0000000000..fff4610a1e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/base_protocol.py
@@ -0,0 +1,87 @@
+import asyncio
+from typing import Optional, cast
+
+from .tcp_helpers import tcp_nodelay
+
+
+class BaseProtocol(asyncio.Protocol):
+ __slots__ = (
+ "_loop",
+ "_paused",
+ "_drain_waiter",
+ "_connection_lost",
+ "_reading_paused",
+ "transport",
+ )
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop # type: asyncio.AbstractEventLoop
+ self._paused = False
+ self._drain_waiter = None # type: Optional[asyncio.Future[None]]
+ self._connection_lost = False
+ self._reading_paused = False
+
+ self.transport = None # type: Optional[asyncio.Transport]
+
+ def pause_writing(self) -> None:
+ assert not self._paused
+ self._paused = True
+
+ def resume_writing(self) -> None:
+ assert self._paused
+ self._paused = False
+
+ waiter = self._drain_waiter
+ if waiter is not None:
+ self._drain_waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def pause_reading(self) -> None:
+ if not self._reading_paused and self.transport is not None:
+ try:
+ self.transport.pause_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = True
+
+ def resume_reading(self) -> None:
+ if self._reading_paused and self.transport is not None:
+ try:
+ self.transport.resume_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = False
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ tr = cast(asyncio.Transport, transport)
+ tcp_nodelay(tr, True)
+ self.transport = tr
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ self._connection_lost = True
+ # Wake up the writer if currently paused.
+ self.transport = None
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ return
+ self._drain_waiter = None
+ if waiter.done():
+ return
+ if exc is None:
+ waiter.set_result(None)
+ else:
+ waiter.set_exception(exc)
+
+ async def _drain_helper(self) -> None:
+ if self._connection_lost:
+ raise ConnectionResetError("Connection lost")
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ waiter = self._loop.create_future()
+ self._drain_waiter = waiter
+ await asyncio.shield(waiter)
diff --git a/contrib/python/aiohttp/aiohttp/client.py b/contrib/python/aiohttp/aiohttp/client.py
new file mode 100644
index 0000000000..6ae9549db9
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client.py
@@ -0,0 +1,1304 @@
+"""HTTP Client for asyncio."""
+
+import asyncio
+import base64
+import hashlib
+import json
+import os
+import sys
+import traceback
+import warnings
+from contextlib import suppress
+from types import SimpleNamespace, TracebackType
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Coroutine,
+ FrozenSet,
+ Generator,
+ Generic,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+import attr
+from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs, http, payload
+from .abc import AbstractCookieJar
+from .client_exceptions import (
+ ClientConnectionError as ClientConnectionError,
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
+ ClientConnectorError as ClientConnectorError,
+ ClientConnectorSSLError as ClientConnectorSSLError,
+ ClientError as ClientError,
+ ClientHttpProxyError as ClientHttpProxyError,
+ ClientOSError as ClientOSError,
+ ClientPayloadError as ClientPayloadError,
+ ClientProxyConnectionError as ClientProxyConnectionError,
+ ClientResponseError as ClientResponseError,
+ ClientSSLError as ClientSSLError,
+ ContentTypeError as ContentTypeError,
+ InvalidURL as InvalidURL,
+ ServerConnectionError as ServerConnectionError,
+ ServerDisconnectedError as ServerDisconnectedError,
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
+ ServerTimeoutError as ServerTimeoutError,
+ TooManyRedirects as TooManyRedirects,
+ WSServerHandshakeError as WSServerHandshakeError,
+)
+from .client_reqrep import (
+ ClientRequest as ClientRequest,
+ ClientResponse as ClientResponse,
+ Fingerprint as Fingerprint,
+ RequestInfo as RequestInfo,
+ _merge_ssl_params,
+)
+from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
+from .connector import (
+ BaseConnector as BaseConnector,
+ NamedPipeConnector as NamedPipeConnector,
+ TCPConnector as TCPConnector,
+ UnixConnector as UnixConnector,
+)
+from .cookiejar import CookieJar
+from .helpers import (
+ DEBUG,
+ PY_36,
+ BasicAuth,
+ TimeoutHandle,
+ ceil_timeout,
+ get_env_proxy_for_url,
+ get_running_loop,
+ sentinel,
+ strip_auth_from_url,
+)
+from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
+from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
+from .streams import FlowControlDataQueue
+from .tracing import Trace, TraceConfig
+from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
+
+__all__ = (
+ # client_exceptions
+ "ClientConnectionError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ContentTypeError",
+ "InvalidURL",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "TooManyRedirects",
+ "WSServerHandshakeError",
+ # client_reqrep
+ "ClientRequest",
+ "ClientResponse",
+ "Fingerprint",
+ "RequestInfo",
+ # connector
+ "BaseConnector",
+ "TCPConnector",
+ "UnixConnector",
+ "NamedPipeConnector",
+ # client_ws
+ "ClientWebSocketResponse",
+ # client
+ "ClientSession",
+ "ClientTimeout",
+ "request",
+)
+
+
+try:
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ClientTimeout:
+ total: Optional[float] = None
+ connect: Optional[float] = None
+ sock_read: Optional[float] = None
+ sock_connect: Optional[float] = None
+
+ # pool_queue_timeout: Optional[float] = None
+ # dns_resolution_timeout: Optional[float] = None
+ # socket_connect_timeout: Optional[float] = None
+ # connection_acquiring_timeout: Optional[float] = None
+ # new_connection_timeout: Optional[float] = None
+ # http_header_timeout: Optional[float] = None
+ # response_body_timeout: Optional[float] = None
+
+ # to create a timeout specific for a single request, either
+ # - create a completely new one to overwrite the default
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
+ # to overwrite the defaults
+
+
+# 5 Minute default read timeout
+DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
+
+_RetType = TypeVar("_RetType")
+
+
+class ClientSession:
+ """First-class interface for making HTTP requests."""
+
+ ATTRS = frozenset(
+ [
+ "_base_url",
+ "_source_traceback",
+ "_connector",
+ "requote_redirect_url",
+ "_loop",
+ "_cookie_jar",
+ "_connector_owner",
+ "_default_auth",
+ "_version",
+ "_json_serialize",
+ "_requote_redirect_url",
+ "_timeout",
+ "_raise_for_status",
+ "_auto_decompress",
+ "_trust_env",
+ "_default_headers",
+ "_skip_auto_headers",
+ "_request_class",
+ "_response_class",
+ "_ws_response_class",
+ "_trace_configs",
+ "_read_bufsize",
+ ]
+ )
+
+ _source_traceback = None
+
+ def __init__(
+ self,
+ base_url: Optional[StrOrURL] = None,
+ *,
+ connector: Optional[BaseConnector] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ json_serialize: JSONEncoder = json.dumps,
+ request_class: Type[ClientRequest] = ClientRequest,
+ response_class: Type[ClientResponse] = ClientResponse,
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
+ version: HttpVersion = http.HttpVersion11,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ connector_owner: bool = True,
+ raise_for_status: bool = False,
+ read_timeout: Union[float, object] = sentinel,
+ conn_timeout: Optional[float] = None,
+ timeout: Union[object, ClientTimeout] = sentinel,
+ auto_decompress: bool = True,
+ trust_env: bool = False,
+ requote_redirect_url: bool = True,
+ trace_configs: Optional[List[TraceConfig]] = None,
+ read_bufsize: int = 2 ** 16,
+ ) -> None:
+ if loop is None:
+ if connector is not None:
+ loop = connector._loop
+
+ loop = get_running_loop(loop)
+
+ if base_url is None or isinstance(base_url, URL):
+ self._base_url: Optional[URL] = base_url
+ else:
+ self._base_url = URL(base_url)
+ assert (
+ self._base_url.origin() == self._base_url
+ ), "Only absolute URLs without path part are supported"
+
+ if connector is None:
+ connector = TCPConnector(loop=loop)
+
+ if connector._loop is not loop:
+ raise RuntimeError("Session and connector has to use same event loop")
+
+ self._loop = loop
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ if cookie_jar is None:
+ cookie_jar = CookieJar(loop=loop)
+ self._cookie_jar = cookie_jar
+
+ if cookies is not None:
+ self._cookie_jar.update_cookies(cookies)
+
+ self._connector = connector # type: Optional[BaseConnector]
+ self._connector_owner = connector_owner
+ self._default_auth = auth
+ self._version = version
+ self._json_serialize = json_serialize
+ if timeout is sentinel:
+ self._timeout = DEFAULT_TIMEOUT
+ if read_timeout is not sentinel:
+ warnings.warn(
+ "read_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
+ if conn_timeout is not None:
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
+ warnings.warn(
+ "conn_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ self._timeout = timeout # type: ignore[assignment]
+ if read_timeout is not sentinel:
+ raise ValueError(
+ "read_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.read"
+ )
+ if conn_timeout is not None:
+ raise ValueError(
+ "conn_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.connect"
+ )
+ self._raise_for_status = raise_for_status
+ self._auto_decompress = auto_decompress
+ self._trust_env = trust_env
+ self._requote_redirect_url = requote_redirect_url
+ self._read_bufsize = read_bufsize
+
+ # Convert to list of tuples
+ if headers:
+ real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ else:
+ real_headers = CIMultiDict()
+ self._default_headers = real_headers # type: CIMultiDict[str]
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
+ else:
+ self._skip_auto_headers = frozenset()
+
+ self._request_class = request_class
+ self._response_class = response_class
+ self._ws_response_class = ws_response_class
+
+ self._trace_configs = trace_configs or []
+ for trace_config in self._trace_configs:
+ trace_config.freeze()
+
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from ClientSession "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom ClientSession.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if not self.closed:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
+ )
+ context = {"client_session": self, "message": "Unclosed client session"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def request(
+ self, method: str, url: StrOrURL, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP request."""
+ return _RequestContextManager(self._request(method, url, **kwargs))
+
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
+ url = URL(str_or_url)
+ if self._base_url is None:
+ return url
+ else:
+ assert not url.is_absolute() and url.path.startswith("/")
+ return self._base_url.join(url)
+
+ async def _request(
+ self,
+ method: str,
+ str_or_url: StrOrURL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ data: Any = None,
+ json: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ allow_redirects: bool = True,
+ max_redirects: int = 10,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ raise_for_status: Optional[bool] = None,
+ read_until_eof: bool = True,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timeout: Union[ClientTimeout, object] = sentinel,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ trace_request_ctx: Optional[SimpleNamespace] = None,
+ read_bufsize: Optional[int] = None,
+ ) -> ClientResponse:
+
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
+ # set the default to None because we need to detect if the user wants
+ # to use the existing timeouts by setting timeout to None.
+
+ if self.closed:
+ raise RuntimeError("Session is closed")
+
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ if data is not None and json is not None:
+ raise ValueError(
+ "data and json parameters can not be used at the same time"
+ )
+ elif json is not None:
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
+
+ if not isinstance(chunked, bool) and chunked is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+
+ redirects = 0
+ history = []
+ version = self._version
+
+ # Merge with default headers and transform to CIMultiDict
+ headers = self._prepare_headers(headers)
+ proxy_headers = self._prepare_headers(proxy_headers)
+
+ try:
+ url = self._build_url(str_or_url)
+ except ValueError as e:
+ raise InvalidURL(str_or_url) from e
+
+ skip_headers = set(self._skip_auto_headers)
+ if skip_auto_headers is not None:
+ for i in skip_auto_headers:
+ skip_headers.add(istr(i))
+
+ if proxy is not None:
+ try:
+ proxy = URL(proxy)
+ except ValueError as e:
+ raise InvalidURL(proxy) from e
+
+ if timeout is sentinel:
+ real_timeout = self._timeout # type: ClientTimeout
+ else:
+ if not isinstance(timeout, ClientTimeout):
+ real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
+ else:
+ real_timeout = timeout
+ # timeout is cumulative for all request operations
+ # (request, redirects, responses, data consuming)
+ tm = TimeoutHandle(self._loop, real_timeout.total)
+ handle = tm.start()
+
+ if read_bufsize is None:
+ read_bufsize = self._read_bufsize
+
+ traces = [
+ Trace(
+ self,
+ trace_config,
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
+ )
+ for trace_config in self._trace_configs
+ ]
+
+ for trace in traces:
+ await trace.send_request_start(method, url.update_query(params), headers)
+
+ timer = tm.timer()
+ try:
+ with timer:
+ while True:
+ url, auth_from_url = strip_auth_from_url(url)
+ if auth and auth_from_url:
+ raise ValueError(
+ "Cannot combine AUTH argument with "
+ "credentials encoded in URL"
+ )
+
+ if auth is None:
+ auth = auth_from_url
+ if auth is None:
+ auth = self._default_auth
+ # It would be confusing if we support explicit
+ # Authorization header with auth argument
+ if (
+ headers is not None
+ and auth is not None
+ and hdrs.AUTHORIZATION in headers
+ ):
+ raise ValueError(
+ "Cannot combine AUTHORIZATION header "
+ "with AUTH argument or credentials "
+ "encoded in URL"
+ )
+
+ all_cookies = self._cookie_jar.filter_cookies(url)
+
+ if cookies is not None:
+ tmp_cookie_jar = CookieJar()
+ tmp_cookie_jar.update_cookies(cookies)
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
+ if req_cookies:
+ all_cookies.load(req_cookies)
+
+ if proxy is not None:
+ proxy = URL(proxy)
+ elif self._trust_env:
+ with suppress(LookupError):
+ proxy, proxy_auth = get_env_proxy_for_url(url)
+
+ req = self._request_class(
+ method,
+ url,
+ params=params,
+ headers=headers,
+ skip_auto_headers=skip_headers,
+ data=data,
+ cookies=all_cookies,
+ auth=auth,
+ version=version,
+ compress=compress,
+ chunked=chunked,
+ expect100=expect100,
+ loop=self._loop,
+ response_class=self._response_class,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ timer=timer,
+ session=self,
+ ssl=ssl,
+ proxy_headers=proxy_headers,
+ traces=traces,
+ )
+
+ # connection timeout
+ try:
+ async with ceil_timeout(real_timeout.connect):
+ assert self._connector is not None
+ conn = await self._connector.connect(
+ req, traces=traces, timeout=real_timeout
+ )
+ except asyncio.TimeoutError as exc:
+ raise ServerTimeoutError(
+ "Connection timeout " "to host {}".format(url)
+ ) from exc
+
+ assert conn.transport is not None
+
+ assert conn.protocol is not None
+ conn.protocol.set_response_params(
+ timer=timer,
+ skip_payload=method.upper() == "HEAD",
+ read_until_eof=read_until_eof,
+ auto_decompress=self._auto_decompress,
+ read_timeout=real_timeout.sock_read,
+ read_bufsize=read_bufsize,
+ )
+
+ try:
+ try:
+ resp = await req.send(conn)
+ try:
+ await resp.start(conn)
+ except BaseException:
+ resp.close()
+ raise
+ except BaseException:
+ conn.close()
+ raise
+ except ClientError:
+ raise
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise ClientOSError(*exc.args) from exc
+
+ self._cookie_jar.update_cookies(resp.cookies, resp.url)
+
+ # redirects
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
+
+ for trace in traces:
+ await trace.send_request_redirect(
+ method, url.update_query(params), headers, resp
+ )
+
+ redirects += 1
+ history.append(resp)
+ if max_redirects and redirects >= max_redirects:
+ resp.close()
+ raise TooManyRedirects(
+ history[0].request_info, tuple(history)
+ )
+
+ # For 301 and 302, mimic IE, now changed in RFC
+ # https://github.com/kennethreitz/requests/pull/269
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
+ ):
+ method = hdrs.METH_GET
+ data = None
+ if headers.get(hdrs.CONTENT_LENGTH):
+ headers.pop(hdrs.CONTENT_LENGTH)
+
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
+ hdrs.URI
+ )
+ if r_url is None:
+ # see github.com/aio-libs/aiohttp/issues/2022
+ break
+ else:
+ # reading from correct redirection
+ # response is forbidden
+ resp.release()
+
+ try:
+ parsed_url = URL(
+ r_url, encoded=not self._requote_redirect_url
+ )
+
+ except ValueError as e:
+ raise InvalidURL(r_url) from e
+
+ scheme = parsed_url.scheme
+ if scheme not in ("http", "https", ""):
+ resp.close()
+ raise ValueError("Can redirect only to http or https")
+ elif not scheme:
+ parsed_url = url.join(parsed_url)
+
+ if url.origin() != parsed_url.origin():
+ auth = None
+ headers.pop(hdrs.AUTHORIZATION, None)
+
+ url = parsed_url
+ params = None
+ resp.release()
+ continue
+
+ break
+
+ # check response status
+ if raise_for_status is None:
+ raise_for_status = self._raise_for_status
+ if raise_for_status:
+ resp.raise_for_status()
+
+ # register connection
+ if handle is not None:
+ if resp.connection is not None:
+ resp.connection.add_callback(handle.cancel)
+ else:
+ handle.cancel()
+
+ resp._history = tuple(history)
+
+ for trace in traces:
+ await trace.send_request_end(
+ method, url.update_query(params), headers, resp
+ )
+ return resp
+
+ except BaseException as e:
+ # cleanup timer
+ tm.close()
+ if handle:
+ handle.cancel()
+ handle = None
+
+ for trace in traces:
+ await trace.send_request_exception(
+ method, url.update_query(params), headers, e
+ )
+ raise
+
+ def ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> "_WSRequestContextManager":
+ """Initiate websocket connection."""
+ return _WSRequestContextManager(
+ self._ws_connect(
+ url,
+ method=method,
+ protocols=protocols,
+ timeout=timeout,
+ receive_timeout=receive_timeout,
+ autoclose=autoclose,
+ autoping=autoping,
+ heartbeat=heartbeat,
+ auth=auth,
+ origin=origin,
+ params=params,
+ headers=headers,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ verify_ssl=verify_ssl,
+ fingerprint=fingerprint,
+ ssl_context=ssl_context,
+ proxy_headers=proxy_headers,
+ compress=compress,
+ max_msg_size=max_msg_size,
+ )
+ )
+
+ async def _ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> ClientWebSocketResponse:
+
+ if headers is None:
+ real_headers = CIMultiDict() # type: CIMultiDict[str]
+ else:
+ real_headers = CIMultiDict(headers)
+
+ default_headers = {
+ hdrs.UPGRADE: "websocket",
+ hdrs.CONNECTION: "upgrade",
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
+ }
+
+ for key, value in default_headers.items():
+ real_headers.setdefault(key, value)
+
+ sec_key = base64.b64encode(os.urandom(16))
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
+
+ if protocols:
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
+ if origin is not None:
+ real_headers[hdrs.ORIGIN] = origin
+ if compress:
+ extstr = ws_ext_gen(compress=compress)
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
+
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ # send request
+ resp = await self.request(
+ method,
+ url,
+ params=params,
+ headers=real_headers,
+ read_until_eof=False,
+ auth=auth,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ proxy_headers=proxy_headers,
+ )
+
+ try:
+ # check handshake
+ if resp.status != 101:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid response status",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid upgrade header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid connection header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # key calculation
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
+ if r_key != match:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid challenge response",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # websocket protocol
+ protocol = None
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
+ resp_protocols = [
+ proto.strip()
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in resp_protocols:
+ if proto in protocols:
+ protocol = proto
+ break
+
+ # websocket compress
+ notakeover = False
+ if compress:
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ if compress_hdrs:
+ try:
+ compress, notakeover = ws_ext_parse(compress_hdrs)
+ except WSHandshakeError as exc:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message=exc.args[0],
+ status=resp.status,
+ headers=resp.headers,
+ ) from exc
+ else:
+ compress = 0
+ notakeover = False
+
+ conn = resp.connection
+ assert conn is not None
+ conn_proto = conn.protocol
+ assert conn_proto is not None
+ transport = conn.transport
+ assert transport is not None
+ reader = FlowControlDataQueue(
+ conn_proto, 2 ** 16, loop=self._loop
+ ) # type: FlowControlDataQueue[WSMessage]
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
+ writer = WebSocketWriter(
+ conn_proto,
+ transport,
+ use_mask=True,
+ compress=compress,
+ notakeover=notakeover,
+ )
+ except BaseException:
+ resp.close()
+ raise
+ else:
+ return self._ws_response_class(
+ reader,
+ writer,
+ protocol,
+ resp,
+ timeout,
+ autoclose,
+ autoping,
+ self._loop,
+ receive_timeout=receive_timeout,
+ heartbeat=heartbeat,
+ compress=compress,
+ client_notakeover=notakeover,
+ )
+
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
+ """Add default headers and transform it to CIMultiDict"""
+ # Convert headers to MultiDict
+ result = CIMultiDict(self._default_headers)
+ if headers:
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
+ headers = CIMultiDict(headers)
+ added_names = set() # type: Set[str]
+ for key, value in headers.items():
+ if key in added_names:
+ result.add(key, value)
+ else:
+ result[key] = value
+ added_names.add(key)
+ return result
+
+ def get(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP GET request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
+ )
+
+ def options(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP OPTIONS request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def head(
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP HEAD request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def post(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP POST request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
+ )
+
+ def put(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PUT request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
+ )
+
+ def patch(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
+ )
+
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
+ """Perform HTTP DELETE request."""
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
+
+ async def close(self) -> None:
+ """Close underlying connector.
+
+ Release all acquired resources.
+ """
+ if not self.closed:
+ if self._connector is not None and self._connector_owner:
+ await self._connector.close()
+ self._connector = None
+
+ @property
+ def closed(self) -> bool:
+ """Is client session closed.
+
+ A readonly property.
+ """
+ return self._connector is None or self._connector.closed
+
+ @property
+ def connector(self) -> Optional[BaseConnector]:
+ """Connector instance used for the session."""
+ return self._connector
+
+ @property
+ def cookie_jar(self) -> AbstractCookieJar:
+ """The session cookies."""
+ return self._cookie_jar
+
+ @property
+ def version(self) -> Tuple[int, int]:
+ """The session HTTP protocol version."""
+ return self._version
+
+ @property
+ def requote_redirect_url(self) -> bool:
+ """Do URL requoting on redirection handling."""
+ return self._requote_redirect_url
+
+ @requote_redirect_url.setter
+ def requote_redirect_url(self, val: bool) -> None:
+ """Do URL requoting on redirection handling."""
+ warnings.warn(
+ "session.requote_redirect_url modification " "is deprecated #2778",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._requote_redirect_url = val
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ """Session's loop."""
+ warnings.warn(
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def timeout(self) -> Union[object, ClientTimeout]:
+ """Timeout for the session."""
+ return self._timeout
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ """The default headers of the client session."""
+ return self._default_headers
+
+ @property
+ def skip_auto_headers(self) -> FrozenSet[istr]:
+ """Headers for which autogeneration should be skipped"""
+ return self._skip_auto_headers
+
+ @property
+ def auth(self) -> Optional[BasicAuth]:
+ """An object that represents HTTP Basic Authorization"""
+ return self._default_auth
+
+ @property
+ def json_serialize(self) -> JSONEncoder:
+ """Json serializer callable"""
+ return self._json_serialize
+
+ @property
+ def connector_owner(self) -> bool:
+ """Should connector be closed on session closing"""
+ return self._connector_owner
+
+ @property
+ def raise_for_status(
+ self,
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
+ return self._raise_for_status
+
+ @property
+ def auto_decompress(self) -> bool:
+ """Should the body response be automatically decompressed."""
+ return self._auto_decompress
+
+ @property
+ def trust_env(self) -> bool:
+ """
+ Should proxies information from environment or netrc be trusted.
+
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
+ or ~/.netrc file if present.
+ """
+ return self._trust_env
+
+ @property
+ def trace_configs(self) -> List[TraceConfig]:
+ """A list of TraceConfig instances used for client tracing"""
+ return self._trace_configs
+
+ def detach(self) -> None:
+ """Detach connector from session without closing the former.
+
+ Session is switched to closed state anyway.
+ """
+ self._connector = None
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "ClientSession":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
+
+ __slots__ = ("_coro", "_resp")
+
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
+ self._coro = coro
+
+ def send(self, arg: None) -> "asyncio.Future[Any]":
+ return self._coro.send(arg)
+
+ def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
+ self._coro.throw(arg)
+
+ def close(self) -> None:
+ return self._coro.close()
+
+ def __await__(self) -> Generator[Any, None, _RetType]:
+ ret = self._coro.__await__()
+ return ret
+
+ def __iter__(self) -> Generator[Any, None, _RetType]:
+ return self.__await__()
+
+ async def __aenter__(self) -> _RetType:
+ self._resp = await self._coro
+ return self._resp
+
+
+class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
+ __slots__ = ()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # We're basing behavior on the exception as it can be caused by
+ # user code unrelated to the status of the connection. If you
+ # would like to close a connection you must do that
+ # explicitly. Otherwise connection error handling should kick in
+ # and close/recycle the connection as required.
+ self._resp.release()
+
+
+class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
+ __slots__ = ()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self._resp.close()
+
+
+class _SessionRequestContextManager:
+
+ __slots__ = ("_coro", "_resp", "_session")
+
+ def __init__(
+ self,
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
+ session: ClientSession,
+ ) -> None:
+ self._coro = coro
+ self._resp = None # type: Optional[ClientResponse]
+ self._session = session
+
+ async def __aenter__(self) -> ClientResponse:
+ try:
+ self._resp = await self._coro
+ except BaseException:
+ await self._session.close()
+ raise
+ else:
+ return self._resp
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ assert self._resp is not None
+ self._resp.close()
+ await self._session.close()
+
+
+def request(
+ method: str,
+ url: StrOrURL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ data: Any = None,
+ json: Any = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ allow_redirects: bool = True,
+ max_redirects: int = 10,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ raise_for_status: Optional[bool] = None,
+ read_until_eof: bool = True,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timeout: Union[ClientTimeout, object] = sentinel,
+ cookies: Optional[LooseCookies] = None,
+ version: HttpVersion = http.HttpVersion11,
+ connector: Optional[BaseConnector] = None,
+ read_bufsize: Optional[int] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> _SessionRequestContextManager:
+ """Constructs and sends a request.
+
+ Returns response object.
+ method - HTTP method
+ url - request url
+ params - (optional) Dictionary or bytes to be sent in the query
+ string of the new request
+ data - (optional) Dictionary, bytes, or file-like object to
+ send in the body of the request
+ json - (optional) Any json compatible python object
+ headers - (optional) Dictionary of HTTP Headers to send with
+ the request
+ cookies - (optional) Dict object to send with the request
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
+ auth - aiohttp.helpers.BasicAuth
+ allow_redirects - (optional) If set to False, do not follow
+ redirects
+ version - Request HTTP version.
+ compress - Set to True if request has to be compressed
+ with deflate encoding.
+ chunked - Set to chunk size for chunked transfer encoding.
+ expect100 - Expect 100-continue response from server.
+ connector - BaseConnector sub-class instance to support
+ connection pooling.
+ read_until_eof - Read response until eof if response
+ does not have Content-Length header.
+ loop - Optional event loop.
+ timeout - Optional ClientTimeout settings structure, 5min
+ total timeout by default.
+ Usage::
+ >>> import aiohttp
+ >>> resp = await aiohttp.request('GET', 'http://python.org/')
+ >>> resp
+ <ClientResponse(python.org/) [200]>
+ >>> data = await resp.read()
+ """
+ connector_owner = False
+ if connector is None:
+ connector_owner = True
+ connector = TCPConnector(loop=loop, force_close=True)
+
+ session = ClientSession(
+ loop=loop,
+ cookies=cookies,
+ version=version,
+ timeout=timeout,
+ connector=connector,
+ connector_owner=connector_owner,
+ )
+
+ return _SessionRequestContextManager(
+ session._request(
+ method,
+ url,
+ params=params,
+ data=data,
+ json=json,
+ headers=headers,
+ skip_auto_headers=skip_auto_headers,
+ auth=auth,
+ allow_redirects=allow_redirects,
+ max_redirects=max_redirects,
+ compress=compress,
+ chunked=chunked,
+ expect100=expect100,
+ raise_for_status=raise_for_status,
+ read_until_eof=read_until_eof,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ read_bufsize=read_bufsize,
+ ),
+ session,
+ )
diff --git a/contrib/python/aiohttp/aiohttp/client_exceptions.py b/contrib/python/aiohttp/aiohttp/client_exceptions.py
new file mode 100644
index 0000000000..dd55321054
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_exceptions.py
@@ -0,0 +1,342 @@
+"""HTTP related errors."""
+
+import asyncio
+import warnings
+from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
+
+from .http_parser import RawResponseMessage
+from .typedefs import LooseHeaders
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = SSLContext = None # type: ignore[assignment]
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
+else:
+ RequestInfo = ClientResponse = ConnectionKey = None
+
+__all__ = (
+ "ClientError",
+ "ClientConnectionError",
+ "ClientOSError",
+ "ClientConnectorError",
+ "ClientProxyConnectionError",
+ "ClientSSLError",
+ "ClientConnectorSSLError",
+ "ClientConnectorCertificateError",
+ "ServerConnectionError",
+ "ServerTimeoutError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ClientResponseError",
+ "ClientHttpProxyError",
+ "WSServerHandshakeError",
+ "ContentTypeError",
+ "ClientPayloadError",
+ "InvalidURL",
+)
+
+
+class ClientError(Exception):
+ """Base class for client connection errors."""
+
+
+class ClientResponseError(ClientError):
+ """Connection error during reading response.
+
+ request_info: instance of RequestInfo
+ """
+
+ def __init__(
+ self,
+ request_info: RequestInfo,
+ history: Tuple[ClientResponse, ...],
+ *,
+ code: Optional[int] = None,
+ status: Optional[int] = None,
+ message: str = "",
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ self.request_info = request_info
+ if code is not None:
+ if status is not None:
+ raise ValueError(
+ "Both code and status arguments are provided; "
+ "code is deprecated, use status instead"
+ )
+ warnings.warn(
+ "code argument is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if status is not None:
+ self.status = status
+ elif code is not None:
+ self.status = code
+ else:
+ self.status = 0
+ self.message = message
+ self.headers = headers
+ self.history = history
+ self.args = (request_info, history)
+
+ def __str__(self) -> str:
+ return "{}, message={!r}, url={!r}".format(
+ self.status,
+ self.message,
+ self.request_info.real_url,
+ )
+
+ def __repr__(self) -> str:
+ args = f"{self.request_info!r}, {self.history!r}"
+ if self.status != 0:
+ args += f", status={self.status!r}"
+ if self.message != "":
+ args += f", message={self.message!r}"
+ if self.headers is not None:
+ args += f", headers={self.headers!r}"
+ return f"{type(self).__name__}({args})"
+
+ @property
+ def code(self) -> int:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.status
+
+ @code.setter
+ def code(self, value: int) -> None:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.status = value
+
+
+class ContentTypeError(ClientResponseError):
+ """ContentType found is not valid."""
+
+
+class WSServerHandshakeError(ClientResponseError):
+ """websocket server handshake error."""
+
+
+class ClientHttpProxyError(ClientResponseError):
+ """HTTP proxy error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ proxy responds with status other than ``200 OK``
+ on ``CONNECT`` request.
+ """
+
+
+class TooManyRedirects(ClientResponseError):
+ """Client was redirected too many times."""
+
+
+class ClientConnectionError(ClientError):
+ """Base class for client socket errors."""
+
+
+class ClientOSError(ClientConnectionError, OSError):
+ """OSError error."""
+
+
+class ClientConnectorError(ClientOSError):
+ """Client connector error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ connection to proxy can not be established.
+ """
+
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
+ self._conn_key = connection_key
+ self._os_error = os_error
+ super().__init__(os_error.errno, os_error.strerror)
+ self.args = (connection_key, os_error)
+
+ @property
+ def os_error(self) -> OSError:
+ return self._os_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
+ return self._conn_key.ssl
+
+ def __str__(self) -> str:
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
+ self, self.ssl if self.ssl is not None else "default", self.strerror
+ )
+
+ # OSError.__reduce__ does too much black magick
+ __reduce__ = BaseException.__reduce__
+
+
+class ClientProxyConnectionError(ClientConnectorError):
+ """Proxy connection error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ connection to proxy can not be established.
+ """
+
+
+class UnixClientConnectorError(ClientConnectorError):
+ """Unix connector error.
+
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
+ if connection to unix socket can not be established.
+ """
+
+ def __init__(
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
+ ) -> None:
+ self._path = path
+ super().__init__(connection_key, os_error)
+
+ @property
+ def path(self) -> str:
+ return self._path
+
+ def __str__(self) -> str:
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
+ self, self.ssl if self.ssl is not None else "default", self.strerror
+ )
+
+
+class ServerConnectionError(ClientConnectionError):
+ """Server connection errors."""
+
+
+class ServerDisconnectedError(ServerConnectionError):
+ """Server disconnected."""
+
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
+ if message is None:
+ message = "Server disconnected"
+
+ self.args = (message,)
+ self.message = message
+
+
+class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
+ """Server timeout error."""
+
+
+class ServerFingerprintMismatch(ServerConnectionError):
+ """SSL certificate does not match expected fingerprint."""
+
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
+ self.expected = expected
+ self.got = got
+ self.host = host
+ self.port = port
+ self.args = (expected, got, host, port)
+
+ def __repr__(self) -> str:
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
+ )
+
+
+class ClientPayloadError(ClientError):
+ """Response payload error."""
+
+
+class InvalidURL(ClientError, ValueError):
+ """Invalid URL.
+
+ URL used for fetching is malformed, e.g. it doesn't contains host
+ part.
+ """
+
+ # Derive from ValueError for backward compatibility
+
+ def __init__(self, url: Any) -> None:
+ # The type of url is not yarl.URL because the exception can be raised
+ # on URL(url) call
+ super().__init__(url)
+
+ @property
+ def url(self) -> Any:
+ return self.args[0]
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.url}>"
+
+
+class ClientSSLError(ClientConnectorError):
+ """Base error for ssl.*Errors."""
+
+
+if ssl is not None:
+ cert_errors = (ssl.CertificateError,)
+ cert_errors_bases = (
+ ClientSSLError,
+ ssl.CertificateError,
+ )
+
+ ssl_errors = (ssl.SSLError,)
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
+else: # pragma: no cover
+ cert_errors = tuple()
+ cert_errors_bases = (
+ ClientSSLError,
+ ValueError,
+ )
+
+ ssl_errors = tuple()
+ ssl_error_bases = (ClientSSLError,)
+
+
+class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
+ """Response ssl error."""
+
+
+class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
+ """Response certificate error."""
+
+ def __init__(
+ self, connection_key: ConnectionKey, certificate_error: Exception
+ ) -> None:
+ self._conn_key = connection_key
+ self._certificate_error = certificate_error
+ self.args = (connection_key, certificate_error)
+
+ @property
+ def certificate_error(self) -> Exception:
+ return self._certificate_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> bool:
+ return self._conn_key.is_ssl
+
+ def __str__(self) -> str:
+ return (
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
+ "[{0.certificate_error.__class__.__name__}: "
+ "{0.certificate_error.args}]".format(self)
+ )
diff --git a/contrib/python/aiohttp/aiohttp/client_proto.py b/contrib/python/aiohttp/aiohttp/client_proto.py
new file mode 100644
index 0000000000..f36863b836
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_proto.py
@@ -0,0 +1,251 @@
+import asyncio
+from contextlib import suppress
+from typing import Any, Optional, Tuple
+
+from .base_protocol import BaseProtocol
+from .client_exceptions import (
+ ClientOSError,
+ ClientPayloadError,
+ ServerDisconnectedError,
+ ServerTimeoutError,
+)
+from .helpers import BaseTimerContext
+from .http import HttpResponseParser, RawResponseMessage
+from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
+
+
+class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
+ """Helper class to adapt between Protocol and StreamReader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ BaseProtocol.__init__(self, loop=loop)
+ DataQueue.__init__(self, loop)
+
+ self._should_close = False
+
+ self._payload: Optional[StreamReader] = None
+ self._skip_payload = False
+ self._payload_parser = None
+
+ self._timer = None
+
+ self._tail = b""
+ self._upgraded = False
+ self._parser = None # type: Optional[HttpResponseParser]
+
+ self._read_timeout = None # type: Optional[float]
+ self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
+
+ @property
+ def upgraded(self) -> bool:
+ return self._upgraded
+
+ @property
+ def should_close(self) -> bool:
+ if self._payload is not None and not self._payload.is_eof() or self._upgraded:
+ return True
+
+ return (
+ self._should_close
+ or self._upgraded
+ or self.exception() is not None
+ or self._payload_parser is not None
+ or len(self) > 0
+ or bool(self._tail)
+ )
+
+ def force_close(self) -> None:
+ self._should_close = True
+
+ def close(self) -> None:
+ transport = self.transport
+ if transport is not None:
+ transport.close()
+ self.transport = None
+ self._payload = None
+ self._drop_timeout()
+
+ def is_connected(self) -> bool:
+ return self.transport is not None and not self.transport.is_closing()
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ self._drop_timeout()
+
+ if self._payload_parser is not None:
+ with suppress(Exception):
+ self._payload_parser.feed_eof()
+
+ uncompleted = None
+ if self._parser is not None:
+ try:
+ uncompleted = self._parser.feed_eof()
+ except Exception:
+ if self._payload is not None:
+ self._payload.set_exception(
+ ClientPayloadError("Response payload is not completed")
+ )
+
+ if not self.is_eof():
+ if isinstance(exc, OSError):
+ exc = ClientOSError(*exc.args)
+ if exc is None:
+ exc = ServerDisconnectedError(uncompleted)
+ # assigns self._should_close to True as side effect,
+ # we do it anyway below
+ self.set_exception(exc)
+
+ self._should_close = True
+ self._parser = None
+ self._payload = None
+ self._payload_parser = None
+ self._reading_paused = False
+
+ super().connection_lost(exc)
+
+ def eof_received(self) -> None:
+ # should call parser.feed_eof() most likely
+ self._drop_timeout()
+
+ def pause_reading(self) -> None:
+ super().pause_reading()
+ self._drop_timeout()
+
+ def resume_reading(self) -> None:
+ super().resume_reading()
+ self._reschedule_timeout()
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._should_close = True
+ self._drop_timeout()
+ super().set_exception(exc)
+
+ def set_parser(self, parser: Any, payload: Any) -> None:
+ # TODO: actual types are:
+ # parser: WebSocketReader
+ # payload: FlowControlDataQueue
+ # but they are not generi enough
+ # Need an ABC for both types
+ self._payload = payload
+ self._payload_parser = parser
+
+ self._drop_timeout()
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def set_response_params(
+ self,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ skip_payload: bool = False,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ read_timeout: Optional[float] = None,
+ read_bufsize: int = 2 ** 16,
+ ) -> None:
+ self._skip_payload = skip_payload
+
+ self._read_timeout = read_timeout
+ self._reschedule_timeout()
+
+ self._parser = HttpResponseParser(
+ self,
+ self._loop,
+ read_bufsize,
+ timer=timer,
+ payload_exception=ClientPayloadError,
+ response_with_body=not skip_payload,
+ read_until_eof=read_until_eof,
+ auto_decompress=auto_decompress,
+ )
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def _drop_timeout(self) -> None:
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+ self._read_timeout_handle = None
+
+ def _reschedule_timeout(self) -> None:
+ timeout = self._read_timeout
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+
+ if timeout:
+ self._read_timeout_handle = self._loop.call_later(
+ timeout, self._on_read_timeout
+ )
+ else:
+ self._read_timeout_handle = None
+
+ def _on_read_timeout(self) -> None:
+ exc = ServerTimeoutError("Timeout on reading data from socket")
+ self.set_exception(exc)
+ if self._payload is not None:
+ self._payload.set_exception(exc)
+
+ def data_received(self, data: bytes) -> None:
+ self._reschedule_timeout()
+
+ if not data:
+ return
+
+ # custom payload parser
+ if self._payload_parser is not None:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self._payload = None
+ self._payload_parser = None
+
+ if tail:
+ self.data_received(tail)
+ return
+ else:
+ if self._upgraded or self._parser is None:
+ # i.e. websocket connection, websocket parser is not set yet
+ self._tail += data
+ else:
+ # parse http messages
+ try:
+ messages, upgraded, tail = self._parser.feed_data(data)
+ except BaseException as exc:
+ if self.transport is not None:
+ # connection.release() could be called BEFORE
+ # data_received(), the transport is already
+ # closed in this case
+ self.transport.close()
+ # should_close is True after the call
+ self.set_exception(exc)
+ return
+
+ self._upgraded = upgraded
+
+ payload: Optional[StreamReader] = None
+ for message, payload in messages:
+ if message.should_close:
+ self._should_close = True
+
+ self._payload = payload
+
+ if self._skip_payload or message.code in (204, 304):
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
+ else:
+ self.feed_data((message, payload), 0)
+ if payload is not None:
+ # new message(s) was processed
+ # register timeout handler unsubscribing
+ # either on end-of-stream or immediately for
+ # EMPTY_PAYLOAD
+ if payload is not EMPTY_PAYLOAD:
+ payload.on_eof(self._drop_timeout)
+ else:
+ self._drop_timeout()
+
+ if tail:
+ if upgraded:
+ self.data_received(tail)
+ else:
+ self._tail = tail
diff --git a/contrib/python/aiohttp/aiohttp/client_reqrep.py b/contrib/python/aiohttp/aiohttp/client_reqrep.py
new file mode 100644
index 0000000000..343002517b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_reqrep.py
@@ -0,0 +1,1133 @@
+import asyncio
+import codecs
+import functools
+import io
+import re
+import sys
+import traceback
+import warnings
+from hashlib import md5, sha1, sha256
+from http.cookies import CookieError, Morsel, SimpleCookie
+from types import MappingProxyType, TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs, helpers, http, multipart, payload
+from .abc import AbstractStreamWriter
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientOSError,
+ ClientResponseError,
+ ContentTypeError,
+ InvalidURL,
+ ServerFingerprintMismatch,
+)
+from .formdata import FormData
+from .helpers import (
+ PY_36,
+ BaseTimerContext,
+ BasicAuth,
+ HeadersMixin,
+ TimerNoop,
+ noop,
+ reify,
+ set_result,
+)
+from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
+from .log import client_logger
+from .streams import StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ JSONDecoder,
+ LooseCookies,
+ LooseHeaders,
+ RawHeaders,
+)
+
+try:
+ import ssl
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+try:
+ import cchardet as chardet
+except ImportError: # pragma: no cover
+ import charset_normalizer as chardet # type: ignore[no-redef]
+
+
+__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientSession
+ from .connector import Connection
+ from .tracing import Trace
+
+
+json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ContentDisposition:
+ type: Optional[str]
+ parameters: "MappingProxyType[str, str]"
+ filename: Optional[str]
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class RequestInfo:
+ url: URL
+ method: str
+ headers: "CIMultiDictProxy[str]"
+ real_url: URL = attr.ib()
+
+ @real_url.default
+ def real_url_default(self) -> URL:
+ return self.url
+
+
+class Fingerprint:
+ HASHFUNC_BY_DIGESTLEN = {
+ 16: md5,
+ 20: sha1,
+ 32: sha256,
+ }
+
+ def __init__(self, fingerprint: bytes) -> None:
+ digestlen = len(fingerprint)
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
+ if not hashfunc:
+ raise ValueError("fingerprint has invalid length")
+ elif hashfunc is md5 or hashfunc is sha1:
+ raise ValueError(
+ "md5 and sha1 are insecure and " "not supported. Use sha256."
+ )
+ self._hashfunc = hashfunc
+ self._fingerprint = fingerprint
+
+ @property
+ def fingerprint(self) -> bytes:
+ return self._fingerprint
+
+ def check(self, transport: asyncio.Transport) -> None:
+ if not transport.get_extra_info("sslcontext"):
+ return
+ sslobj = transport.get_extra_info("ssl_object")
+ cert = sslobj.getpeercert(binary_form=True)
+ got = self._hashfunc(cert).digest()
+ if got != self._fingerprint:
+ host, port, *_ = transport.get_extra_info("peername")
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
+
+
+if ssl is not None:
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
+else: # pragma: no cover
+ SSL_ALLOWED_TYPES = type(None)
+
+
+def _merge_ssl_params(
+ ssl: Union["SSLContext", bool, Fingerprint, None],
+ verify_ssl: Optional[bool],
+ ssl_context: Optional["SSLContext"],
+ fingerprint: Optional[bytes],
+) -> Union["SSLContext", bool, Fingerprint, None]:
+ if verify_ssl is not None and not verify_ssl:
+ warnings.warn(
+ "verify_ssl is deprecated, use ssl=False instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = False
+ if ssl_context is not None:
+ warnings.warn(
+ "ssl_context is deprecated, use ssl=context instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = ssl_context
+ if fingerprint is not None:
+ warnings.warn(
+ "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = Fingerprint(fingerprint)
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
+ raise TypeError(
+ "ssl should be SSLContext, bool, Fingerprint or None, "
+ "got {!r} instead.".format(ssl)
+ )
+ return ssl
+
+
+@attr.s(auto_attribs=True, slots=True, frozen=True)
+class ConnectionKey:
+ # the key should contain an information about used proxy / TLS
+ # to prevent reusing wrong connections from a pool
+ host: str
+ port: Optional[int]
+ is_ssl: bool
+ ssl: Union[SSLContext, None, bool, Fingerprint]
+ proxy: Optional[URL]
+ proxy_auth: Optional[BasicAuth]
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
+
+
+def _is_expected_content_type(
+ response_content_type: str, expected_content_type: str
+) -> bool:
+ if expected_content_type == "application/json":
+ return json_re.match(response_content_type) is not None
+ return expected_content_type in response_content_type
+
+
+class ClientRequest:
+ GET_METHODS = {
+ hdrs.METH_GET,
+ hdrs.METH_HEAD,
+ hdrs.METH_OPTIONS,
+ hdrs.METH_TRACE,
+ }
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
+
+ DEFAULT_HEADERS = {
+ hdrs.ACCEPT: "*/*",
+ hdrs.ACCEPT_ENCODING: "gzip, deflate",
+ }
+
+ body = b""
+ auth = None
+ response = None
+
+ _writer = None # async task for streaming data
+ _continue = None # waiter future for '100 Continue' response
+
+ # N.B.
+ # Adding __del__ method with self._writer closing doesn't make sense
+ # because _writer is instance method, thus it keeps a reference to self.
+ # Until writer has finished finalizer will not be called.
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Iterable[str] = frozenset(),
+ data: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ auth: Optional[BasicAuth] = None,
+ version: http.HttpVersion = http.HttpVersion11,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ response_class: Optional[Type["ClientResponse"]] = None,
+ proxy: Optional[URL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timer: Optional[BaseTimerContext] = None,
+ session: Optional["ClientSession"] = None,
+ ssl: Union[SSLContext, bool, Fingerprint, None] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ traces: Optional[List["Trace"]] = None,
+ ):
+
+ if loop is None:
+ loop = asyncio.get_event_loop()
+
+ assert isinstance(url, URL), url
+ assert isinstance(proxy, (URL, type(None))), proxy
+ # FIXME: session is None in tests only, need to fix tests
+ # assert session is not None
+ self._session = cast("ClientSession", session)
+ if params:
+ q = MultiDict(url.query)
+ url2 = url.with_query(params)
+ q.extend(url2.query)
+ url = url.with_query(q)
+ self.original_url = url
+ self.url = url.with_fragment(None)
+ self.method = method.upper()
+ self.chunked = chunked
+ self.compress = compress
+ self.loop = loop
+ self.length = None
+ if response_class is None:
+ real_response_class = ClientResponse
+ else:
+ real_response_class = response_class
+ self.response_class = real_response_class # type: Type[ClientResponse]
+ self._timer = timer if timer is not None else TimerNoop()
+ self._ssl = ssl
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ self.update_version(version)
+ self.update_host(url)
+ self.update_headers(headers)
+ self.update_auto_headers(skip_auto_headers)
+ self.update_cookies(cookies)
+ self.update_content_encoding(data)
+ self.update_auth(auth)
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
+
+ self.update_body_from_data(data)
+ if data is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+ self.update_expect_continue(expect100)
+ if traces is None:
+ traces = []
+ self._traces = traces
+
+ def is_ssl(self) -> bool:
+ return self.url.scheme in ("https", "wss")
+
+ @property
+ def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
+ return self._ssl
+
+ @property
+ def connection_key(self) -> ConnectionKey:
+ proxy_headers = self.proxy_headers
+ if proxy_headers:
+ h = hash(
+ tuple((k, v) for k, v in proxy_headers.items())
+ ) # type: Optional[int]
+ else:
+ h = None
+ return ConnectionKey(
+ self.host,
+ self.port,
+ self.is_ssl(),
+ self.ssl,
+ self.proxy,
+ self.proxy_auth,
+ h,
+ )
+
+ @property
+ def host(self) -> str:
+ ret = self.url.raw_host
+ assert ret is not None
+ return ret
+
+ @property
+ def port(self) -> Optional[int]:
+ return self.url.port
+
+ @property
+ def request_info(self) -> RequestInfo:
+ headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
+ return RequestInfo(self.url, self.method, headers, self.original_url)
+
+ def update_host(self, url: URL) -> None:
+ """Update destination host, port and connection type (ssl)."""
+ # get host/port
+ if not url.raw_host:
+ raise InvalidURL(url)
+
+ # basic auth info
+ username, password = url.user, url.password
+ if username:
+ self.auth = helpers.BasicAuth(username, password or "")
+
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
+ """Convert request version to two elements tuple.
+
+ parser HTTP version '1.1' => (1, 1)
+ """
+ if isinstance(version, str):
+ v = [part.strip() for part in version.split(".", 1)]
+ try:
+ version = http.HttpVersion(int(v[0]), int(v[1]))
+ except ValueError:
+ raise ValueError(
+ f"Can not parse http version number: {version}"
+ ) from None
+ self.version = version
+
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
+ """Update request headers."""
+ self.headers = CIMultiDict() # type: CIMultiDict[str]
+
+ # add host
+ netloc = cast(str, self.url.raw_host)
+ if helpers.is_ipv6_address(netloc):
+ netloc = f"[{netloc}]"
+ if self.url.port is not None and not self.url.is_default_port():
+ netloc += ":" + str(self.url.port)
+ self.headers[hdrs.HOST] = netloc
+
+ if headers:
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
+ headers = headers.items() # type: ignore[assignment]
+
+ for key, value in headers: # type: ignore[misc]
+ # A special case for Host header
+ if key.lower() == "host":
+ self.headers[key] = value
+ else:
+ self.headers.add(key, value)
+
+ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
+ self.skip_auto_headers = CIMultiDict(
+ (hdr, None) for hdr in sorted(skip_auto_headers)
+ )
+ used_headers = self.headers.copy()
+ used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
+
+ for hdr, val in self.DEFAULT_HEADERS.items():
+ if hdr not in used_headers:
+ self.headers.add(hdr, val)
+
+ if hdrs.USER_AGENT not in used_headers:
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
+
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
+ """Update request cookies header."""
+ if not cookies:
+ return
+
+ c = SimpleCookie() # type: SimpleCookie[str]
+ if hdrs.COOKIE in self.headers:
+ c.load(self.headers.get(hdrs.COOKIE, ""))
+ del self.headers[hdrs.COOKIE]
+
+ if isinstance(cookies, Mapping):
+ iter_cookies = cookies.items()
+ else:
+ iter_cookies = cookies # type: ignore[assignment]
+ for name, value in iter_cookies:
+ if isinstance(value, Morsel):
+ # Preserve coded_value
+ mrsl_val = value.get(value.key, Morsel())
+ mrsl_val.set(value.key, value.value, value.coded_value)
+ c[name] = mrsl_val
+ else:
+ c[name] = value # type: ignore[assignment]
+
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
+
+ def update_content_encoding(self, data: Any) -> None:
+ """Set request content encoding."""
+ if data is None:
+ return
+
+ enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
+ if enc:
+ if self.compress:
+ raise ValueError(
+ "compress can not be set " "if Content-Encoding header is set"
+ )
+ elif self.compress:
+ if not isinstance(self.compress, str):
+ self.compress = "deflate"
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
+ self.chunked = True # enable chunked, no need to deal with length
+
+ def update_transfer_encoding(self) -> None:
+ """Analyze transfer-encoding header."""
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ if "chunked" in te:
+ if self.chunked:
+ raise ValueError(
+ "chunked can not be set "
+ 'if "Transfer-Encoding: chunked" header is set'
+ )
+
+ elif self.chunked:
+ if hdrs.CONTENT_LENGTH in self.headers:
+ raise ValueError(
+ "chunked can not be set " "if Content-Length header is set"
+ )
+
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ else:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
+
+ def update_auth(self, auth: Optional[BasicAuth]) -> None:
+ """Set basic auth."""
+ if auth is None:
+ auth = self.auth
+ if auth is None:
+ return
+
+ if not isinstance(auth, helpers.BasicAuth):
+ raise TypeError("BasicAuth() tuple is required instead")
+
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
+
+ def update_body_from_data(self, body: Any) -> None:
+ if body is None:
+ return
+
+ # FormData
+ if isinstance(body, FormData):
+ body = body()
+
+ try:
+ body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
+ except payload.LookupError:
+ body = FormData(body)()
+
+ self.body = body
+
+ # enable chunked encoding if needed
+ if not self.chunked:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ size = body.size
+ if size is None:
+ self.chunked = True
+ else:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+
+ # copy payload headers
+ assert body.headers
+ for (key, value) in body.headers.items():
+ if key in self.headers:
+ continue
+ if key in self.skip_auto_headers:
+ continue
+ self.headers[key] = value
+
+ def update_expect_continue(self, expect: bool = False) -> None:
+ if expect:
+ self.headers[hdrs.EXPECT] = "100-continue"
+ elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
+ expect = True
+
+ if expect:
+ self._continue = self.loop.create_future()
+
+ def update_proxy(
+ self,
+ proxy: Optional[URL],
+ proxy_auth: Optional[BasicAuth],
+ proxy_headers: Optional[LooseHeaders],
+ ) -> None:
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
+ self.proxy = proxy
+ self.proxy_auth = proxy_auth
+ self.proxy_headers = proxy_headers
+
+ def keep_alive(self) -> bool:
+ if self.version < HttpVersion10:
+ # keep alive not supported at all
+ return False
+ if self.version == HttpVersion10:
+ if self.headers.get(hdrs.CONNECTION) == "keep-alive":
+ return True
+ else: # no headers means we close for Http 1.0
+ return False
+ elif self.headers.get(hdrs.CONNECTION) == "close":
+ return False
+
+ return True
+
+ async def write_bytes(
+ self, writer: AbstractStreamWriter, conn: "Connection"
+ ) -> None:
+ """Support coroutines that yields bytes objects."""
+ # 100 response
+ if self._continue is not None:
+ await writer.drain()
+ await self._continue
+
+ protocol = conn.protocol
+ assert protocol is not None
+ try:
+ if isinstance(self.body, payload.Payload):
+ await self.body.write(writer)
+ else:
+ if isinstance(self.body, (bytes, bytearray)):
+ self.body = (self.body,) # type: ignore[assignment]
+
+ for chunk in self.body:
+ await writer.write(chunk) # type: ignore[arg-type]
+
+ await writer.write_eof()
+ except OSError as exc:
+ new_exc = ClientOSError(
+ exc.errno, "Can not write request body for %s" % self.url
+ )
+ new_exc.__context__ = exc
+ new_exc.__cause__ = exc
+ protocol.set_exception(new_exc)
+ except asyncio.CancelledError as exc:
+ if not conn.closed:
+ protocol.set_exception(exc)
+ except Exception as exc:
+ protocol.set_exception(exc)
+ finally:
+ self._writer = None
+
+ async def send(self, conn: "Connection") -> "ClientResponse":
+ # Specify request target:
+ # - CONNECT request must send authority form URI
+ # - not CONNECT proxy must send absolute form URI
+ # - most common is origin form URI
+ if self.method == hdrs.METH_CONNECT:
+ connect_host = self.url.raw_host
+ assert connect_host is not None
+ if helpers.is_ipv6_address(connect_host):
+ connect_host = f"[{connect_host}]"
+ path = f"{connect_host}:{self.url.port}"
+ elif self.proxy and not self.is_ssl():
+ path = str(self.url)
+ else:
+ path = self.url.raw_path
+ if self.url.raw_query_string:
+ path += "?" + self.url.raw_query_string
+
+ protocol = conn.protocol
+ assert protocol is not None
+ writer = StreamWriter(
+ protocol,
+ self.loop,
+ on_chunk_sent=functools.partial(
+ self._on_chunk_request_sent, self.method, self.url
+ ),
+ on_headers_sent=functools.partial(
+ self._on_headers_request_sent, self.method, self.url
+ ),
+ )
+
+ if self.compress:
+ writer.enable_compression(self.compress)
+
+ if self.chunked is not None:
+ writer.enable_chunking()
+
+ # set default content-type
+ if (
+ self.method in self.POST_METHODS
+ and hdrs.CONTENT_TYPE not in self.skip_auto_headers
+ and hdrs.CONTENT_TYPE not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
+
+ # set the connection header
+ connection = self.headers.get(hdrs.CONNECTION)
+ if not connection:
+ if self.keep_alive():
+ if self.version == HttpVersion10:
+ connection = "keep-alive"
+ else:
+ if self.version == HttpVersion11:
+ connection = "close"
+
+ if connection is not None:
+ self.headers[hdrs.CONNECTION] = connection
+
+ # status + headers
+ status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
+ self.method, path, self.version
+ )
+ await writer.write_headers(status_line, self.headers)
+
+ self._writer = self.loop.create_task(self.write_bytes(writer, conn))
+
+ response_class = self.response_class
+ assert response_class is not None
+ self.response = response_class(
+ self.method,
+ self.original_url,
+ writer=self._writer,
+ continue100=self._continue,
+ timer=self._timer,
+ request_info=self.request_info,
+ traces=self._traces,
+ loop=self.loop,
+ session=self._session,
+ )
+ return self.response
+
+ async def close(self) -> None:
+ if self._writer is not None:
+ try:
+ await self._writer
+ finally:
+ self._writer = None
+
+ def terminate(self) -> None:
+ if self._writer is not None:
+ if not self.loop.is_closed():
+ self._writer.cancel()
+ self._writer = None
+
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
+ for trace in self._traces:
+ await trace.send_request_chunk_sent(method, url, chunk)
+
+ async def _on_headers_request_sent(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ for trace in self._traces:
+ await trace.send_request_headers(method, url, headers)
+
+
+class ClientResponse(HeadersMixin):
+
+ # from the Status-Line of the response
+ version = None # HTTP-Version
+ status = None # type: int # Status-Code
+ reason = None # Reason-Phrase
+
+ content = None # type: StreamReader # Payload stream
+ _headers = None # type: CIMultiDictProxy[str] # Response headers
+ _raw_headers = None # type: RawHeaders # Response raw headers
+
+ _connection = None # current connection
+ _source_traceback = None
+ # setted up by ClientRequest after ClientResponse object creation
+ # post-init stage allows to not change ctor signature
+ _closed = True # to allow __del__ for non-initialized properly response
+ _released = False
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ writer: "asyncio.Task[None]",
+ continue100: Optional["asyncio.Future[bool]"],
+ timer: BaseTimerContext,
+ request_info: RequestInfo,
+ traces: List["Trace"],
+ loop: asyncio.AbstractEventLoop,
+ session: "ClientSession",
+ ) -> None:
+ assert isinstance(url, URL)
+
+ self.method = method
+ self.cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ self._real_url = url
+ self._url = url.with_fragment(None)
+ self._body = None # type: Any
+ self._writer = writer # type: Optional[asyncio.Task[None]]
+ self._continue = continue100 # None by default
+ self._closed = True
+ self._history = () # type: Tuple[ClientResponse, ...]
+ self._request_info = request_info
+ self._timer = timer if timer is not None else TimerNoop()
+ self._cache = {} # type: Dict[str, Any]
+ self._traces = traces
+ self._loop = loop
+ # store a reference to session #1985
+ self._session = session # type: Optional[ClientSession]
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ @reify
+ def url(self) -> URL:
+ return self._url
+
+ @reify
+ def url_obj(self) -> URL:
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
+ return self._url
+
+ @reify
+ def real_url(self) -> URL:
+ return self._real_url
+
+ @reify
+ def host(self) -> str:
+ assert self._url.host is not None
+ return self._url.host
+
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ return self._raw_headers
+
+ @reify
+ def request_info(self) -> RequestInfo:
+ return self._request_info
+
+ @reify
+ def content_disposition(self) -> Optional[ContentDisposition]:
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
+ if raw is None:
+ return None
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
+ params = MappingProxyType(params_dct)
+ filename = multipart.content_disposition_filename(params)
+ return ContentDisposition(disposition_type, params, filename)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+
+ if self._connection is not None:
+ self._connection.release()
+ self._cleanup_writer()
+
+ if self._loop.get_debug():
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
+ context = {"client_response": self, "message": "Unclosed response"}
+ if self._source_traceback:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __repr__(self) -> str:
+ out = io.StringIO()
+ ascii_encodable_url = str(self.url)
+ if self.reason:
+ ascii_encodable_reason = self.reason.encode(
+ "ascii", "backslashreplace"
+ ).decode("ascii")
+ else:
+ ascii_encodable_reason = self.reason
+ print(
+ "<ClientResponse({}) [{} {}]>".format(
+ ascii_encodable_url, self.status, ascii_encodable_reason
+ ),
+ file=out,
+ )
+ print(self.headers, file=out)
+ return out.getvalue()
+
+ @property
+ def connection(self) -> Optional["Connection"]:
+ return self._connection
+
+ @reify
+ def history(self) -> Tuple["ClientResponse", ...]:
+ """A sequence of of responses, if redirects occurred."""
+ return self._history
+
+ @reify
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
+ links_str = ", ".join(self.headers.getall("link", []))
+
+ if not links_str:
+ return MultiDictProxy(MultiDict())
+
+ links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]]
+
+ for val in re.split(r",(?=\s*<)", links_str):
+ match = re.match(r"\s*<(.*)>(.*)", val)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ url, params_str = match.groups()
+ params = params_str.split(";")[1:]
+
+ link = MultiDict() # type: MultiDict[Union[str, URL]]
+
+ for param in params:
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ key, _, value, _ = match.groups()
+
+ link.add(key, value)
+
+ key = link.get("rel", url) # type: ignore[assignment]
+
+ link.add("url", self.url.join(URL(url)))
+
+ links.add(key, MultiDictProxy(link))
+
+ return MultiDictProxy(links)
+
+ async def start(self, connection: "Connection") -> "ClientResponse":
+ """Start response processing."""
+ self._closed = False
+ self._protocol = connection.protocol
+ self._connection = connection
+
+ with self._timer:
+ while True:
+ # read response
+ try:
+ protocol = self._protocol
+ message, payload = await protocol.read() # type: ignore[union-attr]
+ except http.HttpProcessingError as exc:
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=exc.code,
+ message=exc.message,
+ headers=exc.headers,
+ ) from exc
+
+ if message.code < 100 or message.code > 199 or message.code == 101:
+ break
+
+ if self._continue is not None:
+ set_result(self._continue, True)
+ self._continue = None
+
+ # payload eof handler
+ payload.on_eof(self._response_eof)
+
+ # response status
+ self.version = message.version
+ self.status = message.code
+ self.reason = message.reason
+
+ # headers
+ self._headers = message.headers # type is CIMultiDictProxy
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
+
+ # payload
+ self.content = payload
+
+ # cookies
+ for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
+ try:
+ self.cookies.load(hdr)
+ except CookieError as exc:
+ client_logger.warning("Can not load response cookies: %s", exc)
+ return self
+
+ def _response_eof(self) -> None:
+ if self._closed:
+ return
+
+ if self._connection is not None:
+ # websocket, protocol could be None because
+ # connection could be detached
+ if (
+ self._connection.protocol is not None
+ and self._connection.protocol.upgraded
+ ):
+ return
+
+ self._connection.release()
+ self._connection = None
+
+ self._closed = True
+ self._cleanup_writer()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ def close(self) -> None:
+ if not self._released:
+ self._notify_content()
+ if self._closed:
+ return
+
+ self._closed = True
+ if self._loop is None or self._loop.is_closed():
+ return
+
+ if self._connection is not None:
+ self._connection.close()
+ self._connection = None
+ self._cleanup_writer()
+
+ def release(self) -> Any:
+ if not self._released:
+ self._notify_content()
+ if self._closed:
+ return noop()
+
+ self._closed = True
+ if self._connection is not None:
+ self._connection.release()
+ self._connection = None
+
+ self._cleanup_writer()
+ return noop()
+
+ @property
+ def ok(self) -> bool:
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+
+ This is **not** a check for ``200 OK`` but a check that the response
+ status is under 400.
+ """
+ return 400 > self.status
+
+ def raise_for_status(self) -> None:
+ if not self.ok:
+ # reason should always be not None for a started response
+ assert self.reason is not None
+ self.release()
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=self.reason,
+ headers=self.headers,
+ )
+
+ def _cleanup_writer(self) -> None:
+ if self._writer is not None:
+ self._writer.cancel()
+ self._writer = None
+ self._session = None
+
+ def _notify_content(self) -> None:
+ content = self.content
+ if content and content.exception() is None:
+ content.set_exception(ClientConnectionError("Connection closed"))
+ self._released = True
+
+ async def wait_for_close(self) -> None:
+ if self._writer is not None:
+ try:
+ await self._writer
+ finally:
+ self._writer = None
+ self.release()
+
+ async def read(self) -> bytes:
+ """Read response payload."""
+ if self._body is None:
+ try:
+ self._body = await self.content.read()
+ for trace in self._traces:
+ await trace.send_response_chunk_received(
+ self.method, self.url, self._body
+ )
+ except BaseException:
+ self.close()
+ raise
+ elif self._released:
+ raise ClientConnectionError("Connection closed")
+
+ return self._body # type: ignore[no-any-return]
+
+ def get_encoding(self) -> str:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ mimetype = helpers.parse_mimetype(ctype)
+
+ encoding = mimetype.parameters.get("charset")
+ if encoding:
+ try:
+ codecs.lookup(encoding)
+ except LookupError:
+ encoding = None
+ if not encoding:
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
+ ):
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ encoding = "utf-8"
+ elif self._body is None:
+ raise RuntimeError(
+ "Cannot guess the encoding of " "a not yet read body"
+ )
+ else:
+ encoding = chardet.detect(self._body)["encoding"]
+ if not encoding:
+ encoding = "utf-8"
+
+ return encoding
+
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
+ """Read response payload and decode."""
+ if self._body is None:
+ await self.read()
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return self._body.decode( # type: ignore[no-any-return,union-attr]
+ encoding, errors=errors
+ )
+
+ async def json(
+ self,
+ *,
+ encoding: Optional[str] = None,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ content_type: Optional[str] = "application/json",
+ ) -> Any:
+ """Read and decodes JSON response."""
+ if self._body is None:
+ await self.read()
+
+ if content_type:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ if not _is_expected_content_type(ctype, content_type):
+ raise ContentTypeError(
+ self.request_info,
+ self.history,
+ message=(
+ "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
+ ),
+ headers=self.headers,
+ )
+
+ stripped = self._body.strip() # type: ignore[union-attr]
+ if not stripped:
+ return None
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return loads(stripped.decode(encoding))
+
+ async def __aenter__(self) -> "ClientResponse":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ # similar to _RequestContextManager, we do not need to check
+ # for exceptions, response object can close connection
+ # if state is broken
+ self.release()
diff --git a/contrib/python/aiohttp/aiohttp/client_ws.py b/contrib/python/aiohttp/aiohttp/client_ws.py
new file mode 100644
index 0000000000..7c8121f659
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_ws.py
@@ -0,0 +1,300 @@
+"""WebSocket client for asyncio."""
+
+import asyncio
+from typing import Any, Optional, cast
+
+import async_timeout
+
+from .client_exceptions import ClientError
+from .client_reqrep import ClientResponse
+from .helpers import call_later, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WebSocketError,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType,
+)
+from .http_websocket import WebSocketWriter # WSMessage
+from .streams import EofStream, FlowControlDataQueue
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ DEFAULT_JSON_ENCODER,
+ JSONDecoder,
+ JSONEncoder,
+)
+
+
+class ClientWebSocketResponse:
+ def __init__(
+ self,
+ reader: "FlowControlDataQueue[WSMessage]",
+ writer: WebSocketWriter,
+ protocol: Optional[str],
+ response: ClientResponse,
+ timeout: float,
+ autoclose: bool,
+ autoping: bool,
+ loop: asyncio.AbstractEventLoop,
+ *,
+ receive_timeout: Optional[float] = None,
+ heartbeat: Optional[float] = None,
+ compress: int = 0,
+ client_notakeover: bool = False,
+ ) -> None:
+ self._response = response
+ self._conn = response.connection
+
+ self._writer = writer
+ self._reader = reader
+ self._protocol = protocol
+ self._closed = False
+ self._closing = False
+ self._close_code = None # type: Optional[int]
+ self._timeout = timeout
+ self._receive_timeout = receive_timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
+ self._loop = loop
+ self._waiting = None # type: Optional[asyncio.Future[bool]]
+ self._exception = None # type: Optional[BaseException]
+ self._compress = compress
+ self._client_notakeover = client_notakeover
+
+ self._reset_heartbeat()
+
+ def _cancel_heartbeat(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ self._cancel_heartbeat()
+
+ if self._heartbeat is not None:
+ self._heartbeat_cb = call_later(
+ self._send_heartbeat, self._heartbeat, self._loop
+ )
+
+ def _send_heartbeat(self) -> None:
+ if self._heartbeat is not None and not self._closed:
+ # fire-and-forget a task is not perfect but maybe ok for
+ # sending ping. Otherwise we need a long-living heartbeat
+ # task in the class.
+ self._loop.create_task(self._writer.ping())
+
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = call_later(
+ self._pong_not_received, self._pong_heartbeat, self._loop
+ )
+
+ def _pong_not_received(self) -> None:
+ if not self._closed:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ self._response.close()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def protocol(self) -> Optional[str]:
+ return self._protocol
+
+ @property
+ def compress(self) -> int:
+ return self._compress
+
+ @property
+ def client_notakeover(self) -> bool:
+ return self._client_notakeover
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """extra info from connection transport"""
+ conn = self._response.connection
+ if conn is None:
+ return default
+ transport = conn.transport
+ if transport is None:
+ return default
+ return transport.get_extra_info(name, default)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ await self._writer.ping(message)
+
+ async def pong(self, message: bytes = b"") -> None:
+ await self._writer.pong(message)
+
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send(data, binary=False, compress=compress)
+
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send(data, binary=True, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[int] = None,
+ *,
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ # we need to break `receive()` cycle first,
+ # `close()` may be called from different task
+ if self._waiting is not None and not self._closed:
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._waiting
+
+ if not self._closed:
+ self._cancel_heartbeat()
+ self._closed = True
+ try:
+ await self._writer.close(code, message)
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if self._closing:
+ self._response.close()
+ return True
+
+ while True:
+ try:
+ async with async_timeout.timeout(self._timeout):
+ msg = await self._reader.read()
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if msg.type == WSMsgType.CLOSE:
+ self._close_code = msg.data
+ self._response.close()
+ return True
+ else:
+ return False
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ while True:
+ if self._waiting is not None:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ await self.close()
+ return WS_CLOSED_MESSAGE
+
+ try:
+ self._waiting = self._loop.create_future()
+ try:
+ async with async_timeout.timeout(timeout or self._receive_timeout):
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ waiter = self._waiting
+ self._waiting = None
+ set_result(waiter, True)
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except ClientError:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ return WS_CLOSED_MESSAGE
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._closing = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type == WSMsgType.CLOSE:
+ self._closing = True
+ self._close_code = msg.data
+ if not self._closed and self._autoclose:
+ await self.close()
+ elif msg.type == WSMsgType.CLOSING:
+ self._closing = True
+ elif msg.type == WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type == WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.TEXT:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.BINARY:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self,
+ *,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ timeout: Optional[float] = None,
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ def __aiter__(self) -> "ClientWebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
diff --git a/contrib/python/aiohttp/aiohttp/connector.py b/contrib/python/aiohttp/aiohttp/connector.py
new file mode 100644
index 0000000000..4c9a951d6e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/connector.py
@@ -0,0 +1,1451 @@
+import asyncio
+import functools
+import random
+import sys
+import traceback
+import warnings
+from collections import defaultdict, deque
+from contextlib import suppress
+from http.cookies import SimpleCookie
+from itertools import cycle, islice
+from time import monotonic
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ DefaultDict,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+
+from . import hdrs, helpers
+from .abc import AbstractResolver
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientConnectorCertificateError,
+ ClientConnectorError,
+ ClientConnectorSSLError,
+ ClientHttpProxyError,
+ ClientProxyConnectionError,
+ ServerFingerprintMismatch,
+ UnixClientConnectorError,
+ cert_errors,
+ ssl_errors,
+)
+from .client_proto import ResponseHandler
+from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
+from .helpers import (
+ PY_36,
+ ceil_timeout,
+ get_running_loop,
+ is_ip_address,
+ noop,
+ sentinel,
+)
+from .http import RESPONSES
+from .locks import EventResultOrError
+from .resolver import DefaultResolver
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientTimeout
+ from .client_reqrep import ConnectionKey
+ from .tracing import Trace
+
+
+class _DeprecationWaiter:
+ __slots__ = ("_awaitable", "_awaited")
+
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
+ self._awaitable = awaitable
+ self._awaited = False
+
+ def __await__(self) -> Any:
+ self._awaited = True
+ return self._awaitable.__await__()
+
+ def __del__(self) -> None:
+ if not self._awaited:
+ warnings.warn(
+ "Connector.close() is a coroutine, "
+ "please use await connector.close()",
+ DeprecationWarning,
+ )
+
+
+class Connection:
+
+ _source_traceback = None
+ _transport = None
+
+ def __init__(
+ self,
+ connector: "BaseConnector",
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ loop: asyncio.AbstractEventLoop,
+ ) -> None:
+ self._key = key
+ self._connector = connector
+ self._loop = loop
+ self._protocol = protocol # type: Optional[ResponseHandler]
+ self._callbacks = [] # type: List[Callable[[], None]]
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ def __repr__(self) -> str:
+ return f"Connection<{self._key}>"
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._protocol is not None:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
+ if self._loop.is_closed():
+ return
+
+ self._connector._release(self._key, self._protocol, should_close=True)
+
+ context = {"client_connection": self, "message": "Unclosed connection"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def protocol(self) -> Optional[ResponseHandler]:
+ return self._protocol
+
+ def add_callback(self, callback: Callable[[], None]) -> None:
+ if callback is not None:
+ self._callbacks.append(callback)
+
+ def _notify_release(self) -> None:
+ callbacks, self._callbacks = self._callbacks[:], []
+
+ for cb in callbacks:
+ with suppress(Exception):
+ cb()
+
+ def close(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(self._key, self._protocol, should_close=True)
+ self._protocol = None
+
+ def release(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(
+ self._key, self._protocol, should_close=self._protocol.should_close
+ )
+ self._protocol = None
+
+ @property
+ def closed(self) -> bool:
+ return self._protocol is None or not self._protocol.is_connected()
+
+
+class _TransportPlaceholder:
+ """placeholder for BaseConnector.connect function"""
+
+ def close(self) -> None:
+ pass
+
+
+class BaseConnector:
+ """Base connector class.
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ loop - Optional event loop.
+ """
+
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
+ _source_traceback = None
+
+ # abort transport after 2 seconds (cleanup broken connections)
+ _cleanup_closed_period = 2.0
+
+ def __init__(
+ self,
+ *,
+ keepalive_timeout: Union[object, None, float] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+
+ if force_close:
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
+ raise ValueError(
+ "keepalive_timeout cannot " "be set if force_close is True"
+ )
+ else:
+ if keepalive_timeout is sentinel:
+ keepalive_timeout = 15.0
+
+ loop = get_running_loop(loop)
+
+ self._closed = False
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ self._conns = (
+ {}
+ ) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
+ self._limit = limit
+ self._limit_per_host = limit_per_host
+ self._acquired = set() # type: Set[ResponseHandler]
+ self._acquired_per_host = defaultdict(
+ set
+ ) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
+ self._keepalive_timeout = cast(float, keepalive_timeout)
+ self._force_close = force_close
+
+ # {host_key: FIFO list of waiters}
+ self._waiters = defaultdict(deque) # type: ignore[var-annotated]
+
+ self._loop = loop
+ self._factory = functools.partial(ResponseHandler, loop=loop)
+
+ self.cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ # start keep-alive connection cleanup task
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
+
+ # start cleanup closed transports task
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
+ self._cleanup_closed_disabled = not enable_cleanup_closed
+ self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
+ self._cleanup_closed()
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+ if not self._conns:
+ return
+
+ conns = [repr(c) for c in self._conns.values()]
+
+ self._close()
+
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
+ context = {
+ "connector": self,
+ "connections": conns,
+ "message": "Unclosed connector",
+ }
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __enter__(self) -> "BaseConnector":
+ warnings.warn(
+ '"witn Connector():" is deprecated, '
+ 'use "async with Connector():" instead',
+ DeprecationWarning,
+ )
+ return self
+
+ def __exit__(self, *exc: Any) -> None:
+ self.close()
+
+ async def __aenter__(self) -> "BaseConnector":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ exc_traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+ @property
+ def force_close(self) -> bool:
+ """Ultimately close connection on releasing if True."""
+ return self._force_close
+
+ @property
+ def limit(self) -> int:
+ """The total number for simultaneous connections.
+
+ If limit is 0 the connector has no limit.
+ The default limit size is 100.
+ """
+ return self._limit
+
+ @property
+ def limit_per_host(self) -> int:
+ """The limit for simultaneous connections to the same endpoint.
+
+ Endpoints are the same if they are have equal
+ (host, port, is_ssl) triple.
+ """
+ return self._limit_per_host
+
+ def _cleanup(self) -> None:
+ """Cleanup unused transports."""
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+ # _cleanup_handle should be unset, otherwise _release() will not
+ # recreate it ever!
+ self._cleanup_handle = None
+
+ now = self._loop.time()
+ timeout = self._keepalive_timeout
+
+ if self._conns:
+ connections = {}
+ deadline = now - timeout
+ for key, conns in self._conns.items():
+ alive = []
+ for proto, use_time in conns:
+ if proto.is_connected():
+ if use_time - deadline < 0:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ alive.append((proto, use_time))
+ else:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+
+ if alive:
+ connections[key] = alive
+
+ self._conns = connections
+
+ if self._conns:
+ self._cleanup_handle = helpers.weakref_handle(
+ self, "_cleanup", timeout, self._loop
+ )
+
+ def _drop_acquired_per_host(
+ self, key: "ConnectionKey", val: ResponseHandler
+ ) -> None:
+ acquired_per_host = self._acquired_per_host
+ if key not in acquired_per_host:
+ return
+ conns = acquired_per_host[key]
+ conns.remove(val)
+ if not conns:
+ del self._acquired_per_host[key]
+
+ def _cleanup_closed(self) -> None:
+ """Double confirmation for transport close.
+
+ Some broken ssl servers may leave socket open without proper close.
+ """
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ self._cleanup_closed_transports = []
+
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_handle = helpers.weakref_handle(
+ self, "_cleanup_closed", self._cleanup_closed_period, self._loop
+ )
+
+ def close(self) -> Awaitable[None]:
+ """Close all opened transports."""
+ self._close()
+ return _DeprecationWaiter(noop())
+
+ def _close(self) -> None:
+ if self._closed:
+ return
+
+ self._closed = True
+
+ try:
+ if self._loop.is_closed():
+ return
+
+ # cancel cleanup task
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+
+ # cancel cleanup close task
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for data in self._conns.values():
+ for proto, t0 in data:
+ proto.close()
+
+ for proto in self._acquired:
+ proto.close()
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ finally:
+ self._conns.clear()
+ self._acquired.clear()
+ self._waiters.clear()
+ self._cleanup_handle = None
+ self._cleanup_closed_transports.clear()
+ self._cleanup_closed_handle = None
+
+ @property
+ def closed(self) -> bool:
+ """Is connector closed.
+
+ A readonly property.
+ """
+ return self._closed
+
+ def _available_connections(self, key: "ConnectionKey") -> int:
+ """
+ Return number of available connections.
+
+ The limit, limit_per_host and the connection key are taken into account.
+
+ If it returns less than 1 means that there are no connections
+ available.
+ """
+ if self._limit:
+ # total calc available connections
+ available = self._limit - len(self._acquired)
+
+ # check limit per host
+ if (
+ self._limit_per_host
+ and available > 0
+ and key in self._acquired_per_host
+ ):
+ acquired = self._acquired_per_host.get(key)
+ assert acquired is not None
+ available = self._limit_per_host - len(acquired)
+
+ elif self._limit_per_host and key in self._acquired_per_host:
+ # check limit per host
+ acquired = self._acquired_per_host.get(key)
+ assert acquired is not None
+ available = self._limit_per_host - len(acquired)
+ else:
+ available = 1
+
+ return available
+
+ async def connect(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Connection:
+ """Get from pool or create new connection."""
+ key = req.connection_key
+ available = self._available_connections(key)
+
+ # Wait if there are no available connections or if there are/were
+ # waiters (i.e. don't steal connection from a waiter about to wake up)
+ if available <= 0 or key in self._waiters:
+ fut = self._loop.create_future()
+
+ # This connection will now count towards the limit.
+ self._waiters[key].append(fut)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_start()
+
+ try:
+ await fut
+ except BaseException as e:
+ if key in self._waiters:
+ # remove a waiter even if it was cancelled, normally it's
+ # removed when it's notified
+ try:
+ self._waiters[key].remove(fut)
+ except ValueError: # fut may no longer be in list
+ pass
+
+ raise e
+ finally:
+ if key in self._waiters and not self._waiters[key]:
+ del self._waiters[key]
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_end()
+
+ proto = self._get(key)
+ if proto is None:
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
+ self._acquired.add(placeholder)
+ self._acquired_per_host[key].add(placeholder)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_start()
+
+ try:
+ proto = await self._create_connection(req, traces, timeout)
+ if self._closed:
+ proto.close()
+ raise ClientConnectionError("Connector is closed.")
+ except BaseException:
+ if not self._closed:
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+ self._release_waiter()
+ raise
+ else:
+ if not self._closed:
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_end()
+ else:
+ if traces:
+ # Acquire the connection to prevent race conditions with limits
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
+ self._acquired.add(placeholder)
+ self._acquired_per_host[key].add(placeholder)
+ for trace in traces:
+ await trace.send_connection_reuseconn()
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+
+ self._acquired.add(proto)
+ self._acquired_per_host[key].add(proto)
+ return Connection(self, key, proto, self._loop)
+
+ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
+ try:
+ conns = self._conns[key]
+ except KeyError:
+ return None
+
+ t1 = self._loop.time()
+ while conns:
+ proto, t0 = conns.pop()
+ if proto.is_connected():
+ if t1 - t0 > self._keepalive_timeout:
+ transport = proto.transport
+ proto.close()
+ # only for SSL transports
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ if not conns:
+ # The very last connection was reclaimed: drop the key
+ del self._conns[key]
+ return proto
+ else:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+
+ # No more connections: drop the key
+ del self._conns[key]
+ return None
+
+ def _release_waiter(self) -> None:
+ """
+ Iterates over all waiters until one to be released is found.
+
+ The one to be released is not finsihed and
+ belongs to a host that has available connections.
+ """
+ if not self._waiters:
+ return
+
+ # Having the dict keys ordered this avoids to iterate
+ # at the same order at each call.
+ queues = list(self._waiters.keys())
+ random.shuffle(queues)
+
+ for key in queues:
+ if self._available_connections(key) < 1:
+ continue
+
+ waiters = self._waiters[key]
+ while waiters:
+ waiter = waiters.popleft()
+ if not waiter.done():
+ waiter.set_result(None)
+ return
+
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ try:
+ self._acquired.remove(proto)
+ self._drop_acquired_per_host(key, proto)
+ except KeyError: # pragma: no cover
+ # this may be result of undetermenistic order of objects
+ # finalization due garbage collection.
+ pass
+ else:
+ self._release_waiter()
+
+ def _release(
+ self,
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ *,
+ should_close: bool = False,
+ ) -> None:
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ self._release_acquired(key, protocol)
+
+ if self._force_close:
+ should_close = True
+
+ if should_close or protocol.should_close:
+ transport = protocol.transport
+ protocol.close()
+
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ conns = self._conns.get(key)
+ if conns is None:
+ conns = self._conns[key] = []
+ conns.append((protocol, self._loop.time()))
+
+ if self._cleanup_handle is None:
+ self._cleanup_handle = helpers.weakref_handle(
+ self, "_cleanup", self._keepalive_timeout, self._loop
+ )
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ raise NotImplementedError()
+
+
+class _DNSCacheTable:
+ def __init__(self, ttl: Optional[float] = None) -> None:
+ self._addrs_rr = (
+ {}
+ ) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
+ self._timestamps = {} # type: Dict[Tuple[str, int], float]
+ self._ttl = ttl
+
+ def __contains__(self, host: object) -> bool:
+ return host in self._addrs_rr
+
+ def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
+
+ if self._ttl:
+ self._timestamps[key] = monotonic()
+
+ def remove(self, key: Tuple[str, int]) -> None:
+ self._addrs_rr.pop(key, None)
+
+ if self._ttl:
+ self._timestamps.pop(key, None)
+
+ def clear(self) -> None:
+ self._addrs_rr.clear()
+ self._timestamps.clear()
+
+ def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
+ loop, length = self._addrs_rr[key]
+ addrs = list(islice(loop, length))
+ # Consume one more element to shift internal state of `cycle`
+ next(loop)
+ return addrs
+
+ def expired(self, key: Tuple[str, int]) -> bool:
+ if self._ttl is None:
+ return False
+
+ return self._timestamps[key] + self._ttl < monotonic()
+
+
+class TCPConnector(BaseConnector):
+ """TCP connector.
+
+ verify_ssl - Set to True to check ssl certifications.
+ fingerprint - Pass the binary sha256
+ digest of the expected certificate in DER format to verify
+ that the certificate the server presents matches. See also
+ https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
+ resolver - Enable DNS lookups and use this
+ resolver
+ use_dns_cache - Use memory cache for DNS lookups.
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
+ family - socket address family
+ local_addr - local tuple of (host, port) to bind socket to
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ *,
+ verify_ssl: bool = True,
+ fingerprint: Optional[bytes] = None,
+ use_dns_cache: bool = True,
+ ttl_dns_cache: Optional[int] = 10,
+ family: int = 0,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Union[None, bool, Fingerprint, SSLContext] = None,
+ local_addr: Optional[Tuple[str, int]] = None,
+ resolver: Optional[AbstractResolver] = None,
+ keepalive_timeout: Union[None, float, object] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ):
+ super().__init__(
+ keepalive_timeout=keepalive_timeout,
+ force_close=force_close,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ enable_cleanup_closed=enable_cleanup_closed,
+ loop=loop,
+ )
+
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+ if resolver is None:
+ resolver = DefaultResolver(loop=self._loop)
+ self._resolver = resolver
+
+ self._use_dns_cache = use_dns_cache
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
+ self._throttle_dns_events = (
+ {}
+ ) # type: Dict[Tuple[str, int], EventResultOrError]
+ self._family = family
+ self._local_addr = local_addr
+
+ def close(self) -> Awaitable[None]:
+ """Close all ongoing DNS calls."""
+ for ev in self._throttle_dns_events.values():
+ ev.cancel()
+
+ return super().close()
+
+ @property
+ def family(self) -> int:
+ """Socket family like AF_INET."""
+ return self._family
+
+ @property
+ def use_dns_cache(self) -> bool:
+ """True if local DNS caching is enabled."""
+ return self._use_dns_cache
+
+ def clear_dns_cache(
+ self, host: Optional[str] = None, port: Optional[int] = None
+ ) -> None:
+ """Remove specified host/port or clear all dns local cache."""
+ if host is not None and port is not None:
+ self._cached_hosts.remove((host, port))
+ elif host is not None or port is not None:
+ raise ValueError("either both host and port " "or none of them are allowed")
+ else:
+ self._cached_hosts.clear()
+
+ async def _resolve_host(
+ self, host: str, port: int, traces: Optional[List["Trace"]] = None
+ ) -> List[Dict[str, Any]]:
+ if is_ip_address(host):
+ return [
+ {
+ "hostname": host,
+ "host": host,
+ "port": port,
+ "family": self._family,
+ "proto": 0,
+ "flags": 0,
+ }
+ ]
+
+ if not self._use_dns_cache:
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ res = await self._resolver.resolve(host, port, family=self._family)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ return res
+
+ key = (host, port)
+
+ if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
+ # get result early, before any await (#4014)
+ result = self._cached_hosts.next_addrs(key)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ return result
+
+ if key in self._throttle_dns_events:
+ # get event early, before any await (#4014)
+ event = self._throttle_dns_events[key]
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ await event.wait()
+ else:
+ # update dict early, before any await (#4014)
+ self._throttle_dns_events[key] = EventResultOrError(self._loop)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_miss(host)
+ try:
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ addrs = await self._resolver.resolve(host, port, family=self._family)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ self._cached_hosts.add(key, addrs)
+ self._throttle_dns_events[key].set()
+ except BaseException as e:
+ # any DNS exception, independently of the implementation
+ # is set for the waiters to raise the same exception.
+ self._throttle_dns_events[key].set(exc=e)
+ raise
+ finally:
+ self._throttle_dns_events.pop(key)
+
+ return self._cached_hosts.next_addrs(key)
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ """Create connection.
+
+ Has same keyword arguments as BaseEventLoop.create_connection.
+ """
+ if req.proxy:
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
+ else:
+ _, proto = await self._create_direct_connection(req, traces, timeout)
+
+ return proto
+
+ @staticmethod
+ @functools.lru_cache(None)
+ def _make_ssl_context(verified: bool) -> SSLContext:
+ if verified:
+ return ssl.create_default_context()
+ else:
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ sslcontext.options |= ssl.OP_NO_SSLv2
+ sslcontext.options |= ssl.OP_NO_SSLv3
+ sslcontext.check_hostname = False
+ sslcontext.verify_mode = ssl.CERT_NONE
+ try:
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
+ except AttributeError as attr_err:
+ warnings.warn(
+ "{!s}: The Python interpreter is compiled "
+ "against OpenSSL < 1.0.0. Ref: "
+ "https://docs.python.org/3/library/ssl.html"
+ "#ssl.OP_NO_COMPRESSION".format(attr_err),
+ )
+ sslcontext.set_default_verify_paths()
+ return sslcontext
+
+ def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
+ """Logic to get the correct SSL context
+
+ 0. if req.ssl is false, return None
+
+ 1. if ssl_context is specified in req, use it
+ 2. if _ssl_context is specified in self, use it
+ 3. otherwise:
+ 1. if verify_ssl is not specified in req, use self.ssl_context
+ (will generate a default context according to self.verify_ssl)
+ 2. if verify_ssl is True in req, generate a default SSL context
+ 3. if verify_ssl is False in req, generate a SSL context that
+ won't verify
+ """
+ if req.is_ssl():
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+ sslcontext = req.ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not None:
+ # not verified or fingerprinted
+ return self._make_ssl_context(False)
+ sslcontext = self._ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not None:
+ # not verified or fingerprinted
+ return self._make_ssl_context(False)
+ return self._make_ssl_context(True)
+ else:
+ return None
+
+ def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
+ ret = req.ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ ret = self._ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ return None
+
+ async def _wrap_create_connection(
+ self,
+ *args: Any,
+ req: "ClientRequest",
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ **kwargs: Any,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ raise client_error(req.connection_key, exc) from exc
+
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
+
+ One case is that :py:meth:`asyncio.loop.start_tls` is not yet
+ implemented under Python 3.6. It is necessary for TLS-in-TLS so
+ that it is possible to send HTTPS queries through HTTPS proxies.
+
+ This doesn't affect regular HTTP requests, though.
+ """
+ if not req.is_ssl():
+ return
+
+ proxy_url = req.proxy
+ assert proxy_url is not None
+ if proxy_url.scheme != "https":
+ return
+
+ self._check_loop_for_start_tls()
+
+ def _check_loop_for_start_tls(self) -> None:
+ try:
+ self._loop.start_tls
+ except AttributeError as attr_exc:
+ raise RuntimeError(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This needs support for TLS in TLS but it is not implemented "
+ "in your runtime for the stdlib asyncio.\n\n"
+ "Please upgrade to Python 3.7 or higher. For more details, "
+ "please see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n"
+ "* https://docs.aiohttp.org/en/stable/"
+ "client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ ) from attr_exc
+
+ def _loop_supports_start_tls(self) -> bool:
+ try:
+ self._check_loop_for_start_tls()
+ except RuntimeError:
+ return False
+ else:
+ return True
+
+ def _warn_about_tls_in_tls(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ ) -> None:
+ """Issue a warning if the requested URL has HTTPS scheme."""
+ if req.request_info.url.scheme != "https":
+ return
+
+ asyncio_supports_tls_in_tls = getattr(
+ underlying_transport,
+ "_start_tls_compatible",
+ False,
+ )
+
+ if asyncio_supports_tls_in_tls:
+ return
+
+ warnings.warn(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This support for TLS in TLS is known to be disabled "
+ "in the stdlib asyncio. This is why you'll probably see "
+ "an error in the log below.\n\n"
+ "It is possible to enable it via monkeypatching under "
+ "Python 3.7 or higher. For more details, see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n\n"
+ "You can temporarily patch this as follows:\n"
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ RuntimeWarning,
+ source=self,
+ # Why `4`? At least 3 of the calls in the stack originate
+ # from the methods in this class.
+ stacklevel=3,
+ )
+
+ async def _start_tls_connection(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ """Wrap the raw TCP transport with TLS."""
+ tls_proto = self._factory() # Create a brand new proto for TLS
+
+ # Safety of the `cast()` call here is based on the fact that
+ # internally `_get_ssl_context()` only returns `None` when
+ # `req.is_ssl()` evaluates to `False` which is never gonna happen
+ # in this code path. Of course, it's rather fragile
+ # maintainability-wise but this is to be solved separately.
+ sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
+
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ try:
+ tls_transport = await self._loop.start_tls(
+ underlying_transport,
+ tls_proto,
+ sslcontext,
+ server_hostname=req.host,
+ ssl_handshake_timeout=timeout.total,
+ )
+ except BaseException:
+ # We need to close the underlying transport since
+ # `start_tls()` probably failed before it had a
+ # chance to do this:
+ underlying_transport.close()
+ raise
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ raise client_error(req.connection_key, exc) from exc
+ except TypeError as type_err:
+ # Example cause looks like this:
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
+ # object at 0x7f760615e460> is not supported by start_tls()
+
+ raise ClientConnectionError(
+ "Cannot initialize a TLS-in-TLS connection to host "
+ f"{req.host!s}:{req.port:d} through an underlying connection "
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
+ f"[{type_err!s}]"
+ ) from type_err
+ else:
+ tls_proto.connection_made(
+ tls_transport
+ ) # Kick the state machine of the new TLS protocol
+
+ return tls_transport, tls_proto
+
+ async def _create_direct_connection(
+ self,
+ req: "ClientRequest",
+ traces: List["Trace"],
+ timeout: "ClientTimeout",
+ *,
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ sslcontext = self._get_ssl_context(req)
+ fingerprint = self._get_fingerprint(req)
+
+ host = req.url.raw_host
+ assert host is not None
+ port = req.port
+ assert port is not None
+ host_resolved = asyncio.ensure_future(
+ self._resolve_host(host, port, traces=traces), loop=self._loop
+ )
+ try:
+ # Cancelling this lookup should not cancel the underlying lookup
+ # or else the cancel event will get broadcast to all the waiters
+ # across all connections.
+ hosts = await asyncio.shield(host_resolved)
+ except asyncio.CancelledError:
+
+ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
+ with suppress(Exception, asyncio.CancelledError):
+ fut.result()
+
+ host_resolved.add_done_callback(drop_exception)
+ raise
+ except OSError as exc:
+ # in case of proxy it is not ClientProxyConnectionError
+ # it is problem of resolving proxy ip itself
+ raise ClientConnectorError(req.connection_key, exc) from exc
+
+ last_exc = None # type: Optional[Exception]
+
+ for hinfo in hosts:
+ host = hinfo["host"]
+ port = hinfo["port"]
+
+ try:
+ transp, proto = await self._wrap_create_connection(
+ self._factory,
+ host,
+ port,
+ timeout=timeout,
+ ssl=sslcontext,
+ family=hinfo["family"],
+ proto=hinfo["proto"],
+ flags=hinfo["flags"],
+ server_hostname=hinfo["hostname"] if sslcontext else None,
+ local_addr=self._local_addr,
+ req=req,
+ client_error=client_error,
+ )
+ except ClientConnectorError as exc:
+ last_exc = exc
+ continue
+
+ if req.is_ssl() and fingerprint:
+ try:
+ fingerprint.check(transp)
+ except ServerFingerprintMismatch as exc:
+ transp.close()
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transp)
+ last_exc = exc
+ continue
+
+ return transp, proto
+ else:
+ assert last_exc is not None
+ raise last_exc
+
+ async def _create_proxy_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ self._fail_on_no_start_tls(req)
+ runtime_has_start_tls = self._loop_supports_start_tls()
+ if req.proxy.scheme != "https":
+ runtime_has_start_tls = False
+
+ headers = {} # type: Dict[str, str]
+ if req.proxy_headers is not None:
+ headers = req.proxy_headers # type: ignore[assignment]
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
+
+ url = req.proxy
+ assert url is not None
+ proxy_req = ClientRequest(
+ hdrs.METH_GET,
+ url,
+ headers=headers,
+ auth=req.proxy_auth,
+ loop=self._loop,
+ ssl=req.ssl,
+ )
+
+ # create connection to proxy server
+ transport, proto = await self._create_direct_connection(
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
+ )
+
+ # Many HTTP proxies has buggy keepalive support. Let's not
+ # reuse connection but close it after processing every
+ # response.
+ proto.force_close()
+
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
+ if auth is not None:
+ if not req.is_ssl():
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+ else:
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+
+ if req.is_ssl():
+ if runtime_has_start_tls:
+ self._warn_about_tls_in_tls(transport, req)
+
+ # For HTTPS requests over HTTP proxy
+ # we must notify proxy to tunnel connection
+ # so we send CONNECT command:
+ # CONNECT www.python.org:443 HTTP/1.1
+ # Host: www.python.org
+ #
+ # next we must do TLS handshake and so on
+ # to do this we must wrap raw socket into secure one
+ # asyncio handles this perfectly
+ proxy_req.method = hdrs.METH_CONNECT
+ proxy_req.url = req.url
+ key = attr.evolve(
+ req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
+ )
+ conn = Connection(self, key, proto, self._loop)
+ proxy_resp = await proxy_req.send(conn)
+ try:
+ protocol = conn._protocol
+ assert protocol is not None
+
+ # read_until_eof=True will ensure the connection isn't closed
+ # once the response is received and processed allowing
+ # START_TLS to work on the connection below.
+ protocol.set_response_params(read_until_eof=runtime_has_start_tls)
+ resp = await proxy_resp.start(conn)
+ except BaseException:
+ proxy_resp.close()
+ conn.close()
+ raise
+ else:
+ conn._protocol = None
+ conn._transport = None
+ try:
+ if resp.status != 200:
+ message = resp.reason
+ if message is None:
+ message = RESPONSES[resp.status][0]
+ raise ClientHttpProxyError(
+ proxy_resp.request_info,
+ resp.history,
+ status=resp.status,
+ message=message,
+ headers=resp.headers,
+ )
+ if not runtime_has_start_tls:
+ rawsock = transport.get_extra_info("socket", default=None)
+ if rawsock is None:
+ raise RuntimeError(
+ "Transport does not expose socket instance"
+ )
+ # Duplicate the socket, so now we can close proxy transport
+ rawsock = rawsock.dup()
+ except BaseException:
+ # It shouldn't be closed in `finally` because it's fed to
+ # `loop.start_tls()` and the docs say not to touch it after
+ # passing there.
+ transport.close()
+ raise
+ finally:
+ if not runtime_has_start_tls:
+ transport.close()
+
+ if not runtime_has_start_tls:
+ # HTTP proxy with support for upgrade to HTTPS
+ sslcontext = self._get_ssl_context(req)
+ return await self._wrap_create_connection(
+ self._factory,
+ timeout=timeout,
+ ssl=sslcontext,
+ sock=rawsock,
+ server_hostname=req.host,
+ req=req,
+ )
+
+ return await self._start_tls_connection(
+ # Access the old transport for the last time before it's
+ # closed and forgotten forever:
+ transport,
+ req=req,
+ timeout=timeout,
+ )
+ finally:
+ proxy_resp.close()
+
+ return transport, proto
+
+
+class UnixConnector(BaseConnector):
+ """Unix socket connector.
+
+ path - Unix socket path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to unix socket."""
+ return self._path
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ _, proto = await self._loop.create_unix_connection(
+ self._factory, self._path
+ )
+ except OSError as exc:
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
+
+ return cast(ResponseHandler, proto)
+
+
+class NamedPipeConnector(BaseConnector):
+ """Named pipe connector.
+
+ Only supported by the proactor event loop.
+ See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
+
+ path - Windows named pipe path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ if not isinstance(
+ self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor " "loop under windows"
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to the named pipe."""
+ return self._path
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
+ self._factory, self._path
+ )
+ # the drain is required so that the connection_made is called
+ # and transport is set otherwise it is not set before the
+ # `assert conn.transport is not None`
+ # in client.py's _request method
+ await asyncio.sleep(0)
+ # other option is to manually set transport like
+ # `proto.transport = trans`
+ except OSError as exc:
+ raise ClientConnectorError(req.connection_key, exc) from exc
+
+ return cast(ResponseHandler, proto)
diff --git a/contrib/python/aiohttp/aiohttp/cookiejar.py b/contrib/python/aiohttp/aiohttp/cookiejar.py
new file mode 100644
index 0000000000..0a2656634d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/cookiejar.py
@@ -0,0 +1,413 @@
+import asyncio
+import contextlib
+import datetime
+import os # noqa
+import pathlib
+import pickle
+import re
+from collections import defaultdict
+from http.cookies import BaseCookie, Morsel, SimpleCookie
+from typing import ( # noqa
+ DefaultDict,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
+
+from yarl import URL
+
+from .abc import AbstractCookieJar, ClearCookiePredicate
+from .helpers import is_ip_address, next_whole_second
+from .typedefs import LooseCookies, PathLike, StrOrURL
+
+__all__ = ("CookieJar", "DummyCookieJar")
+
+
+CookieItem = Union[str, "Morsel[str]"]
+
+
+class CookieJar(AbstractCookieJar):
+ """Implements cookie storage adhering to RFC 6265."""
+
+ DATE_TOKENS_RE = re.compile(
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
+ )
+
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
+
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
+
+ DATE_MONTH_RE = re.compile(
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
+ re.I,
+ )
+
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
+
+ MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
+
+ MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
+
+ def __init__(
+ self,
+ *,
+ unsafe: bool = False,
+ quote_cookie: bool = True,
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(loop=loop)
+ self._cookies = defaultdict(
+ SimpleCookie
+ ) # type: DefaultDict[str, SimpleCookie[str]]
+ self._host_only_cookies = set() # type: Set[Tuple[str, str]]
+ self._unsafe = unsafe
+ self._quote_cookie = quote_cookie
+ if treat_as_secure_origin is None:
+ treat_as_secure_origin = []
+ elif isinstance(treat_as_secure_origin, URL):
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
+ elif isinstance(treat_as_secure_origin, str):
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
+ else:
+ treat_as_secure_origin = [
+ URL(url).origin() if isinstance(url, str) else url.origin()
+ for url in treat_as_secure_origin
+ ]
+ self._treat_as_secure_origin = treat_as_secure_origin
+ self._next_expiration = next_whole_second()
+ self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
+ # #4515: datetime.max may not be representable on 32-bit platforms
+ self._max_time = self.MAX_TIME
+ try:
+ self._max_time.timestamp()
+ except OverflowError:
+ self._max_time = self.MAX_32BIT_TIME
+
+ def save(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="wb") as f:
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
+
+ def load(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="rb") as f:
+ self._cookies = pickle.load(f)
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ if predicate is None:
+ self._next_expiration = next_whole_second()
+ self._cookies.clear()
+ self._host_only_cookies.clear()
+ self._expirations.clear()
+ return
+
+ to_del = []
+ now = datetime.datetime.now(datetime.timezone.utc)
+ for domain, cookie in self._cookies.items():
+ for name, morsel in cookie.items():
+ key = (domain, name)
+ if (
+ key in self._expirations and self._expirations[key] <= now
+ ) or predicate(morsel):
+ to_del.append(key)
+
+ for domain, name in to_del:
+ key = (domain, name)
+ self._host_only_cookies.discard(key)
+ if key in self._expirations:
+ del self._expirations[(domain, name)]
+ self._cookies[domain].pop(name, None)
+
+ next_expiration = min(self._expirations.values(), default=self._max_time)
+ try:
+ self._next_expiration = next_expiration.replace(
+ microsecond=0
+ ) + datetime.timedelta(seconds=1)
+ except OverflowError:
+ self._next_expiration = self._max_time
+
+ def clear_domain(self, domain: str) -> None:
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ self._do_expiration()
+ for val in self._cookies.values():
+ yield from val.values()
+
+ def __len__(self) -> int:
+ return sum(1 for i in self)
+
+ def _do_expiration(self) -> None:
+ self.clear(lambda x: False)
+
+ def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
+ self._next_expiration = min(self._next_expiration, when)
+ self._expirations[(domain, name)] = when
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+ hostname = response_url.raw_host
+
+ if not self._unsafe and is_ip_address(hostname):
+ # Don't accept cookies from IPs
+ return
+
+ if isinstance(cookies, Mapping):
+ cookies = cookies.items()
+
+ for name, cookie in cookies:
+ if not isinstance(cookie, Morsel):
+ tmp = SimpleCookie() # type: SimpleCookie[str]
+ tmp[name] = cookie # type: ignore[assignment]
+ cookie = tmp[name]
+
+ domain = cookie["domain"]
+
+ # ignore domains with trailing dots
+ if domain.endswith("."):
+ domain = ""
+ del cookie["domain"]
+
+ if not domain and hostname is not None:
+ # Set the cookie's domain to the response hostname
+ # and set its host-only-flag
+ self._host_only_cookies.add((hostname, name))
+ domain = cookie["domain"] = hostname
+
+ if domain.startswith("."):
+ # Remove leading dot
+ domain = domain[1:]
+ cookie["domain"] = domain
+
+ if hostname and not self._is_domain_match(domain, hostname):
+ # Setting cookies for different domains is not allowed
+ continue
+
+ path = cookie["path"]
+ if not path or not path.startswith("/"):
+ # Set the cookie's path to the response path
+ path = response_url.path
+ if not path.startswith("/"):
+ path = "/"
+ else:
+ # Cut everything from the last slash to the end
+ path = "/" + path[1 : path.rfind("/")]
+ cookie["path"] = path
+
+ max_age = cookie["max-age"]
+ if max_age:
+ try:
+ delta_seconds = int(max_age)
+ try:
+ max_age_expiration = datetime.datetime.now(
+ datetime.timezone.utc
+ ) + datetime.timedelta(seconds=delta_seconds)
+ except OverflowError:
+ max_age_expiration = self._max_time
+ self._expire_cookie(max_age_expiration, domain, name)
+ except ValueError:
+ cookie["max-age"] = ""
+
+ else:
+ expires = cookie["expires"]
+ if expires:
+ expire_time = self._parse_date(expires)
+ if expire_time:
+ self._expire_cookie(expire_time, domain, name)
+ else:
+ cookie["expires"] = ""
+
+ self._cookies[domain][name] = cookie
+
+ self._do_expiration()
+
+ def filter_cookies(
+ self, request_url: URL = URL()
+ ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
+ """Returns this jar's cookies filtered by their attributes."""
+ self._do_expiration()
+ request_url = URL(request_url)
+ filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
+ SimpleCookie() if self._quote_cookie else BaseCookie()
+ )
+ hostname = request_url.raw_host or ""
+ request_origin = URL()
+ with contextlib.suppress(ValueError):
+ request_origin = request_url.origin()
+
+ is_not_secure = (
+ request_url.scheme not in ("https", "wss")
+ and request_origin not in self._treat_as_secure_origin
+ )
+
+ for cookie in self:
+ name = cookie.key
+ domain = cookie["domain"]
+
+ # Send shared cookies
+ if not domain:
+ filtered[name] = cookie.value
+ continue
+
+ if not self._unsafe and is_ip_address(hostname):
+ continue
+
+ if (domain, name) in self._host_only_cookies:
+ if domain != hostname:
+ continue
+ elif not self._is_domain_match(domain, hostname):
+ continue
+
+ if not self._is_path_match(request_url.path, cookie["path"]):
+ continue
+
+ if is_not_secure and cookie["secure"]:
+ continue
+
+ # It's critical we use the Morsel so the coded_value
+ # (based on cookie version) is preserved
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
+ mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
+ filtered[name] = mrsl_val
+
+ return filtered
+
+ @staticmethod
+ def _is_domain_match(domain: str, hostname: str) -> bool:
+ """Implements domain matching adhering to RFC 6265."""
+ if hostname == domain:
+ return True
+
+ if not hostname.endswith(domain):
+ return False
+
+ non_matching = hostname[: -len(domain)]
+
+ if not non_matching.endswith("."):
+ return False
+
+ return not is_ip_address(hostname)
+
+ @staticmethod
+ def _is_path_match(req_path: str, cookie_path: str) -> bool:
+ """Implements path matching adhering to RFC 6265."""
+ if not req_path.startswith("/"):
+ req_path = "/"
+
+ if req_path == cookie_path:
+ return True
+
+ if not req_path.startswith(cookie_path):
+ return False
+
+ if cookie_path.endswith("/"):
+ return True
+
+ non_matching = req_path[len(cookie_path) :]
+
+ return non_matching.startswith("/")
+
+ @classmethod
+ def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
+ """Implements date string parsing adhering to RFC 6265."""
+ if not date_str:
+ return None
+
+ found_time = False
+ found_day = False
+ found_month = False
+ found_year = False
+
+ hour = minute = second = 0
+ day = 0
+ month = 0
+ year = 0
+
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
+
+ token = token_match.group("token")
+
+ if not found_time:
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
+ if time_match:
+ found_time = True
+ hour, minute, second = (int(s) for s in time_match.groups())
+ continue
+
+ if not found_day:
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
+ if day_match:
+ found_day = True
+ day = int(day_match.group())
+ continue
+
+ if not found_month:
+ month_match = cls.DATE_MONTH_RE.match(token)
+ if month_match:
+ found_month = True
+ assert month_match.lastindex is not None
+ month = month_match.lastindex
+ continue
+
+ if not found_year:
+ year_match = cls.DATE_YEAR_RE.match(token)
+ if year_match:
+ found_year = True
+ year = int(year_match.group())
+
+ if 70 <= year <= 99:
+ year += 1900
+ elif 0 <= year <= 69:
+ year += 2000
+
+ if False in (found_day, found_month, found_year, found_time):
+ return None
+
+ if not 1 <= day <= 31:
+ return None
+
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
+ return None
+
+ return datetime.datetime(
+ year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
+ )
+
+
+class DummyCookieJar(AbstractCookieJar):
+ """Implements a dummy cookie storage.
+
+ It can be used with the ClientSession when no cookie processing is needed.
+
+ """
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ super().__init__(loop=loop)
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ while False:
+ yield None
+
+ def __len__(self) -> int:
+ return 0
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ pass
+
+ def clear_domain(self, domain: str) -> None:
+ pass
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ pass
+
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ return SimpleCookie()
diff --git a/contrib/python/aiohttp/aiohttp/formdata.py b/contrib/python/aiohttp/aiohttp/formdata.py
new file mode 100644
index 0000000000..4857c89856
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/formdata.py
@@ -0,0 +1,172 @@
+import io
+from typing import Any, Iterable, List, Optional
+from urllib.parse import urlencode
+
+from multidict import MultiDict, MultiDictProxy
+
+from . import hdrs, multipart, payload
+from .helpers import guess_filename
+from .payload import Payload
+
+__all__ = ("FormData",)
+
+
+class FormData:
+ """Helper class for form body generation.
+
+ Supports multipart/form-data and application/x-www-form-urlencoded.
+ """
+
+ def __init__(
+ self,
+ fields: Iterable[Any] = (),
+ quote_fields: bool = True,
+ charset: Optional[str] = None,
+ ) -> None:
+ self._writer = multipart.MultipartWriter("form-data")
+ self._fields = [] # type: List[Any]
+ self._is_multipart = False
+ self._is_processed = False
+ self._quote_fields = quote_fields
+ self._charset = charset
+
+ if isinstance(fields, dict):
+ fields = list(fields.items())
+ elif not isinstance(fields, (list, tuple)):
+ fields = (fields,)
+ self.add_fields(*fields)
+
+ @property
+ def is_multipart(self) -> bool:
+ return self._is_multipart
+
+ def add_field(
+ self,
+ name: str,
+ value: Any,
+ *,
+ content_type: Optional[str] = None,
+ filename: Optional[str] = None,
+ content_transfer_encoding: Optional[str] = None,
+ ) -> None:
+
+ if isinstance(value, io.IOBase):
+ self._is_multipart = True
+ elif isinstance(value, (bytes, bytearray, memoryview)):
+ if filename is None and content_transfer_encoding is None:
+ filename = name
+
+ type_options = MultiDict({"name": name}) # type: MultiDict[str]
+ if filename is not None and not isinstance(filename, str):
+ raise TypeError(
+ "filename must be an instance of str. " "Got: %s" % filename
+ )
+ if filename is None and isinstance(value, io.IOBase):
+ filename = guess_filename(value, name)
+ if filename is not None:
+ type_options["filename"] = filename
+ self._is_multipart = True
+
+ headers = {}
+ if content_type is not None:
+ if not isinstance(content_type, str):
+ raise TypeError(
+ "content_type must be an instance of str. " "Got: %s" % content_type
+ )
+ headers[hdrs.CONTENT_TYPE] = content_type
+ self._is_multipart = True
+ if content_transfer_encoding is not None:
+ if not isinstance(content_transfer_encoding, str):
+ raise TypeError(
+ "content_transfer_encoding must be an instance"
+ " of str. Got: %s" % content_transfer_encoding
+ )
+ headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
+ self._is_multipart = True
+
+ self._fields.append((type_options, headers, value))
+
+ def add_fields(self, *fields: Any) -> None:
+ to_add = list(fields)
+
+ while to_add:
+ rec = to_add.pop(0)
+
+ if isinstance(rec, io.IOBase):
+ k = guess_filename(rec, "unknown")
+ self.add_field(k, rec) # type: ignore[arg-type]
+
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
+ to_add.extend(rec.items())
+
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
+ k, fp = rec
+ self.add_field(k, fp) # type: ignore[arg-type]
+
+ else:
+ raise TypeError(
+ "Only io.IOBase, multidict and (name, file) "
+ "pairs allowed, use .add_field() for passing "
+ "more complex parameters, got {!r}".format(rec)
+ )
+
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
+ # form data (x-www-form-urlencoded)
+ data = []
+ for type_options, _, value in self._fields:
+ data.append((type_options["name"], value))
+
+ charset = self._charset if self._charset is not None else "utf-8"
+
+ if charset == "utf-8":
+ content_type = "application/x-www-form-urlencoded"
+ else:
+ content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
+
+ return payload.BytesPayload(
+ urlencode(data, doseq=True, encoding=charset).encode(),
+ content_type=content_type,
+ )
+
+ def _gen_form_data(self) -> multipart.MultipartWriter:
+ """Encode a list of fields using the multipart/form-data MIME format"""
+ if self._is_processed:
+ raise RuntimeError("Form data has been processed already")
+ for dispparams, headers, value in self._fields:
+ try:
+ if hdrs.CONTENT_TYPE in headers:
+ part = payload.get_payload(
+ value,
+ content_type=headers[hdrs.CONTENT_TYPE],
+ headers=headers,
+ encoding=self._charset,
+ )
+ else:
+ part = payload.get_payload(
+ value, headers=headers, encoding=self._charset
+ )
+ except Exception as exc:
+ raise TypeError(
+ "Can not serialize value type: %r\n "
+ "headers: %r\n value: %r" % (type(value), headers, value)
+ ) from exc
+
+ if dispparams:
+ part.set_content_disposition(
+ "form-data", quote_fields=self._quote_fields, **dispparams
+ )
+ # FIXME cgi.FieldStorage doesn't likes body parts with
+ # Content-Length which were sent via chunked transfer encoding
+ assert part.headers is not None
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ self._writer.append_payload(part)
+
+ self._is_processed = True
+ return self._writer
+
+ def __call__(self) -> Payload:
+ if self._is_multipart:
+ return self._gen_form_data()
+ else:
+ return self._gen_form_urlencoded()
diff --git a/contrib/python/aiohttp/aiohttp/hdrs.py b/contrib/python/aiohttp/aiohttp/hdrs.py
new file mode 100644
index 0000000000..a619f2543e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/hdrs.py
@@ -0,0 +1,114 @@
+"""HTTP Headers constants."""
+
+# After changing the file content call ./tools/gen.py
+# to regenerate the headers parser
+import sys
+from typing import Set
+
+from multidict import istr
+
+if sys.version_info >= (3, 8):
+ from typing import Final
+else:
+ from typing_extensions import Final
+
+METH_ANY: Final[str] = "*"
+METH_CONNECT: Final[str] = "CONNECT"
+METH_HEAD: Final[str] = "HEAD"
+METH_GET: Final[str] = "GET"
+METH_DELETE: Final[str] = "DELETE"
+METH_OPTIONS: Final[str] = "OPTIONS"
+METH_PATCH: Final[str] = "PATCH"
+METH_POST: Final[str] = "POST"
+METH_PUT: Final[str] = "PUT"
+METH_TRACE: Final[str] = "TRACE"
+
+METH_ALL: Final[Set[str]] = {
+ METH_CONNECT,
+ METH_HEAD,
+ METH_GET,
+ METH_DELETE,
+ METH_OPTIONS,
+ METH_PATCH,
+ METH_POST,
+ METH_PUT,
+ METH_TRACE,
+}
+
+ACCEPT: Final[istr] = istr("Accept")
+ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
+ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
+ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
+ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
+ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
+ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
+ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
+ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
+ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
+ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
+ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
+ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
+AGE: Final[istr] = istr("Age")
+ALLOW: Final[istr] = istr("Allow")
+AUTHORIZATION: Final[istr] = istr("Authorization")
+CACHE_CONTROL: Final[istr] = istr("Cache-Control")
+CONNECTION: Final[istr] = istr("Connection")
+CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
+CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
+CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
+CONTENT_LENGTH: Final[istr] = istr("Content-Length")
+CONTENT_LOCATION: Final[istr] = istr("Content-Location")
+CONTENT_MD5: Final[istr] = istr("Content-MD5")
+CONTENT_RANGE: Final[istr] = istr("Content-Range")
+CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
+CONTENT_TYPE: Final[istr] = istr("Content-Type")
+COOKIE: Final[istr] = istr("Cookie")
+DATE: Final[istr] = istr("Date")
+DESTINATION: Final[istr] = istr("Destination")
+DIGEST: Final[istr] = istr("Digest")
+ETAG: Final[istr] = istr("Etag")
+EXPECT: Final[istr] = istr("Expect")
+EXPIRES: Final[istr] = istr("Expires")
+FORWARDED: Final[istr] = istr("Forwarded")
+FROM: Final[istr] = istr("From")
+HOST: Final[istr] = istr("Host")
+IF_MATCH: Final[istr] = istr("If-Match")
+IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
+IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
+IF_RANGE: Final[istr] = istr("If-Range")
+IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
+KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
+LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
+LAST_MODIFIED: Final[istr] = istr("Last-Modified")
+LINK: Final[istr] = istr("Link")
+LOCATION: Final[istr] = istr("Location")
+MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
+ORIGIN: Final[istr] = istr("Origin")
+PRAGMA: Final[istr] = istr("Pragma")
+PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
+PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
+RANGE: Final[istr] = istr("Range")
+REFERER: Final[istr] = istr("Referer")
+RETRY_AFTER: Final[istr] = istr("Retry-After")
+SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
+SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
+SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
+SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
+SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
+SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
+SERVER: Final[istr] = istr("Server")
+SET_COOKIE: Final[istr] = istr("Set-Cookie")
+TE: Final[istr] = istr("TE")
+TRAILER: Final[istr] = istr("Trailer")
+TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
+UPGRADE: Final[istr] = istr("Upgrade")
+URI: Final[istr] = istr("URI")
+USER_AGENT: Final[istr] = istr("User-Agent")
+VARY: Final[istr] = istr("Vary")
+VIA: Final[istr] = istr("Via")
+WANT_DIGEST: Final[istr] = istr("Want-Digest")
+WARNING: Final[istr] = istr("Warning")
+WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
+X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
+X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
+X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
diff --git a/contrib/python/aiohttp/aiohttp/helpers.py b/contrib/python/aiohttp/aiohttp/helpers.py
new file mode 100644
index 0000000000..f30f76ba41
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/helpers.py
@@ -0,0 +1,878 @@
+"""Various helper functions"""
+
+import asyncio
+import base64
+import binascii
+import datetime
+import functools
+import inspect
+import netrc
+import os
+import platform
+import re
+import sys
+import time
+import warnings
+import weakref
+from collections import namedtuple
+from contextlib import suppress
+from email.parser import HeaderParser
+from email.utils import parsedate
+from math import ceil
+from pathlib import Path
+from types import TracebackType
+from typing import (
+ Any,
+ Callable,
+ ContextManager,
+ Dict,
+ Generator,
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+from urllib.parse import quote
+from urllib.request import getproxies, proxy_bypass
+
+import async_timeout
+import attr
+from multidict import MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs
+from .log import client_logger, internal_logger
+from .typedefs import PathLike, Protocol # noqa
+
+__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
+
+IS_MACOS = platform.system() == "Darwin"
+IS_WINDOWS = platform.system() == "Windows"
+
+PY_36 = sys.version_info >= (3, 6)
+PY_37 = sys.version_info >= (3, 7)
+PY_38 = sys.version_info >= (3, 8)
+PY_310 = sys.version_info >= (3, 10)
+
+if sys.version_info < (3, 7):
+ import idna_ssl
+
+ idna_ssl.patch_match_hostname()
+
+ def all_tasks(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> Set["asyncio.Task[Any]"]:
+ tasks = list(asyncio.Task.all_tasks(loop))
+ return {t for t in tasks if not t.done()}
+
+
+else:
+ all_tasks = asyncio.all_tasks
+
+
+_T = TypeVar("_T")
+_S = TypeVar("_S")
+
+
+sentinel = object() # type: Any
+NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
+
+# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
+# for compatibility with older versions
+DEBUG = getattr(sys.flags, "dev_mode", False) or (
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
+) # type: bool
+
+
+CHAR = {chr(i) for i in range(0, 128)}
+CTL = {chr(i) for i in range(0, 32)} | {
+ chr(127),
+}
+SEPARATORS = {
+ "(",
+ ")",
+ "<",
+ ">",
+ "@",
+ ",",
+ ";",
+ ":",
+ "\\",
+ '"',
+ "/",
+ "[",
+ "]",
+ "?",
+ "=",
+ "{",
+ "}",
+ " ",
+ chr(9),
+}
+TOKEN = CHAR ^ CTL ^ SEPARATORS
+
+
+class noop:
+ def __await__(self) -> Generator[None, None, None]:
+ yield
+
+
+class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
+ """Http basic authentication helper."""
+
+ def __new__(
+ cls, login: str, password: str = "", encoding: str = "latin1"
+ ) -> "BasicAuth":
+ if login is None:
+ raise ValueError("None is not allowed as login value")
+
+ if password is None:
+ raise ValueError("None is not allowed as password value")
+
+ if ":" in login:
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
+
+ return super().__new__(cls, login, password, encoding)
+
+ @classmethod
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
+ """Create a BasicAuth object from an Authorization HTTP header."""
+ try:
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
+ except ValueError:
+ raise ValueError("Could not parse authorization header.")
+
+ if auth_type.lower() != "basic":
+ raise ValueError("Unknown authorization method %s" % auth_type)
+
+ try:
+ decoded = base64.b64decode(
+ encoded_credentials.encode("ascii"), validate=True
+ ).decode(encoding)
+ except binascii.Error:
+ raise ValueError("Invalid base64 encoding.")
+
+ try:
+ # RFC 2617 HTTP Authentication
+ # https://www.ietf.org/rfc/rfc2617.txt
+ # the colon must be present, but the username and password may be
+ # otherwise blank.
+ username, password = decoded.split(":", 1)
+ except ValueError:
+ raise ValueError("Invalid credentials.")
+
+ return cls(username, password, encoding=encoding)
+
+ @classmethod
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
+ """Create BasicAuth from url."""
+ if not isinstance(url, URL):
+ raise TypeError("url should be yarl.URL instance")
+ if url.user is None:
+ return None
+ return cls(url.user, url.password or "", encoding=encoding)
+
+ def encode(self) -> str:
+ """Encode credentials."""
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
+
+
+def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ auth = BasicAuth.from_url(url)
+ if auth is None:
+ return url, None
+ else:
+ return url.with_user(None), auth
+
+
+def netrc_from_env() -> Optional[netrc.netrc]:
+ """Load netrc from file.
+
+ Attempt to load it from the path specified by the env-var
+ NETRC or in the default location in the user's home directory.
+
+ Returns None if it couldn't be found or fails to parse.
+ """
+ netrc_env = os.environ.get("NETRC")
+
+ if netrc_env is not None:
+ netrc_path = Path(netrc_env)
+ else:
+ try:
+ home_dir = Path.home()
+ except RuntimeError as e: # pragma: no cover
+ # if pathlib can't resolve home, it may raise a RuntimeError
+ client_logger.debug(
+ "Could not resolve home directory when "
+ "trying to look for .netrc file: %s",
+ e,
+ )
+ return None
+
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
+
+ try:
+ return netrc.netrc(str(netrc_path))
+ except netrc.NetrcParseError as e:
+ client_logger.warning("Could not parse .netrc file: %s", e)
+ except OSError as e:
+ # we couldn't read the file (doesn't exist, permissions, etc.)
+ if netrc_env or netrc_path.is_file():
+ # only warn if the environment wanted us to load it,
+ # or it appears like the default file does actually exist
+ client_logger.warning("Could not read .netrc file: %s", e)
+
+ return None
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ProxyInfo:
+ proxy: URL
+ proxy_auth: Optional[BasicAuth]
+
+
+def proxies_from_env() -> Dict[str, ProxyInfo]:
+ proxy_urls = {
+ k: URL(v)
+ for k, v in getproxies().items()
+ if k in ("http", "https", "ws", "wss")
+ }
+ netrc_obj = netrc_from_env()
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
+ ret = {}
+ for proto, val in stripped.items():
+ proxy, auth = val
+ if proxy.scheme in ("https", "wss"):
+ client_logger.warning(
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
+ )
+ continue
+ if netrc_obj and auth is None:
+ auth_from_netrc = None
+ if proxy.host is not None:
+ auth_from_netrc = netrc_obj.authenticators(proxy.host)
+ if auth_from_netrc is not None:
+ # auth_from_netrc is a (`user`, `account`, `password`) tuple,
+ # `user` and `account` both can be username,
+ # if `user` is None, use `account`
+ *logins, password = auth_from_netrc
+ login = logins[0] if logins[0] else logins[-1]
+ auth = BasicAuth(cast(str, login), cast(str, password))
+ ret[proto] = ProxyInfo(proxy, auth)
+ return ret
+
+
+def current_task(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> "Optional[asyncio.Task[Any]]":
+ if sys.version_info >= (3, 7):
+ return asyncio.current_task(loop=loop)
+ else:
+ return asyncio.Task.current_task(loop=loop)
+
+
+def get_running_loop(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> asyncio.AbstractEventLoop:
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if not loop.is_running():
+ warnings.warn(
+ "The object should be created within an async function",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if loop.get_debug():
+ internal_logger.warning(
+ "The object should be created within an async function", stack_info=True
+ )
+ return loop
+
+
+def isasyncgenfunction(obj: Any) -> bool:
+ func = getattr(inspect, "isasyncgenfunction", None)
+ if func is not None:
+ return func(obj) # type: ignore[no-any-return]
+ else:
+ return False
+
+
+def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ """Get a permitted proxy for the given URL from the env."""
+ if url.host is not None and proxy_bypass(url.host):
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
+
+ proxies_in_env = proxies_from_env()
+ try:
+ proxy_info = proxies_in_env[url.scheme]
+ except KeyError:
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
+ else:
+ return proxy_info.proxy, proxy_info.proxy_auth
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class MimeType:
+ type: str
+ subtype: str
+ suffix: str
+ parameters: "MultiDictProxy[str]"
+
+
+@functools.lru_cache(maxsize=56)
+def parse_mimetype(mimetype: str) -> MimeType:
+ """Parses a MIME type into its components.
+
+ mimetype is a MIME type string.
+
+ Returns a MimeType object.
+
+ Example:
+
+ >>> parse_mimetype('text/html; charset=utf-8')
+ MimeType(type='text', subtype='html', suffix='',
+ parameters={'charset': 'utf-8'})
+
+ """
+ if not mimetype:
+ return MimeType(
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
+ )
+
+ parts = mimetype.split(";")
+ params = MultiDict() # type: MultiDict[str]
+ for item in parts[1:]:
+ if not item:
+ continue
+ key, value = cast(
+ Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
+ )
+ params.add(key.lower().strip(), value.strip(' "'))
+
+ fulltype = parts[0].strip().lower()
+ if fulltype == "*":
+ fulltype = "*/*"
+
+ mtype, stype = (
+ cast(Tuple[str, str], fulltype.split("/", 1))
+ if "/" in fulltype
+ else (fulltype, "")
+ )
+ stype, suffix = (
+ cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
+ )
+
+ return MimeType(
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
+ )
+
+
+def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
+ name = getattr(obj, "name", None)
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
+ return Path(name).name
+ return default
+
+
+not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
+QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
+
+
+def quoted_string(content: str) -> str:
+ """Return 7-bit content as quoted-string.
+
+ Format content into a quoted-string as defined in RFC5322 for
+ Internet Message Format. Notice that this is not the 8-bit HTTP
+ format, but the 7-bit email format. Content must be in usascii or
+ a ValueError is raised.
+ """
+ if not (QCONTENT > set(content)):
+ raise ValueError(f"bad content for quoted-string {content!r}")
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
+
+
+def content_disposition_header(
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
+) -> str:
+ """Sets ``Content-Disposition`` header for MIME.
+
+ This is the MIME payload Content-Disposition header from RFC 2183
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
+ RFC 6266.
+
+ disptype is a disposition type: inline, attachment, form-data.
+ Should be valid extension token (see RFC 2183)
+
+ quote_fields performs value quoting to 7-bit MIME headers
+ according to RFC 7578. Set to quote_fields to False if recipient
+ can take 8-bit file names and field values.
+
+ _charset specifies the charset to use when quote_fields is True.
+
+ params is a dict with disposition params.
+ """
+ if not disptype or not (TOKEN > set(disptype)):
+ raise ValueError("bad content disposition type {!r}" "".format(disptype))
+
+ value = disptype
+ if params:
+ lparams = []
+ for key, val in params.items():
+ if not key or not (TOKEN > set(key)):
+ raise ValueError(
+ "bad content disposition parameter" " {!r}={!r}".format(key, val)
+ )
+ if quote_fields:
+ if key.lower() == "filename":
+ qval = quote(val, "", encoding=_charset)
+ lparams.append((key, '"%s"' % qval))
+ else:
+ try:
+ qval = quoted_string(val)
+ except ValueError:
+ qval = "".join(
+ (_charset, "''", quote(val, "", encoding=_charset))
+ )
+ lparams.append((key + "*", qval))
+ else:
+ lparams.append((key, '"%s"' % qval))
+ else:
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
+ lparams.append((key, '"%s"' % qval))
+ sparams = "; ".join("=".join(pair) for pair in lparams)
+ value = "; ".join((value, sparams))
+ return value
+
+
+class _TSelf(Protocol, Generic[_T]):
+ _cache: Dict[str, _T]
+
+
+class reify(Generic[_T]):
+ """Use as a class method decorator.
+
+ It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+ """
+
+ def __init__(self, wrapped: Callable[..., _T]) -> None:
+ self.wrapped = wrapped
+ self.__doc__ = wrapped.__doc__
+ self.name = wrapped.__name__
+
+ def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
+ try:
+ try:
+ return inst._cache[self.name]
+ except KeyError:
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+ except AttributeError:
+ if inst is None:
+ return self
+ raise
+
+ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
+ raise AttributeError("reified property is read-only")
+
+
+reify_py = reify
+
+try:
+ from ._helpers import reify as reify_c
+
+ if not NO_EXTENSIONS:
+ reify = reify_c # type: ignore[misc,assignment]
+except ImportError:
+ pass
+
+_ipv4_pattern = (
+ r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
+ r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
+)
+_ipv6_pattern = (
+ r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
+ r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
+ r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
+ r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
+ r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
+ r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
+ r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
+ r":|:(:[A-F0-9]{1,4}){7})$"
+)
+_ipv4_regex = re.compile(_ipv4_pattern)
+_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
+_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
+_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
+
+
+def _is_ip_address(
+ regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
+) -> bool:
+ if host is None:
+ return False
+ if isinstance(host, str):
+ return bool(regex.match(host))
+ elif isinstance(host, (bytes, bytearray, memoryview)):
+ return bool(regexb.match(host))
+ else:
+ raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
+
+
+is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
+is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
+
+
+def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
+ return is_ipv4_address(host) or is_ipv6_address(host)
+
+
+def next_whole_second() -> datetime.datetime:
+ """Return current time rounded up to the next whole second."""
+ return datetime.datetime.now(datetime.timezone.utc).replace(
+ microsecond=0
+ ) + datetime.timedelta(seconds=0)
+
+
+_cached_current_datetime = None # type: Optional[int]
+_cached_formatted_datetime = ""
+
+
+def rfc822_formatted_time() -> str:
+ global _cached_current_datetime
+ global _cached_formatted_datetime
+
+ now = int(time.time())
+ if now != _cached_current_datetime:
+ # Weekday and month names for HTTP date/time formatting;
+ # always English!
+ # Tuples are constants stored in codeobject!
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
+ _monthname = (
+ "", # Dummy so we can use 1-based month numbers
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+ )
+
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+ _weekdayname[wd],
+ day,
+ _monthname[month],
+ year,
+ hh,
+ mm,
+ ss,
+ )
+ _cached_current_datetime = now
+ return _cached_formatted_datetime
+
+
+def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
+ ref, name = info
+ ob = ref()
+ if ob is not None:
+ with suppress(Exception):
+ getattr(ob, name)()
+
+
+def weakref_handle(
+ ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is not None and timeout > 0:
+ when = loop.time() + timeout
+ if timeout >= 5:
+ when = ceil(when)
+
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
+ return None
+
+
+def call_later(
+ cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is not None and timeout > 0:
+ when = loop.time() + timeout
+ if timeout > 5:
+ when = ceil(when)
+ return loop.call_at(when, cb)
+ return None
+
+
+class TimeoutHandle:
+ """Timeout handle"""
+
+ def __init__(
+ self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
+ ) -> None:
+ self._timeout = timeout
+ self._loop = loop
+ self._callbacks = (
+ []
+ ) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
+
+ def register(
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
+ ) -> None:
+ self._callbacks.append((callback, args, kwargs))
+
+ def close(self) -> None:
+ self._callbacks.clear()
+
+ def start(self) -> Optional[asyncio.Handle]:
+ timeout = self._timeout
+ if timeout is not None and timeout > 0:
+ when = self._loop.time() + timeout
+ if timeout >= 5:
+ when = ceil(when)
+ return self._loop.call_at(when, self.__call__)
+ else:
+ return None
+
+ def timer(self) -> "BaseTimerContext":
+ if self._timeout is not None and self._timeout > 0:
+ timer = TimerContext(self._loop)
+ self.register(timer.timeout)
+ return timer
+ else:
+ return TimerNoop()
+
+ def __call__(self) -> None:
+ for cb, args, kwargs in self._callbacks:
+ with suppress(Exception):
+ cb(*args, **kwargs)
+
+ self._callbacks.clear()
+
+
+class BaseTimerContext(ContextManager["BaseTimerContext"]):
+ pass
+
+
+class TimerNoop(BaseTimerContext):
+ def __enter__(self) -> BaseTimerContext:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ return
+
+
+class TimerContext(BaseTimerContext):
+ """Low resolution timeout context manager"""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._tasks = [] # type: List[asyncio.Task[Any]]
+ self._cancelled = False
+
+ def __enter__(self) -> BaseTimerContext:
+ task = current_task(loop=self._loop)
+
+ if task is None:
+ raise RuntimeError(
+ "Timeout context manager should be used " "inside a task"
+ )
+
+ if self._cancelled:
+ raise asyncio.TimeoutError from None
+
+ self._tasks.append(task)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> Optional[bool]:
+ if self._tasks:
+ self._tasks.pop()
+
+ if exc_type is asyncio.CancelledError and self._cancelled:
+ raise asyncio.TimeoutError from None
+ return None
+
+ def timeout(self) -> None:
+ if not self._cancelled:
+ for task in set(self._tasks):
+ task.cancel()
+
+ self._cancelled = True
+
+
+def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
+ if delay is None or delay <= 0:
+ return async_timeout.timeout(None)
+
+ loop = get_running_loop()
+ now = loop.time()
+ when = now + delay
+ if delay > 5:
+ when = ceil(when)
+ return async_timeout.timeout_at(when)
+
+
+class HeadersMixin:
+
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
+
+ _content_type = None # type: Optional[str]
+ _content_dict = None # type: Optional[Dict[str, str]]
+ _stored_content_type = sentinel
+
+ def _parse_content_type(self, raw: str) -> None:
+ self._stored_content_type = raw
+ if raw is None:
+ # default value according to RFC 2616
+ self._content_type = "application/octet-stream"
+ self._content_dict = {}
+ else:
+ msg = HeaderParser().parsestr("Content-Type: " + raw)
+ self._content_type = msg.get_content_type()
+ params = msg.get_params()
+ self._content_dict = dict(params[1:]) # First element is content type again
+
+ @property
+ def content_type(self) -> str:
+ """The value of content part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ return self._content_type # type: ignore[return-value]
+
+ @property
+ def charset(self) -> Optional[str]:
+ """The value of charset part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ return self._content_dict.get("charset") # type: ignore[union-attr]
+
+ @property
+ def content_length(self) -> Optional[int]:
+ """The value of Content-Length HTTP header."""
+ content_length = self._headers.get( # type: ignore[attr-defined]
+ hdrs.CONTENT_LENGTH
+ )
+
+ if content_length is not None:
+ return int(content_length)
+ else:
+ return None
+
+
+def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
+ if not fut.done():
+ fut.set_result(result)
+
+
+def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
+ if not fut.done():
+ fut.set_exception(exc)
+
+
+class ChainMapProxy(Mapping[str, Any]):
+ __slots__ = ("_maps",)
+
+ def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
+ self._maps = tuple(maps)
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(
+ "Inheritance class {} from ChainMapProxy "
+ "is forbidden".format(cls.__name__)
+ )
+
+ def __getitem__(self, key: str) -> Any:
+ for mapping in self._maps:
+ try:
+ return mapping[key]
+ except KeyError:
+ pass
+ raise KeyError(key)
+
+ def get(self, key: str, default: Any = None) -> Any:
+ return self[key] if key in self else default
+
+ def __len__(self) -> int:
+ # reuses stored hash values if possible
+ return len(set().union(*self._maps)) # type: ignore[arg-type]
+
+ def __iter__(self) -> Iterator[str]:
+ d = {} # type: Dict[str, Any]
+ for mapping in reversed(self._maps):
+ # reuses stored hash values if possible
+ d.update(mapping)
+ return iter(d)
+
+ def __contains__(self, key: object) -> bool:
+ return any(key in m for m in self._maps)
+
+ def __bool__(self) -> bool:
+ return any(self._maps)
+
+ def __repr__(self) -> str:
+ content = ", ".join(map(repr, self._maps))
+ return f"ChainMapProxy({content})"
+
+
+# https://tools.ietf.org/html/rfc7232#section-2.3
+_ETAGC = r"[!#-}\x80-\xff]+"
+_ETAGC_RE = re.compile(_ETAGC)
+_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
+QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
+LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
+
+ETAG_ANY = "*"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ETag:
+ value: str
+ is_weak: bool = False
+
+
+def validate_etag_value(value: str) -> None:
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
+ raise ValueError(
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
+ )
+
+
+def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
+ """Process a date string, return a datetime object"""
+ if date_str is not None:
+ timetuple = parsedate(date_str)
+ if timetuple is not None:
+ with suppress(ValueError):
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
+ return None
diff --git a/contrib/python/aiohttp/aiohttp/http.py b/contrib/python/aiohttp/aiohttp/http.py
new file mode 100644
index 0000000000..415ffbf563
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http.py
@@ -0,0 +1,72 @@
+import http.server
+import sys
+from typing import Mapping, Tuple
+
+from . import __version__
+from .http_exceptions import HttpProcessingError as HttpProcessingError
+from .http_parser import (
+ HeadersParser as HeadersParser,
+ HttpParser as HttpParser,
+ HttpRequestParser as HttpRequestParser,
+ HttpResponseParser as HttpResponseParser,
+ RawRequestMessage as RawRequestMessage,
+ RawResponseMessage as RawResponseMessage,
+)
+from .http_websocket import (
+ WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
+ WS_KEY as WS_KEY,
+ WebSocketError as WebSocketError,
+ WebSocketReader as WebSocketReader,
+ WebSocketWriter as WebSocketWriter,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen as ws_ext_gen,
+ ws_ext_parse as ws_ext_parse,
+)
+from .http_writer import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ StreamWriter as StreamWriter,
+)
+
+__all__ = (
+ "HttpProcessingError",
+ "RESPONSES",
+ "SERVER_SOFTWARE",
+ # .http_writer
+ "StreamWriter",
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ # .http_parser
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+ # .http_websocket
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "ws_ext_gen",
+ "ws_ext_parse",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+)
+
+
+SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
+ sys.version_info, __version__
+) # type: str
+
+RESPONSES = (
+ http.server.BaseHTTPRequestHandler.responses
+) # type: Mapping[int, Tuple[str, str]]
diff --git a/contrib/python/aiohttp/aiohttp/http_exceptions.py b/contrib/python/aiohttp/aiohttp/http_exceptions.py
new file mode 100644
index 0000000000..c885f80f32
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_exceptions.py
@@ -0,0 +1,105 @@
+"""Low-level http related exceptions."""
+
+
+from typing import Optional, Union
+
+from .typedefs import _CIMultiDict
+
+__all__ = ("HttpProcessingError",)
+
+
+class HttpProcessingError(Exception):
+ """HTTP error.
+
+ Shortcut for raising HTTP errors with custom code, message and headers.
+
+ code: HTTP Error code.
+ message: (optional) Error message.
+ headers: (optional) Headers to be sent in response, a list of pairs
+ """
+
+ code = 0
+ message = ""
+ headers = None
+
+ def __init__(
+ self,
+ *,
+ code: Optional[int] = None,
+ message: str = "",
+ headers: Optional[_CIMultiDict] = None,
+ ) -> None:
+ if code is not None:
+ self.code = code
+ self.headers = headers
+ self.message = message
+
+ def __str__(self) -> str:
+ return f"{self.code}, message={self.message!r}"
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}: {self}>"
+
+
+class BadHttpMessage(HttpProcessingError):
+
+ code = 400
+ message = "Bad Request"
+
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
+ super().__init__(message=message, headers=headers)
+ self.args = (message,)
+
+
+class HttpBadRequest(BadHttpMessage):
+
+ code = 400
+ message = "Bad Request"
+
+
+class PayloadEncodingError(BadHttpMessage):
+ """Base class for payload errors"""
+
+
+class ContentEncodingError(PayloadEncodingError):
+ """Content encoding error."""
+
+
+class TransferEncodingError(PayloadEncodingError):
+ """transfer encoding error."""
+
+
+class ContentLengthError(PayloadEncodingError):
+ """Not enough data for satisfy content length header."""
+
+
+class LineTooLong(BadHttpMessage):
+ def __init__(
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
+ ) -> None:
+ super().__init__(
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
+ )
+ self.args = (line, limit, actual_size)
+
+
+class InvalidHeader(BadHttpMessage):
+ def __init__(self, hdr: Union[bytes, str]) -> None:
+ if isinstance(hdr, bytes):
+ hdr = hdr.decode("utf-8", "surrogateescape")
+ super().__init__(f"Invalid HTTP Header: {hdr}")
+ self.hdr = hdr
+ self.args = (hdr,)
+
+
+class BadStatusLine(BadHttpMessage):
+ def __init__(self, line: str = "") -> None:
+ if not isinstance(line, str):
+ line = repr(line)
+ super().__init__(f"Bad status line {line!r}")
+ self.args = (line,)
+ self.line = line
+
+
+class InvalidURLError(BadHttpMessage):
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/http_parser.py b/contrib/python/aiohttp/aiohttp/http_parser.py
new file mode 100644
index 0000000000..2dc9482f4f
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_parser.py
@@ -0,0 +1,956 @@
+import abc
+import asyncio
+import collections
+import re
+import string
+import zlib
+from contextlib import suppress
+from enum import IntEnum
+from typing import (
+ Any,
+ Generic,
+ List,
+ NamedTuple,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS, BaseTimerContext
+from .http_exceptions import (
+ BadHttpMessage,
+ BadStatusLine,
+ ContentEncodingError,
+ ContentLengthError,
+ InvalidHeader,
+ LineTooLong,
+ TransferEncodingError,
+)
+from .http_writer import HttpVersion, HttpVersion10
+from .log import internal_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import Final, RawHeaders
+
+try:
+ import brotli
+
+ HAS_BROTLI = True
+except ImportError: # pragma: no cover
+ HAS_BROTLI = False
+
+
+__all__ = (
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+)
+
+ASCIISET: Final[Set[str]] = set(string.printable)
+
+# See https://tools.ietf.org/html/rfc7230#section-3.1.1
+# and https://tools.ietf.org/html/rfc7230#appendix-B
+#
+# method = token
+# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
+# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
+# token = 1*tchar
+METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
+HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
+
+
+class RawRequestMessage(NamedTuple):
+ method: str
+ path: str
+ version: HttpVersion
+ headers: "CIMultiDictProxy[str]"
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+ url: URL
+
+
+RawResponseMessage = collections.namedtuple(
+ "RawResponseMessage",
+ [
+ "version",
+ "code",
+ "reason",
+ "headers",
+ "raw_headers",
+ "should_close",
+ "compression",
+ "upgrade",
+ "chunked",
+ ],
+)
+
+
+_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
+
+
+class ParseState(IntEnum):
+
+ PARSE_NONE = 0
+ PARSE_LENGTH = 1
+ PARSE_CHUNKED = 2
+ PARSE_UNTIL_EOF = 3
+
+
+class ChunkState(IntEnum):
+ PARSE_CHUNKED_SIZE = 0
+ PARSE_CHUNKED_CHUNK = 1
+ PARSE_CHUNKED_CHUNK_EOF = 2
+ PARSE_MAYBE_TRAILERS = 3
+ PARSE_TRAILERS = 4
+
+
+class HeadersParser:
+ def __init__(
+ self,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ ) -> None:
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
+ headers = CIMultiDict() # type: CIMultiDict[str]
+ raw_headers = []
+
+ lines_idx = 1
+ line = lines[1]
+ line_count = len(lines)
+
+ while line:
+ # Parse initial header name : value pair.
+ try:
+ bname, bvalue = line.split(b":", 1)
+ except ValueError:
+ raise InvalidHeader(line) from None
+
+ bname = bname.strip(b" \t")
+ bvalue = bvalue.lstrip()
+ if HDRRE.search(bname):
+ raise InvalidHeader(bname)
+ if len(bname) > self.max_field_size:
+ raise LineTooLong(
+ "request header name {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(len(bname)),
+ )
+
+ header_length = len(bvalue)
+
+ # next line
+ lines_idx += 1
+ line = lines[lines_idx]
+
+ # consume continuation lines
+ continuation = line and line[0] in (32, 9) # (' ', '\t')
+
+ if continuation:
+ bvalue_lst = [bvalue]
+ while continuation:
+ header_length += len(line)
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+ bvalue_lst.append(line)
+
+ # next line
+ lines_idx += 1
+ if lines_idx < line_count:
+ line = lines[lines_idx]
+ if line:
+ continuation = line[0] in (32, 9) # (' ', '\t')
+ else:
+ line = b""
+ break
+ bvalue = b"".join(bvalue_lst)
+ else:
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+
+ bvalue = bvalue.strip()
+ name = bname.decode("utf-8", "surrogateescape")
+ value = bvalue.decode("utf-8", "surrogateescape")
+
+ headers.add(name, value)
+ raw_headers.append((bname, bvalue))
+
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
+
+
+class HttpParser(abc.ABC, Generic[_MsgT]):
+ def __init__(
+ self,
+ protocol: Optional[BaseProtocol] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ limit: int = 2 ** 16,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ timer: Optional[BaseTimerContext] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ readall: bool = False,
+ payload_exception: Optional[Type[BaseException]] = None,
+ response_with_body: bool = True,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ ) -> None:
+ self.protocol = protocol
+ self.loop = loop
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self.timer = timer
+ self.code = code
+ self.method = method
+ self.readall = readall
+ self.payload_exception = payload_exception
+ self.response_with_body = response_with_body
+ self.read_until_eof = read_until_eof
+
+ self._lines = [] # type: List[bytes]
+ self._tail = b""
+ self._upgraded = False
+ self._payload = None
+ self._payload_parser = None # type: Optional[HttpPayloadParser]
+ self._auto_decompress = auto_decompress
+ self._limit = limit
+ self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
+
+ @abc.abstractmethod
+ def parse_message(self, lines: List[bytes]) -> _MsgT:
+ pass
+
+ def feed_eof(self) -> Optional[_MsgT]:
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+ else:
+ # try to extract partial message
+ if self._tail:
+ self._lines.append(self._tail)
+
+ if self._lines:
+ if self._lines[-1] != "\r\n":
+ self._lines.append(b"")
+ with suppress(Exception):
+ return self.parse_message(self._lines)
+ return None
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: bytes = b"\r\n",
+ EMPTY: bytes = b"",
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ METH_CONNECT: str = hdrs.METH_CONNECT,
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
+
+ messages = []
+
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ data_len = len(data)
+ start_pos = 0
+ loop = self.loop
+
+ while start_pos < data_len:
+
+ # read HTTP message (request/response line + headers), \r\n\r\n
+ # and split by lines
+ if self._payload_parser is None and not self._upgraded:
+ pos = data.find(SEP, start_pos)
+ # consume \r\n
+ if pos == start_pos and not self._lines:
+ start_pos = pos + 2
+ continue
+
+ if pos >= start_pos:
+ # line found
+ self._lines.append(data[start_pos:pos])
+ start_pos = pos + 2
+
+ # \r\n\r\n found
+ if self._lines[-1] == EMPTY:
+ try:
+ msg: _MsgT = self.parse_message(self._lines)
+ finally:
+ self._lines.clear()
+
+ def get_content_length() -> Optional[int]:
+ # payload length
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
+ if length_hdr is None:
+ return None
+
+ try:
+ length = int(length_hdr)
+ except ValueError:
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ if length < 0:
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ return length
+
+ length = get_content_length()
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ self._upgraded = msg.upgrade
+
+ method = getattr(msg, "method", self.method)
+
+ assert self.protocol is not None
+ # calculate payload
+ if (
+ (length is not None and length > 0)
+ or msg.chunked
+ and not msg.upgrade
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ readall=self.readall,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ elif method == METH_CONNECT:
+ assert isinstance(msg, RawRequestMessage)
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ self._upgraded = True
+ self._payload_parser = HttpPayloadParser(
+ payload,
+ method=msg.method,
+ compression=msg.compression,
+ readall=True,
+ auto_decompress=self._auto_decompress,
+ )
+ else:
+ if (
+ getattr(msg, "code", 100) >= 199
+ and length is None
+ and self.read_until_eof
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ readall=True,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ else:
+ payload = EMPTY_PAYLOAD
+
+ messages.append((msg, payload))
+ else:
+ self._tail = data[start_pos:]
+ data = EMPTY
+ break
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgraded:
+ assert not self._lines
+ break
+
+ # feed payload
+ elif data and start_pos < data_len:
+ assert not self._lines
+ assert self._payload_parser is not None
+ try:
+ eof, data = self._payload_parser.feed_data(data[start_pos:])
+ except BaseException as exc:
+ if self.payload_exception is not None:
+ self._payload_parser.payload.set_exception(
+ self.payload_exception(str(exc))
+ )
+ else:
+ self._payload_parser.payload.set_exception(exc)
+
+ eof = True
+ data = b""
+
+ if eof:
+ start_pos = 0
+ data_len = len(data)
+ self._payload_parser = None
+ continue
+ else:
+ break
+
+ if data and start_pos < data_len:
+ data = data[start_pos:]
+ else:
+ data = EMPTY
+
+ return messages, self._upgraded, data
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple[
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
+ ]:
+ """Parses RFC 5322 headers from a stream.
+
+ Line continuations are supported. Returns list of header name
+ and value pairs. Header name is in upper case.
+ """
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
+ close_conn = None
+ encoding = None
+ upgrade = False
+ chunked = False
+
+ # keep-alive
+ conn = headers.get(hdrs.CONNECTION)
+ if conn:
+ v = conn.lower()
+ if v == "close":
+ close_conn = True
+ elif v == "keep-alive":
+ close_conn = False
+ elif v == "upgrade":
+ upgrade = True
+
+ # encoding
+ enc = headers.get(hdrs.CONTENT_ENCODING)
+ if enc:
+ enc = enc.lower()
+ if enc in ("gzip", "deflate", "br"):
+ encoding = enc
+
+ # chunking
+ te = headers.get(hdrs.TRANSFER_ENCODING)
+ if te is not None:
+ if "chunked" == te.lower():
+ chunked = True
+ else:
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
+
+ if hdrs.CONTENT_LENGTH in headers:
+ raise BadHttpMessage(
+ "Content-Length can't be present with Transfer-Encoding",
+ )
+
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
+
+ def set_upgraded(self, val: bool) -> None:
+ """Set connection upgraded (to websocket) mode.
+
+ :param bool val: new state.
+ """
+ self._upgraded = val
+
+
+class HttpRequestParser(HttpParser[RawRequestMessage]):
+ """Read request status line.
+
+ Exception .http_exceptions.BadStatusLine
+ could be raised in case of any errors in status line.
+ Returns RawRequestMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
+ # request line
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ method, path, version = line.split(None, 2)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ if len(path) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(path))
+ )
+
+ path_part, _hash_separator, url_fragment = path.partition("#")
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
+
+ # method
+ if not METHRE.match(method):
+ raise BadStatusLine(method)
+
+ # version
+ try:
+ if version.startswith("HTTP/"):
+ n1, n2 = version[5:].split(".", 1)
+ version_o = HttpVersion(int(n1), int(n2))
+ else:
+ raise BadStatusLine(version)
+ except Exception:
+ raise BadStatusLine(version)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None: # then the headers weren't set in the request
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
+ close = True
+ else: # HTTP 1.1 must ask to close.
+ close = False
+
+ return RawRequestMessage(
+ method,
+ path,
+ version_o,
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
+ # NOTE: parser does, otherwise it results into the same
+ # NOTE: HTTP Request-Line input producing different
+ # NOTE: `yarl.URL()` objects
+ URL.build(
+ path=path_part,
+ query_string=qs_part,
+ fragment=url_fragment,
+ encoded=True,
+ ),
+ )
+
+
+class HttpResponseParser(HttpParser[RawResponseMessage]):
+ """Read response status line and headers.
+
+ BadStatusLine could be raised in case of any errors in status line.
+ Returns RawResponseMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ version, status = line.split(None, 1)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ try:
+ status, reason = status.split(None, 1)
+ except ValueError:
+ reason = ""
+
+ if len(reason) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(reason))
+ )
+
+ # version
+ match = VERSRE.match(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ # The status code is a three-digit number
+ try:
+ status_i = int(status)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ if status_i > 999:
+ raise BadStatusLine(line)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None:
+ close = version_o <= HttpVersion10
+
+ return RawResponseMessage(
+ version_o,
+ status_i,
+ reason.strip(),
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ )
+
+
+class HttpPayloadParser:
+ def __init__(
+ self,
+ payload: StreamReader,
+ length: Optional[int] = None,
+ chunked: bool = False,
+ compression: Optional[str] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ readall: bool = False,
+ response_with_body: bool = True,
+ auto_decompress: bool = True,
+ ) -> None:
+ self._length = 0
+ self._type = ParseState.PARSE_NONE
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ self._chunk_size = 0
+ self._chunk_tail = b""
+ self._auto_decompress = auto_decompress
+ self.done = False
+
+ # payload decompression wrapper
+ if response_with_body and compression and self._auto_decompress:
+ real_payload = DeflateBuffer(
+ payload, compression
+ ) # type: Union[StreamReader, DeflateBuffer]
+ else:
+ real_payload = payload
+
+ # payload parser
+ if not response_with_body:
+ # don't parse payload if it's not expected to be received
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+
+ elif chunked:
+ self._type = ParseState.PARSE_CHUNKED
+ elif length is not None:
+ self._type = ParseState.PARSE_LENGTH
+ self._length = length
+ if self._length == 0:
+ real_payload.feed_eof()
+ self.done = True
+ else:
+ if readall and code != 204:
+ self._type = ParseState.PARSE_UNTIL_EOF
+ elif method in ("PUT", "POST"):
+ internal_logger.warning( # pragma: no cover
+ "Content-Length or Transfer-Encoding header is required"
+ )
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+
+ self.payload = real_payload
+
+ def feed_eof(self) -> None:
+ if self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_eof()
+ elif self._type == ParseState.PARSE_LENGTH:
+ raise ContentLengthError(
+ "Not enough data for satisfy content length header."
+ )
+ elif self._type == ParseState.PARSE_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data for satisfy transfer length header."
+ )
+
+ def feed_data(
+ self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
+ ) -> Tuple[bool, bytes]:
+ # Read specified amount of bytes
+ if self._type == ParseState.PARSE_LENGTH:
+ required = self._length
+ chunk_len = len(chunk)
+
+ if required >= chunk_len:
+ self._length = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ if self._length == 0:
+ self.payload.feed_eof()
+ return True, b""
+ else:
+ self._length = 0
+ self.payload.feed_data(chunk[:required], required)
+ self.payload.feed_eof()
+ return True, chunk[required:]
+
+ # Chunked transfer encoding parser
+ elif self._type == ParseState.PARSE_CHUNKED:
+ if self._chunk_tail:
+ chunk = self._chunk_tail + chunk
+ self._chunk_tail = b""
+
+ while chunk:
+
+ # read next chunk size
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ i = chunk.find(CHUNK_EXT, 0, pos)
+ if i >= 0:
+ size_b = chunk[:i] # strip chunk-extensions
+ else:
+ size_b = chunk[:pos]
+
+ try:
+ size = int(bytes(size_b), 16)
+ except ValueError:
+ exc = TransferEncodingError(
+ chunk[:pos].decode("ascii", "surrogateescape")
+ )
+ self.payload.set_exception(exc)
+ raise exc from None
+
+ chunk = chunk[pos + 2 :]
+ if size == 0: # eof marker
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ else:
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
+ self._chunk_size = size
+ self.payload.begin_http_chunk_receiving()
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # read chunk and feed buffer
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
+ required = self._chunk_size
+ chunk_len = len(chunk)
+
+ if required > chunk_len:
+ self._chunk_size = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ return False, b""
+ else:
+ self._chunk_size = 0
+ self.payload.feed_data(chunk[:required], required)
+ chunk = chunk[required:]
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
+ self.payload.end_http_chunk_receiving()
+
+ # toss the CRLF at the end of the chunk
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
+ if chunk[:2] == SEP:
+ chunk = chunk[2:]
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # if stream does not contain trailer, after 0\r\n
+ # we should get another \r\n otherwise
+ # trailers needs to be skiped until \r\n\r\n
+ if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
+ head = chunk[:2]
+ if head == SEP:
+ # end of stream
+ self.payload.feed_eof()
+ return True, chunk[2:]
+ # Both CR and LF, or only LF may not be received yet. It is
+ # expected that CRLF or LF will be shown at the very first
+ # byte next time, otherwise trailers should come. The last
+ # CRLF which marks the end of response might not be
+ # contained in the same TCP segment which delivered the
+ # size indicator.
+ if not head:
+ return False, b""
+ if head == SEP[:1]:
+ self._chunk_tail = head
+ return False, b""
+ self._chunk = ChunkState.PARSE_TRAILERS
+
+ # read and discard trailer up to the CRLF terminator
+ if self._chunk == ChunkState.PARSE_TRAILERS:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ chunk = chunk[pos + 2 :]
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # Read all bytes until eof
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_data(chunk, len(chunk))
+
+ return False, b""
+
+
+class DeflateBuffer:
+ """DeflateStream decompress stream and feed data into specified stream."""
+
+ decompressor: Any
+
+ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
+ self.out = out
+ self.size = 0
+ self.encoding = encoding
+ self._started_decoding = False
+
+ if encoding == "br":
+ if not HAS_BROTLI: # pragma: no cover
+ raise ContentEncodingError(
+ "Can not decode content-encoding: brotli (br). "
+ "Please install `Brotli`"
+ )
+
+ class BrotliDecoder:
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self) -> None:
+ self._obj = brotli.Decompressor()
+
+ def decompress(self, data: bytes) -> bytes:
+ if hasattr(self._obj, "decompress"):
+ return cast(bytes, self._obj.decompress(data))
+ return cast(bytes, self._obj.process(data))
+
+ def flush(self) -> bytes:
+ if hasattr(self._obj, "flush"):
+ return cast(bytes, self._obj.flush())
+ return b""
+
+ self.decompressor = BrotliDecoder()
+ else:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
+ self.decompressor = zlib.decompressobj(wbits=zlib_mode)
+
+ def set_exception(self, exc: BaseException) -> None:
+ self.out.set_exception(exc)
+
+ def feed_data(self, chunk: bytes, size: int) -> None:
+ if not size:
+ return
+
+ self.size += size
+
+ # RFC1950
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
+ # bits 4..7 = CINFO = 1..7 = windows size.
+ if (
+ not self._started_decoding
+ and self.encoding == "deflate"
+ and chunk[0] & 0xF != 8
+ ):
+ # Change the decoder to decompress incorrectly compressed data
+ # Actually we should issue a warning about non-RFC-compliant data.
+ self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+
+ try:
+ chunk = self.decompressor.decompress(chunk)
+ except Exception:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: %s" % self.encoding
+ )
+
+ self._started_decoding = True
+
+ if chunk:
+ self.out.feed_data(chunk, len(chunk))
+
+ def feed_eof(self) -> None:
+ chunk = self.decompressor.flush()
+
+ if chunk or self.size > 0:
+ self.out.feed_data(chunk, len(chunk))
+ if self.encoding == "deflate" and not self.decompressor.eof:
+ raise ContentEncodingError("deflate")
+
+ self.out.feed_eof()
+
+ def begin_http_chunk_receiving(self) -> None:
+ self.out.begin_http_chunk_receiving()
+
+ def end_http_chunk_receiving(self) -> None:
+ self.out.end_http_chunk_receiving()
+
+
+HttpRequestParserPy = HttpRequestParser
+HttpResponseParserPy = HttpResponseParser
+RawRequestMessagePy = RawRequestMessage
+RawResponseMessagePy = RawResponseMessage
+
+try:
+ if not NO_EXTENSIONS:
+ from ._http_parser import ( # type: ignore[import,no-redef]
+ HttpRequestParser,
+ HttpResponseParser,
+ RawRequestMessage,
+ RawResponseMessage,
+ )
+
+ HttpRequestParserC = HttpRequestParser
+ HttpResponseParserC = HttpResponseParser
+ RawRequestMessageC = RawRequestMessage
+ RawResponseMessageC = RawResponseMessage
+except ImportError: # pragma: no cover
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/http_websocket.py b/contrib/python/aiohttp/aiohttp/http_websocket.py
new file mode 100644
index 0000000000..991a149d09
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_websocket.py
@@ -0,0 +1,701 @@
+"""WebSocket protocol versions 13 and 8."""
+
+import asyncio
+import collections
+import json
+import random
+import re
+import sys
+import zlib
+from enum import IntEnum
+from struct import Struct
+from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
+
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS
+from .streams import DataQueue
+from .typedefs import Final
+
+__all__ = (
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+)
+
+
+class WSCloseCode(IntEnum):
+ OK = 1000
+ GOING_AWAY = 1001
+ PROTOCOL_ERROR = 1002
+ UNSUPPORTED_DATA = 1003
+ ABNORMAL_CLOSURE = 1006
+ INVALID_TEXT = 1007
+ POLICY_VIOLATION = 1008
+ MESSAGE_TOO_BIG = 1009
+ MANDATORY_EXTENSION = 1010
+ INTERNAL_ERROR = 1011
+ SERVICE_RESTART = 1012
+ TRY_AGAIN_LATER = 1013
+ BAD_GATEWAY = 1014
+
+
+ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
+
+
+class WSMsgType(IntEnum):
+ # websocket spec types
+ CONTINUATION = 0x0
+ TEXT = 0x1
+ BINARY = 0x2
+ PING = 0x9
+ PONG = 0xA
+ CLOSE = 0x8
+
+ # aiohttp specific types
+ CLOSING = 0x100
+ CLOSED = 0x101
+ ERROR = 0x102
+
+ text = TEXT
+ binary = BINARY
+ ping = PING
+ pong = PONG
+ close = CLOSE
+ closing = CLOSING
+ closed = CLOSED
+ error = ERROR
+
+
+WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+
+
+UNPACK_LEN2 = Struct("!H").unpack_from
+UNPACK_LEN3 = Struct("!Q").unpack_from
+UNPACK_CLOSE_CODE = Struct("!H").unpack
+PACK_LEN1 = Struct("!BB").pack
+PACK_LEN2 = Struct("!BBH").pack
+PACK_LEN3 = Struct("!BBQ").pack
+PACK_CLOSE_CODE = Struct("!H").pack
+MSG_SIZE: Final[int] = 2 ** 14
+DEFAULT_LIMIT: Final[int] = 2 ** 16
+
+
+_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
+
+
+class WSMessage(_WSMessageBase):
+ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
+ """Return parsed JSON data.
+
+ .. versionadded:: 0.22
+ """
+ return loads(self.data)
+
+
+WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
+WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
+
+
+class WebSocketError(Exception):
+ """WebSocket protocol parser error."""
+
+ def __init__(self, code: int, message: str) -> None:
+ self.code = code
+ super().__init__(code, message)
+
+ def __str__(self) -> str:
+ return cast(str, self.args[1])
+
+
+class WSHandshakeError(Exception):
+ """WebSocket protocol handshake error."""
+
+
+native_byteorder: Final[str] = sys.byteorder
+
+
+# Used by _websocket_mask_python
+_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
+
+
+def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
+ """Websocket masking function.
+
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
+ object of any length. The contents of `data` are masked with `mask`,
+ as specified in section 5.3 of RFC 6455.
+
+ Note that this function mutates the `data` argument.
+
+ This pure-python implementation may be replaced by an optimized
+ version when available.
+
+ """
+ assert isinstance(data, bytearray), data
+ assert len(mask) == 4, mask
+
+ if data:
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
+ data[::4] = data[::4].translate(a)
+ data[1::4] = data[1::4].translate(b)
+ data[2::4] = data[2::4].translate(c)
+ data[3::4] = data[3::4].translate(d)
+
+
+if NO_EXTENSIONS: # pragma: no cover
+ _websocket_mask = _websocket_mask_python
+else:
+ try:
+ from ._websocket import _websocket_mask_cython # type: ignore[import]
+
+ _websocket_mask = _websocket_mask_cython
+ except ImportError: # pragma: no cover
+ _websocket_mask = _websocket_mask_python
+
+_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
+
+
+_WS_EXT_RE: Final[Pattern[str]] = re.compile(
+ r"^(?:;\s*(?:"
+ r"(server_no_context_takeover)|"
+ r"(client_no_context_takeover)|"
+ r"(server_max_window_bits(?:=(\d+))?)|"
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
+)
+
+_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
+
+
+def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
+ if not extstr:
+ return 0, False
+
+ compress = 0
+ notakeover = False
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
+ defext = ext.group(1)
+ # Return compress = 15 when get `permessage-deflate`
+ if not defext:
+ compress = 15
+ break
+ match = _WS_EXT_RE.match(defext)
+ if match:
+ compress = 15
+ if isserver:
+ # Server never fail to detect compress handshake.
+ # Server does not need to send max wbit to client
+ if match.group(4):
+ compress = int(match.group(4))
+ # Group3 must match if group4 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # CONTINUE to next extension
+ if compress > 15 or compress < 9:
+ compress = 0
+ continue
+ if match.group(1):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ else:
+ if match.group(6):
+ compress = int(match.group(6))
+ # Group5 must match if group6 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # FAIL the parse progress
+ if compress > 15 or compress < 9:
+ raise WSHandshakeError("Invalid window size")
+ if match.group(2):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ # Return Fail if client side and not match
+ elif not isserver:
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
+
+ return compress, notakeover
+
+
+def ws_ext_gen(
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
+) -> str:
+ # client_notakeover=False not used for server
+ # compress wbit 8 does not support in zlib
+ if compress < 9 or compress > 15:
+ raise ValueError(
+ "Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
+ )
+ enabledext = ["permessage-deflate"]
+ if not isserver:
+ enabledext.append("client_max_window_bits")
+
+ if compress < 15:
+ enabledext.append("server_max_window_bits=" + str(compress))
+ if server_notakeover:
+ enabledext.append("server_no_context_takeover")
+ # if client_notakeover:
+ # enabledext.append('client_no_context_takeover')
+ return "; ".join(enabledext)
+
+
+class WSParserState(IntEnum):
+ READ_HEADER = 1
+ READ_PAYLOAD_LENGTH = 2
+ READ_PAYLOAD_MASK = 3
+ READ_PAYLOAD = 4
+
+
+class WebSocketReader:
+ def __init__(
+ self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
+ ) -> None:
+ self.queue = queue
+ self._max_msg_size = max_msg_size
+
+ self._exc = None # type: Optional[BaseException]
+ self._partial = bytearray()
+ self._state = WSParserState.READ_HEADER
+
+ self._opcode = None # type: Optional[int]
+ self._frame_fin = False
+ self._frame_opcode = None # type: Optional[int]
+ self._frame_payload = bytearray()
+
+ self._tail = b""
+ self._has_mask = False
+ self._frame_mask = None # type: Optional[bytes]
+ self._payload_length = 0
+ self._payload_length_flag = 0
+ self._compressed = None # type: Optional[bool]
+ self._decompressobj = None # type: Any # zlib.decompressobj actually
+ self._compress = compress
+
+ def feed_eof(self) -> None:
+ self.queue.feed_eof()
+
+ def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
+ if self._exc:
+ return True, data
+
+ try:
+ return self._feed_data(data)
+ except Exception as exc:
+ self._exc = exc
+ self.queue.set_exception(exc)
+ return True, b""
+
+ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
+ for fin, opcode, payload, compressed in self.parse_frame(data):
+ if compressed and not self._decompressobj:
+ self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+ if opcode == WSMsgType.CLOSE:
+ if len(payload) >= 2:
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close code: {close_code}",
+ )
+ try:
+ close_message = payload[2:].decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
+ elif payload:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
+ )
+ else:
+ msg = WSMessage(WSMsgType.CLOSE, 0, "")
+
+ self.queue.feed_data(msg, 0)
+
+ elif opcode == WSMsgType.PING:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.PING, payload, ""), len(payload)
+ )
+
+ elif opcode == WSMsgType.PONG:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.PONG, payload, ""), len(payload)
+ )
+
+ elif (
+ opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
+ and self._opcode is None
+ ):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
+ )
+ else:
+ # load text/binary
+ if not fin:
+ # got partial frame payload
+ if opcode != WSMsgType.CONTINUATION:
+ self._opcode = opcode
+ self._partial.extend(payload)
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Message size {} exceeds limit {}".format(
+ len(self._partial), self._max_msg_size
+ ),
+ )
+ else:
+ # previous frame was non finished
+ # we should get continuation opcode
+ if self._partial:
+ if opcode != WSMsgType.CONTINUATION:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "The opcode in non-fin frame is expected "
+ "to be zero, got {!r}".format(opcode),
+ )
+
+ if opcode == WSMsgType.CONTINUATION:
+ assert self._opcode is not None
+ opcode = self._opcode
+ self._opcode = None
+
+ self._partial.extend(payload)
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Message size {} exceeds limit {}".format(
+ len(self._partial), self._max_msg_size
+ ),
+ )
+
+ # Decompress process must to be done after all packets
+ # received.
+ if compressed:
+ self._partial.extend(_WS_DEFLATE_TRAILING)
+ payload_merged = self._decompressobj.decompress(
+ self._partial, self._max_msg_size
+ )
+ if self._decompressobj.unconsumed_tail:
+ left = len(self._decompressobj.unconsumed_tail)
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Decompressed message size {} exceeds limit {}".format(
+ self._max_msg_size + left, self._max_msg_size
+ ),
+ )
+ else:
+ payload_merged = bytes(self._partial)
+
+ self._partial.clear()
+
+ if opcode == WSMsgType.TEXT:
+ try:
+ text = payload_merged.decode("utf-8")
+ self.queue.feed_data(
+ WSMessage(WSMsgType.TEXT, text, ""), len(text)
+ )
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ else:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.BINARY, payload_merged, ""),
+ len(payload_merged),
+ )
+
+ return False, b""
+
+ def parse_frame(
+ self, buf: bytes
+ ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
+ """Return the next frame from the socket."""
+ frames = []
+ if self._tail:
+ buf, self._tail = self._tail + buf, b""
+
+ start_pos = 0
+ buf_length = len(buf)
+
+ while True:
+ # read header
+ if self._state == WSParserState.READ_HEADER:
+ if buf_length - start_pos >= 2:
+ data = buf[start_pos : start_pos + 2]
+ start_pos += 2
+ first_byte, second_byte = data
+
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xF
+
+ # frame-fin = %x0 ; more frames of this message follow
+ # / %x1 ; final frame of this message
+ # frame-rsv1 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv2 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv3 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ #
+ # Remove rsv1 from this test for deflate development
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ if opcode > 0x7 and fin == 0:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received fragmented control frame",
+ )
+
+ has_mask = (second_byte >> 7) & 1
+ length = second_byte & 0x7F
+
+ # Control frames MUST have a payload
+ # length of 125 bytes or less
+ if opcode > 0x7 and length > 125:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Control frame payload cannot be " "larger than 125 bytes",
+ )
+
+ # Set compress status if last package is FIN
+ # OR set compress status if this is first fragment
+ # Raise error if not first fragment with rsv1 = 0x1
+ if self._frame_fin or self._compressed is None:
+ self._compressed = True if rsv1 else False
+ elif rsv1:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ self._frame_fin = bool(fin)
+ self._frame_opcode = opcode
+ self._has_mask = bool(has_mask)
+ self._payload_length_flag = length
+ self._state = WSParserState.READ_PAYLOAD_LENGTH
+ else:
+ break
+
+ # read payload length
+ if self._state == WSParserState.READ_PAYLOAD_LENGTH:
+ length = self._payload_length_flag
+ if length == 126:
+ if buf_length - start_pos >= 2:
+ data = buf[start_pos : start_pos + 2]
+ start_pos += 2
+ length = UNPACK_LEN2(data)[0]
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+ else:
+ break
+ elif length > 126:
+ if buf_length - start_pos >= 8:
+ data = buf[start_pos : start_pos + 8]
+ start_pos += 8
+ length = UNPACK_LEN3(data)[0]
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+ else:
+ break
+ else:
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+
+ # read payload mask
+ if self._state == WSParserState.READ_PAYLOAD_MASK:
+ if buf_length - start_pos >= 4:
+ self._frame_mask = buf[start_pos : start_pos + 4]
+ start_pos += 4
+ self._state = WSParserState.READ_PAYLOAD
+ else:
+ break
+
+ if self._state == WSParserState.READ_PAYLOAD:
+ length = self._payload_length
+ payload = self._frame_payload
+
+ chunk_len = buf_length - start_pos
+ if length >= chunk_len:
+ self._payload_length = length - chunk_len
+ payload.extend(buf[start_pos:])
+ start_pos = buf_length
+ else:
+ self._payload_length = 0
+ payload.extend(buf[start_pos : start_pos + length])
+ start_pos = start_pos + length
+
+ if self._payload_length == 0:
+ if self._has_mask:
+ assert self._frame_mask is not None
+ _websocket_mask(self._frame_mask, payload)
+
+ frames.append(
+ (self._frame_fin, self._frame_opcode, payload, self._compressed)
+ )
+
+ self._frame_payload = bytearray()
+ self._state = WSParserState.READ_HEADER
+ else:
+ break
+
+ self._tail = buf[start_pos:]
+
+ return frames
+
+
+class WebSocketWriter:
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ transport: asyncio.Transport,
+ *,
+ use_mask: bool = False,
+ limit: int = DEFAULT_LIMIT,
+ random: Any = random.Random(),
+ compress: int = 0,
+ notakeover: bool = False,
+ ) -> None:
+ self.protocol = protocol
+ self.transport = transport
+ self.use_mask = use_mask
+ self.randrange = random.randrange
+ self.compress = compress
+ self.notakeover = notakeover
+ self._closing = False
+ self._limit = limit
+ self._output_size = 0
+ self._compressobj = None # type: Any # actually compressobj
+
+ async def _send_frame(
+ self, message: bytes, opcode: int, compress: Optional[int] = None
+ ) -> None:
+ """Send a frame over the websocket with message as its payload."""
+ if self._closing and not (opcode & WSMsgType.CLOSE):
+ raise ConnectionResetError("Cannot write to closing transport")
+
+ rsv = 0
+
+ # Only compress larger packets (disabled)
+ # Does small packet needs to be compressed?
+ # if self.compress and opcode < 8 and len(message) > 124:
+ if (compress or self.compress) and opcode < 8:
+ if compress:
+ # Do not set self._compress if compressing is for this frame
+ compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
+ else: # self.compress
+ if not self._compressobj:
+ self._compressobj = zlib.compressobj(
+ level=zlib.Z_BEST_SPEED, wbits=-self.compress
+ )
+ compressobj = self._compressobj
+
+ message = compressobj.compress(message)
+ message = message + compressobj.flush(
+ zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
+ )
+ if message.endswith(_WS_DEFLATE_TRAILING):
+ message = message[:-4]
+ rsv = rsv | 0x40
+
+ msg_length = len(message)
+
+ use_mask = self.use_mask
+ if use_mask:
+ mask_bit = 0x80
+ else:
+ mask_bit = 0
+
+ if msg_length < 126:
+ header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
+ elif msg_length < (1 << 16):
+ header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
+ else:
+ header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
+ if use_mask:
+ mask = self.randrange(0, 0xFFFFFFFF)
+ mask = mask.to_bytes(4, "big")
+ message = bytearray(message)
+ _websocket_mask(mask, message)
+ self._write(header + mask + message)
+ self._output_size += len(header) + len(mask) + len(message)
+ else:
+ if len(message) > MSG_SIZE:
+ self._write(header)
+ self._write(message)
+ else:
+ self._write(header + message)
+
+ self._output_size += len(header) + len(message)
+
+ if self._output_size > self._limit:
+ self._output_size = 0
+ await self.protocol._drain_helper()
+
+ def _write(self, data: bytes) -> None:
+ if self.transport is None or self.transport.is_closing():
+ raise ConnectionResetError("Cannot write to closing transport")
+ self.transport.write(data)
+
+ async def pong(self, message: bytes = b"") -> None:
+ """Send pong message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ await self._send_frame(message, WSMsgType.PONG)
+
+ async def ping(self, message: bytes = b"") -> None:
+ """Send ping message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ await self._send_frame(message, WSMsgType.PING)
+
+ async def send(
+ self,
+ message: Union[str, bytes],
+ binary: bool = False,
+ compress: Optional[int] = None,
+ ) -> None:
+ """Send a frame over the websocket with message as its payload."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ if binary:
+ await self._send_frame(message, WSMsgType.BINARY, compress)
+ else:
+ await self._send_frame(message, WSMsgType.TEXT, compress)
+
+ async def close(self, code: int = 1000, message: bytes = b"") -> None:
+ """Close the websocket, sending the specified code and message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ try:
+ await self._send_frame(
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
+ )
+ finally:
+ self._closing = True
diff --git a/contrib/python/aiohttp/aiohttp/http_writer.py b/contrib/python/aiohttp/aiohttp/http_writer.py
new file mode 100644
index 0000000000..e09144736c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_writer.py
@@ -0,0 +1,200 @@
+"""Http related parsers and protocol."""
+
+import asyncio
+import zlib
+from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
+
+from multidict import CIMultiDict
+
+from .abc import AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS
+
+__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
+
+
+class HttpVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+HttpVersion10 = HttpVersion(1, 0)
+HttpVersion11 = HttpVersion(1, 1)
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
+
+
+class StreamWriter(AbstractStreamWriter):
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ loop: asyncio.AbstractEventLoop,
+ on_chunk_sent: _T_OnChunkSent = None,
+ on_headers_sent: _T_OnHeadersSent = None,
+ ) -> None:
+ self._protocol = protocol
+ self._transport = protocol.transport
+
+ self.loop = loop
+ self.length = None
+ self.chunked = False
+ self.buffer_size = 0
+ self.output_size = 0
+
+ self._eof = False
+ self._compress = None # type: Any
+ self._drain_waiter = None
+
+ self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
+ self._on_headers_sent = on_headers_sent # type: _T_OnHeadersSent
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ return self._transport
+
+ @property
+ def protocol(self) -> BaseProtocol:
+ return self._protocol
+
+ def enable_chunking(self) -> None:
+ self.chunked = True
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
+ self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+
+ def _write(self, chunk: bytes) -> None:
+ size = len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+
+ if self._transport is None or self._transport.is_closing():
+ raise ConnectionResetError("Cannot write to closing transport")
+ self._transport.write(chunk)
+
+ async def write(
+ self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
+ ) -> None:
+ """Writes chunk of data to a stream.
+
+ write_eof() indicates end of stream.
+ writer can't be used after write_eof() method being called.
+ write() return drain future.
+ """
+ if self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if isinstance(chunk, memoryview):
+ if chunk.nbytes != len(chunk):
+ # just reshape it
+ chunk = chunk.cast("c")
+
+ if self._compress is not None:
+ chunk = self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self.length is not None:
+ chunk_len = len(chunk)
+ if self.length >= chunk_len:
+ self.length = self.length - chunk_len
+ else:
+ chunk = chunk[: self.length]
+ self.length = 0
+ if not chunk:
+ return
+
+ if chunk:
+ if self.chunked:
+ chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len_pre + chunk + b"\r\n"
+
+ self._write(chunk)
+
+ if self.buffer_size > LIMIT and drain:
+ self.buffer_size = 0
+ await self.drain()
+
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write request/response status and headers."""
+ if self._on_headers_sent is not None:
+ await self._on_headers_sent(headers)
+
+ # status + headers
+ buf = _serialize_headers(status_line, headers)
+ self._write(buf)
+
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ if self._eof:
+ return
+
+ if chunk and self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if self._compress:
+ if chunk:
+ chunk = self._compress.compress(chunk)
+
+ chunk = chunk + self._compress.flush()
+ if chunk and self.chunked:
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
+ else:
+ if self.chunked:
+ if chunk:
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
+ else:
+ chunk = b"0\r\n\r\n"
+
+ if chunk:
+ self._write(chunk)
+
+ await self.drain()
+
+ self._eof = True
+ self._transport = None
+
+ async def drain(self) -> None:
+ """Flush the write buffer.
+
+ The intended use is to write
+
+ await w.write(data)
+ await w.drain()
+ """
+ if self._protocol.transport is not None:
+ await self._protocol._drain_helper()
+
+
+def _safe_header(string: str) -> str:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ return string
+
+
+def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
+ return line.encode("utf-8")
+
+
+_serialize_headers = _py_serialize_headers
+
+try:
+ import aiohttp._http_writer as _http_writer # type: ignore[import]
+
+ _c_serialize_headers = _http_writer._serialize_headers
+ if not NO_EXTENSIONS:
+ _serialize_headers = _c_serialize_headers
+except ImportError:
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/locks.py b/contrib/python/aiohttp/aiohttp/locks.py
new file mode 100644
index 0000000000..df65e3e47d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/locks.py
@@ -0,0 +1,41 @@
+import asyncio
+import collections
+from typing import Any, Deque, Optional
+
+
+class EventResultOrError:
+ """Event asyncio lock helper class.
+
+ Wraps the Event asyncio lock allowing either to awake the
+ locked Tasks without any error or raising an exception.
+
+ thanks to @vorpalsmith for the simple design.
+ """
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._exc = None # type: Optional[BaseException]
+ self._event = asyncio.Event()
+ self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
+
+ def set(self, exc: Optional[BaseException] = None) -> None:
+ self._exc = exc
+ self._event.set()
+
+ async def wait(self) -> Any:
+ waiter = self._loop.create_task(self._event.wait())
+ self._waiters.append(waiter)
+ try:
+ val = await waiter
+ finally:
+ self._waiters.remove(waiter)
+
+ if self._exc is not None:
+ raise self._exc
+
+ return val
+
+ def cancel(self) -> None:
+ """Cancel all waiters"""
+ for waiter in self._waiters:
+ waiter.cancel()
diff --git a/contrib/python/aiohttp/aiohttp/log.py b/contrib/python/aiohttp/aiohttp/log.py
new file mode 100644
index 0000000000..3cecea2bac
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/log.py
@@ -0,0 +1,8 @@
+import logging
+
+access_logger = logging.getLogger("aiohttp.access")
+client_logger = logging.getLogger("aiohttp.client")
+internal_logger = logging.getLogger("aiohttp.internal")
+server_logger = logging.getLogger("aiohttp.server")
+web_logger = logging.getLogger("aiohttp.web")
+ws_logger = logging.getLogger("aiohttp.websocket")
diff --git a/contrib/python/aiohttp/aiohttp/multipart.py b/contrib/python/aiohttp/aiohttp/multipart.py
new file mode 100644
index 0000000000..c84e20044f
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/multipart.py
@@ -0,0 +1,963 @@
+import base64
+import binascii
+import json
+import re
+import uuid
+import warnings
+import zlib
+from collections import deque
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Deque,
+ Dict,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl, unquote, urlencode
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
+
+from .hdrs import (
+ CONTENT_DISPOSITION,
+ CONTENT_ENCODING,
+ CONTENT_LENGTH,
+ CONTENT_TRANSFER_ENCODING,
+ CONTENT_TYPE,
+)
+from .helpers import CHAR, TOKEN, parse_mimetype, reify
+from .http import HeadersParser
+from .payload import (
+ JsonPayload,
+ LookupError,
+ Order,
+ Payload,
+ StringPayload,
+ get_payload,
+ payload_type,
+)
+from .streams import StreamReader
+
+__all__ = (
+ "MultipartReader",
+ "MultipartWriter",
+ "BodyPartReader",
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "parse_content_disposition",
+ "content_disposition_filename",
+)
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client_reqrep import ClientResponse
+
+
+class BadContentDispositionHeader(RuntimeWarning):
+ pass
+
+
+class BadContentDispositionParam(RuntimeWarning):
+ pass
+
+
+def parse_content_disposition(
+ header: Optional[str],
+) -> Tuple[Optional[str], Dict[str, str]]:
+ def is_token(string: str) -> bool:
+ return bool(string) and TOKEN >= set(string)
+
+ def is_quoted(string: str) -> bool:
+ return string[0] == string[-1] == '"'
+
+ def is_rfc5987(string: str) -> bool:
+ return is_token(string) and string.count("'") == 2
+
+ def is_extended_param(string: str) -> bool:
+ return string.endswith("*")
+
+ def is_continuous_param(string: str) -> bool:
+ pos = string.find("*") + 1
+ if not pos:
+ return False
+ substring = string[pos:-1] if string.endswith("*") else string[pos:]
+ return substring.isdigit()
+
+ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
+ return re.sub(f"\\\\([{chars}])", "\\1", text)
+
+ if not header:
+ return None, {}
+
+ disptype, *parts = header.split(";")
+ if not is_token(disptype):
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params = {} # type: Dict[str, str]
+ while parts:
+ item = parts.pop(0)
+
+ if "=" not in item:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ key, value = item.split("=", 1)
+ key = key.lower().strip()
+ value = value.lstrip()
+
+ if key in params:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ if not is_token(key):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_continuous_param(key):
+ if is_quoted(value):
+ value = unescape(value[1:-1])
+ elif not is_token(value):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_extended_param(key):
+ if is_rfc5987(value):
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ else:
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ try:
+ value = unquote(value, encoding, "strict")
+ except UnicodeDecodeError: # pragma: nocover
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ else:
+ failed = True
+ if is_quoted(value):
+ failed = False
+ value = unescape(value[1:-1].lstrip("\\/"))
+ elif is_token(value):
+ failed = False
+ elif parts:
+ # maybe just ; in filename, in any case this is just
+ # one case fix, for proper fix we need to redesign parser
+ _value = f"{value};{parts[0]}"
+ if is_quoted(_value):
+ parts.pop(0)
+ value = unescape(_value[1:-1].lstrip("\\/"))
+ failed = False
+
+ if failed:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params[key] = value
+
+ return disptype.lower(), params
+
+
+def content_disposition_filename(
+ params: Mapping[str, str], name: str = "filename"
+) -> Optional[str]:
+ name_suf = "%s*" % name
+ if not params:
+ return None
+ elif name_suf in params:
+ return params[name_suf]
+ elif name in params:
+ return params[name]
+ else:
+ parts = []
+ fnparams = sorted(
+ (key, value) for key, value in params.items() if key.startswith(name_suf)
+ )
+ for num, (key, value) in enumerate(fnparams):
+ _, tail = key.split("*", 1)
+ if tail.endswith("*"):
+ tail = tail[:-1]
+ if tail == str(num):
+ parts.append(value)
+ else:
+ break
+ if not parts:
+ return None
+ value = "".join(parts)
+ if "'" in value:
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ return unquote(value, encoding, "strict")
+ return value
+
+
+class MultipartResponseWrapper:
+ """Wrapper around the MultipartReader.
+
+ It takes care about
+ underlying connection and close it when it needs in.
+ """
+
+ def __init__(
+ self,
+ resp: "ClientResponse",
+ stream: "MultipartReader",
+ ) -> None:
+ self.resp = resp
+ self.stream = stream
+
+ def __aiter__(self) -> "MultipartResponseWrapper":
+ return self
+
+ async def __anext__(
+ self,
+ ) -> Union["MultipartReader", "BodyPartReader"]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ def at_eof(self) -> bool:
+ """Returns True when all response data had been read."""
+ return self.resp.content.at_eof()
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
+ """Emits next multipart reader object."""
+ item = await self.stream.next()
+ if self.stream.at_eof():
+ await self.release()
+ return item
+
+ async def release(self) -> None:
+ """Release the connection gracefully.
+
+ All remaining content is read to the void.
+ """
+ await self.resp.release()
+
+
+class BodyPartReader:
+ """Multipart reader for single body part."""
+
+ chunk_size = 8192
+
+ def __init__(
+ self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
+ ) -> None:
+ self.headers = headers
+ self._boundary = boundary
+ self._content = content
+ self._at_eof = False
+ length = self.headers.get(CONTENT_LENGTH, None)
+ self._length = int(length) if length is not None else None
+ self._read_bytes = 0
+ # TODO: typeing.Deque is not supported by Python 3.5
+ self._unread: Deque[bytes] = deque()
+ self._prev_chunk = None # type: Optional[bytes]
+ self._content_eof = 0
+ self._cache = {} # type: Dict[str, Any]
+
+ def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
+ return self # type: ignore[return-value]
+
+ async def __anext__(self) -> bytes:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ async def next(self) -> Optional[bytes]:
+ item = await self.read()
+ if not item:
+ return None
+ return item
+
+ async def read(self, *, decode: bool = False) -> bytes:
+ """Reads body part data.
+
+ decode: Decodes data following by encoding
+ method from Content-Encoding header. If it missed
+ data remains untouched
+ """
+ if self._at_eof:
+ return b""
+ data = bytearray()
+ while not self._at_eof:
+ data.extend(await self.read_chunk(self.chunk_size))
+ if decode:
+ return self.decode(data)
+ return data
+
+ async def read_chunk(self, size: int = chunk_size) -> bytes:
+ """Reads body part content chunk of the specified size.
+
+ size: chunk size
+ """
+ if self._at_eof:
+ return b""
+ if self._length:
+ chunk = await self._read_chunk_from_length(size)
+ else:
+ chunk = await self._read_chunk_from_stream(size)
+
+ self._read_bytes += len(chunk)
+ if self._read_bytes == self._length:
+ self._at_eof = True
+ if self._at_eof:
+ clrf = await self._content.readline()
+ assert (
+ b"\r\n" == clrf
+ ), "reader did not read all the data or it is malformed"
+ return chunk
+
+ async def _read_chunk_from_length(self, size: int) -> bytes:
+ # Reads body part content chunk of the specified size.
+ # The body part must has Content-Length header with proper value.
+ assert self._length is not None, "Content-Length required for chunked read"
+ chunk_size = min(size, self._length - self._read_bytes)
+ chunk = await self._content.read(chunk_size)
+ return chunk
+
+ async def _read_chunk_from_stream(self, size: int) -> bytes:
+ # Reads content chunk of body part with unknown length.
+ # The Content-Length header for body part is not necessary.
+ assert (
+ size >= len(self._boundary) + 2
+ ), "Chunk size must be greater or equal than boundary length + 2"
+ first_chunk = self._prev_chunk is None
+ if first_chunk:
+ self._prev_chunk = await self._content.read(size)
+
+ chunk = await self._content.read(size)
+ self._content_eof += int(self._content.at_eof())
+ assert self._content_eof < 3, "Reading after EOF"
+ assert self._prev_chunk is not None
+ window = self._prev_chunk + chunk
+ sub = b"\r\n" + self._boundary
+ if first_chunk:
+ idx = window.find(sub)
+ else:
+ idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
+ if idx >= 0:
+ # pushing boundary back to content
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ self._content.unread_data(window[idx:])
+ if size > idx:
+ self._prev_chunk = self._prev_chunk[:idx]
+ chunk = window[len(self._prev_chunk) : idx]
+ if not chunk:
+ self._at_eof = True
+ result = self._prev_chunk
+ self._prev_chunk = chunk
+ return result
+
+ async def readline(self) -> bytes:
+ """Reads body part by line by line."""
+ if self._at_eof:
+ return b""
+
+ if self._unread:
+ line = self._unread.popleft()
+ else:
+ line = await self._content.readline()
+
+ if line.startswith(self._boundary):
+ # the very last boundary may not come with \r\n,
+ # so set single rules for everyone
+ sline = line.rstrip(b"\r\n")
+ boundary = self._boundary
+ last_boundary = self._boundary + b"--"
+ # ensure that we read exactly the boundary, not something alike
+ if sline == boundary or sline == last_boundary:
+ self._at_eof = True
+ self._unread.append(line)
+ return b""
+ else:
+ next_line = await self._content.readline()
+ if next_line.startswith(self._boundary):
+ line = line[:-2] # strip CRLF but only once
+ self._unread.append(next_line)
+
+ return line
+
+ async def release(self) -> None:
+ """Like read(), but reads all the data to the void."""
+ if self._at_eof:
+ return
+ while not self._at_eof:
+ await self.read_chunk(self.chunk_size)
+
+ async def text(self, *, encoding: Optional[str] = None) -> str:
+ """Like read(), but assumes that body part contains text data."""
+ data = await self.read(decode=True)
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
+ encoding = encoding or self.get_charset(default="utf-8")
+ return data.decode(encoding)
+
+ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ """Like read(), but assumes that body parts contains JSON data."""
+ data = await self.read(decode=True)
+ if not data:
+ return None
+ encoding = encoding or self.get_charset(default="utf-8")
+ return cast(Dict[str, Any], json.loads(data.decode(encoding)))
+
+ async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
+ """Like read(), but assumes that body parts contain form urlencoded data."""
+ data = await self.read(decode=True)
+ if not data:
+ return []
+ if encoding is not None:
+ real_encoding = encoding
+ else:
+ real_encoding = self.get_charset(default="utf-8")
+ return parse_qsl(
+ data.rstrip().decode(real_encoding),
+ keep_blank_values=True,
+ encoding=real_encoding,
+ )
+
+ def at_eof(self) -> bool:
+ """Returns True if the boundary was reached or False otherwise."""
+ return self._at_eof
+
+ def decode(self, data: bytes) -> bytes:
+ """Decodes data.
+
+ Decoding is done according the specified Content-Encoding
+ or Content-Transfer-Encoding headers value.
+ """
+ if CONTENT_TRANSFER_ENCODING in self.headers:
+ data = self._decode_content_transfer(data)
+ if CONTENT_ENCODING in self.headers:
+ return self._decode_content(data)
+ return data
+
+ def _decode_content(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_ENCODING, "").lower()
+
+ if encoding == "deflate":
+ return zlib.decompress(data, -zlib.MAX_WBITS)
+ elif encoding == "gzip":
+ return zlib.decompress(data, 16 + zlib.MAX_WBITS)
+ elif encoding == "identity":
+ return data
+ else:
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+
+ def _decode_content_transfer(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
+
+ if encoding == "base64":
+ return base64.b64decode(data)
+ elif encoding == "quoted-printable":
+ return binascii.a2b_qp(data)
+ elif encoding in ("binary", "8bit", "7bit"):
+ return data
+ else:
+ raise RuntimeError(
+ "unknown content transfer encoding: {}" "".format(encoding)
+ )
+
+ def get_charset(self, default: str) -> str:
+ """Returns charset parameter from Content-Type header or default."""
+ ctype = self.headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+ return mimetype.parameters.get("charset", default)
+
+ @reify
+ def name(self) -> Optional[str]:
+ """Returns name specified in Content-Disposition header.
+
+ If the header is missing or malformed, returns None.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "name")
+
+ @reify
+ def filename(self) -> Optional[str]:
+ """Returns filename specified in Content-Disposition header.
+
+ Returns None if the header is missing or malformed.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "filename")
+
+
+@payload_type(BodyPartReader, order=Order.try_first)
+class BodyPartReaderPayload(Payload):
+ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value, *args, **kwargs)
+
+ params = {} # type: Dict[str, str]
+ if value.name is not None:
+ params["name"] = value.name
+ if value.filename is not None:
+ params["filename"] = value.filename
+
+ if params:
+ self.set_content_disposition("attachment", True, **params)
+
+ async def write(self, writer: Any) -> None:
+ field = self._value
+ chunk = await field.read_chunk(size=2 ** 16)
+ while chunk:
+ await writer.write(field.decode(chunk))
+ chunk = await field.read_chunk(size=2 ** 16)
+
+
+class MultipartReader:
+ """Multipart body reader."""
+
+ #: Response wrapper, used when multipart readers constructs from response.
+ response_wrapper_cls = MultipartResponseWrapper
+ #: Multipart reader class, used to handle multipart/* body parts.
+ #: None points to type(self)
+ multipart_reader_cls = None
+ #: Body part reader class for non multipart/* content types.
+ part_reader_cls = BodyPartReader
+
+ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
+ self.headers = headers
+ self._boundary = ("--" + self._get_boundary()).encode()
+ self._content = content
+ self._last_part = (
+ None
+ ) # type: Optional[Union['MultipartReader', BodyPartReader]]
+ self._at_eof = False
+ self._at_bof = True
+ self._unread = [] # type: List[bytes]
+
+ def __aiter__(
+ self,
+ ) -> AsyncIterator["BodyPartReader"]:
+ return self # type: ignore[return-value]
+
+ async def __anext__(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ @classmethod
+ def from_response(
+ cls,
+ response: "ClientResponse",
+ ) -> MultipartResponseWrapper:
+ """Constructs reader instance from HTTP response.
+
+ :param response: :class:`~aiohttp.client.ClientResponse` instance
+ """
+ obj = cls.response_wrapper_cls(
+ response, cls(response.headers, response.content)
+ )
+ return obj
+
+ def at_eof(self) -> bool:
+ """Returns True if the final boundary was reached, false otherwise."""
+ return self._at_eof
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ """Emits the next multipart body part."""
+ # So, if we're at BOF, we need to skip till the boundary.
+ if self._at_eof:
+ return None
+ await self._maybe_release_last_part()
+ if self._at_bof:
+ await self._read_until_first_boundary()
+ self._at_bof = False
+ else:
+ await self._read_boundary()
+ if self._at_eof: # we just read the last boundary, nothing to do there
+ return None
+ self._last_part = await self.fetch_next_part()
+ return self._last_part
+
+ async def release(self) -> None:
+ """Reads all the body parts to the void till the final boundary."""
+ while not self._at_eof:
+ item = await self.next()
+ if item is None:
+ break
+ await item.release()
+
+ async def fetch_next_part(
+ self,
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Returns the next body part reader."""
+ headers = await self._read_headers()
+ return self._get_part_reader(headers)
+
+ def _get_part_reader(
+ self,
+ headers: "CIMultiDictProxy[str]",
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Dispatches the response by the `Content-Type` header.
+
+ Returns a suitable reader instance.
+
+ :param dict headers: Response headers
+ """
+ ctype = headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+
+ if mimetype.type == "multipart":
+ if self.multipart_reader_cls is None:
+ return type(self)(headers, self._content)
+ return self.multipart_reader_cls(headers, self._content)
+ else:
+ return self.part_reader_cls(self._boundary, headers, self._content)
+
+ def _get_boundary(self) -> str:
+ mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
+
+ assert mimetype.type == "multipart", "multipart/* content type expected"
+
+ if "boundary" not in mimetype.parameters:
+ raise ValueError(
+ "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
+ )
+
+ boundary = mimetype.parameters["boundary"]
+ if len(boundary) > 70:
+ raise ValueError("boundary %r is too long (70 chars max)" % boundary)
+
+ return boundary
+
+ async def _readline(self) -> bytes:
+ if self._unread:
+ return self._unread.pop()
+ return await self._content.readline()
+
+ async def _read_until_first_boundary(self) -> None:
+ while True:
+ chunk = await self._readline()
+ if chunk == b"":
+ raise ValueError(
+ "Could not find starting boundary %r" % (self._boundary)
+ )
+ chunk = chunk.rstrip()
+ if chunk == self._boundary:
+ return
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ return
+
+ async def _read_boundary(self) -> None:
+ chunk = (await self._readline()).rstrip()
+ if chunk == self._boundary:
+ pass
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ epilogue = await self._readline()
+ next_line = await self._readline()
+
+ # the epilogue is expected and then either the end of input or the
+ # parent multipart boundary, if the parent boundary is found then
+ # it should be marked as unread and handed to the parent for
+ # processing
+ if next_line[:2] == b"--":
+ self._unread.append(next_line)
+ # otherwise the request is likely missing an epilogue and both
+ # lines should be passed to the parent for processing
+ # (this handles the old behavior gracefully)
+ else:
+ self._unread.extend([next_line, epilogue])
+ else:
+ raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
+
+ async def _read_headers(self) -> "CIMultiDictProxy[str]":
+ lines = [b""]
+ while True:
+ chunk = await self._content.readline()
+ chunk = chunk.strip()
+ lines.append(chunk)
+ if not chunk:
+ break
+ parser = HeadersParser()
+ headers, raw_headers = parser.parse_headers(lines)
+ return headers
+
+ async def _maybe_release_last_part(self) -> None:
+ """Ensures that the last read body part is read completely."""
+ if self._last_part is not None:
+ if not self._last_part.at_eof():
+ await self._last_part.release()
+ self._unread.extend(self._last_part._unread)
+ self._last_part = None
+
+
+_Part = Tuple[Payload, str, str]
+
+
+class MultipartWriter(Payload):
+ """Multipart body writer."""
+
+ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
+ boundary = boundary if boundary is not None else uuid.uuid4().hex
+ # The underlying Payload API demands a str (utf-8), not bytes,
+ # so we need to ensure we don't lose anything during conversion.
+ # As a result, require the boundary to be ASCII only.
+ # In both situations.
+
+ try:
+ self._boundary = boundary.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError("boundary should contain ASCII only chars") from None
+ ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
+
+ super().__init__(None, content_type=ctype)
+
+ self._parts = [] # type: List[_Part]
+
+ def __enter__(self) -> "MultipartWriter":
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ pass
+
+ def __iter__(self) -> Iterator[_Part]:
+ return iter(self._parts)
+
+ def __len__(self) -> int:
+ return len(self._parts)
+
+ def __bool__(self) -> bool:
+ return True
+
+ _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
+ _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
+
+ @property
+ def _boundary_value(self) -> str:
+ """Wrap boundary parameter value in quotes, if necessary.
+
+ Reads self.boundary and returns a unicode sting.
+ """
+ # Refer to RFCs 7231, 7230, 5234.
+ #
+ # parameter = token "=" ( token / quoted-string )
+ # token = 1*tchar
+ # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
+ # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
+ # obs-text = %x80-FF
+ # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
+ # / DIGIT / ALPHA
+ # ; any VCHAR, except delimiters
+ # VCHAR = %x21-7E
+ value = self._boundary
+ if re.match(self._valid_tchar_regex, value):
+ return value.decode("ascii") # cannot fail
+
+ if re.search(self._invalid_qdtext_char_regex, value):
+ raise ValueError("boundary value contains invalid characters")
+
+ # escape %x5C and %x22
+ quoted_value_content = value.replace(b"\\", b"\\\\")
+ quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
+
+ return '"' + quoted_value_content.decode("ascii") + '"'
+
+ @property
+ def boundary(self) -> str:
+ return self._boundary.decode("ascii")
+
+ def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Payload):
+ obj.headers.update(headers)
+ return self.append_payload(obj)
+ else:
+ try:
+ payload = get_payload(obj, headers=headers)
+ except LookupError:
+ raise TypeError("Cannot create payload from %r" % obj)
+ else:
+ return self.append_payload(payload)
+
+ def append_payload(self, payload: Payload) -> Payload:
+ """Adds a new body part to multipart writer."""
+ # compression
+ encoding = payload.headers.get(
+ CONTENT_ENCODING,
+ "",
+ ).lower() # type: Optional[str]
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+ if encoding == "identity":
+ encoding = None
+
+ # te encoding
+ te_encoding = payload.headers.get(
+ CONTENT_TRANSFER_ENCODING,
+ "",
+ ).lower() # type: Optional[str]
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
+ raise RuntimeError(
+ "unknown content transfer encoding: {}" "".format(te_encoding)
+ )
+ if te_encoding == "binary":
+ te_encoding = None
+
+ # size
+ size = payload.size
+ if size is not None and not (encoding or te_encoding):
+ payload.headers[CONTENT_LENGTH] = str(size)
+
+ self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
+ return payload
+
+ def append_json(
+ self, obj: Any, headers: Optional[MultiMapping[str]] = None
+ ) -> Payload:
+ """Helper to append JSON part."""
+ if headers is None:
+ headers = CIMultiDict()
+
+ return self.append_payload(JsonPayload(obj, headers=headers))
+
+ def append_form(
+ self,
+ obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
+ headers: Optional[MultiMapping[str]] = None,
+ ) -> Payload:
+ """Helper to append form urlencoded part."""
+ assert isinstance(obj, (Sequence, Mapping))
+
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Mapping):
+ obj = list(obj.items())
+ data = urlencode(obj, doseq=True)
+
+ return self.append_payload(
+ StringPayload(
+ data, headers=headers, content_type="application/x-www-form-urlencoded"
+ )
+ )
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload."""
+ total = 0
+ for part, encoding, te_encoding in self._parts:
+ if encoding or te_encoding or part.size is None:
+ return None
+
+ total += int(
+ 2
+ + len(self._boundary)
+ + 2
+ + part.size # b'--'+self._boundary+b'\r\n'
+ + len(part._binary_headers)
+ + 2 # b'\r\n'
+ )
+
+ total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
+ return total
+
+ async def write(self, writer: Any, close_boundary: bool = True) -> None:
+ """Write body."""
+ for part, encoding, te_encoding in self._parts:
+ await writer.write(b"--" + self._boundary + b"\r\n")
+ await writer.write(part._binary_headers)
+
+ if encoding or te_encoding:
+ w = MultipartPayloadWriter(writer)
+ if encoding:
+ w.enable_compression(encoding)
+ if te_encoding:
+ w.enable_encoding(te_encoding)
+ await part.write(w) # type: ignore[arg-type]
+ await w.write_eof()
+ else:
+ await part.write(writer)
+
+ await writer.write(b"\r\n")
+
+ if close_boundary:
+ await writer.write(b"--" + self._boundary + b"--\r\n")
+
+
+class MultipartPayloadWriter:
+ def __init__(self, writer: Any) -> None:
+ self._writer = writer
+ self._encoding = None # type: Optional[str]
+ self._compress = None # type: Any
+ self._encoding_buffer = None # type: Optional[bytearray]
+
+ def enable_encoding(self, encoding: str) -> None:
+ if encoding == "base64":
+ self._encoding = encoding
+ self._encoding_buffer = bytearray()
+ elif encoding == "quoted-printable":
+ self._encoding = "quoted-printable"
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
+ self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+
+ async def write_eof(self) -> None:
+ if self._compress is not None:
+ chunk = self._compress.flush()
+ if chunk:
+ self._compress = None
+ await self.write(chunk)
+
+ if self._encoding == "base64":
+ if self._encoding_buffer:
+ await self._writer.write(base64.b64encode(self._encoding_buffer))
+
+ async def write(self, chunk: bytes) -> None:
+ if self._compress is not None:
+ if chunk:
+ chunk = self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self._encoding == "base64":
+ buf = self._encoding_buffer
+ assert buf is not None
+ buf.extend(chunk)
+
+ if buf:
+ div, mod = divmod(len(buf), 3)
+ enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
+ if enc_chunk:
+ b64chunk = base64.b64encode(enc_chunk)
+ await self._writer.write(b64chunk)
+ elif self._encoding == "quoted-printable":
+ await self._writer.write(binascii.b2a_qp(chunk))
+ else:
+ await self._writer.write(chunk)
diff --git a/contrib/python/aiohttp/aiohttp/payload.py b/contrib/python/aiohttp/aiohttp/payload.py
new file mode 100644
index 0000000000..2ee90beea8
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/payload.py
@@ -0,0 +1,465 @@
+import asyncio
+import enum
+import io
+import json
+import mimetypes
+import os
+import warnings
+from abc import ABC, abstractmethod
+from itertools import chain
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ ByteString,
+ Dict,
+ Iterable,
+ Optional,
+ TextIO,
+ Tuple,
+ Type,
+ Union,
+)
+
+from multidict import CIMultiDict
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import (
+ PY_36,
+ content_disposition_header,
+ guess_filename,
+ parse_mimetype,
+ sentinel,
+)
+from .streams import StreamReader
+from .typedefs import Final, JSONEncoder, _CIMultiDict
+
+__all__ = (
+ "PAYLOAD_REGISTRY",
+ "get_payload",
+ "payload_type",
+ "Payload",
+ "BytesPayload",
+ "StringPayload",
+ "IOBasePayload",
+ "BytesIOPayload",
+ "BufferedReaderPayload",
+ "TextIOPayload",
+ "StringIOPayload",
+ "JsonPayload",
+ "AsyncIterablePayload",
+)
+
+TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import List
+
+
+class LookupError(Exception):
+ pass
+
+
+class Order(str, enum.Enum):
+ normal = "normal"
+ try_first = "try_first"
+ try_last = "try_last"
+
+
+def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
+ return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
+
+
+def register_payload(
+ factory: Type["Payload"], type: Any, *, order: Order = Order.normal
+) -> None:
+ PAYLOAD_REGISTRY.register(factory, type, order=order)
+
+
+class payload_type:
+ def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
+ self.type = type
+ self.order = order
+
+ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
+ register_payload(factory, self.type, order=self.order)
+ return factory
+
+
+PayloadType = Type["Payload"]
+_PayloadRegistryItem = Tuple[PayloadType, Any]
+
+
+class PayloadRegistry:
+ """Payload registry.
+
+ note: we need zope.interface for more efficient adapter search
+ """
+
+ def __init__(self) -> None:
+ self._first = [] # type: List[_PayloadRegistryItem]
+ self._normal = [] # type: List[_PayloadRegistryItem]
+ self._last = [] # type: List[_PayloadRegistryItem]
+
+ def get(
+ self,
+ data: Any,
+ *args: Any,
+ _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
+ **kwargs: Any,
+ ) -> "Payload":
+ if isinstance(data, Payload):
+ return data
+ for factory, type in _CHAIN(self._first, self._normal, self._last):
+ if isinstance(data, type):
+ return factory(data, *args, **kwargs)
+
+ raise LookupError()
+
+ def register(
+ self, factory: PayloadType, type: Any, *, order: Order = Order.normal
+ ) -> None:
+ if order is Order.try_first:
+ self._first.append((factory, type))
+ elif order is Order.normal:
+ self._normal.append((factory, type))
+ elif order is Order.try_last:
+ self._last.append((factory, type))
+ else:
+ raise ValueError(f"Unsupported order {order!r}")
+
+
+class Payload(ABC):
+
+ _default_content_type = "application/octet-stream" # type: str
+ _size = None # type: Optional[int]
+
+ def __init__(
+ self,
+ value: Any,
+ headers: Optional[
+ Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
+ ] = None,
+ content_type: Optional[str] = sentinel,
+ filename: Optional[str] = None,
+ encoding: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._encoding = encoding
+ self._filename = filename
+ self._headers = CIMultiDict() # type: _CIMultiDict
+ self._value = value
+ if content_type is not sentinel and content_type is not None:
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ elif self._filename is not None:
+ content_type = mimetypes.guess_type(self._filename)[0]
+ if content_type is None:
+ content_type = self._default_content_type
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ else:
+ self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
+ self._headers.update(headers or {})
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload."""
+ return self._size
+
+ @property
+ def filename(self) -> Optional[str]:
+ """Filename of the payload."""
+ return self._filename
+
+ @property
+ def headers(self) -> _CIMultiDict:
+ """Custom item headers"""
+ return self._headers
+
+ @property
+ def _binary_headers(self) -> bytes:
+ return (
+ "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
+ "utf-8"
+ )
+ + b"\r\n"
+ )
+
+ @property
+ def encoding(self) -> Optional[str]:
+ """Payload encoding"""
+ return self._encoding
+
+ @property
+ def content_type(self) -> str:
+ """Content type"""
+ return self._headers[hdrs.CONTENT_TYPE]
+
+ def set_content_disposition(
+ self,
+ disptype: str,
+ quote_fields: bool = True,
+ _charset: str = "utf-8",
+ **params: Any,
+ ) -> None:
+ """Sets ``Content-Disposition`` header."""
+ self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
+ disptype, quote_fields=quote_fields, _charset=_charset, **params
+ )
+
+ @abstractmethod
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """Write payload.
+
+ writer is an AbstractStreamWriter instance:
+ """
+
+
+class BytesPayload(Payload):
+ def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
+ if not isinstance(value, (bytes, bytearray, memoryview)):
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
+
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ if isinstance(value, memoryview):
+ self._size = value.nbytes
+ else:
+ self._size = len(value)
+
+ if self._size > TOO_LARGE_BYTES_BODY:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ warnings.warn(
+ "Sending a large body directly with raw bytes might"
+ " lock the event loop. You should probably pass an "
+ "io.BytesIO object instead",
+ ResourceWarning,
+ **kwargs,
+ )
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await writer.write(self._value)
+
+
+class StringPayload(BytesPayload):
+ def __init__(
+ self,
+ value: str,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ real_encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ real_encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+ real_encoding = encoding
+
+ super().__init__(
+ value.encode(real_encoding),
+ encoding=real_encoding,
+ content_type=content_type,
+ *args,
+ **kwargs,
+ )
+
+
+class StringIOPayload(StringPayload):
+ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.read(), *args, **kwargs)
+
+
+class IOBasePayload(Payload):
+ _value: IO[Any]
+
+ def __init__(
+ self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
+ ) -> None:
+ if "filename" not in kwargs:
+ kwargs["filename"] = guess_filename(value)
+
+ super().__init__(value, *args, **kwargs)
+
+ if self._filename is not None and disposition is not None:
+ if hdrs.CONTENT_DISPOSITION not in self.headers:
+ self.set_content_disposition(disposition, filename=self._filename)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ loop = asyncio.get_event_loop()
+ try:
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ while chunk:
+ await writer.write(chunk)
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ finally:
+ await loop.run_in_executor(None, self._value.close)
+
+
+class TextIOPayload(IOBasePayload):
+ _value: TextIO
+
+ def __init__(
+ self,
+ value: TextIO,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+
+ super().__init__(
+ value,
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+ @property
+ def size(self) -> Optional[int]:
+ try:
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ except OSError:
+ return None
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ loop = asyncio.get_event_loop()
+ try:
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ while chunk:
+ data = (
+ chunk.encode(encoding=self._encoding)
+ if self._encoding
+ else chunk.encode()
+ )
+ await writer.write(data)
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ finally:
+ await loop.run_in_executor(None, self._value.close)
+
+
+class BytesIOPayload(IOBasePayload):
+ @property
+ def size(self) -> int:
+ position = self._value.tell()
+ end = self._value.seek(0, os.SEEK_END)
+ self._value.seek(position)
+ return end - position
+
+
+class BufferedReaderPayload(IOBasePayload):
+ @property
+ def size(self) -> Optional[int]:
+ try:
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ except OSError:
+ # data.fileno() is not supported, e.g.
+ # io.BufferedReader(io.BytesIO(b'data'))
+ return None
+
+
+class JsonPayload(BytesPayload):
+ def __init__(
+ self,
+ value: Any,
+ encoding: str = "utf-8",
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+
+ super().__init__(
+ dumps(value).encode(encoding),
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator[bytes]
+ _AsyncIterable = AsyncIterable[bytes]
+else:
+ from collections.abc import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator
+ _AsyncIterable = AsyncIterable
+
+
+class AsyncIterablePayload(Payload):
+
+ _iter = None # type: Optional[_AsyncIterator]
+
+ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
+ if not isinstance(value, AsyncIterable):
+ raise TypeError(
+ "value argument must support "
+ "collections.abc.AsyncIterablebe interface, "
+ "got {!r}".format(type(value))
+ )
+
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ self._iter = value.__aiter__()
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ if self._iter:
+ try:
+ # iter is not None check prevents rare cases
+ # when the case iterable is used twice
+ while True:
+ chunk = await self._iter.__anext__()
+ await writer.write(chunk)
+ except StopAsyncIteration:
+ self._iter = None
+
+
+class StreamReaderPayload(AsyncIterablePayload):
+ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.iter_any(), *args, **kwargs)
+
+
+PAYLOAD_REGISTRY = PayloadRegistry()
+PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
+PAYLOAD_REGISTRY.register(StringPayload, str)
+PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
+PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
+PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
+PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
+PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
+PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
+# try_last for giving a chance to more specialized async interables like
+# multidict.BodyPartReaderPayload override the default
+PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
diff --git a/contrib/python/aiohttp/aiohttp/payload_streamer.py b/contrib/python/aiohttp/aiohttp/payload_streamer.py
new file mode 100644
index 0000000000..9f8b8bc57c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/payload_streamer.py
@@ -0,0 +1,75 @@
+"""
+Payload implemenation for coroutines as data provider.
+
+As a simple case, you can upload data from file::
+
+ @aiohttp.streamer
+ async def file_sender(writer, file_name=None):
+ with open(file_name, 'rb') as f:
+ chunk = f.read(2**16)
+ while chunk:
+ await writer.write(chunk)
+
+ chunk = f.read(2**16)
+
+Then you can use `file_sender` like this:
+
+ async with session.post('http://httpbin.org/post',
+ data=file_sender(file_name='huge_file')) as resp:
+ print(await resp.text())
+
+..note:: Coroutine must accept `writer` as first argument
+
+"""
+
+import types
+import warnings
+from typing import Any, Awaitable, Callable, Dict, Tuple
+
+from .abc import AbstractStreamWriter
+from .payload import Payload, payload_type
+
+__all__ = ("streamer",)
+
+
+class _stream_wrapper:
+ def __init__(
+ self,
+ coro: Callable[..., Awaitable[None]],
+ args: Tuple[Any, ...],
+ kwargs: Dict[str, Any],
+ ) -> None:
+ self.coro = types.coroutine(coro)
+ self.args = args
+ self.kwargs = kwargs
+
+ async def __call__(self, writer: AbstractStreamWriter) -> None:
+ await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
+
+
+class streamer:
+ def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
+ warnings.warn(
+ "@streamer is deprecated, use async generators instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.coro = coro
+
+ def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
+ return _stream_wrapper(self.coro, args, kwargs)
+
+
+@payload_type(_stream_wrapper)
+class StreamWrapperPayload(Payload):
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
+
+
+@payload_type(streamer)
+class StreamPayload(StreamWrapperPayload):
+ def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value(), *args, **kwargs)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
diff --git a/contrib/python/aiohttp/aiohttp/py.typed b/contrib/python/aiohttp/aiohttp/py.typed
new file mode 100644
index 0000000000..f5642f79f2
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/py.typed
@@ -0,0 +1 @@
+Marker
diff --git a/contrib/python/aiohttp/aiohttp/pytest_plugin.py b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
new file mode 100644
index 0000000000..dd9a9f6179
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
@@ -0,0 +1,391 @@
+import asyncio
+import contextlib
+import warnings
+from collections.abc import Callable
+from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
+
+import pytest
+
+from aiohttp.helpers import PY_37, isasyncgenfunction
+from aiohttp.web import Application
+
+from .test_utils import (
+ BaseTestServer,
+ RawTestServer,
+ TestClient,
+ TestServer,
+ loop_context,
+ setup_test_loop,
+ teardown_test_loop,
+ unused_port as _unused_port,
+)
+
+try:
+ import uvloop
+except ImportError: # pragma: no cover
+ uvloop = None
+
+try:
+ import tokio
+except ImportError: # pragma: no cover
+ tokio = None
+
+AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
+
+
+def pytest_addoption(parser): # type: ignore[no-untyped-def]
+ parser.addoption(
+ "--aiohttp-fast",
+ action="store_true",
+ default=False,
+ help="run tests faster by disabling extra checks",
+ )
+ parser.addoption(
+ "--aiohttp-loop",
+ action="store",
+ default="pyloop",
+ help="run tests with specific loop: pyloop, uvloop, tokio or all",
+ )
+ parser.addoption(
+ "--aiohttp-enable-loop-debug",
+ action="store_true",
+ default=False,
+ help="enable event loop debug mode",
+ )
+
+
+def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
+ """Set up pytest fixture.
+
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
+ """
+ func = fixturedef.func
+
+ if isasyncgenfunction(func):
+ # async generator fixture
+ is_async_gen = True
+ elif asyncio.iscoroutinefunction(func):
+ # regular async fixture
+ is_async_gen = False
+ else:
+ # not an async fixture, nothing to do
+ return
+
+ strip_request = False
+ if "request" not in fixturedef.argnames:
+ fixturedef.argnames += ("request",)
+ strip_request = True
+
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
+ request = kwargs["request"]
+ if strip_request:
+ del kwargs["request"]
+
+ # if neither the fixture nor the test use the 'loop' fixture,
+ # 'getfixturevalue' will fail because the test is not parameterized
+ # (this can be removed someday if 'loop' is no longer parameterized)
+ if "loop" not in request.fixturenames:
+ raise Exception(
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
+ "be used in tests depending from it."
+ )
+
+ _loop = request.getfixturevalue("loop")
+
+ if is_async_gen:
+ # for async generators, we need to advance the generator once,
+ # then advance it again in a finalizer
+ gen = func(*args, **kwargs)
+
+ def finalizer(): # type: ignore[no-untyped-def]
+ try:
+ return _loop.run_until_complete(gen.__anext__())
+ except StopAsyncIteration:
+ pass
+
+ request.addfinalizer(finalizer)
+ return _loop.run_until_complete(gen.__anext__())
+ else:
+ return _loop.run_until_complete(func(*args, **kwargs))
+
+ fixturedef.func = wrapper
+
+
+@pytest.fixture
+def fast(request): # type: ignore[no-untyped-def]
+ """--fast config option"""
+ return request.config.getoption("--aiohttp-fast")
+
+
+@pytest.fixture
+def loop_debug(request): # type: ignore[no-untyped-def]
+ """--enable-loop-debug config option"""
+ return request.config.getoption("--aiohttp-enable-loop-debug")
+
+
+@contextlib.contextmanager
+def _runtime_warning_context(): # type: ignore[no-untyped-def]
+ """Context manager which checks for RuntimeWarnings.
+
+ This exists specifically to
+ avoid "coroutine 'X' was never awaited" warnings being missed.
+
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
+ """
+ with warnings.catch_warnings(record=True) as _warnings:
+ yield
+ rw = [
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
+ for w in _warnings
+ if w.category == RuntimeWarning
+ ]
+ if rw:
+ raise RuntimeError(
+ "{} Runtime Warning{},\n{}".format(
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
+ )
+ )
+
+
+@contextlib.contextmanager
+def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
+ """Passthrough loop context.
+
+ Sets up and tears down a loop unless one is passed in via the loop
+ argument when it's passed straight through.
+ """
+ if loop:
+ # loop already exists, pass it straight through
+ yield loop
+ else:
+ # this shadows loop_context's standard behavior
+ loop = setup_test_loop()
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
+ """Fix pytest collecting for coroutines."""
+ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
+ return list(collector._genfunctions(name, obj))
+
+
+def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
+ """Run coroutines in an event loop instead of a normal function call."""
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
+ if asyncio.iscoroutinefunction(pyfuncitem.function):
+ existing_loop = pyfuncitem.funcargs.get(
+ "proactor_loop"
+ ) or pyfuncitem.funcargs.get("loop", None)
+ with _runtime_warning_context():
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
+ testargs = {
+ arg: pyfuncitem.funcargs[arg]
+ for arg in pyfuncitem._fixtureinfo.argnames
+ }
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
+
+ return True
+
+
+def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
+ if "loop_factory" not in metafunc.fixturenames:
+ return
+
+ loops = metafunc.config.option.aiohttp_loop
+ avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
+
+ if uvloop is not None: # pragma: no cover
+ avail_factories["uvloop"] = uvloop.EventLoopPolicy
+
+ if tokio is not None: # pragma: no cover
+ avail_factories["tokio"] = tokio.EventLoopPolicy
+
+ if loops == "all":
+ loops = "pyloop,uvloop?,tokio?"
+
+ factories = {} # type: ignore[var-annotated]
+ for name in loops.split(","):
+ required = not name.endswith("?")
+ name = name.strip(" ?")
+ if name not in avail_factories: # pragma: no cover
+ if required:
+ raise ValueError(
+ "Unknown loop '%s', available loops: %s"
+ % (name, list(factories.keys()))
+ )
+ else:
+ continue
+ factories[name] = avail_factories[name]
+ metafunc.parametrize(
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
+ )
+
+
+@pytest.fixture
+def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
+ """Return an instance of the event loop."""
+ policy = loop_factory()
+ asyncio.set_event_loop_policy(policy)
+ with loop_context(fast=fast) as _loop:
+ if loop_debug:
+ _loop.set_debug(True) # pragma: no cover
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def proactor_loop(): # type: ignore[no-untyped-def]
+ if not PY_37:
+ policy = asyncio.get_event_loop_policy()
+ policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ else:
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
+ asyncio.set_event_loop_policy(policy)
+
+ with loop_context(policy.new_event_loop) as _loop:
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_unused_port fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_unused_port
+
+
+@pytest.fixture
+def aiohttp_unused_port(): # type: ignore[no-untyped-def]
+ """Return a port that is unused on the current host."""
+ return _unused_port
+
+
+@pytest.fixture
+def aiohttp_server(loop): # type: ignore[no-untyped-def]
+ """Factory to create a TestServer instance, given an app.
+
+ aiohttp_server(app, **kwargs)
+ """
+ servers = []
+
+ async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
+ server = TestServer(app, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_server
+
+
+@pytest.fixture
+def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
+ """Factory to create a RawTestServer instance, given a web handler.
+
+ aiohttp_raw_server(handler, **kwargs)
+ """
+ servers = []
+
+ async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
+ server = RawTestServer(handler, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
+ aiohttp_raw_server,
+):
+ warnings.warn(
+ "Deprecated, use aiohttp_raw_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_raw_server
+
+
+@pytest.fixture
+def aiohttp_client(
+ loop: asyncio.AbstractEventLoop,
+) -> Generator[AiohttpClient, None, None]:
+ """Factory to create a TestClient instance.
+
+ aiohttp_client(app, **kwargs)
+ aiohttp_client(server, **kwargs)
+ aiohttp_client(raw_server, **kwargs)
+ """
+ clients = []
+
+ async def go(
+ __param: Union[Application, BaseTestServer],
+ *args: Any,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any
+ ) -> TestClient:
+
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
+ __param, (Application, BaseTestServer)
+ ):
+ __param = __param(loop, *args, **kwargs)
+ kwargs = {}
+ else:
+ assert not args, "args should be empty"
+
+ if isinstance(__param, Application):
+ server_kwargs = server_kwargs or {}
+ server = TestServer(__param, loop=loop, **server_kwargs)
+ client = TestClient(server, loop=loop, **kwargs)
+ elif isinstance(__param, BaseTestServer):
+ client = TestClient(__param, loop=loop, **kwargs)
+ else:
+ raise ValueError("Unknown argument type: %r" % type(__param))
+
+ await client.start_server()
+ clients.append(client)
+ return client
+
+ yield go
+
+ async def finalize() -> None:
+ while clients:
+ await clients.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_client fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_client
diff --git a/contrib/python/aiohttp/aiohttp/resolver.py b/contrib/python/aiohttp/aiohttp/resolver.py
new file mode 100644
index 0000000000..6668fa80ec
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/resolver.py
@@ -0,0 +1,160 @@
+import asyncio
+import socket
+from typing import Any, Dict, List, Optional, Type, Union
+
+from .abc import AbstractResolver
+from .helpers import get_running_loop
+
+__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
+
+try:
+ import aiodns
+
+ # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
+except ImportError: # pragma: no cover
+ aiodns = None
+
+aiodns_default = False
+
+
+class ThreadedResolver(AbstractResolver):
+ """Threaded resolver.
+
+ Uses an Executor for synchronous getaddrinfo() calls.
+ concurrent.futures.ThreadPoolExecutor is used by default.
+ """
+
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = get_running_loop(loop)
+
+ async def resolve(
+ self, hostname: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ infos = await self._loop.getaddrinfo(
+ hostname,
+ port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ # flags=socket.AI_ADDRCONFIG,
+ )
+
+ hosts = []
+ for family, _, proto, _, address in infos:
+ if family == socket.AF_INET6:
+ if len(address) < 3:
+ # IPv6 is not supported by Python build,
+ # or IPv6 is not enabled in the host
+ continue
+ if address[3]: # type: ignore[misc]
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ host, _port = socket.getnameinfo(
+ address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
+ )
+ port = int(_port)
+ else:
+ host, port = address[:2]
+ else: # IPv4
+ assert family == socket.AF_INET
+ host, port = address # type: ignore[misc]
+ hosts.append(
+ {
+ "hostname": hostname,
+ "host": host,
+ "port": port,
+ "family": family,
+ "proto": proto,
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
+ }
+ )
+
+ return hosts
+
+ async def close(self) -> None:
+ pass
+
+
+class AsyncResolver(AbstractResolver):
+ """Use the `aiodns` package to make asynchronous DNS lookups"""
+
+ def __init__(
+ self,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ *args: Any,
+ **kwargs: Any
+ ) -> None:
+ if aiodns is None:
+ raise RuntimeError("Resolver requires aiodns library")
+
+ self._loop = get_running_loop(loop)
+ self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
+
+ if not hasattr(self._resolver, "gethostbyname"):
+ # aiodns 1.1 is not available, fallback to DNSResolver.query
+ self.resolve = self._resolve_with_query # type: ignore
+
+ async def resolve(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ try:
+ resp = await self._resolver.gethostbyname(host, family)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(msg) from exc
+ hosts = []
+ for address in resp.addresses:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": address,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
+ }
+ )
+
+ if not hosts:
+ raise OSError("DNS lookup failed")
+
+ return hosts
+
+ async def _resolve_with_query(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ if family == socket.AF_INET6:
+ qtype = "AAAA"
+ else:
+ qtype = "A"
+
+ try:
+ resp = await self._resolver.query(host, qtype)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(msg) from exc
+
+ hosts = []
+ for rr in resp:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": rr.host,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST,
+ }
+ )
+
+ if not hosts:
+ raise OSError("DNS lookup failed")
+
+ return hosts
+
+ async def close(self) -> None:
+ self._resolver.cancel()
+
+
+_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
+DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
diff --git a/contrib/python/aiohttp/aiohttp/streams.py b/contrib/python/aiohttp/aiohttp/streams.py
new file mode 100644
index 0000000000..055848877e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/streams.py
@@ -0,0 +1,660 @@
+import asyncio
+import collections
+import warnings
+from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
+
+from .base_protocol import BaseProtocol
+from .helpers import BaseTimerContext, set_exception, set_result
+from .log import internal_logger
+from .typedefs import Final
+
+__all__ = (
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "StreamReader",
+ "DataQueue",
+ "FlowControlDataQueue",
+)
+
+_T = TypeVar("_T")
+
+
+class EofStream(Exception):
+ """eof stream indication."""
+
+
+class AsyncStreamIterator(Generic[_T]):
+ def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
+ self.read_func = read_func
+
+ def __aiter__(self) -> "AsyncStreamIterator[_T]":
+ return self
+
+ async def __anext__(self) -> _T:
+ try:
+ rv = await self.read_func()
+ except EofStream:
+ raise StopAsyncIteration
+ if rv == b"":
+ raise StopAsyncIteration
+ return rv
+
+
+class ChunkTupleAsyncStreamIterator:
+ def __init__(self, stream: "StreamReader") -> None:
+ self._stream = stream
+
+ def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
+ return self
+
+ async def __anext__(self) -> Tuple[bytes, bool]:
+ rv = await self._stream.readchunk()
+ if rv == (b"", False):
+ raise StopAsyncIteration
+ return rv
+
+
+class AsyncStreamReaderMixin:
+ def __aiter__(self) -> AsyncStreamIterator[bytes]:
+ return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
+
+ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
+ """Returns an asynchronous iterator that yields chunks of size n.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return AsyncStreamIterator(
+ lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
+ )
+
+ def iter_any(self) -> AsyncStreamIterator[bytes]:
+ """Yield all available data as soon as it is received.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
+
+ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
+ """Yield chunks of data as they are received by the server.
+
+ The yielded objects are tuples
+ of (bytes, bool) as returned by the StreamReader.readchunk method.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
+
+
+class StreamReader(AsyncStreamReaderMixin):
+ """An enhancement of asyncio.StreamReader.
+
+ Supports asynchronous iteration by line, chunk or as available::
+
+ async for line in reader:
+ ...
+ async for chunk in reader.iter_chunked(1024):
+ ...
+ async for slice in reader.iter_any():
+ ...
+
+ """
+
+ total_bytes = 0
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ limit: int,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ self._protocol = protocol
+ self._low_water = limit
+ self._high_water = limit * 2
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ self._loop = loop
+ self._size = 0
+ self._cursor = 0
+ self._http_chunk_splits = None # type: Optional[List[int]]
+ self._buffer = collections.deque() # type: Deque[bytes]
+ self._buffer_offset = 0
+ self._eof = False
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._eof_waiter = None # type: Optional[asyncio.Future[None]]
+ self._exception = None # type: Optional[BaseException]
+ self._timer = timer
+ self._eof_callbacks = [] # type: List[Callable[[], None]]
+
+ def __repr__(self) -> str:
+ info = [self.__class__.__name__]
+ if self._size:
+ info.append("%d bytes" % self._size)
+ if self._eof:
+ info.append("eof")
+ if self._low_water != 2 ** 16: # default limit
+ info.append("low=%d high=%d" % (self._low_water, self._high_water))
+ if self._waiter:
+ info.append("w=%r" % self._waiter)
+ if self._exception:
+ info.append("e=%r" % self._exception)
+ return "<%s>" % " ".join(info)
+
+ def get_read_buffer_limits(self) -> Tuple[int, int]:
+ return (self._low_water, self._high_water)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._exception = exc
+ self._eof_callbacks.clear()
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_exception(waiter, exc)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_exception(waiter, exc)
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ if self._eof:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+ else:
+ self._eof_callbacks.append(callback)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_result(waiter, None)
+
+ for cb in self._eof_callbacks:
+ try:
+ cb()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ self._eof_callbacks.clear()
+
+ def is_eof(self) -> bool:
+ """Return True if 'feed_eof' was called."""
+ return self._eof
+
+ def at_eof(self) -> bool:
+ """Return True if the buffer is empty and 'feed_eof' was called."""
+ return self._eof and not self._buffer
+
+ async def wait_eof(self) -> None:
+ if self._eof:
+ return
+
+ assert self._eof_waiter is None
+ self._eof_waiter = self._loop.create_future()
+ try:
+ await self._eof_waiter
+ finally:
+ self._eof_waiter = None
+
+ def unread_data(self, data: bytes) -> None:
+ """rollback reading some data from stream, inserting it to buffer head."""
+ warnings.warn(
+ "unread_data() is deprecated "
+ "and will be removed in future releases (#3260)",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if not data:
+ return
+
+ if self._buffer_offset:
+ self._buffer[0] = self._buffer[0][self._buffer_offset :]
+ self._buffer_offset = 0
+ self._size += len(data)
+ self._cursor -= len(data)
+ self._buffer.appendleft(data)
+ self._eof_counter = 0
+
+ # TODO: size is ignored, remove the param later
+ def feed_data(self, data: bytes, size: int = 0) -> None:
+ assert not self._eof, "feed_data after feed_eof"
+
+ if not data:
+ return
+
+ self._size += len(data)
+ self._buffer.append(data)
+ self.total_bytes += len(data)
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ if self._size > self._high_water and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ def begin_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ if self.total_bytes:
+ raise RuntimeError(
+ "Called begin_http_chunk_receiving when" "some data was already fed"
+ )
+ self._http_chunk_splits = []
+
+ def end_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ raise RuntimeError(
+ "Called end_chunk_receiving without calling "
+ "begin_chunk_receiving first"
+ )
+
+ # self._http_chunk_splits contains logical byte offsets from start of
+ # the body transfer. Each offset is the offset of the end of a chunk.
+ # "Logical" means bytes, accessible for a user.
+ # If no chunks containig logical data were received, current position
+ # is difinitely zero.
+ pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
+
+ if self.total_bytes == pos:
+ # We should not add empty chunks here. So we check for that.
+ # Note, when chunked + gzip is used, we can receive a chunk
+ # of compressed data, but that data may not be enough for gzip FSM
+ # to yield any uncompressed data. That's why current position may
+ # not change after receiving a chunk.
+ return
+
+ self._http_chunk_splits.append(self.total_bytes)
+
+ # wake up readchunk when end of http chunk received
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def _wait(self, func_name: str) -> None:
+ # StreamReader uses a future to link the protocol feed_data() method
+ # to a read coroutine. Running two read coroutines at the same time
+ # would have an unexpected behaviour. It would not possible to know
+ # which coroutine would get the next data.
+ if self._waiter is not None:
+ raise RuntimeError(
+ "%s() called while another coroutine is "
+ "already waiting for incoming data" % func_name
+ )
+
+ waiter = self._waiter = self._loop.create_future()
+ try:
+ if self._timer:
+ with self._timer:
+ await waiter
+ else:
+ await waiter
+ finally:
+ self._waiter = None
+
+ async def readline(self) -> bytes:
+ return await self.readuntil()
+
+ async def readuntil(self, separator: bytes = b"\n") -> bytes:
+ seplen = len(separator)
+ if seplen == 0:
+ raise ValueError("Separator should be at least one-byte string")
+
+ if self._exception is not None:
+ raise self._exception
+
+ chunk = b""
+ chunk_size = 0
+ not_enough = True
+
+ while not_enough:
+ while self._buffer and not_enough:
+ offset = self._buffer_offset
+ ichar = self._buffer[0].find(separator, offset) + 1
+ # Read from current offset to found separator or to the end.
+ data = self._read_nowait_chunk(ichar - offset if ichar else -1)
+ chunk += data
+ chunk_size += len(data)
+ if ichar:
+ not_enough = False
+
+ if chunk_size > self._high_water:
+ raise ValueError("Chunk too big")
+
+ if self._eof:
+ break
+
+ if not_enough:
+ await self._wait("readuntil")
+
+ return chunk
+
+ async def read(self, n: int = -1) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # migration problem; with DataQueue you have to catch
+ # EofStream exception, so common way is to run payload.read() inside
+ # infinite loop. what can cause real infinite loop with StreamReader
+ # lets keep this code one major release.
+ if __debug__:
+ if self._eof and not self._buffer:
+ self._eof_counter = getattr(self, "_eof_counter", 0) + 1
+ if self._eof_counter > 5:
+ internal_logger.warning(
+ "Multiple access to StreamReader in eof state, "
+ "might be infinite loop.",
+ stack_info=True,
+ )
+
+ if not n:
+ return b""
+
+ if n < 0:
+ # This used to just loop creating a new waiter hoping to
+ # collect everything in self._buffer, but that would
+ # deadlock if the subprocess sends more than self.limit
+ # bytes. So just call self.readany() until EOF.
+ blocks = []
+ while True:
+ block = await self.readany()
+ if not block:
+ break
+ blocks.append(block)
+ return b"".join(blocks)
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("read")
+
+ return self._read_nowait(n)
+
+ async def readany(self) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("readany")
+
+ return self._read_nowait(-1)
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ """Returns a tuple of (data, end_of_http_chunk).
+
+ When chunked transfer
+ encoding is used, end_of_http_chunk is a boolean indicating if the end
+ of the data corresponds to the end of a HTTP chunk , otherwise it is
+ always False.
+ """
+ while True:
+ if self._exception is not None:
+ raise self._exception
+
+ while self._http_chunk_splits:
+ pos = self._http_chunk_splits.pop(0)
+ if pos == self._cursor:
+ return (b"", True)
+ if pos > self._cursor:
+ return (self._read_nowait(pos - self._cursor), True)
+ internal_logger.warning(
+ "Skipping HTTP chunk end due to data "
+ "consumption beyond chunk boundary"
+ )
+
+ if self._buffer:
+ return (self._read_nowait_chunk(-1), False)
+ # return (self._read_nowait(-1), False)
+
+ if self._eof:
+ # Special case for signifying EOF.
+ # (b'', True) is not a final return value actually.
+ return (b"", False)
+
+ await self._wait("readchunk")
+
+ async def readexactly(self, n: int) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ blocks = [] # type: List[bytes]
+ while n > 0:
+ block = await self.read(n)
+ if not block:
+ partial = b"".join(blocks)
+ raise asyncio.IncompleteReadError(partial, len(partial) + n)
+ blocks.append(block)
+ n -= len(block)
+
+ return b"".join(blocks)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ # default was changed to be consistent with .read(-1)
+ #
+ # I believe the most users don't know about the method and
+ # they are not affected.
+ if self._exception is not None:
+ raise self._exception
+
+ if self._waiter and not self._waiter.done():
+ raise RuntimeError(
+ "Called while some coroutine is waiting for incoming data."
+ )
+
+ return self._read_nowait(n)
+
+ def _read_nowait_chunk(self, n: int) -> bytes:
+ first_buffer = self._buffer[0]
+ offset = self._buffer_offset
+ if n != -1 and len(first_buffer) - offset > n:
+ data = first_buffer[offset : offset + n]
+ self._buffer_offset += n
+
+ elif offset:
+ self._buffer.popleft()
+ data = first_buffer[offset:]
+ self._buffer_offset = 0
+
+ else:
+ data = self._buffer.popleft()
+
+ self._size -= len(data)
+ self._cursor += len(data)
+
+ chunk_splits = self._http_chunk_splits
+ # Prevent memory leak: drop useless chunk splits
+ while chunk_splits and chunk_splits[0] < self._cursor:
+ chunk_splits.pop(0)
+
+ if self._size < self._low_water and self._protocol._reading_paused:
+ self._protocol.resume_reading()
+ return data
+
+ def _read_nowait(self, n: int) -> bytes:
+ """Read not more than n bytes, or whole buffer if n == -1"""
+ chunks = []
+
+ while self._buffer:
+ chunk = self._read_nowait_chunk(n)
+ chunks.append(chunk)
+ if n != -1:
+ n -= len(chunk)
+ if n == 0:
+ break
+
+ return b"".join(chunks) if chunks else b""
+
+
+class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
+ def __init__(self) -> None:
+ pass
+
+ def exception(self) -> Optional[BaseException]:
+ return None
+
+ def set_exception(self, exc: BaseException) -> None:
+ pass
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ def feed_eof(self) -> None:
+ pass
+
+ def is_eof(self) -> bool:
+ return True
+
+ def at_eof(self) -> bool:
+ return True
+
+ async def wait_eof(self) -> None:
+ return
+
+ def feed_data(self, data: bytes, n: int = 0) -> None:
+ pass
+
+ async def readline(self) -> bytes:
+ return b""
+
+ async def read(self, n: int = -1) -> bytes:
+ return b""
+
+ # TODO add async def readuntil
+
+ async def readany(self) -> bytes:
+ return b""
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ return (b"", True)
+
+ async def readexactly(self, n: int) -> bytes:
+ raise asyncio.IncompleteReadError(b"", n)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ return b""
+
+
+EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
+
+
+class DataQueue(Generic[_T]):
+ """DataQueue is a general-purpose blocking queue with one reader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._eof = False
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._exception = None # type: Optional[BaseException]
+ self._size = 0
+ self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
+
+ def __len__(self) -> int:
+ return len(self._buffer)
+
+ def is_eof(self) -> bool:
+ return self._eof
+
+ def at_eof(self) -> bool:
+ return self._eof and not self._buffer
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._eof = True
+ self._exception = exc
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_exception(waiter, exc)
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ self._size += size
+ self._buffer.append((data, size))
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def read(self) -> _T:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+
+ if self._buffer:
+ data, size = self._buffer.popleft()
+ self._size -= size
+ return data
+ else:
+ if self._exception is not None:
+ raise self._exception
+ else:
+ raise EofStream
+
+ def __aiter__(self) -> AsyncStreamIterator[_T]:
+ return AsyncStreamIterator(self.read)
+
+
+class FlowControlDataQueue(DataQueue[_T]):
+ """FlowControlDataQueue resumes and pauses an underlying stream.
+
+ It is a destination for parsed data.
+ """
+
+ def __init__(
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
+ ) -> None:
+ super().__init__(loop=loop)
+
+ self._protocol = protocol
+ self._limit = limit * 2
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ super().feed_data(data, size)
+
+ if self._size > self._limit and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ async def read(self) -> _T:
+ try:
+ return await super().read()
+ finally:
+ if self._size < self._limit and self._protocol._reading_paused:
+ self._protocol.resume_reading()
diff --git a/contrib/python/aiohttp/aiohttp/tcp_helpers.py b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
new file mode 100644
index 0000000000..0e1dbf1655
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
@@ -0,0 +1,38 @@
+"""Helper methods to tune a TCP connection"""
+
+import asyncio
+import socket
+from contextlib import suppress
+from typing import Optional # noqa
+
+__all__ = ("tcp_keepalive", "tcp_nodelay")
+
+
+if hasattr(socket, "SO_KEEPALIVE"):
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None:
+ sock = transport.get_extra_info("socket")
+ if sock is not None:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+
+
+else:
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
+ pass
+
+
+def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
+ sock = transport.get_extra_info("socket")
+
+ if sock is None:
+ return
+
+ if sock.family not in (socket.AF_INET, socket.AF_INET6):
+ return
+
+ value = bool(value)
+
+ # socket may be closed already, on windows OSError get raised
+ with suppress(OSError):
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
diff --git a/contrib/python/aiohttp/aiohttp/test_utils.py b/contrib/python/aiohttp/aiohttp/test_utils.py
new file mode 100644
index 0000000000..361dae486c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/test_utils.py
@@ -0,0 +1,698 @@
+"""Utilities shared by tests."""
+
+import asyncio
+import contextlib
+import gc
+import inspect
+import ipaddress
+import os
+import socket
+import sys
+import warnings
+from abc import ABC, abstractmethod
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Iterator,
+ List,
+ Optional,
+ Type,
+ Union,
+ cast,
+)
+from unittest import mock
+
+from aiosignal import Signal
+from multidict import CIMultiDict, CIMultiDictProxy
+from yarl import URL
+
+import aiohttp
+from aiohttp.client import _RequestContextManager, _WSRequestContextManager
+
+from . import ClientSession, hdrs
+from .abc import AbstractCookieJar
+from .client_reqrep import ClientResponse
+from .client_ws import ClientWebSocketResponse
+from .helpers import PY_38, sentinel
+from .http import HttpVersion, RawRequestMessage
+from .web import (
+ Application,
+ AppRunner,
+ BaseRunner,
+ Request,
+ Server,
+ ServerRunner,
+ SockSite,
+ UrlMappingMatchInfo,
+)
+from .web_protocol import _RequestHandler
+
+if TYPE_CHECKING: # pragma: no cover
+ from ssl import SSLContext
+else:
+ SSLContext = None
+
+if PY_38:
+ from unittest import IsolatedAsyncioTestCase as TestCase
+else:
+ from asynctest import TestCase # type: ignore[no-redef]
+
+REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
+
+
+def get_unused_port_socket(
+ host: str, family: socket.AddressFamily = socket.AF_INET
+) -> socket.socket:
+ return get_port_socket(host, 0, family)
+
+
+def get_port_socket(
+ host: str, port: int, family: socket.AddressFamily
+) -> socket.socket:
+ s = socket.socket(family, socket.SOCK_STREAM)
+ if REUSE_ADDRESS:
+ # Windows has different semantics for SO_REUSEADDR,
+ # so don't set it. Ref:
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.bind((host, port))
+ return s
+
+
+def unused_port() -> int:
+ """Return a port that is unused on the current host."""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("127.0.0.1", 0))
+ return cast(int, s.getsockname()[1])
+
+
+class BaseTestServer(ABC):
+ __test__ = False
+
+ def __init__(
+ self,
+ *,
+ scheme: Union[str, object] = sentinel,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ skip_url_asserts: bool = False,
+ socket_factory: Callable[
+ [str, int, socket.AddressFamily], socket.socket
+ ] = get_port_socket,
+ **kwargs: Any,
+ ) -> None:
+ self._loop = loop
+ self.runner = None # type: Optional[BaseRunner]
+ self._root = None # type: Optional[URL]
+ self.host = host
+ self.port = port
+ self._closed = False
+ self.scheme = scheme
+ self.skip_url_asserts = skip_url_asserts
+ self.socket_factory = socket_factory
+
+ async def start_server(
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
+ ) -> None:
+ if self.runner:
+ return
+ self._loop = loop
+ self._ssl = kwargs.pop("ssl", None)
+ self.runner = await self._make_runner(**kwargs)
+ await self.runner.setup()
+ if not self.port:
+ self.port = 0
+ try:
+ version = ipaddress.ip_address(self.host).version
+ except ValueError:
+ version = 4
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
+ _sock = self.socket_factory(self.host, self.port, family)
+ self.host, self.port = _sock.getsockname()[:2]
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
+ await site.start()
+ server = site._server
+ assert server is not None
+ sockets = server.sockets
+ assert sockets is not None
+ self.port = sockets[0].getsockname()[1]
+ if self.scheme is sentinel:
+ if self._ssl:
+ scheme = "https"
+ else:
+ scheme = "http"
+ self.scheme = scheme
+ self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
+
+ @abstractmethod # pragma: no cover
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ pass
+
+ def make_url(self, path: str) -> URL:
+ assert self._root is not None
+ url = URL(path)
+ if not self.skip_url_asserts:
+ assert not url.is_absolute()
+ return self._root.join(url)
+ else:
+ return URL(str(self._root) + path)
+
+ @property
+ def started(self) -> bool:
+ return self.runner is not None
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def handler(self) -> Server:
+ # for backward compatibility
+ # web.Server instance
+ runner = self.runner
+ assert runner is not None
+ assert runner.server is not None
+ return runner.server
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run when the object is garbage collected, and on
+ exit when used as a context manager.
+
+ """
+ if self.started and not self.closed:
+ assert self.runner is not None
+ await self.runner.cleanup()
+ self._root = None
+ self.port = None
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "BaseTestServer":
+ await self.start_server(loop=self._loop)
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class TestServer(BaseTestServer):
+ def __init__(
+ self,
+ app: Application,
+ *,
+ scheme: Union[str, object] = sentinel,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ):
+ self.app = app
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ return AppRunner(self.app, **kwargs)
+
+
+class RawTestServer(BaseTestServer):
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ scheme: Union[str, object] = sentinel,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._handler = handler
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
+ return ServerRunner(srv, debug=debug, **kwargs)
+
+
+class TestClient:
+ """
+ A test client implementation.
+
+ To write functional tests for aiohttp based servers.
+
+ """
+
+ __test__ = False
+
+ def __init__(
+ self,
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> None:
+ if not isinstance(server, BaseTestServer):
+ raise TypeError(
+ "server must be TestServer " "instance, found type: %r" % type(server)
+ )
+ self._server = server
+ self._loop = loop
+ if cookie_jar is None:
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
+ self._closed = False
+ self._responses = [] # type: List[ClientResponse]
+ self._websockets = [] # type: List[ClientWebSocketResponse]
+
+ async def start_server(self) -> None:
+ await self._server.start_server(loop=self._loop)
+
+ @property
+ def host(self) -> str:
+ return self._server.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._server.port
+
+ @property
+ def server(self) -> BaseTestServer:
+ return self._server
+
+ @property
+ def app(self) -> Optional[Application]:
+ return cast(Optional[Application], getattr(self._server, "app", None))
+
+ @property
+ def session(self) -> ClientSession:
+ """An internal aiohttp.ClientSession.
+
+ Unlike the methods on the TestClient, client session requests
+ do not automatically include the host in the url queried, and
+ will require an absolute path to the resource.
+
+ """
+ return self._session
+
+ def make_url(self, path: str) -> URL:
+ return self._server.make_url(path)
+
+ async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
+ # save it to close later
+ self._responses.append(resp)
+ return resp
+
+ def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Routes a request to tested http server.
+
+ The interface is identical to aiohttp.ClientSession.request,
+ except the loop kwarg is overridden by the instance used by the
+ test server.
+
+ """
+ return _RequestContextManager(self._request(method, path, **kwargs))
+
+ def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP GET request."""
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
+
+ def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP POST request."""
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
+
+ def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP OPTIONS request."""
+ return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
+
+ def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP HEAD request."""
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
+
+ def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PUT request."""
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
+
+ def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
+
+ def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
+
+ def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
+ """Initiate websocket connection.
+
+ The api corresponds to aiohttp.ClientSession.ws_connect.
+
+ """
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
+
+ async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
+ self._websockets.append(ws)
+ return ws
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run on exit when used as a(n) (asynchronous)
+ context manager.
+
+ """
+ if not self._closed:
+ for resp in self._responses:
+ resp.close()
+ for ws in self._websockets:
+ await ws.close()
+ await self._session.close()
+ await self._server.close()
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "TestClient":
+ await self.start_server()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class AioHTTPTestCase(TestCase):
+ """A base class to allow for unittest web applications using aiohttp.
+
+ Provides the following:
+
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
+ application and server are running.
+ * self.app (aiohttp.web.Application): the application returned by
+ self.get_application()
+
+ Note that the TestClient's methods are asynchronous: you have to
+ execute function on the test client using asynchronous methods.
+ """
+
+ async def get_application(self) -> Application:
+ """Get application.
+
+ This method should be overridden
+ to return the aiohttp.web.Application
+ object to test.
+ """
+ return self.get_app()
+
+ def get_app(self) -> Application:
+ """Obsolete method used to constructing web application.
+
+ Use .get_application() coroutine instead.
+ """
+ raise RuntimeError("Did you forget to define get_application()?")
+
+ def setUp(self) -> None:
+ try:
+ self.loop = asyncio.get_running_loop()
+ except (AttributeError, RuntimeError): # AttributeError->py36
+ self.loop = asyncio.get_event_loop_policy().get_event_loop()
+
+ self.loop.run_until_complete(self.setUpAsync())
+
+ async def setUpAsync(self) -> None:
+ self.app = await self.get_application()
+ self.server = await self.get_server(self.app)
+ self.client = await self.get_client(self.server)
+
+ await self.client.start_server()
+
+ def tearDown(self) -> None:
+ self.loop.run_until_complete(self.tearDownAsync())
+
+ async def tearDownAsync(self) -> None:
+ await self.client.close()
+
+ async def get_server(self, app: Application) -> TestServer:
+ """Return a TestServer instance."""
+ return TestServer(app, loop=self.loop)
+
+ async def get_client(self, server: TestServer) -> TestClient:
+ """Return a TestClient instance."""
+ return TestClient(server, loop=self.loop)
+
+
+def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
+ """
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
+
+ In 3.8+, this does nothing.
+ """
+ warnings.warn(
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func
+
+
+_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
+
+
+@contextlib.contextmanager
+def loop_context(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
+) -> Iterator[asyncio.AbstractEventLoop]:
+ """A contextmanager that creates an event_loop, for test purposes.
+
+ Handles the creation and cleanup of a test loop.
+ """
+ loop = setup_test_loop(loop_factory)
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def setup_test_loop(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
+) -> asyncio.AbstractEventLoop:
+ """Create and return an asyncio.BaseEventLoop instance.
+
+ The caller should also call teardown_test_loop,
+ once they are done with the loop.
+ """
+ loop = loop_factory()
+ try:
+ module = loop.__class__.__module__
+ skip_watcher = "uvloop" in module
+ except AttributeError: # pragma: no cover
+ # Just in case
+ skip_watcher = True
+ asyncio.set_event_loop(loop)
+ if sys.platform != "win32" and not skip_watcher:
+ policy = asyncio.get_event_loop_policy()
+ watcher: asyncio.AbstractChildWatcher
+ try: # Python >= 3.8
+ # Refs:
+ # * https://github.com/pytest-dev/pytest-xdist/issues/620
+ # * https://stackoverflow.com/a/58614689/595220
+ # * https://bugs.python.org/issue35621
+ # * https://github.com/python/cpython/pull/14344
+ watcher = asyncio.ThreadedChildWatcher()
+ except AttributeError: # Python < 3.8
+ watcher = asyncio.SafeChildWatcher()
+ watcher.attach_loop(loop)
+ with contextlib.suppress(NotImplementedError):
+ policy.set_child_watcher(watcher)
+ return loop
+
+
+def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
+ closed = loop.is_closed()
+ if not closed:
+ loop.call_soon(loop.stop)
+ loop.run_forever()
+ loop.close()
+
+ if not fast:
+ gc.collect()
+
+ asyncio.set_event_loop(None)
+
+
+def _create_app_mock() -> mock.MagicMock:
+ def get_dict(app: Any, key: str) -> Any:
+ return app.__app_dict[key]
+
+ def set_dict(app: Any, key: str, value: Any) -> None:
+ app.__app_dict[key] = value
+
+ app = mock.MagicMock()
+ app.__app_dict = {}
+ app.__getitem__ = get_dict
+ app.__setitem__ = set_dict
+
+ app._debug = False
+ app.on_response_prepare = Signal(app)
+ app.on_response_prepare.freeze()
+ return app
+
+
+def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
+ transport = mock.Mock()
+
+ def get_extra_info(key: str) -> Optional[SSLContext]:
+ if key == "sslcontext":
+ return sslcontext
+ else:
+ return None
+
+ transport.get_extra_info.side_effect = get_extra_info
+ return transport
+
+
+def make_mocked_request(
+ method: str,
+ path: str,
+ headers: Any = None,
+ *,
+ match_info: Any = sentinel,
+ version: HttpVersion = HttpVersion(1, 1),
+ closing: bool = False,
+ app: Any = None,
+ writer: Any = sentinel,
+ protocol: Any = sentinel,
+ transport: Any = sentinel,
+ payload: Any = sentinel,
+ sslcontext: Optional[SSLContext] = None,
+ client_max_size: int = 1024 ** 2,
+ loop: Any = ...,
+) -> Request:
+ """Creates mocked web.Request testing purposes.
+
+ Useful in unit tests, when spinning full web server is overkill or
+ specific conditions and errors are hard to trigger.
+ """
+ task = mock.Mock()
+ if loop is ...:
+ loop = mock.Mock()
+ loop.create_future.return_value = ()
+
+ if version < HttpVersion(1, 1):
+ closing = True
+
+ if headers:
+ headers = CIMultiDictProxy(CIMultiDict(headers))
+ raw_hdrs = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+ else:
+ headers = CIMultiDictProxy(CIMultiDict())
+ raw_hdrs = ()
+
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ message = RawRequestMessage(
+ method,
+ path,
+ version,
+ headers,
+ raw_hdrs,
+ closing,
+ None,
+ False,
+ chunked,
+ URL(path),
+ )
+ if app is None:
+ app = _create_app_mock()
+
+ if transport is sentinel:
+ transport = _create_transport(sslcontext)
+
+ if protocol is sentinel:
+ protocol = mock.Mock()
+ protocol.transport = transport
+
+ if writer is sentinel:
+ writer = mock.Mock()
+ writer.write_headers = make_mocked_coro(None)
+ writer.write = make_mocked_coro(None)
+ writer.write_eof = make_mocked_coro(None)
+ writer.drain = make_mocked_coro(None)
+ writer.transport = transport
+
+ protocol.transport = transport
+ protocol.writer = writer
+
+ if payload is sentinel:
+ payload = mock.Mock()
+
+ req = Request(
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
+ )
+
+ match_info = UrlMappingMatchInfo(
+ {} if match_info is sentinel else match_info, mock.Mock()
+ )
+ match_info.add_app(app)
+ req._match_info = match_info
+
+ return req
+
+
+def make_mocked_coro(
+ return_value: Any = sentinel, raise_exception: Any = sentinel
+) -> Any:
+ """Creates a coroutine mock."""
+
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
+ if raise_exception is not sentinel:
+ raise raise_exception
+ if not inspect.isawaitable(return_value):
+ return return_value
+ await return_value
+
+ return mock.Mock(wraps=mock_coro)
diff --git a/contrib/python/aiohttp/aiohttp/tracing.py b/contrib/python/aiohttp/aiohttp/tracing.py
new file mode 100644
index 0000000000..0e118a3997
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/tracing.py
@@ -0,0 +1,472 @@
+from types import SimpleNamespace
+from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
+
+import attr
+from aiosignal import Signal
+from multidict import CIMultiDict
+from yarl import URL
+
+from .client_reqrep import ClientResponse
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientSession
+ from .typedefs import Protocol
+
+ _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
+
+ class _SignalCallback(Protocol[_ParamT_contra]):
+ def __call__(
+ self,
+ __client_session: ClientSession,
+ __trace_config_ctx: SimpleNamespace,
+ __params: _ParamT_contra,
+ ) -> Awaitable[None]:
+ ...
+
+
+__all__ = (
+ "TraceConfig",
+ "TraceRequestStartParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestChunkSentParams",
+ "TraceResponseChunkReceivedParams",
+ "TraceRequestHeadersSentParams",
+)
+
+
+class TraceConfig:
+ """First-class used to trace requests launched via ClientSession objects."""
+
+ def __init__(
+ self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
+ ) -> None:
+ self._on_request_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestStartParams]]
+ self._on_request_chunk_sent = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
+ self._on_response_chunk_received = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
+ self._on_request_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestEndParams]]
+ self._on_request_exception = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
+ self._on_request_redirect = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
+ self._on_connection_queued_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
+ self._on_connection_queued_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
+ self._on_connection_create_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
+ self._on_connection_create_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
+ self._on_connection_reuseconn = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
+ self._on_dns_resolvehost_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
+ self._on_dns_resolvehost_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
+ self._on_dns_cache_hit = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
+ self._on_dns_cache_miss = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
+ self._on_request_headers_sent = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestHeadersSentParams]]
+
+ self._trace_config_ctx_factory = trace_config_ctx_factory
+
+ def trace_config_ctx(
+ self, trace_request_ctx: Optional[SimpleNamespace] = None
+ ) -> SimpleNamespace:
+ """Return a new trace_config_ctx instance"""
+ return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
+
+ def freeze(self) -> None:
+ self._on_request_start.freeze()
+ self._on_request_chunk_sent.freeze()
+ self._on_response_chunk_received.freeze()
+ self._on_request_end.freeze()
+ self._on_request_exception.freeze()
+ self._on_request_redirect.freeze()
+ self._on_connection_queued_start.freeze()
+ self._on_connection_queued_end.freeze()
+ self._on_connection_create_start.freeze()
+ self._on_connection_create_end.freeze()
+ self._on_connection_reuseconn.freeze()
+ self._on_dns_resolvehost_start.freeze()
+ self._on_dns_resolvehost_end.freeze()
+ self._on_dns_cache_hit.freeze()
+ self._on_dns_cache_miss.freeze()
+ self._on_request_headers_sent.freeze()
+
+ @property
+ def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
+ return self._on_request_start
+
+ @property
+ def on_request_chunk_sent(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
+ return self._on_request_chunk_sent
+
+ @property
+ def on_response_chunk_received(
+ self,
+ ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
+ return self._on_response_chunk_received
+
+ @property
+ def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
+ return self._on_request_end
+
+ @property
+ def on_request_exception(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
+ return self._on_request_exception
+
+ @property
+ def on_request_redirect(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
+ return self._on_request_redirect
+
+ @property
+ def on_connection_queued_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
+ return self._on_connection_queued_start
+
+ @property
+ def on_connection_queued_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
+ return self._on_connection_queued_end
+
+ @property
+ def on_connection_create_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
+ return self._on_connection_create_start
+
+ @property
+ def on_connection_create_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
+ return self._on_connection_create_end
+
+ @property
+ def on_connection_reuseconn(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
+ return self._on_connection_reuseconn
+
+ @property
+ def on_dns_resolvehost_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
+ return self._on_dns_resolvehost_start
+
+ @property
+ def on_dns_resolvehost_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
+ return self._on_dns_resolvehost_end
+
+ @property
+ def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
+ return self._on_dns_cache_hit
+
+ @property
+ def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
+ return self._on_dns_cache_miss
+
+ @property
+ def on_request_headers_sent(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
+ return self._on_request_headers_sent
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestStartParams:
+ """Parameters sent by the `on_request_start` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestChunkSentParams:
+ """Parameters sent by the `on_request_chunk_sent` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceResponseChunkReceivedParams:
+ """Parameters sent by the `on_response_chunk_received` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestEndParams:
+ """Parameters sent by the `on_request_end` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestExceptionParams:
+ """Parameters sent by the `on_request_exception` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ exception: BaseException
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestRedirectParams:
+ """Parameters sent by the `on_request_redirect` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedStartParams:
+ """Parameters sent by the `on_connection_queued_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedEndParams:
+ """Parameters sent by the `on_connection_queued_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateStartParams:
+ """Parameters sent by the `on_connection_create_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateEndParams:
+ """Parameters sent by the `on_connection_create_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionReuseconnParams:
+ """Parameters sent by the `on_connection_reuseconn` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostStartParams:
+ """Parameters sent by the `on_dns_resolvehost_start` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostEndParams:
+ """Parameters sent by the `on_dns_resolvehost_end` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheHitParams:
+ """Parameters sent by the `on_dns_cache_hit` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheMissParams:
+ """Parameters sent by the `on_dns_cache_miss` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestHeadersSentParams:
+ """Parameters sent by the `on_request_headers_sent` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+class Trace:
+ """Internal dependency holder class.
+
+ Used to keep together the main dependencies used
+ at the moment of send a signal.
+ """
+
+ def __init__(
+ self,
+ session: "ClientSession",
+ trace_config: TraceConfig,
+ trace_config_ctx: SimpleNamespace,
+ ) -> None:
+ self._trace_config = trace_config
+ self._trace_config_ctx = trace_config_ctx
+ self._session = session
+
+ async def send_request_start(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config.on_request_start.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestStartParams(method, url, headers),
+ )
+
+ async def send_request_chunk_sent(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_request_chunk_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestChunkSentParams(method, url, chunk),
+ )
+
+ async def send_response_chunk_received(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_response_chunk_received.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceResponseChunkReceivedParams(method, url, chunk),
+ )
+
+ async def send_request_end(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config.on_request_end.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestEndParams(method, url, headers, response),
+ )
+
+ async def send_request_exception(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ exception: BaseException,
+ ) -> None:
+ return await self._trace_config.on_request_exception.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestExceptionParams(method, url, headers, exception),
+ )
+
+ async def send_request_redirect(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config._on_request_redirect.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestRedirectParams(method, url, headers, response),
+ )
+
+ async def send_connection_queued_start(self) -> None:
+ return await self._trace_config.on_connection_queued_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
+ )
+
+ async def send_connection_queued_end(self) -> None:
+ return await self._trace_config.on_connection_queued_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
+ )
+
+ async def send_connection_create_start(self) -> None:
+ return await self._trace_config.on_connection_create_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
+ )
+
+ async def send_connection_create_end(self) -> None:
+ return await self._trace_config.on_connection_create_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
+ )
+
+ async def send_connection_reuseconn(self) -> None:
+ return await self._trace_config.on_connection_reuseconn.send(
+ self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
+ )
+
+ async def send_dns_resolvehost_start(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_start.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
+ )
+
+ async def send_dns_resolvehost_end(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_end.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
+ )
+
+ async def send_dns_cache_hit(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_hit.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
+ )
+
+ async def send_dns_cache_miss(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_miss.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
+ )
+
+ async def send_request_headers(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config._on_request_headers_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestHeadersSentParams(method, url, headers),
+ )
diff --git a/contrib/python/aiohttp/aiohttp/typedefs.py b/contrib/python/aiohttp/aiohttp/typedefs.py
new file mode 100644
index 0000000000..84283d9a46
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/typedefs.py
@@ -0,0 +1,64 @@
+import json
+import os
+import sys
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ Mapping,
+ Tuple,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
+from yarl import URL
+
+# These are for other modules to use (to avoid repeating the conditional import).
+if sys.version_info >= (3, 8):
+ from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
+else:
+ from typing_extensions import ( # noqa: F401
+ Final,
+ Protocol as Protocol,
+ TypedDict as TypedDict,
+ )
+
+DEFAULT_JSON_ENCODER = json.dumps
+DEFAULT_JSON_DECODER = json.loads
+
+if TYPE_CHECKING: # pragma: no cover
+ _CIMultiDict = CIMultiDict[str]
+ _CIMultiDictProxy = CIMultiDictProxy[str]
+ _MultiDict = MultiDict[str]
+ _MultiDictProxy = MultiDictProxy[str]
+ from http.cookies import BaseCookie, Morsel
+
+ from .web import Request, StreamResponse
+else:
+ _CIMultiDict = CIMultiDict
+ _CIMultiDictProxy = CIMultiDictProxy
+ _MultiDict = MultiDict
+ _MultiDictProxy = MultiDictProxy
+
+Byteish = Union[bytes, bytearray, memoryview]
+JSONEncoder = Callable[[Any], str]
+JSONDecoder = Callable[[str], Any]
+LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
+RawHeaders = Tuple[Tuple[bytes, bytes], ...]
+StrOrURL = Union[str, URL]
+
+LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+LooseCookiesIterables = Iterable[
+ Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+]
+LooseCookies = Union[
+ LooseCookiesMappings,
+ LooseCookiesIterables,
+ "BaseCookie[str]",
+]
+
+Handler = Callable[["Request"], Awaitable["StreamResponse"]]
+
+PathLike = Union[str, "os.PathLike[str]"]
diff --git a/contrib/python/aiohttp/aiohttp/web.py b/contrib/python/aiohttp/aiohttp/web.py
new file mode 100644
index 0000000000..864428b49b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web.py
@@ -0,0 +1,586 @@
+import asyncio
+import logging
+import socket
+import sys
+from argparse import ArgumentParser
+from collections.abc import Iterable
+from importlib import import_module
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Iterable as TypingIterable,
+ List,
+ Optional,
+ Set,
+ Type,
+ Union,
+ cast,
+)
+
+from .abc import AbstractAccessLogger
+from .helpers import all_tasks
+from .log import access_logger
+from .web_app import Application as Application, CleanupError as CleanupError
+from .web_exceptions import (
+ HTTPAccepted as HTTPAccepted,
+ HTTPBadGateway as HTTPBadGateway,
+ HTTPBadRequest as HTTPBadRequest,
+ HTTPClientError as HTTPClientError,
+ HTTPConflict as HTTPConflict,
+ HTTPCreated as HTTPCreated,
+ HTTPError as HTTPError,
+ HTTPException as HTTPException,
+ HTTPExpectationFailed as HTTPExpectationFailed,
+ HTTPFailedDependency as HTTPFailedDependency,
+ HTTPForbidden as HTTPForbidden,
+ HTTPFound as HTTPFound,
+ HTTPGatewayTimeout as HTTPGatewayTimeout,
+ HTTPGone as HTTPGone,
+ HTTPInsufficientStorage as HTTPInsufficientStorage,
+ HTTPInternalServerError as HTTPInternalServerError,
+ HTTPLengthRequired as HTTPLengthRequired,
+ HTTPMethodNotAllowed as HTTPMethodNotAllowed,
+ HTTPMisdirectedRequest as HTTPMisdirectedRequest,
+ HTTPMovedPermanently as HTTPMovedPermanently,
+ HTTPMultipleChoices as HTTPMultipleChoices,
+ HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
+ HTTPNoContent as HTTPNoContent,
+ HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
+ HTTPNotAcceptable as HTTPNotAcceptable,
+ HTTPNotExtended as HTTPNotExtended,
+ HTTPNotFound as HTTPNotFound,
+ HTTPNotImplemented as HTTPNotImplemented,
+ HTTPNotModified as HTTPNotModified,
+ HTTPOk as HTTPOk,
+ HTTPPartialContent as HTTPPartialContent,
+ HTTPPaymentRequired as HTTPPaymentRequired,
+ HTTPPermanentRedirect as HTTPPermanentRedirect,
+ HTTPPreconditionFailed as HTTPPreconditionFailed,
+ HTTPPreconditionRequired as HTTPPreconditionRequired,
+ HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
+ HTTPRedirection as HTTPRedirection,
+ HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
+ HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
+ HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
+ HTTPRequestTimeout as HTTPRequestTimeout,
+ HTTPRequestURITooLong as HTTPRequestURITooLong,
+ HTTPResetContent as HTTPResetContent,
+ HTTPSeeOther as HTTPSeeOther,
+ HTTPServerError as HTTPServerError,
+ HTTPServiceUnavailable as HTTPServiceUnavailable,
+ HTTPSuccessful as HTTPSuccessful,
+ HTTPTemporaryRedirect as HTTPTemporaryRedirect,
+ HTTPTooManyRequests as HTTPTooManyRequests,
+ HTTPUnauthorized as HTTPUnauthorized,
+ HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
+ HTTPUnprocessableEntity as HTTPUnprocessableEntity,
+ HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
+ HTTPUpgradeRequired as HTTPUpgradeRequired,
+ HTTPUseProxy as HTTPUseProxy,
+ HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
+ HTTPVersionNotSupported as HTTPVersionNotSupported,
+)
+from .web_fileresponse import FileResponse as FileResponse
+from .web_log import AccessLogger
+from .web_middlewares import (
+ middleware as middleware,
+ normalize_path_middleware as normalize_path_middleware,
+)
+from .web_protocol import (
+ PayloadAccessError as PayloadAccessError,
+ RequestHandler as RequestHandler,
+ RequestPayloadError as RequestPayloadError,
+)
+from .web_request import (
+ BaseRequest as BaseRequest,
+ FileField as FileField,
+ Request as Request,
+)
+from .web_response import (
+ ContentCoding as ContentCoding,
+ Response as Response,
+ StreamResponse as StreamResponse,
+ json_response as json_response,
+)
+from .web_routedef import (
+ AbstractRouteDef as AbstractRouteDef,
+ RouteDef as RouteDef,
+ RouteTableDef as RouteTableDef,
+ StaticDef as StaticDef,
+ delete as delete,
+ get as get,
+ head as head,
+ options as options,
+ patch as patch,
+ post as post,
+ put as put,
+ route as route,
+ static as static,
+ view as view,
+)
+from .web_runner import (
+ AppRunner as AppRunner,
+ BaseRunner as BaseRunner,
+ BaseSite as BaseSite,
+ GracefulExit as GracefulExit,
+ NamedPipeSite as NamedPipeSite,
+ ServerRunner as ServerRunner,
+ SockSite as SockSite,
+ TCPSite as TCPSite,
+ UnixSite as UnixSite,
+)
+from .web_server import Server as Server
+from .web_urldispatcher import (
+ AbstractResource as AbstractResource,
+ AbstractRoute as AbstractRoute,
+ DynamicResource as DynamicResource,
+ PlainResource as PlainResource,
+ Resource as Resource,
+ ResourceRoute as ResourceRoute,
+ StaticResource as StaticResource,
+ UrlDispatcher as UrlDispatcher,
+ UrlMappingMatchInfo as UrlMappingMatchInfo,
+ View as View,
+)
+from .web_ws import (
+ WebSocketReady as WebSocketReady,
+ WebSocketResponse as WebSocketResponse,
+ WSMsgType as WSMsgType,
+)
+
+__all__ = (
+ # web_app
+ "Application",
+ "CleanupError",
+ # web_exceptions
+ "HTTPAccepted",
+ "HTTPBadGateway",
+ "HTTPBadRequest",
+ "HTTPClientError",
+ "HTTPConflict",
+ "HTTPCreated",
+ "HTTPError",
+ "HTTPException",
+ "HTTPExpectationFailed",
+ "HTTPFailedDependency",
+ "HTTPForbidden",
+ "HTTPFound",
+ "HTTPGatewayTimeout",
+ "HTTPGone",
+ "HTTPInsufficientStorage",
+ "HTTPInternalServerError",
+ "HTTPLengthRequired",
+ "HTTPMethodNotAllowed",
+ "HTTPMisdirectedRequest",
+ "HTTPMovedPermanently",
+ "HTTPMultipleChoices",
+ "HTTPNetworkAuthenticationRequired",
+ "HTTPNoContent",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNotAcceptable",
+ "HTTPNotExtended",
+ "HTTPNotFound",
+ "HTTPNotImplemented",
+ "HTTPNotModified",
+ "HTTPOk",
+ "HTTPPartialContent",
+ "HTTPPaymentRequired",
+ "HTTPPermanentRedirect",
+ "HTTPPreconditionFailed",
+ "HTTPPreconditionRequired",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRedirection",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPRequestTimeout",
+ "HTTPRequestURITooLong",
+ "HTTPResetContent",
+ "HTTPSeeOther",
+ "HTTPServerError",
+ "HTTPServiceUnavailable",
+ "HTTPSuccessful",
+ "HTTPTemporaryRedirect",
+ "HTTPTooManyRequests",
+ "HTTPUnauthorized",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPUnprocessableEntity",
+ "HTTPUnsupportedMediaType",
+ "HTTPUpgradeRequired",
+ "HTTPUseProxy",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPVersionNotSupported",
+ # web_fileresponse
+ "FileResponse",
+ # web_middlewares
+ "middleware",
+ "normalize_path_middleware",
+ # web_protocol
+ "PayloadAccessError",
+ "RequestHandler",
+ "RequestPayloadError",
+ # web_request
+ "BaseRequest",
+ "FileField",
+ "Request",
+ # web_response
+ "ContentCoding",
+ "Response",
+ "StreamResponse",
+ "json_response",
+ # web_routedef
+ "AbstractRouteDef",
+ "RouteDef",
+ "RouteTableDef",
+ "StaticDef",
+ "delete",
+ "get",
+ "head",
+ "options",
+ "patch",
+ "post",
+ "put",
+ "route",
+ "static",
+ "view",
+ # web_runner
+ "AppRunner",
+ "BaseRunner",
+ "BaseSite",
+ "GracefulExit",
+ "ServerRunner",
+ "SockSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ # web_server
+ "Server",
+ # web_urldispatcher
+ "AbstractResource",
+ "AbstractRoute",
+ "DynamicResource",
+ "PlainResource",
+ "Resource",
+ "ResourceRoute",
+ "StaticResource",
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "View",
+ # web_ws
+ "WebSocketReady",
+ "WebSocketResponse",
+ "WSMsgType",
+ # web
+ "run_app",
+)
+
+
+try:
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ SSLContext = Any # type: ignore[misc,assignment]
+
+HostSequence = TypingIterable[str]
+
+
+async def _run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Optional[str] = None,
+ sock: Optional[socket.socket] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Callable[..., None] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+) -> None:
+ # A internal functio to actually do all dirty job for application running
+ if asyncio.iscoroutine(app):
+ app = await app # type: ignore[misc]
+
+ app = cast(Application, app)
+
+ runner = AppRunner(
+ app,
+ handle_signals=handle_signals,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ keepalive_timeout=keepalive_timeout,
+ )
+
+ await runner.setup()
+
+ sites = [] # type: List[BaseSite]
+
+ try:
+ if host is not None:
+ if isinstance(host, (str, bytes, bytearray, memoryview)):
+ sites.append(
+ TCPSite(
+ runner,
+ host,
+ port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ else:
+ for h in host:
+ sites.append(
+ TCPSite(
+ runner,
+ h,
+ port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ elif path is None and sock is None or port is not None:
+ sites.append(
+ TCPSite(
+ runner,
+ port=port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ if path is not None:
+ if isinstance(path, (str, bytes, bytearray, memoryview)):
+ sites.append(
+ UnixSite(
+ runner,
+ path,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for p in path:
+ sites.append(
+ UnixSite(
+ runner,
+ p,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+
+ if sock is not None:
+ if not isinstance(sock, Iterable):
+ sites.append(
+ SockSite(
+ runner,
+ sock,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for s in sock:
+ sites.append(
+ SockSite(
+ runner,
+ s,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ for site in sites:
+ await site.start()
+
+ if print: # pragma: no branch
+ names = sorted(str(s.name) for s in runner.sites)
+ print(
+ "======== Running on {} ========\n"
+ "(Press CTRL+C to quit)".format(", ".join(names))
+ )
+
+ # sleep forever by 1 hour intervals,
+ # on Windows before Python 3.8 wake up every 1 second to handle
+ # Ctrl+C smoothly
+ if sys.platform == "win32" and sys.version_info < (3, 8):
+ delay = 1
+ else:
+ delay = 3600
+
+ while True:
+ await asyncio.sleep(delay)
+ finally:
+ await runner.cleanup()
+
+
+def _cancel_tasks(
+ to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
+) -> None:
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ loop.call_exception_handler(
+ {
+ "message": "unhandled exception during asyncio.run() shutdown",
+ "exception": task.exception(),
+ "task": task,
+ }
+ )
+
+
+def run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Optional[str] = None,
+ sock: Optional[socket.socket] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Callable[..., None] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> None:
+ """Run an app locally"""
+ if loop is None:
+ loop = asyncio.new_event_loop()
+
+ # Configure if and only if in debugging mode and using the default logger
+ if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
+ if access_log.level == logging.NOTSET:
+ access_log.setLevel(logging.DEBUG)
+ if not access_log.hasHandlers():
+ access_log.addHandler(logging.StreamHandler())
+
+ main_task = loop.create_task(
+ _run_app(
+ app,
+ host=host,
+ port=port,
+ path=path,
+ sock=sock,
+ shutdown_timeout=shutdown_timeout,
+ keepalive_timeout=keepalive_timeout,
+ ssl_context=ssl_context,
+ print=print,
+ backlog=backlog,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ handle_signals=handle_signals,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ try:
+ asyncio.set_event_loop(loop)
+ loop.run_until_complete(main_task)
+ except (GracefulExit, KeyboardInterrupt): # pragma: no cover
+ pass
+ finally:
+ _cancel_tasks({main_task}, loop)
+ _cancel_tasks(all_tasks(loop), loop)
+ loop.run_until_complete(loop.shutdown_asyncgens())
+ loop.close()
+
+
+def main(argv: List[str]) -> None:
+ arg_parser = ArgumentParser(
+ description="aiohttp.web Application server", prog="aiohttp.web"
+ )
+ arg_parser.add_argument(
+ "entry_func",
+ help=(
+ "Callable returning the `aiohttp.web.Application` instance to "
+ "run. Should be specified in the 'module:function' syntax."
+ ),
+ metavar="entry-func",
+ )
+ arg_parser.add_argument(
+ "-H",
+ "--hostname",
+ help="TCP/IP hostname to serve on (default: %(default)r)",
+ default="localhost",
+ )
+ arg_parser.add_argument(
+ "-P",
+ "--port",
+ help="TCP/IP port to serve on (default: %(default)r)",
+ type=int,
+ default="8080",
+ )
+ arg_parser.add_argument(
+ "-U",
+ "--path",
+ help="Unix file system path to serve on. Specifying a path will cause "
+ "hostname and port arguments to be ignored.",
+ )
+ args, extra_argv = arg_parser.parse_known_args(argv)
+
+ # Import logic
+ mod_str, _, func_str = args.entry_func.partition(":")
+ if not func_str or not mod_str:
+ arg_parser.error("'entry-func' not in 'module:function' syntax")
+ if mod_str.startswith("."):
+ arg_parser.error("relative module names not supported")
+ try:
+ module = import_module(mod_str)
+ except ImportError as ex:
+ arg_parser.error(f"unable to import {mod_str}: {ex}")
+ try:
+ func = getattr(module, func_str)
+ except AttributeError:
+ arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
+
+ # Compatibility logic
+ if args.path is not None and not hasattr(socket, "AF_UNIX"):
+ arg_parser.error(
+ "file system paths not supported by your operating" " environment"
+ )
+
+ logging.basicConfig(level=logging.DEBUG)
+
+ app = func(extra_argv)
+ run_app(app, host=args.hostname, port=args.port, path=args.path)
+ arg_parser.exit(message="Stopped\n")
+
+
+if __name__ == "__main__": # pragma: no branch
+ main(sys.argv[1:]) # pragma: no cover
diff --git a/contrib/python/aiohttp/aiohttp/web_app.py b/contrib/python/aiohttp/aiohttp/web_app.py
new file mode 100644
index 0000000000..d5dc90ed42
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_app.py
@@ -0,0 +1,557 @@
+import asyncio
+import logging
+import warnings
+from functools import partial, update_wrapper
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from aiosignal import Signal
+from frozenlist import FrozenList
+
+from . import hdrs
+from .abc import (
+ AbstractAccessLogger,
+ AbstractMatchInfo,
+ AbstractRouter,
+ AbstractStreamWriter,
+)
+from .helpers import DEBUG
+from .http_parser import RawRequestMessage
+from .log import web_logger
+from .streams import StreamReader
+from .web_log import AccessLogger
+from .web_middlewares import _fix_request_current_app
+from .web_protocol import RequestHandler
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_routedef import AbstractRouteDef
+from .web_server import Server
+from .web_urldispatcher import (
+ AbstractResource,
+ AbstractRoute,
+ Domain,
+ MaskDomain,
+ MatchedSubAppResource,
+ PrefixedSubAppResource,
+ UrlDispatcher,
+)
+
+__all__ = ("Application", "CleanupError")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .typedefs import Handler
+
+ _AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
+ _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
+ _Middleware = Union[
+ Callable[[Request, Handler], Awaitable[StreamResponse]],
+ Callable[["Application", Handler], Awaitable[Handler]], # old-style
+ ]
+ _Middlewares = FrozenList[_Middleware]
+ _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
+ _Subapps = List["Application"]
+else:
+ # No type checker mode, skip types
+ _AppSignal = Signal
+ _RespPrepareSignal = Signal
+ _Middleware = Callable
+ _Middlewares = FrozenList
+ _MiddlewaresHandlers = Optional[Sequence]
+ _Subapps = List
+
+
+class Application(MutableMapping[str, Any]):
+ ATTRS = frozenset(
+ [
+ "logger",
+ "_debug",
+ "_router",
+ "_loop",
+ "_handler_args",
+ "_middlewares",
+ "_middlewares_handlers",
+ "_run_middlewares",
+ "_state",
+ "_frozen",
+ "_pre_frozen",
+ "_subapps",
+ "_on_response_prepare",
+ "_on_startup",
+ "_on_shutdown",
+ "_on_cleanup",
+ "_client_max_size",
+ "_cleanup_ctx",
+ ]
+ )
+
+ def __init__(
+ self,
+ *,
+ logger: logging.Logger = web_logger,
+ router: Optional[UrlDispatcher] = None,
+ middlewares: Iterable[_Middleware] = (),
+ handler_args: Optional[Mapping[str, Any]] = None,
+ client_max_size: int = 1024 ** 2,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ debug: Any = ..., # mypy doesn't support ellipsis
+ ) -> None:
+ if router is None:
+ router = UrlDispatcher()
+ else:
+ warnings.warn(
+ "router argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ assert isinstance(router, AbstractRouter), router
+
+ if loop is not None:
+ warnings.warn(
+ "loop argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+
+ if debug is not ...:
+ warnings.warn(
+ "debug argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ self._debug = debug
+ self._router = router # type: UrlDispatcher
+ self._loop = loop
+ self._handler_args = handler_args
+ self.logger = logger
+
+ self._middlewares = FrozenList(middlewares) # type: _Middlewares
+
+ # initialized on freezing
+ self._middlewares_handlers = None # type: _MiddlewaresHandlers
+ # initialized on freezing
+ self._run_middlewares = None # type: Optional[bool]
+
+ self._state = {} # type: Dict[str, Any]
+ self._frozen = False
+ self._pre_frozen = False
+ self._subapps = [] # type: _Subapps
+
+ self._on_response_prepare = Signal(self) # type: _RespPrepareSignal
+ self._on_startup = Signal(self) # type: _AppSignal
+ self._on_shutdown = Signal(self) # type: _AppSignal
+ self._on_cleanup = Signal(self) # type: _AppSignal
+ self._cleanup_ctx = CleanupContext()
+ self._on_startup.append(self._cleanup_ctx._on_startup)
+ self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
+ self._client_max_size = client_max_size
+
+ def __init_subclass__(cls: Type["Application"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from web.Application "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if DEBUG: # pragma: no cover
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom web.Application.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ # MutableMapping API
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def _check_frozen(self) -> None:
+ if self._frozen:
+ warnings.warn(
+ "Changing state of started or joined " "application is deprecated",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._check_frozen()
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ self._check_frozen()
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ ########
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ # Technically the loop can be None
+ # but we mask it by explicit type cast
+ # to provide more convinient type annotation
+ warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
+ return cast(asyncio.AbstractEventLoop, self._loop)
+
+ def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if self._loop is not None and self._loop is not loop:
+ raise RuntimeError(
+ "web.Application instance initialized with different loop"
+ )
+
+ self._loop = loop
+
+ # set loop debug
+ if self._debug is ...:
+ self._debug = loop.get_debug()
+
+ # set loop to sub applications
+ for subapp in self._subapps:
+ subapp._set_loop(loop)
+
+ @property
+ def pre_frozen(self) -> bool:
+ return self._pre_frozen
+
+ def pre_freeze(self) -> None:
+ if self._pre_frozen:
+ return
+
+ self._pre_frozen = True
+ self._middlewares.freeze()
+ self._router.freeze()
+ self._on_response_prepare.freeze()
+ self._cleanup_ctx.freeze()
+ self._on_startup.freeze()
+ self._on_shutdown.freeze()
+ self._on_cleanup.freeze()
+ self._middlewares_handlers = tuple(self._prepare_middleware())
+
+ # If current app and any subapp do not have middlewares avoid run all
+ # of the code footprint that it implies, which have a middleware
+ # hardcoded per app that sets up the current_app attribute. If no
+ # middlewares are configured the handler will receive the proper
+ # current_app without needing all of this code.
+ self._run_middlewares = True if self.middlewares else False
+
+ for subapp in self._subapps:
+ subapp.pre_freeze()
+ self._run_middlewares = self._run_middlewares or subapp._run_middlewares
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ if self._frozen:
+ return
+
+ self.pre_freeze()
+ self._frozen = True
+ for subapp in self._subapps:
+ subapp.freeze()
+
+ @property
+ def debug(self) -> bool:
+ warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
+ return self._debug # type: ignore[no-any-return]
+
+ def _reg_subapp_signals(self, subapp: "Application") -> None:
+ def reg_handler(signame: str) -> None:
+ subsig = getattr(subapp, signame)
+
+ async def handler(app: "Application") -> None:
+ await subsig.send(subapp)
+
+ appsig = getattr(self, signame)
+ appsig.append(handler)
+
+ reg_handler("on_startup")
+ reg_handler("on_shutdown")
+ reg_handler("on_cleanup")
+
+ def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
+ if not isinstance(prefix, str):
+ raise TypeError("Prefix must be str")
+ prefix = prefix.rstrip("/")
+ if not prefix:
+ raise ValueError("Prefix cannot be empty")
+ factory = partial(PrefixedSubAppResource, prefix, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def _add_subapp(
+ self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
+ ) -> AbstractResource:
+ if self.frozen:
+ raise RuntimeError("Cannot add sub application to frozen application")
+ if subapp.frozen:
+ raise RuntimeError("Cannot add frozen application")
+ resource = resource_factory()
+ self.router.register_resource(resource)
+ self._reg_subapp_signals(subapp)
+ self._subapps.append(subapp)
+ subapp.pre_freeze()
+ if self._loop is not None:
+ subapp._set_loop(self._loop)
+ return resource
+
+ def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ elif "*" in domain:
+ rule = MaskDomain(domain) # type: Domain
+ else:
+ rule = Domain(domain)
+ factory = partial(MatchedSubAppResource, rule, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ return self.router.add_routes(routes)
+
+ @property
+ def on_response_prepare(self) -> _RespPrepareSignal:
+ return self._on_response_prepare
+
+ @property
+ def on_startup(self) -> _AppSignal:
+ return self._on_startup
+
+ @property
+ def on_shutdown(self) -> _AppSignal:
+ return self._on_shutdown
+
+ @property
+ def on_cleanup(self) -> _AppSignal:
+ return self._on_cleanup
+
+ @property
+ def cleanup_ctx(self) -> "CleanupContext":
+ return self._cleanup_ctx
+
+ @property
+ def router(self) -> UrlDispatcher:
+ return self._router
+
+ @property
+ def middlewares(self) -> _Middlewares:
+ return self._middlewares
+
+ def _make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ if not issubclass(access_log_class, AbstractAccessLogger):
+ raise TypeError(
+ "access_log_class must be subclass of "
+ "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
+ )
+
+ self._set_loop(loop)
+ self.freeze()
+
+ kwargs["debug"] = self._debug
+ kwargs["access_log_class"] = access_log_class
+ if self._handler_args:
+ for k, v in self._handler_args.items():
+ kwargs[k] = v
+
+ return Server(
+ self._handle, # type: ignore[arg-type]
+ request_factory=self._make_request,
+ loop=self._loop,
+ **kwargs,
+ )
+
+ def make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ warnings.warn(
+ "Application.make_handler(...) is deprecated, " "use AppRunner API instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return self._make_handler(
+ loop=loop, access_log_class=access_log_class, **kwargs
+ )
+
+ async def startup(self) -> None:
+ """Causes on_startup signal
+
+ Should be called in the event loop along with the request handler.
+ """
+ await self.on_startup.send(self)
+
+ async def shutdown(self) -> None:
+ """Causes on_shutdown signal
+
+ Should be called before cleanup()
+ """
+ await self.on_shutdown.send(self)
+
+ async def cleanup(self) -> None:
+ """Causes on_cleanup signal
+
+ Should be called after shutdown()
+ """
+ if self.on_cleanup.frozen:
+ await self.on_cleanup.send(self)
+ else:
+ # If an exception occurs in startup, ensure cleanup contexts are completed.
+ await self._cleanup_ctx._on_cleanup(self)
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ _cls: Type[Request] = Request,
+ ) -> Request:
+ return _cls(
+ message,
+ payload,
+ protocol,
+ writer,
+ task,
+ self._loop,
+ client_max_size=self._client_max_size,
+ )
+
+ def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
+ for m in reversed(self._middlewares):
+ if getattr(m, "__middleware_version__", None) == 1:
+ yield m, True
+ else:
+ warnings.warn(
+ 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ yield m, False
+
+ yield _fix_request_current_app(self), True
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ loop = asyncio.get_event_loop()
+ debug = loop.get_debug()
+ match_info = await self._router.resolve(request)
+ if debug: # pragma: no cover
+ if not isinstance(match_info, AbstractMatchInfo):
+ raise TypeError(
+ "match_info should be AbstractMatchInfo "
+ "instance, not {!r}".format(match_info)
+ )
+ match_info.add_app(self)
+
+ match_info.freeze()
+
+ resp = None
+ request._match_info = match_info
+ expect = request.headers.get(hdrs.EXPECT)
+ if expect:
+ resp = await match_info.expect_handler(request)
+ await request.writer.drain()
+
+ if resp is None:
+ handler = match_info.handler
+
+ if self._run_middlewares:
+ for app in match_info.apps[::-1]:
+ for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
+ if new_style:
+ handler = update_wrapper(
+ partial(m, handler=handler), handler
+ )
+ else:
+ handler = await m(app, handler) # type: ignore[arg-type]
+
+ resp = await handler(request)
+
+ return resp
+
+ def __call__(self) -> "Application":
+ """gunicorn compatibility"""
+ return self
+
+ def __repr__(self) -> str:
+ return f"<Application 0x{id(self):x}>"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class CleanupError(RuntimeError):
+ @property
+ def exceptions(self) -> List[BaseException]:
+ return cast(List[BaseException], self.args[1])
+
+
+if TYPE_CHECKING: # pragma: no cover
+ _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
+else:
+ _CleanupContextBase = FrozenList
+
+
+class CleanupContext(_CleanupContextBase):
+ def __init__(self) -> None:
+ super().__init__()
+ self._exits = [] # type: List[AsyncIterator[None]]
+
+ async def _on_startup(self, app: Application) -> None:
+ for cb in self:
+ it = cb(app).__aiter__()
+ await it.__anext__()
+ self._exits.append(it)
+
+ async def _on_cleanup(self, app: Application) -> None:
+ errors = []
+ for it in reversed(self._exits):
+ try:
+ await it.__anext__()
+ except StopAsyncIteration:
+ pass
+ except Exception as exc:
+ errors.append(exc)
+ else:
+ errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
+ if errors:
+ if len(errors) == 1:
+ raise errors[0]
+ else:
+ raise CleanupError("Multiple errors on cleanup stage", errors)
diff --git a/contrib/python/aiohttp/aiohttp/web_exceptions.py b/contrib/python/aiohttp/aiohttp/web_exceptions.py
new file mode 100644
index 0000000000..2eadca0386
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_exceptions.py
@@ -0,0 +1,441 @@
+import warnings
+from typing import Any, Dict, Iterable, List, Optional, Set # noqa
+
+from yarl import URL
+
+from .typedefs import LooseHeaders, StrOrURL
+from .web_response import Response
+
+__all__ = (
+ "HTTPException",
+ "HTTPError",
+ "HTTPRedirection",
+ "HTTPSuccessful",
+ "HTTPOk",
+ "HTTPCreated",
+ "HTTPAccepted",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNoContent",
+ "HTTPResetContent",
+ "HTTPPartialContent",
+ "HTTPMultipleChoices",
+ "HTTPMovedPermanently",
+ "HTTPFound",
+ "HTTPSeeOther",
+ "HTTPNotModified",
+ "HTTPUseProxy",
+ "HTTPTemporaryRedirect",
+ "HTTPPermanentRedirect",
+ "HTTPClientError",
+ "HTTPBadRequest",
+ "HTTPUnauthorized",
+ "HTTPPaymentRequired",
+ "HTTPForbidden",
+ "HTTPNotFound",
+ "HTTPMethodNotAllowed",
+ "HTTPNotAcceptable",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRequestTimeout",
+ "HTTPConflict",
+ "HTTPGone",
+ "HTTPLengthRequired",
+ "HTTPPreconditionFailed",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestURITooLong",
+ "HTTPUnsupportedMediaType",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPExpectationFailed",
+ "HTTPMisdirectedRequest",
+ "HTTPUnprocessableEntity",
+ "HTTPFailedDependency",
+ "HTTPUpgradeRequired",
+ "HTTPPreconditionRequired",
+ "HTTPTooManyRequests",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPServerError",
+ "HTTPInternalServerError",
+ "HTTPNotImplemented",
+ "HTTPBadGateway",
+ "HTTPServiceUnavailable",
+ "HTTPGatewayTimeout",
+ "HTTPVersionNotSupported",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPInsufficientStorage",
+ "HTTPNotExtended",
+ "HTTPNetworkAuthenticationRequired",
+)
+
+
+############################################################
+# HTTP Exceptions
+############################################################
+
+
+class HTTPException(Response, Exception):
+
+ # You should set in subclasses:
+ # status = 200
+
+ status_code = -1
+ empty_body = False
+
+ __http_exception__ = True
+
+ def __init__(
+ self,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if body is not None:
+ warnings.warn(
+ "body argument is deprecated for http web exceptions",
+ DeprecationWarning,
+ )
+ Response.__init__(
+ self,
+ status=self.status_code,
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ Exception.__init__(self, self.reason)
+ if self.body is None and not self.empty_body:
+ self.text = f"{self.status}: {self.reason}"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class HTTPError(HTTPException):
+ """Base class for exceptions with status codes in the 400s and 500s."""
+
+
+class HTTPRedirection(HTTPException):
+ """Base class for exceptions with status codes in the 300s."""
+
+
+class HTTPSuccessful(HTTPException):
+ """Base class for exceptions with status codes in the 200s."""
+
+
+class HTTPOk(HTTPSuccessful):
+ status_code = 200
+
+
+class HTTPCreated(HTTPSuccessful):
+ status_code = 201
+
+
+class HTTPAccepted(HTTPSuccessful):
+ status_code = 202
+
+
+class HTTPNonAuthoritativeInformation(HTTPSuccessful):
+ status_code = 203
+
+
+class HTTPNoContent(HTTPSuccessful):
+ status_code = 204
+ empty_body = True
+
+
+class HTTPResetContent(HTTPSuccessful):
+ status_code = 205
+ empty_body = True
+
+
+class HTTPPartialContent(HTTPSuccessful):
+ status_code = 206
+
+
+############################################################
+# 3xx redirection
+############################################################
+
+
+class _HTTPMove(HTTPRedirection):
+ def __init__(
+ self,
+ location: StrOrURL,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if not location:
+ raise ValueError("HTTP redirects need a location to redirect to.")
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Location"] = str(URL(location))
+ self.location = location
+
+
+class HTTPMultipleChoices(_HTTPMove):
+ status_code = 300
+
+
+class HTTPMovedPermanently(_HTTPMove):
+ status_code = 301
+
+
+class HTTPFound(_HTTPMove):
+ status_code = 302
+
+
+# This one is safe after a POST (the redirected location will be
+# retrieved with GET):
+class HTTPSeeOther(_HTTPMove):
+ status_code = 303
+
+
+class HTTPNotModified(HTTPRedirection):
+ # FIXME: this should include a date or etag header
+ status_code = 304
+ empty_body = True
+
+
+class HTTPUseProxy(_HTTPMove):
+ # Not a move, but looks a little like one
+ status_code = 305
+
+
+class HTTPTemporaryRedirect(_HTTPMove):
+ status_code = 307
+
+
+class HTTPPermanentRedirect(_HTTPMove):
+ status_code = 308
+
+
+############################################################
+# 4xx client error
+############################################################
+
+
+class HTTPClientError(HTTPError):
+ pass
+
+
+class HTTPBadRequest(HTTPClientError):
+ status_code = 400
+
+
+class HTTPUnauthorized(HTTPClientError):
+ status_code = 401
+
+
+class HTTPPaymentRequired(HTTPClientError):
+ status_code = 402
+
+
+class HTTPForbidden(HTTPClientError):
+ status_code = 403
+
+
+class HTTPNotFound(HTTPClientError):
+ status_code = 404
+
+
+class HTTPMethodNotAllowed(HTTPClientError):
+ status_code = 405
+
+ def __init__(
+ self,
+ method: str,
+ allowed_methods: Iterable[str],
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ allow = ",".join(sorted(allowed_methods))
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Allow"] = allow
+ self.allowed_methods = set(allowed_methods) # type: Set[str]
+ self.method = method.upper()
+
+
+class HTTPNotAcceptable(HTTPClientError):
+ status_code = 406
+
+
+class HTTPProxyAuthenticationRequired(HTTPClientError):
+ status_code = 407
+
+
+class HTTPRequestTimeout(HTTPClientError):
+ status_code = 408
+
+
+class HTTPConflict(HTTPClientError):
+ status_code = 409
+
+
+class HTTPGone(HTTPClientError):
+ status_code = 410
+
+
+class HTTPLengthRequired(HTTPClientError):
+ status_code = 411
+
+
+class HTTPPreconditionFailed(HTTPClientError):
+ status_code = 412
+
+
+class HTTPRequestEntityTooLarge(HTTPClientError):
+ status_code = 413
+
+ def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
+ kwargs.setdefault(
+ "text",
+ "Maximum request body size {} exceeded, "
+ "actual body size {}".format(max_size, actual_size),
+ )
+ super().__init__(**kwargs)
+
+
+class HTTPRequestURITooLong(HTTPClientError):
+ status_code = 414
+
+
+class HTTPUnsupportedMediaType(HTTPClientError):
+ status_code = 415
+
+
+class HTTPRequestRangeNotSatisfiable(HTTPClientError):
+ status_code = 416
+
+
+class HTTPExpectationFailed(HTTPClientError):
+ status_code = 417
+
+
+class HTTPMisdirectedRequest(HTTPClientError):
+ status_code = 421
+
+
+class HTTPUnprocessableEntity(HTTPClientError):
+ status_code = 422
+
+
+class HTTPFailedDependency(HTTPClientError):
+ status_code = 424
+
+
+class HTTPUpgradeRequired(HTTPClientError):
+ status_code = 426
+
+
+class HTTPPreconditionRequired(HTTPClientError):
+ status_code = 428
+
+
+class HTTPTooManyRequests(HTTPClientError):
+ status_code = 429
+
+
+class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
+ status_code = 431
+
+
+class HTTPUnavailableForLegalReasons(HTTPClientError):
+ status_code = 451
+
+ def __init__(
+ self,
+ link: str,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Link"] = '<%s>; rel="blocked-by"' % link
+ self.link = link
+
+
+############################################################
+# 5xx Server Error
+############################################################
+# Response status codes beginning with the digit "5" indicate cases in
+# which the server is aware that it has erred or is incapable of
+# performing the request. Except when responding to a HEAD request, the
+# server SHOULD include an entity containing an explanation of the error
+# situation, and whether it is a temporary or permanent condition. User
+# agents SHOULD display any included entity to the user. These response
+# codes are applicable to any request method.
+
+
+class HTTPServerError(HTTPError):
+ pass
+
+
+class HTTPInternalServerError(HTTPServerError):
+ status_code = 500
+
+
+class HTTPNotImplemented(HTTPServerError):
+ status_code = 501
+
+
+class HTTPBadGateway(HTTPServerError):
+ status_code = 502
+
+
+class HTTPServiceUnavailable(HTTPServerError):
+ status_code = 503
+
+
+class HTTPGatewayTimeout(HTTPServerError):
+ status_code = 504
+
+
+class HTTPVersionNotSupported(HTTPServerError):
+ status_code = 505
+
+
+class HTTPVariantAlsoNegotiates(HTTPServerError):
+ status_code = 506
+
+
+class HTTPInsufficientStorage(HTTPServerError):
+ status_code = 507
+
+
+class HTTPNotExtended(HTTPServerError):
+ status_code = 510
+
+
+class HTTPNetworkAuthenticationRequired(HTTPServerError):
+ status_code = 511
diff --git a/contrib/python/aiohttp/aiohttp/web_fileresponse.py b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
new file mode 100644
index 0000000000..f41ed3fd0a
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
@@ -0,0 +1,288 @@
+import asyncio
+import mimetypes
+import os
+import pathlib
+import sys
+from typing import ( # noqa
+ IO,
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import ETAG_ANY, ETag
+from .typedefs import Final, LooseHeaders
+from .web_exceptions import (
+ HTTPNotModified,
+ HTTPPartialContent,
+ HTTPPreconditionFailed,
+ HTTPRequestRangeNotSatisfiable,
+)
+from .web_response import StreamResponse
+
+__all__ = ("FileResponse",)
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import BaseRequest
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+
+
+NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
+
+
+class FileResponse(StreamResponse):
+ """A response object can be used to send files."""
+
+ def __init__(
+ self,
+ path: Union[str, pathlib.Path],
+ chunk_size: int = 256 * 1024,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ super().__init__(status=status, reason=reason, headers=headers)
+
+ if isinstance(path, str):
+ path = pathlib.Path(path)
+
+ self._path = path
+ self._chunk_size = chunk_size
+
+ async def _sendfile_fallback(
+ self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ # To keep memory usage low,fobj is transferred in chunks
+ # controlled by the constructor's chunk_size argument.
+
+ chunk_size = self._chunk_size
+ loop = asyncio.get_event_loop()
+
+ await loop.run_in_executor(None, fobj.seek, offset)
+
+ chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
+ while chunk:
+ await writer.write(chunk)
+ count = count - chunk_size
+ if count <= 0:
+ break
+ chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
+
+ await writer.drain()
+ return writer
+
+ async def _sendfile(
+ self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ writer = await super().prepare(request)
+ assert writer is not None
+
+ if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ loop = request._loop
+ transport = request.transport
+ assert transport is not None
+
+ try:
+ await loop.sendfile(transport, fobj, offset, count)
+ except NotImplementedError:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ await super().write_eof()
+ return writer
+
+ @staticmethod
+ def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
+ if len(etags) == 1 and etags[0].value == ETAG_ANY:
+ return True
+ return any(etag.value == etag_value for etag in etags if not etag.is_weak)
+
+ async def _not_modified(
+ self, request: "BaseRequest", etag_value: str, last_modified: float
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPNotModified.status_code)
+ self._length_check = False
+ self.etag = etag_value # type: ignore[assignment]
+ self.last_modified = last_modified # type: ignore[assignment]
+ # Delete any Content-Length headers provided by user. HTTP 304
+ # should always have empty response body
+ return await super().prepare(request)
+
+ async def _precondition_failed(
+ self, request: "BaseRequest"
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPPreconditionFailed.status_code)
+ self.content_length = 0
+ return await super().prepare(request)
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ filepath = self._path
+
+ gzip = False
+ if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
+ gzip_path = filepath.with_name(filepath.name + ".gz")
+
+ if gzip_path.is_file():
+ filepath = gzip_path
+ gzip = True
+
+ loop = asyncio.get_event_loop()
+ st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
+
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
+ last_modified = st.st_mtime
+
+ # https://tools.ietf.org/html/rfc7232#section-6
+ ifmatch = request.if_match
+ if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
+ return await self._precondition_failed(request)
+
+ unmodsince = request.if_unmodified_since
+ if (
+ unmodsince is not None
+ and ifmatch is None
+ and st.st_mtime > unmodsince.timestamp()
+ ):
+ return await self._precondition_failed(request)
+
+ ifnonematch = request.if_none_match
+ if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
+ return await self._not_modified(request, etag_value, last_modified)
+
+ modsince = request.if_modified_since
+ if (
+ modsince is not None
+ and ifnonematch is None
+ and st.st_mtime <= modsince.timestamp()
+ ):
+ return await self._not_modified(request, etag_value, last_modified)
+
+ if hdrs.CONTENT_TYPE not in self.headers:
+ ct, encoding = mimetypes.guess_type(str(filepath))
+ if not ct:
+ ct = "application/octet-stream"
+ should_set_ct = True
+ else:
+ encoding = "gzip" if gzip else None
+ should_set_ct = False
+
+ status = self._status
+ file_size = st.st_size
+ count = file_size
+
+ start = None
+
+ ifrange = request.if_range
+ if ifrange is None or st.st_mtime <= ifrange.timestamp():
+ # If-Range header check:
+ # condition = cached date >= last modification date
+ # return 206 if True else 200.
+ # if False:
+ # Range header would not be processed, return 200
+ # if True but Range header missing
+ # return 200
+ try:
+ rng = request.http_range
+ start = rng.start
+ end = rng.stop
+ except ValueError:
+ # https://tools.ietf.org/html/rfc7233:
+ # A server generating a 416 (Range Not Satisfiable) response to
+ # a byte-range request SHOULD send a Content-Range header field
+ # with an unsatisfied-range value.
+ # The complete-length in a 416 response indicates the current
+ # length of the selected representation.
+ #
+ # Will do the same below. Many servers ignore this and do not
+ # send a Content-Range header with HTTP 416
+ self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ # If a range request has been made, convert start, end slice
+ # notation into file pointer offset and count
+ if start is not None or end is not None:
+ if start < 0 and end is None: # return tail of file
+ start += file_size
+ if start < 0:
+ # if Range:bytes=-1000 in request header but file size
+ # is only 200, there would be trouble without this
+ start = 0
+ count = file_size - start
+ else:
+ # rfc7233:If the last-byte-pos value is
+ # absent, or if the value is greater than or equal to
+ # the current length of the representation data,
+ # the byte range is interpreted as the remainder
+ # of the representation (i.e., the server replaces the
+ # value of last-byte-pos with a value that is one less than
+ # the current length of the selected representation).
+ count = (
+ min(end if end is not None else file_size, file_size) - start
+ )
+
+ if start >= file_size:
+ # HTTP 416 should be returned in this case.
+ #
+ # According to https://tools.ietf.org/html/rfc7233:
+ # If a valid byte-range-set includes at least one
+ # byte-range-spec with a first-byte-pos that is less than
+ # the current length of the representation, or at least one
+ # suffix-byte-range-spec with a non-zero suffix-length,
+ # then the byte-range-set is satisfiable. Otherwise, the
+ # byte-range-set is unsatisfiable.
+ self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ status = HTTPPartialContent.status_code
+ # Even though you are sending the whole file, you should still
+ # return a HTTP 206 for a Range request.
+ self.set_status(status)
+
+ if should_set_ct:
+ self.content_type = ct # type: ignore[assignment]
+ if encoding:
+ self.headers[hdrs.CONTENT_ENCODING] = encoding
+ if gzip:
+ self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
+
+ self.etag = etag_value # type: ignore[assignment]
+ self.last_modified = st.st_mtime # type: ignore[assignment]
+ self.content_length = count
+
+ self.headers[hdrs.ACCEPT_RANGES] = "bytes"
+
+ real_start = cast(int, start)
+
+ if status == HTTPPartialContent.status_code:
+ self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
+ real_start, real_start + count - 1, file_size
+ )
+
+ # If we are sending 0 bytes calling sendfile() will throw a ValueError
+ if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
+ return await super().prepare(request)
+
+ fobj = await loop.run_in_executor(None, filepath.open, "rb")
+ if start: # be aware that start could be None or int=0 here.
+ offset = start
+ else:
+ offset = 0
+
+ try:
+ return await self._sendfile(request, fobj, offset, count)
+ finally:
+ await loop.run_in_executor(None, fobj.close)
diff --git a/contrib/python/aiohttp/aiohttp/web_log.py b/contrib/python/aiohttp/aiohttp/web_log.py
new file mode 100644
index 0000000000..a977c1ba5c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_log.py
@@ -0,0 +1,208 @@
+import datetime
+import functools
+import logging
+import os
+import re
+from collections import namedtuple
+from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
+
+from .abc import AbstractAccessLogger
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+KeyMethod = namedtuple("KeyMethod", "key method")
+
+
+class AccessLogger(AbstractAccessLogger):
+ """Helper object to log access.
+
+ Usage:
+ log = logging.getLogger("spam")
+ log_format = "%a %{User-Agent}i"
+ access_logger = AccessLogger(log, log_format)
+ access_logger.log(request, response, time)
+
+ Format:
+ %% The percent sign
+ %a Remote IP-address (IP-address of proxy if using reverse proxy)
+ %t Time when the request was started to process
+ %P The process ID of the child that serviced the request
+ %r First line of request
+ %s Response status code
+ %b Size of response in bytes, including HTTP headers
+ %T Time taken to serve the request, in seconds
+ %Tf Time taken to serve the request, in seconds with floating fraction
+ in .06f format
+ %D Time taken to serve the request, in microseconds
+ %{FOO}i request.headers['FOO']
+ %{FOO}o response.headers['FOO']
+ %{FOO}e os.environ['FOO']
+
+ """
+
+ LOG_FORMAT_MAP = {
+ "a": "remote_address",
+ "t": "request_start_time",
+ "P": "process_id",
+ "r": "first_request_line",
+ "s": "response_status",
+ "b": "response_size",
+ "T": "request_time",
+ "Tf": "request_time_frac",
+ "D": "request_time_micro",
+ "i": "request_header",
+ "o": "response_header",
+ }
+
+ LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
+ FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
+ CLEANUP_RE = re.compile(r"(%[^s])")
+ _FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
+
+ def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
+ """Initialise the logger.
+
+ logger is a logger object to be used for logging.
+ log_format is a string with apache compatible log format description.
+
+ """
+ super().__init__(logger, log_format=log_format)
+
+ _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
+ if not _compiled_format:
+ _compiled_format = self.compile_format(log_format)
+ AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
+
+ self._log_format, self._methods = _compiled_format
+
+ def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
+ """Translate log_format into form usable by modulo formatting
+
+ All known atoms will be replaced with %s
+ Also methods for formatting of those atoms will be added to
+ _methods in appropriate order
+
+ For example we have log_format = "%a %t"
+ This format will be translated to "%s %s"
+ Also contents of _methods will be
+ [self._format_a, self._format_t]
+ These method will be called and results will be passed
+ to translated string format.
+
+ Each _format_* method receive 'args' which is list of arguments
+ given to self.log
+
+ Exceptions are _format_e, _format_i and _format_o methods which
+ also receive key name (by functools.partial)
+
+ """
+ # list of (key, method) tuples, we don't use an OrderedDict as users
+ # can repeat the same key more than once
+ methods = list()
+
+ for atom in self.FORMAT_RE.findall(log_format):
+ if atom[1] == "":
+ format_key1 = self.LOG_FORMAT_MAP[atom[0]]
+ m = getattr(AccessLogger, "_format_%s" % atom[0])
+ key_method = KeyMethod(format_key1, m)
+ else:
+ format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
+ m = getattr(AccessLogger, "_format_%s" % atom[2])
+ key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
+
+ methods.append(key_method)
+
+ log_format = self.FORMAT_RE.sub(r"%s", log_format)
+ log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
+ return log_format, methods
+
+ @staticmethod
+ def _format_i(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ if request is None:
+ return "(no headers)"
+
+ # suboptimal, make istr(key) once
+ return request.headers.get(key, "-")
+
+ @staticmethod
+ def _format_o(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ # suboptimal, make istr(key) once
+ return response.headers.get(key, "-")
+
+ @staticmethod
+ def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ ip = request.remote
+ return ip if ip is not None else "-"
+
+ @staticmethod
+ def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ now = datetime.datetime.utcnow()
+ start_time = now - datetime.timedelta(seconds=time)
+ return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
+
+ @staticmethod
+ def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "<%s>" % os.getpid()
+
+ @staticmethod
+ def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ return "{} {} HTTP/{}.{}".format(
+ request.method,
+ request.path_qs,
+ request.version.major,
+ request.version.minor,
+ )
+
+ @staticmethod
+ def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.status
+
+ @staticmethod
+ def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.body_length
+
+ @staticmethod
+ def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time))
+
+ @staticmethod
+ def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "%06f" % time
+
+ @staticmethod
+ def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time * 1000000))
+
+ def _format_line(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
+ return [(key, method(request, response, time)) for key, method in self._methods]
+
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ try:
+ fmt_info = self._format_line(request, response, time)
+
+ values = list()
+ extra = dict()
+ for key, value in fmt_info:
+ values.append(value)
+
+ if key.__class__ is str:
+ extra[key] = value
+ else:
+ k1, k2 = key # type: ignore[misc]
+ dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
+ dct[k2] = value # type: ignore[index,has-type]
+ extra[k1] = dct # type: ignore[has-type,assignment]
+
+ self.logger.info(self._log_format % tuple(values), extra=extra)
+ except Exception:
+ self.logger.exception("Error in logging")
diff --git a/contrib/python/aiohttp/aiohttp/web_middlewares.py b/contrib/python/aiohttp/aiohttp/web_middlewares.py
new file mode 100644
index 0000000000..fabcc449a2
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_middlewares.py
@@ -0,0 +1,119 @@
+import re
+from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
+
+from .typedefs import Handler
+from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_urldispatcher import SystemRoute
+
+__all__ = (
+ "middleware",
+ "normalize_path_middleware",
+)
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+
+_Func = TypeVar("_Func")
+
+
+async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
+ alt_request = request.clone(rel_url=path)
+
+ match_info = await request.app.router.resolve(alt_request)
+ alt_request._match_info = match_info
+
+ if match_info.http_exception is None:
+ return True, alt_request
+
+ return False, request
+
+
+def middleware(f: _Func) -> _Func:
+ f.__middleware_version__ = 1 # type: ignore[attr-defined]
+ return f
+
+
+_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
+
+
+def normalize_path_middleware(
+ *,
+ append_slash: bool = True,
+ remove_slash: bool = False,
+ merge_slashes: bool = True,
+ redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
+) -> _Middleware:
+ """Factory for producing a middleware that normalizes the path of a request.
+
+ Normalizing means:
+ - Add or remove a trailing slash to the path.
+ - Double slashes are replaced by one.
+
+ The middleware returns as soon as it finds a path that resolves
+ correctly. The order if both merge and append/remove are enabled is
+ 1) merge slashes
+ 2) append/remove slash
+ 3) both merge slashes and append/remove slash.
+ If the path resolves with at least one of those conditions, it will
+ redirect to the new path.
+
+ Only one of `append_slash` and `remove_slash` can be enabled. If both
+ are `True` the factory will raise an assertion error
+
+ If `append_slash` is `True` the middleware will append a slash when
+ needed. If a resource is defined with trailing slash and the request
+ comes without it, it will append it automatically.
+
+ If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
+ the middleware will remove trailing slashes and redirect if the resource
+ is defined
+
+ If merge_slashes is True, merge multiple consecutive slashes in the
+ path into one.
+ """
+ correct_configuration = not (append_slash and remove_slash)
+ assert correct_configuration, "Cannot both remove and append slash"
+
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ if isinstance(request.match_info.route, SystemRoute):
+ paths_to_check = []
+ if "?" in request.raw_path:
+ path, query = request.raw_path.split("?", 1)
+ query = "?" + query
+ else:
+ query = ""
+ path = request.raw_path
+
+ if merge_slashes:
+ paths_to_check.append(re.sub("//+", "/", path))
+ if append_slash and not request.path.endswith("/"):
+ paths_to_check.append(path + "/")
+ if remove_slash and request.path.endswith("/"):
+ paths_to_check.append(path[:-1])
+ if merge_slashes and append_slash:
+ paths_to_check.append(re.sub("//+", "/", path + "/"))
+ if merge_slashes and remove_slash:
+ merged_slashes = re.sub("//+", "/", path)
+ paths_to_check.append(merged_slashes[:-1])
+
+ for path in paths_to_check:
+ path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
+ resolves, request = await _check_request_resolves(request, path)
+ if resolves:
+ raise redirect_class(request.raw_path + query)
+
+ return await handler(request)
+
+ return impl
+
+
+def _fix_request_current_app(app: "Application") -> _Middleware:
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ with request.match_info.set_current_app(app):
+ return await handler(request)
+
+ return impl
diff --git a/contrib/python/aiohttp/aiohttp/web_protocol.py b/contrib/python/aiohttp/aiohttp/web_protocol.py
new file mode 100644
index 0000000000..ad0c0498e3
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_protocol.py
@@ -0,0 +1,681 @@
+import asyncio
+import asyncio.streams
+import traceback
+import warnings
+from collections import deque
+from contextlib import suppress
+from html import escape as html_escape
+from http import HTTPStatus
+from logging import Logger
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Deque,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+import yarl
+
+from .abc import AbstractAccessLogger, AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import ceil_timeout
+from .http import (
+ HttpProcessingError,
+ HttpRequestParser,
+ HttpVersion10,
+ RawRequestMessage,
+ StreamWriter,
+)
+from .log import access_logger, server_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .tcp_helpers import tcp_keepalive
+from .web_exceptions import HTTPException
+from .web_log import AccessLogger
+from .web_request import BaseRequest
+from .web_response import Response, StreamResponse
+
+__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_server import Server
+
+
+_RequestFactory = Callable[
+ [
+ RawRequestMessage,
+ StreamReader,
+ "RequestHandler",
+ AbstractStreamWriter,
+ "asyncio.Task[None]",
+ ],
+ BaseRequest,
+]
+
+_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
+
+ERROR = RawRequestMessage(
+ "UNKNOWN",
+ "/",
+ HttpVersion10,
+ {}, # type: ignore[arg-type]
+ {}, # type: ignore[arg-type]
+ True,
+ None,
+ False,
+ False,
+ yarl.URL("/"),
+)
+
+
+class RequestPayloadError(Exception):
+ """Payload parsing error."""
+
+
+class PayloadAccessError(Exception):
+ """Payload was accessed after response was sent."""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class _ErrInfo:
+ status: int
+ exc: BaseException
+ message: str
+
+
+_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
+
+
+class RequestHandler(BaseProtocol):
+ """HTTP protocol implementation.
+
+ RequestHandler handles incoming HTTP request. It reads request line,
+ request headers and request payload and calls handle_request() method.
+ By default it always returns with 404 response.
+
+ RequestHandler handles errors in incoming request, like bad
+ status line, bad headers or incomplete payload. If any error occurs,
+ connection gets closed.
+
+ keepalive_timeout -- number of seconds before closing
+ keep-alive connection
+
+ tcp_keepalive -- TCP keep-alive is on, default is on
+
+ debug -- enable debug mode
+
+ logger -- custom logger object
+
+ access_log_class -- custom class for access_logger
+
+ access_log -- custom logging object
+
+ access_log_format -- access log format string
+
+ loop -- Optional event loop
+
+ max_line_size -- Optional maximum header line size
+
+ max_field_size -- Optional maximum header field size
+
+ max_headers -- Optional maximum header size
+
+ """
+
+ KEEPALIVE_RESCHEDULE_DELAY = 1
+
+ __slots__ = (
+ "_request_count",
+ "_keepalive",
+ "_manager",
+ "_request_handler",
+ "_request_factory",
+ "_tcp_keepalive",
+ "_keepalive_time",
+ "_keepalive_handle",
+ "_keepalive_timeout",
+ "_lingering_time",
+ "_messages",
+ "_message_tail",
+ "_waiter",
+ "_task_handler",
+ "_upgrade",
+ "_payload_parser",
+ "_request_parser",
+ "_reading_paused",
+ "logger",
+ "debug",
+ "access_log",
+ "access_logger",
+ "_close",
+ "_force_close",
+ "_current_request",
+ )
+
+ def __init__(
+ self,
+ manager: "Server",
+ *,
+ loop: asyncio.AbstractEventLoop,
+ keepalive_timeout: float = 75.0, # NGINX default is 75 secs
+ tcp_keepalive: bool = True,
+ logger: Logger = server_logger,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log: Logger = access_logger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ debug: bool = False,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lingering_time: float = 10.0,
+ read_bufsize: int = 2 ** 16,
+ auto_decompress: bool = True,
+ ):
+ super().__init__(loop)
+
+ self._request_count = 0
+ self._keepalive = False
+ self._current_request = None # type: Optional[BaseRequest]
+ self._manager = manager # type: Optional[Server]
+ self._request_handler: Optional[_RequestHandler] = manager.request_handler
+ self._request_factory: Optional[_RequestFactory] = manager.request_factory
+
+ self._tcp_keepalive = tcp_keepalive
+ # placeholder to be replaced on keepalive timeout setup
+ self._keepalive_time = 0.0
+ self._keepalive_handle = None # type: Optional[asyncio.Handle]
+ self._keepalive_timeout = keepalive_timeout
+ self._lingering_time = float(lingering_time)
+
+ self._messages: Deque[_MsgType] = deque()
+ self._message_tail = b""
+
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._task_handler = None # type: Optional[asyncio.Task[None]]
+
+ self._upgrade = False
+ self._payload_parser = None # type: Any
+ self._request_parser = HttpRequestParser(
+ self,
+ loop,
+ read_bufsize,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ max_headers=max_headers,
+ payload_exception=RequestPayloadError,
+ auto_decompress=auto_decompress,
+ ) # type: Optional[HttpRequestParser]
+
+ self.logger = logger
+ self.debug = debug
+ self.access_log = access_log
+ if access_log:
+ self.access_logger = access_log_class(
+ access_log, access_log_format
+ ) # type: Optional[AbstractAccessLogger]
+ else:
+ self.access_logger = None
+
+ self._close = False
+ self._force_close = False
+
+ def __repr__(self) -> str:
+ return "<{} {}>".format(
+ self.__class__.__name__,
+ "connected" if self.transport is not None else "disconnected",
+ )
+
+ @property
+ def keepalive_timeout(self) -> float:
+ return self._keepalive_timeout
+
+ async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
+ """Do worker process exit preparations.
+
+ We need to clean up everything and stop accepting requests.
+ It is especially important for keep-alive connections.
+ """
+ self._force_close = True
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._waiter:
+ self._waiter.cancel()
+
+ # wait for handlers
+ with suppress(asyncio.CancelledError, asyncio.TimeoutError):
+ async with ceil_timeout(timeout):
+ if self._current_request is not None:
+ self._current_request._cancel(asyncio.CancelledError())
+
+ if self._task_handler is not None and not self._task_handler.done():
+ await self._task_handler
+
+ # force-close non-idle handler
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ super().connection_made(transport)
+
+ real_transport = cast(asyncio.Transport, transport)
+ if self._tcp_keepalive:
+ tcp_keepalive(real_transport)
+
+ self._task_handler = self._loop.create_task(self.start())
+ assert self._manager is not None
+ self._manager.connection_made(self, real_transport)
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ if self._manager is None:
+ return
+ self._manager.connection_lost(self, exc)
+
+ super().connection_lost(exc)
+
+ self._manager = None
+ self._force_close = True
+ self._request_factory = None
+ self._request_handler = None
+ self._request_parser = None
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._current_request is not None:
+ if exc is None:
+ exc = ConnectionResetError("Connection lost")
+ self._current_request._cancel(exc)
+
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+ if self._waiter is not None:
+ self._waiter.cancel()
+
+ self._task_handler = None
+
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+
+ def set_parser(self, parser: Any) -> None:
+ # Actual type is WebReader
+ assert self._payload_parser is None
+
+ self._payload_parser = parser
+
+ if self._message_tail:
+ self._payload_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+
+ def eof_received(self) -> None:
+ pass
+
+ def data_received(self, data: bytes) -> None:
+ if self._force_close or self._close:
+ return
+ # parse http messages
+ messages: Sequence[_MsgType]
+ if self._payload_parser is None and not self._upgrade:
+ assert self._request_parser is not None
+ try:
+ messages, upgraded, tail = self._request_parser.feed_data(data)
+ except HttpProcessingError as exc:
+ messages = [
+ (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
+ ]
+ upgraded = False
+ tail = b""
+
+ for msg, payload in messages or ():
+ self._request_count += 1
+ self._messages.append((msg, payload))
+
+ waiter = self._waiter
+ if messages and waiter is not None and not waiter.done():
+ # don't set result twice
+ waiter.set_result(None)
+
+ self._upgrade = upgraded
+ if upgraded and tail:
+ self._message_tail = tail
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgrade and data:
+ self._message_tail += data
+
+ # feed payload
+ elif data:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self.close()
+
+ def keep_alive(self, val: bool) -> None:
+ """Set keep-alive connection mode.
+
+ :param bool val: new state.
+ """
+ self._keepalive = val
+ if self._keepalive_handle:
+ self._keepalive_handle.cancel()
+ self._keepalive_handle = None
+
+ def close(self) -> None:
+ """Close connection.
+
+ Stop accepting new pipelining messages and close
+ connection when handlers done processing messages.
+ """
+ self._close = True
+ if self._waiter:
+ self._waiter.cancel()
+
+ def force_close(self) -> None:
+ """Forcefully close connection."""
+ self._force_close = True
+ if self._waiter:
+ self._waiter.cancel()
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def log_access(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> None:
+ if self.access_logger is not None:
+ self.access_logger.log(request, response, self._loop.time() - time)
+
+ def log_debug(self, *args: Any, **kw: Any) -> None:
+ if self.debug:
+ self.logger.debug(*args, **kw)
+
+ def log_exception(self, *args: Any, **kw: Any) -> None:
+ self.logger.exception(*args, **kw)
+
+ def _process_keepalive(self) -> None:
+ if self._force_close or not self._keepalive:
+ return
+
+ next = self._keepalive_time + self._keepalive_timeout
+
+ # handler in idle state
+ if self._waiter:
+ if self._loop.time() > next:
+ self.force_close()
+ return
+
+ # not all request handlers are done,
+ # reschedule itself to next second
+ self._keepalive_handle = self._loop.call_later(
+ self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
+ )
+
+ async def _handle_request(
+ self,
+ request: BaseRequest,
+ start_time: float,
+ request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
+ ) -> Tuple[StreamResponse, bool]:
+ assert self._request_handler is not None
+ try:
+ try:
+ self._current_request = request
+ resp = await request_handler(request)
+ finally:
+ self._current_request = None
+ except HTTPException as exc:
+ resp = exc
+ reset = await self.finish_response(request, resp, start_time)
+ except asyncio.CancelledError:
+ raise
+ except asyncio.TimeoutError as exc:
+ self.log_debug("Request handler timed out.", exc_info=exc)
+ resp = self.handle_error(request, 504)
+ reset = await self.finish_response(request, resp, start_time)
+ except Exception as exc:
+ resp = self.handle_error(request, 500, exc)
+ reset = await self.finish_response(request, resp, start_time)
+ else:
+ # Deprecation warning (See #2415)
+ if getattr(resp, "__http_exception__", False):
+ warnings.warn(
+ "returning HTTPException object is deprecated "
+ "(#2415) and will be removed, "
+ "please raise the exception instead",
+ DeprecationWarning,
+ )
+
+ reset = await self.finish_response(request, resp, start_time)
+
+ return resp, reset
+
+ async def start(self) -> None:
+ """Process incoming request.
+
+ It reads request line, request headers and request payload, then
+ calls handle_request() method. Subclass has to override
+ handle_request(). start() handles various exceptions in request
+ or response handling. Connection is being closed always unless
+ keep_alive(True) specified.
+ """
+ loop = self._loop
+ handler = self._task_handler
+ assert handler is not None
+ manager = self._manager
+ assert manager is not None
+ keepalive_timeout = self._keepalive_timeout
+ resp = None
+ assert self._request_factory is not None
+ assert self._request_handler is not None
+
+ while not self._force_close:
+ if not self._messages:
+ try:
+ # wait for next request
+ self._waiter = loop.create_future()
+ await self._waiter
+ except asyncio.CancelledError:
+ break
+ finally:
+ self._waiter = None
+
+ message, payload = self._messages.popleft()
+
+ start = loop.time()
+
+ manager.requests_count += 1
+ writer = StreamWriter(self, loop)
+ if isinstance(message, _ErrInfo):
+ # make request_factory work
+ request_handler = self._make_error_handler(message)
+ message = ERROR
+ else:
+ request_handler = self._request_handler
+
+ request = self._request_factory(message, payload, self, writer, handler)
+ try:
+ # a new task is used for copy context vars (#3406)
+ task = self._loop.create_task(
+ self._handle_request(request, start, request_handler)
+ )
+ try:
+ resp, reset = await task
+ except (asyncio.CancelledError, ConnectionError):
+ self.log_debug("Ignored premature client disconnection")
+ break
+
+ # Drop the processed task from asyncio.Task.all_tasks() early
+ del task
+ if reset:
+ self.log_debug("Ignored premature client disconnection 2")
+ break
+
+ # notify server about keep-alive
+ self._keepalive = bool(resp.keep_alive)
+
+ # check payload
+ if not payload.is_eof():
+ lingering_time = self._lingering_time
+ if not self._force_close and lingering_time:
+ self.log_debug(
+ "Start lingering close timer for %s sec.", lingering_time
+ )
+
+ now = loop.time()
+ end_t = now + lingering_time
+
+ with suppress(asyncio.TimeoutError, asyncio.CancelledError):
+ while not payload.is_eof() and now < end_t:
+ async with ceil_timeout(end_t - now):
+ # read and ignore
+ await payload.readany()
+ now = loop.time()
+
+ # if payload still uncompleted
+ if not payload.is_eof() and not self._force_close:
+ self.log_debug("Uncompleted request.")
+ self.close()
+
+ payload.set_exception(PayloadAccessError())
+
+ except asyncio.CancelledError:
+ self.log_debug("Ignored premature client disconnection ")
+ break
+ except RuntimeError as exc:
+ if self.debug:
+ self.log_exception("Unhandled runtime exception", exc_info=exc)
+ self.force_close()
+ except Exception as exc:
+ self.log_exception("Unhandled exception", exc_info=exc)
+ self.force_close()
+ finally:
+ if self.transport is None and resp is not None:
+ self.log_debug("Ignored premature client disconnection.")
+ elif not self._force_close:
+ if self._keepalive and not self._close:
+ # start keep-alive timer
+ if keepalive_timeout is not None:
+ now = self._loop.time()
+ self._keepalive_time = now
+ if self._keepalive_handle is None:
+ self._keepalive_handle = loop.call_at(
+ now + keepalive_timeout, self._process_keepalive
+ )
+ else:
+ break
+
+ # remove handler, close transport if no handlers left
+ if not self._force_close:
+ self._task_handler = None
+ if self.transport is not None:
+ self.transport.close()
+
+ async def finish_response(
+ self, request: BaseRequest, resp: StreamResponse, start_time: float
+ ) -> bool:
+ """Prepare the response and write_eof, then log access.
+
+ This has to
+ be called within the context of any exception so the access logger
+ can get exception information. Returns True if the client disconnects
+ prematurely.
+ """
+ if self._request_parser is not None:
+ self._request_parser.set_upgraded(False)
+ self._upgrade = False
+ if self._message_tail:
+ self._request_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+ try:
+ prepare_meth = resp.prepare
+ except AttributeError:
+ if resp is None:
+ raise RuntimeError("Missing return " "statement on request handler")
+ else:
+ raise RuntimeError(
+ "Web-handler should return "
+ "a response instance, "
+ "got {!r}".format(resp)
+ )
+ try:
+ await prepare_meth(request)
+ await resp.write_eof()
+ except ConnectionError:
+ self.log_access(request, resp, start_time)
+ return True
+ else:
+ self.log_access(request, resp, start_time)
+ return False
+
+ def handle_error(
+ self,
+ request: BaseRequest,
+ status: int = 500,
+ exc: Optional[BaseException] = None,
+ message: Optional[str] = None,
+ ) -> StreamResponse:
+ """Handle errors.
+
+ Returns HTTP response with specific status code. Logs additional
+ information. It always closes current connection.
+ """
+ self.log_exception("Error handling request", exc_info=exc)
+
+ # some data already got sent, connection is broken
+ if request.writer.output_size > 0:
+ raise ConnectionError(
+ "Response is sent already, cannot send another response "
+ "with the error message"
+ )
+
+ ct = "text/plain"
+ if status == HTTPStatus.INTERNAL_SERVER_ERROR:
+ title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
+ msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
+ tb = None
+ if self.debug:
+ with suppress(Exception):
+ tb = traceback.format_exc()
+
+ if "text/html" in request.headers.get("Accept", ""):
+ if tb:
+ tb = html_escape(tb)
+ msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
+ message = (
+ "<html><head>"
+ "<title>{title}</title>"
+ "</head><body>\n<h1>{title}</h1>"
+ "\n{msg}\n</body></html>\n"
+ ).format(title=title, msg=msg)
+ ct = "text/html"
+ else:
+ if tb:
+ msg = tb
+ message = title + "\n\n" + msg
+
+ resp = Response(status=status, text=message, content_type=ct)
+ resp.force_close()
+
+ return resp
+
+ def _make_error_handler(
+ self, err_info: _ErrInfo
+ ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
+ async def handler(request: BaseRequest) -> StreamResponse:
+ return self.handle_error(
+ request, err_info.status, err_info.exc, err_info.message
+ )
+
+ return handler
diff --git a/contrib/python/aiohttp/aiohttp/web_request.py b/contrib/python/aiohttp/aiohttp/web_request.py
new file mode 100644
index 0000000000..b3574cafb3
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_request.py
@@ -0,0 +1,874 @@
+import asyncio
+import datetime
+import io
+import re
+import socket
+import string
+import tempfile
+import types
+import warnings
+from http.cookies import SimpleCookie
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Pattern,
+ Tuple,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl
+
+import attr
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import (
+ DEBUG,
+ ETAG_ANY,
+ LIST_QUOTED_ETAG_RE,
+ ChainMapProxy,
+ ETag,
+ HeadersMixin,
+ parse_http_date,
+ reify,
+ sentinel,
+)
+from .http_parser import RawRequestMessage
+from .http_writer import HttpVersion
+from .multipart import BodyPartReader, MultipartReader
+from .streams import EmptyStreamReader, StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ Final,
+ JSONDecoder,
+ LooseHeaders,
+ RawHeaders,
+ StrOrURL,
+)
+from .web_exceptions import HTTPRequestEntityTooLarge
+from .web_response import StreamResponse
+
+__all__ = ("BaseRequest", "FileField", "Request")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+ from .web_protocol import RequestHandler
+ from .web_urldispatcher import UrlMappingMatchInfo
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class FileField:
+ name: str
+ filename: str
+ file: io.BufferedReader
+ content_type: str
+ headers: "CIMultiDictProxy[str]"
+
+
+_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
+# '-' at the end to prevent interpretation as range in a char class
+
+_TOKEN: Final[str] = fr"[{_TCHAR}]+"
+
+_QDTEXT: Final[str] = r"[{}]".format(
+ r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
+)
+# qdtext includes 0x5C to escape 0x5D ('\]')
+# qdtext excludes obs-text (because obsoleted, and encoding not specified)
+
+_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
+
+_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
+ qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
+)
+
+_FORWARDED_PAIR: Final[
+ str
+] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
+ token=_TOKEN, quoted_string=_QUOTED_STRING
+)
+
+_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
+# same pattern as _QUOTED_PAIR but contains a capture group
+
+_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
+
+############################################################
+# HTTP Request
+############################################################
+
+
+class BaseRequest(MutableMapping[str, Any], HeadersMixin):
+
+ POST_METHODS = {
+ hdrs.METH_PATCH,
+ hdrs.METH_POST,
+ hdrs.METH_PUT,
+ hdrs.METH_TRACE,
+ hdrs.METH_DELETE,
+ }
+
+ ATTRS = HeadersMixin.ATTRS | frozenset(
+ [
+ "_message",
+ "_protocol",
+ "_payload_writer",
+ "_payload",
+ "_headers",
+ "_method",
+ "_version",
+ "_rel_url",
+ "_post",
+ "_read_bytes",
+ "_state",
+ "_cache",
+ "_task",
+ "_client_max_size",
+ "_loop",
+ "_transport_sslcontext",
+ "_transport_peername",
+ ]
+ )
+
+ def __init__(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: "RequestHandler",
+ payload_writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ loop: asyncio.AbstractEventLoop,
+ *,
+ client_max_size: int = 1024 ** 2,
+ state: Optional[Dict[str, Any]] = None,
+ scheme: Optional[str] = None,
+ host: Optional[str] = None,
+ remote: Optional[str] = None,
+ ) -> None:
+ if state is None:
+ state = {}
+ self._message = message
+ self._protocol = protocol
+ self._payload_writer = payload_writer
+
+ self._payload = payload
+ self._headers = message.headers
+ self._method = message.method
+ self._version = message.version
+ self._rel_url = message.url
+ self._post = (
+ None
+ ) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
+ self._read_bytes = None # type: Optional[bytes]
+
+ self._state = state
+ self._cache = {} # type: Dict[str, Any]
+ self._task = task
+ self._client_max_size = client_max_size
+ self._loop = loop
+
+ transport = self._protocol.transport
+ assert transport is not None
+ self._transport_sslcontext = transport.get_extra_info("sslcontext")
+ self._transport_peername = transport.get_extra_info("peername")
+
+ if scheme is not None:
+ self._cache["scheme"] = scheme
+ if host is not None:
+ self._cache["host"] = host
+ if remote is not None:
+ self._cache["remote"] = remote
+
+ def clone(
+ self,
+ *,
+ method: str = sentinel,
+ rel_url: StrOrURL = sentinel,
+ headers: LooseHeaders = sentinel,
+ scheme: str = sentinel,
+ host: str = sentinel,
+ remote: str = sentinel,
+ ) -> "BaseRequest":
+ """Clone itself with replacement some attributes.
+
+ Creates and returns a new instance of Request object. If no parameters
+ are given, an exact copy is returned. If a parameter is not passed, it
+ will reuse the one from the current request object.
+ """
+ if self._read_bytes:
+ raise RuntimeError("Cannot clone request " "after reading its content")
+
+ dct = {} # type: Dict[str, Any]
+ if method is not sentinel:
+ dct["method"] = method
+ if rel_url is not sentinel:
+ new_url = URL(rel_url)
+ dct["url"] = new_url
+ dct["path"] = str(new_url)
+ if headers is not sentinel:
+ # a copy semantic
+ dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
+ dct["raw_headers"] = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+
+ message = self._message._replace(**dct)
+
+ kwargs = {}
+ if scheme is not sentinel:
+ kwargs["scheme"] = scheme
+ if host is not sentinel:
+ kwargs["host"] = host
+ if remote is not sentinel:
+ kwargs["remote"] = remote
+
+ return self.__class__(
+ message,
+ self._payload,
+ self._protocol,
+ self._payload_writer,
+ self._task,
+ self._loop,
+ client_max_size=self._client_max_size,
+ state=self._state.copy(),
+ **kwargs,
+ )
+
+ @property
+ def task(self) -> "asyncio.Task[None]":
+ return self._task
+
+ @property
+ def protocol(self) -> "RequestHandler":
+ return self._protocol
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def writer(self) -> AbstractStreamWriter:
+ return self._payload_writer
+
+ @reify
+ def message(self) -> RawRequestMessage:
+ warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
+ return self._message
+
+ @reify
+ def rel_url(self) -> URL:
+ return self._rel_url
+
+ @reify
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "request.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ # MutableMapping API
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ ########
+
+ @reify
+ def secure(self) -> bool:
+ """A bool indicating if the request is handled with SSL."""
+ return self.scheme == "https"
+
+ @reify
+ def forwarded(self) -> Tuple[Mapping[str, str], ...]:
+ """A tuple containing all parsed Forwarded header(s).
+
+ Makes an effort to parse Forwarded headers as specified by RFC 7239:
+
+ - It adds one (immutable) dictionary per Forwarded 'field-value', ie
+ per proxy. The element corresponds to the data in the Forwarded
+ field-value added by the first proxy encountered by the client. Each
+ subsequent item corresponds to those added by later proxies.
+ - It checks that every value has valid syntax in general as specified
+ in section 4: either a 'token' or a 'quoted-string'.
+ - It un-escapes found escape sequences.
+ - It does NOT validate 'by' and 'for' contents as specified in section
+ 6.
+ - It does NOT validate 'host' contents (Host ABNF).
+ - It does NOT validate 'proto' contents for valid URI scheme names.
+
+ Returns a tuple containing one or more immutable dicts
+ """
+ elems = []
+ for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
+ length = len(field_value)
+ pos = 0
+ need_separator = False
+ elem = {} # type: Dict[str, str]
+ elems.append(types.MappingProxyType(elem))
+ while 0 <= pos < length:
+ match = _FORWARDED_PAIR_RE.match(field_value, pos)
+ if match is not None: # got a valid forwarded-pair
+ if need_separator:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ else:
+ name, value, port = match.groups()
+ if value[0] == '"':
+ # quoted string: remove quotes and unescape
+ value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
+ if port:
+ value += port
+ elem[name.lower()] = value
+ pos += len(match.group(0))
+ need_separator = True
+ elif field_value[pos] == ",": # next forwarded-element
+ need_separator = False
+ elem = {}
+ elems.append(types.MappingProxyType(elem))
+ pos += 1
+ elif field_value[pos] == ";": # next forwarded-pair
+ need_separator = False
+ pos += 1
+ elif field_value[pos] in " \t":
+ # Allow whitespace even between forwarded-pairs, though
+ # RFC 7239 doesn't. This simplifies code and is in line
+ # with Postel's law.
+ pos += 1
+ else:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ return tuple(elems)
+
+ @reify
+ def scheme(self) -> str:
+ """A string representing the scheme of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(scheme=new_scheme) call.
+ - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
+
+ 'http' or 'https'.
+ """
+ if self._transport_sslcontext:
+ return "https"
+ else:
+ return "http"
+
+ @reify
+ def method(self) -> str:
+ """Read only property for getting HTTP method.
+
+ The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
+ """
+ return self._method
+
+ @reify
+ def version(self) -> HttpVersion:
+ """Read only property for getting HTTP version of request.
+
+ Returns aiohttp.protocol.HttpVersion instance.
+ """
+ return self._version
+
+ @reify
+ def host(self) -> str:
+ """Hostname of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(host=new_host) call.
+ - HOST HTTP header
+ - socket.getfqdn() value
+ """
+ host = self._message.headers.get(hdrs.HOST)
+ if host is not None:
+ return host
+ return socket.getfqdn()
+
+ @reify
+ def remote(self) -> Optional[str]:
+ """Remote IP of client initiated HTTP request.
+
+ The IP is resolved in this order:
+
+ - overridden value by .clone(remote=new_remote) call.
+ - peername of opened socket
+ """
+ if self._transport_peername is None:
+ return None
+ if isinstance(self._transport_peername, (list, tuple)):
+ return str(self._transport_peername[0])
+ return str(self._transport_peername)
+
+ @reify
+ def url(self) -> URL:
+ url = URL.build(scheme=self.scheme, host=self.host)
+ return url.join(self._rel_url)
+
+ @reify
+ def path(self) -> str:
+ """The URL including *PATH INFO* without the host or scheme.
+
+ E.g., ``/app/blog``
+ """
+ return self._rel_url.path
+
+ @reify
+ def path_qs(self) -> str:
+ """The URL including PATH_INFO and the query string.
+
+ E.g, /app/blog?id=10
+ """
+ return str(self._rel_url)
+
+ @reify
+ def raw_path(self) -> str:
+ """The URL including raw *PATH INFO* without the host or scheme.
+
+ Warning, the path is unquoted and may contains non valid URL characters
+
+ E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
+ """
+ return self._message.path
+
+ @reify
+ def query(self) -> "MultiDictProxy[str]":
+ """A multidict with all the variables in the query string."""
+ return MultiDictProxy(self._rel_url.query)
+
+ @reify
+ def query_string(self) -> str:
+ """The query string in the URL.
+
+ E.g., id=10
+ """
+ return self._rel_url.query_string
+
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ """A case-insensitive multidict proxy with all headers."""
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ """A sequence of pairs for all headers."""
+ return self._message.raw_headers
+
+ @reify
+ def if_modified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Modified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
+
+ @reify
+ def if_unmodified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Unmodified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
+
+ @staticmethod
+ def _etag_values(etag_header: str) -> Iterator[ETag]:
+ """Extract `ETag` objects from raw header."""
+ if etag_header == ETAG_ANY:
+ yield ETag(
+ is_weak=False,
+ value=ETAG_ANY,
+ )
+ else:
+ for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
+ is_weak, value, garbage = match.group(2, 3, 4)
+ # Any symbol captured by 4th group means
+ # that the following sequence is invalid.
+ if garbage:
+ break
+
+ yield ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @classmethod
+ def _if_match_or_none_impl(
+ cls, header_value: Optional[str]
+ ) -> Optional[Tuple[ETag, ...]]:
+ if not header_value:
+ return None
+
+ return tuple(cls._etag_values(header_value))
+
+ @reify
+ def if_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
+
+ @reify
+ def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-None-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
+
+ @reify
+ def if_range(self) -> Optional[datetime.datetime]:
+ """The value of If-Range HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_RANGE))
+
+ @reify
+ def keep_alive(self) -> bool:
+ """Is keepalive enabled by client?"""
+ return not self._message.should_close
+
+ @reify
+ def cookies(self) -> Mapping[str, str]:
+ """Return request cookies.
+
+ A read-only dictionary-like object.
+ """
+ raw = self.headers.get(hdrs.COOKIE, "")
+ parsed = SimpleCookie(raw) # type: SimpleCookie[str]
+ return MappingProxyType({key: val.value for key, val in parsed.items()})
+
+ @reify
+ def http_range(self) -> slice:
+ """The content of Range HTTP header.
+
+ Return a slice instance.
+
+ """
+ rng = self._headers.get(hdrs.RANGE)
+ start, end = None, None
+ if rng is not None:
+ try:
+ pattern = r"^bytes=(\d*)-(\d*)$"
+ start, end = re.findall(pattern, rng)[0]
+ except IndexError: # pattern was not found in header
+ raise ValueError("range not in acceptable format")
+
+ end = int(end) if end else None
+ start = int(start) if start else None
+
+ if start is None and end is not None:
+ # end with no start is to return tail of content
+ start = -end
+ end = None
+
+ if start is not None and end is not None:
+ # end is inclusive in range header, exclusive for slice
+ end += 1
+
+ if start >= end:
+ raise ValueError("start cannot be after end")
+
+ if start is end is None: # No valid range supplied
+ raise ValueError("No start or end of range specified")
+
+ return slice(start, end, 1)
+
+ @reify
+ def content(self) -> StreamReader:
+ """Return raw payload stream."""
+ return self._payload
+
+ @property
+ def has_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ warnings.warn(
+ "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
+ )
+ return not self._payload.at_eof()
+
+ @property
+ def can_read_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ return not self._payload.at_eof()
+
+ @reify
+ def body_exists(self) -> bool:
+ """Return True if request has HTTP BODY, False otherwise."""
+ return type(self._payload) is not EmptyStreamReader
+
+ async def release(self) -> None:
+ """Release request.
+
+ Eat unread part of HTTP BODY if present.
+ """
+ while not self._payload.at_eof():
+ await self._payload.readany()
+
+ async def read(self) -> bytes:
+ """Read request body if present.
+
+ Returns bytes object with full request content.
+ """
+ if self._read_bytes is None:
+ body = bytearray()
+ while True:
+ chunk = await self._payload.readany()
+ body.extend(chunk)
+ if self._client_max_size:
+ body_size = len(body)
+ if body_size >= self._client_max_size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=self._client_max_size, actual_size=body_size
+ )
+ if not chunk:
+ break
+ self._read_bytes = bytes(body)
+ return self._read_bytes
+
+ async def text(self) -> str:
+ """Return BODY as text using encoding from .charset."""
+ bytes_body = await self.read()
+ encoding = self.charset or "utf-8"
+ return bytes_body.decode(encoding)
+
+ async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
+ """Return BODY as JSON."""
+ body = await self.text()
+ return loads(body)
+
+ async def multipart(self) -> MultipartReader:
+ """Return async iterator to process BODY as multipart."""
+ return MultipartReader(self._headers, self._payload)
+
+ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
+ """Return POST parameters."""
+ if self._post is not None:
+ return self._post
+ if self._method not in self.POST_METHODS:
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ content_type = self.content_type
+ if content_type not in (
+ "",
+ "application/x-www-form-urlencoded",
+ "multipart/form-data",
+ ):
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
+
+ if content_type == "multipart/form-data":
+ multipart = await self.multipart()
+ max_size = self._client_max_size
+
+ field = await multipart.next()
+ while field is not None:
+ size = 0
+ field_ct = field.headers.get(hdrs.CONTENT_TYPE)
+
+ if isinstance(field, BodyPartReader):
+ assert field.name is not None
+
+ # Note that according to RFC 7578, the Content-Type header
+ # is optional, even for files, so we can't assume it's
+ # present.
+ # https://tools.ietf.org/html/rfc7578#section-4.4
+ if field.filename:
+ # store file in temp file
+ tmp = tempfile.TemporaryFile()
+ chunk = await field.read_chunk(size=2 ** 16)
+ while chunk:
+ chunk = field.decode(chunk)
+ tmp.write(chunk)
+ size += len(chunk)
+ if 0 < max_size < size:
+ tmp.close()
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ chunk = await field.read_chunk(size=2 ** 16)
+ tmp.seek(0)
+
+ if field_ct is None:
+ field_ct = "application/octet-stream"
+
+ ff = FileField(
+ field.name,
+ field.filename,
+ cast(io.BufferedReader, tmp),
+ field_ct,
+ field.headers,
+ )
+ out.add(field.name, ff)
+ else:
+ # deal with ordinary data
+ value = await field.read(decode=True)
+ if field_ct is None or field_ct.startswith("text/"):
+ charset = field.get_charset(default="utf-8")
+ out.add(field.name, value.decode(charset))
+ else:
+ out.add(field.name, value)
+ size += len(value)
+ if 0 < max_size < size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ else:
+ raise ValueError(
+ "To decode nested multipart you need " "to use custom reader",
+ )
+
+ field = await multipart.next()
+ else:
+ data = await self.read()
+ if data:
+ charset = self.charset or "utf-8"
+ out.extend(
+ parse_qsl(
+ data.rstrip().decode(charset),
+ keep_blank_values=True,
+ encoding=charset,
+ )
+ )
+
+ self._post = MultiDictProxy(out)
+ return self._post
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """Extra info from protocol transport"""
+ protocol = self._protocol
+ if protocol is None:
+ return default
+
+ transport = protocol.transport
+ if transport is None:
+ return default
+
+ return transport.get_extra_info(name, default)
+
+ def __repr__(self) -> str:
+ ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
+ "ascii"
+ )
+ return "<{} {} {} >".format(
+ self.__class__.__name__, self._method, ascii_encodable_path
+ )
+
+ def __eq__(self, other: object) -> bool:
+ return id(self) == id(other)
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ return
+
+ def _cancel(self, exc: BaseException) -> None:
+ self._payload.set_exception(exc)
+
+
+class Request(BaseRequest):
+
+ ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+ # matchdict, route_name, handler
+ # or information about traversal lookup
+
+ # initialized after route resolving
+ self._match_info = None # type: Optional[UrlMappingMatchInfo]
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom {}.{} attribute "
+ "is discouraged".format(self.__class__.__name__, name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def clone(
+ self,
+ *,
+ method: str = sentinel,
+ rel_url: StrOrURL = sentinel,
+ headers: LooseHeaders = sentinel,
+ scheme: str = sentinel,
+ host: str = sentinel,
+ remote: str = sentinel,
+ ) -> "Request":
+ ret = super().clone(
+ method=method,
+ rel_url=rel_url,
+ headers=headers,
+ scheme=scheme,
+ host=host,
+ remote=remote,
+ )
+ new_ret = cast(Request, ret)
+ new_ret._match_info = self._match_info
+ return new_ret
+
+ @reify
+ def match_info(self) -> "UrlMappingMatchInfo":
+ """Result of route resolving."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info
+
+ @property
+ def app(self) -> "Application":
+ """Application instance."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info.current_app
+
+ @property
+ def config_dict(self) -> ChainMapProxy:
+ match_info = self._match_info
+ assert match_info is not None
+ lst = match_info.apps
+ app = self.app
+ idx = lst.index(app)
+ sublist = list(reversed(lst[: idx + 1]))
+ return ChainMapProxy(sublist)
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ match_info = self._match_info
+ if match_info is None:
+ return
+ for app in match_info._apps:
+ await app.on_response_prepare.send(self, response)
diff --git a/contrib/python/aiohttp/aiohttp/web_response.py b/contrib/python/aiohttp/aiohttp/web_response.py
new file mode 100644
index 0000000000..7880ab2d02
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_response.py
@@ -0,0 +1,825 @@
+import asyncio
+import collections.abc
+import datetime
+import enum
+import json
+import math
+import time
+import warnings
+import zlib
+from concurrent.futures import Executor
+from http.cookies import Morsel, SimpleCookie
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Tuple,
+ Union,
+ cast,
+)
+
+from multidict import CIMultiDict, istr
+
+from . import hdrs, payload
+from .abc import AbstractStreamWriter
+from .helpers import (
+ ETAG_ANY,
+ PY_38,
+ QUOTED_ETAG_RE,
+ ETag,
+ HeadersMixin,
+ parse_http_date,
+ rfc822_formatted_time,
+ sentinel,
+ validate_etag_value,
+)
+from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
+from .payload import Payload
+from .typedefs import JSONEncoder, LooseHeaders
+
+__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import BaseRequest
+
+ BaseClass = MutableMapping[str, Any]
+else:
+ BaseClass = collections.abc.MutableMapping
+
+
+if not PY_38:
+ # allow samesite to be used in python < 3.8
+ # already permitted in python 3.8, see https://bugs.python.org/issue29613
+ Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
+
+
+class ContentCoding(enum.Enum):
+ # The content codings that we have support for.
+ #
+ # Additional registered codings are listed at:
+ # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
+ deflate = "deflate"
+ gzip = "gzip"
+ identity = "identity"
+
+
+############################################################
+# HTTP Response classes
+############################################################
+
+
+class StreamResponse(BaseClass, HeadersMixin):
+
+ _length_check = True
+
+ def __init__(
+ self,
+ *,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ self._body = None
+ self._keep_alive = None # type: Optional[bool]
+ self._chunked = False
+ self._compression = False
+ self._compression_force = None # type: Optional[ContentCoding]
+ self._cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ self._req = None # type: Optional[BaseRequest]
+ self._payload_writer = None # type: Optional[AbstractStreamWriter]
+ self._eof_sent = False
+ self._body_length = 0
+ self._state = {} # type: Dict[str, Any]
+
+ if headers is not None:
+ self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ else:
+ self._headers = CIMultiDict()
+
+ self.set_status(status, reason)
+
+ @property
+ def prepared(self) -> bool:
+ return self._payload_writer is not None
+
+ @property
+ def task(self) -> "Optional[asyncio.Task[None]]":
+ if self._req:
+ return self._req.task
+ else:
+ return None
+
+ @property
+ def status(self) -> int:
+ return self._status
+
+ @property
+ def chunked(self) -> bool:
+ return self._chunked
+
+ @property
+ def compression(self) -> bool:
+ return self._compression
+
+ @property
+ def reason(self) -> str:
+ return self._reason
+
+ def set_status(
+ self,
+ status: int,
+ reason: Optional[str] = None,
+ _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
+ ) -> None:
+ assert not self.prepared, (
+ "Cannot change the response status code after " "the headers have been sent"
+ )
+ self._status = int(status)
+ if reason is None:
+ try:
+ reason = _RESPONSES[self._status][0]
+ except Exception:
+ reason = ""
+ self._reason = reason
+
+ @property
+ def keep_alive(self) -> Optional[bool]:
+ return self._keep_alive
+
+ def force_close(self) -> None:
+ self._keep_alive = False
+
+ @property
+ def body_length(self) -> int:
+ return self._body_length
+
+ @property
+ def output_length(self) -> int:
+ warnings.warn("output_length is deprecated", DeprecationWarning)
+ assert self._payload_writer
+ return self._payload_writer.buffer_size
+
+ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
+ """Enables automatic chunked transfer encoding."""
+ self._chunked = True
+
+ if hdrs.CONTENT_LENGTH in self._headers:
+ raise RuntimeError(
+ "You can't enable chunked encoding when " "a content length is set"
+ )
+ if chunk_size is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+
+ def enable_compression(
+ self, force: Optional[Union[bool, ContentCoding]] = None
+ ) -> None:
+ """Enables response compression encoding."""
+ # Backwards compatibility for when force was a bool <0.17.
+ if type(force) == bool:
+ force = ContentCoding.deflate if force else ContentCoding.identity
+ warnings.warn(
+ "Using boolean for force is deprecated #3318", DeprecationWarning
+ )
+ elif force is not None:
+ assert isinstance(force, ContentCoding), (
+ "force should one of " "None, bool or " "ContentEncoding"
+ )
+
+ self._compression = True
+ self._compression_force = force
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ return self._headers
+
+ @property
+ def cookies(self) -> "SimpleCookie[str]":
+ return self._cookies
+
+ def set_cookie(
+ self,
+ name: str,
+ value: str,
+ *,
+ expires: Optional[str] = None,
+ domain: Optional[str] = None,
+ max_age: Optional[Union[int, str]] = None,
+ path: str = "/",
+ secure: Optional[bool] = None,
+ httponly: Optional[bool] = None,
+ version: Optional[str] = None,
+ samesite: Optional[str] = None,
+ ) -> None:
+ """Set or update response cookie.
+
+ Sets new cookie or updates existent with new value.
+ Also updates only those params which are not None.
+ """
+ old = self._cookies.get(name)
+ if old is not None and old.coded_value == "":
+ # deleted cookie
+ self._cookies.pop(name, None)
+
+ self._cookies[name] = value
+ c = self._cookies[name]
+
+ if expires is not None:
+ c["expires"] = expires
+ elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
+ del c["expires"]
+
+ if domain is not None:
+ c["domain"] = domain
+
+ if max_age is not None:
+ c["max-age"] = str(max_age)
+ elif "max-age" in c:
+ del c["max-age"]
+
+ c["path"] = path
+
+ if secure is not None:
+ c["secure"] = secure
+ if httponly is not None:
+ c["httponly"] = httponly
+ if version is not None:
+ c["version"] = version
+ if samesite is not None:
+ c["samesite"] = samesite
+
+ def del_cookie(
+ self, name: str, *, domain: Optional[str] = None, path: str = "/"
+ ) -> None:
+ """Delete cookie.
+
+ Creates new empty expired cookie.
+ """
+ # TODO: do we need domain/path here?
+ self._cookies.pop(name, None)
+ self.set_cookie(
+ name,
+ "",
+ max_age=0,
+ expires="Thu, 01 Jan 1970 00:00:00 GMT",
+ domain=domain,
+ path=path,
+ )
+
+ @property
+ def content_length(self) -> Optional[int]:
+ # Just a placeholder for adding setter
+ return super().content_length
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ if value is not None:
+ value = int(value)
+ if self._chunked:
+ raise RuntimeError(
+ "You can't set content length when " "chunked encoding is enable"
+ )
+ self._headers[hdrs.CONTENT_LENGTH] = str(value)
+ else:
+ self._headers.pop(hdrs.CONTENT_LENGTH, None)
+
+ @property
+ def content_type(self) -> str:
+ # Just a placeholder for adding setter
+ return super().content_type
+
+ @content_type.setter
+ def content_type(self, value: str) -> None:
+ self.content_type # read header values if needed
+ self._content_type = str(value)
+ self._generate_content_type_header()
+
+ @property
+ def charset(self) -> Optional[str]:
+ # Just a placeholder for adding setter
+ return super().charset
+
+ @charset.setter
+ def charset(self, value: Optional[str]) -> None:
+ ctype = self.content_type # read header values if needed
+ if ctype == "application/octet-stream":
+ raise RuntimeError(
+ "Setting charset for application/octet-stream "
+ "doesn't make sense, setup content_type first"
+ )
+ assert self._content_dict is not None
+ if value is None:
+ self._content_dict.pop("charset", None)
+ else:
+ self._content_dict["charset"] = str(value).lower()
+ self._generate_content_type_header()
+
+ @property
+ def last_modified(self) -> Optional[datetime.datetime]:
+ """The value of Last-Modified HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
+
+ @last_modified.setter
+ def last_modified(
+ self, value: Optional[Union[int, float, datetime.datetime, str]]
+ ) -> None:
+ if value is None:
+ self._headers.pop(hdrs.LAST_MODIFIED, None)
+ elif isinstance(value, (int, float)):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
+ )
+ elif isinstance(value, datetime.datetime):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
+ )
+ elif isinstance(value, str):
+ self._headers[hdrs.LAST_MODIFIED] = value
+
+ @property
+ def etag(self) -> Optional[ETag]:
+ quoted_value = self._headers.get(hdrs.ETAG)
+ if not quoted_value:
+ return None
+ elif quoted_value == ETAG_ANY:
+ return ETag(value=ETAG_ANY)
+ match = QUOTED_ETAG_RE.fullmatch(quoted_value)
+ if not match:
+ return None
+ is_weak, value = match.group(1, 2)
+ return ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @etag.setter
+ def etag(self, value: Optional[Union[ETag, str]]) -> None:
+ if value is None:
+ self._headers.pop(hdrs.ETAG, None)
+ elif (isinstance(value, str) and value == ETAG_ANY) or (
+ isinstance(value, ETag) and value.value == ETAG_ANY
+ ):
+ self._headers[hdrs.ETAG] = ETAG_ANY
+ elif isinstance(value, str):
+ validate_etag_value(value)
+ self._headers[hdrs.ETAG] = f'"{value}"'
+ elif isinstance(value, ETag) and isinstance(value.value, str):
+ validate_etag_value(value.value)
+ hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
+ self._headers[hdrs.ETAG] = hdr_value
+ else:
+ raise ValueError(
+ f"Unsupported etag type: {type(value)}. "
+ f"etag must be str, ETag or None"
+ )
+
+ def _generate_content_type_header(
+ self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
+ ) -> None:
+ assert self._content_dict is not None
+ assert self._content_type is not None
+ params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
+ if params:
+ ctype = self._content_type + "; " + params
+ else:
+ ctype = self._content_type
+ self._headers[CONTENT_TYPE] = ctype
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if coding != ContentCoding.identity:
+ assert self._payload_writer is not None
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._payload_writer.enable_compression(coding.value)
+ # Compressed payload may have different content length,
+ # remove the header
+ self._headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ async def _start_compression(self, request: "BaseRequest") -> None:
+ if self._compression_force:
+ await self._do_start_compression(self._compression_force)
+ else:
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
+ for coding in ContentCoding:
+ if coding.value in accept_encoding:
+ await self._do_start_compression(coding)
+ return
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ if self._eof_sent:
+ return None
+ if self._payload_writer is not None:
+ return self._payload_writer
+
+ return await self._start(request)
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ self._req = request
+ writer = self._payload_writer = request._payload_writer
+
+ await self._prepare_headers()
+ await request._prepare_hook(self)
+ await self._write_headers()
+
+ return writer
+
+ async def _prepare_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ keep_alive = self._keep_alive
+ if keep_alive is None:
+ keep_alive = request.keep_alive
+ self._keep_alive = keep_alive
+
+ version = request.version
+
+ headers = self._headers
+ for cookie in self._cookies.values():
+ value = cookie.output(header="")[1:]
+ headers.add(hdrs.SET_COOKIE, value)
+
+ if self._compression:
+ await self._start_compression(request)
+
+ if self._chunked:
+ if version != HttpVersion11:
+ raise RuntimeError(
+ "Using chunked encoding is forbidden "
+ "for HTTP/{0.major}.{0.minor}".format(request.version)
+ )
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ if hdrs.CONTENT_LENGTH in headers:
+ del headers[hdrs.CONTENT_LENGTH]
+ elif self._length_check:
+ writer.length = self.content_length
+ if writer.length is None:
+ if version >= HttpVersion11 and self.status != 204:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ if hdrs.CONTENT_LENGTH in headers:
+ del headers[hdrs.CONTENT_LENGTH]
+ else:
+ keep_alive = False
+ # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
+ # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
+ elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
+ del headers[hdrs.CONTENT_LENGTH]
+
+ if self.status not in (204, 304):
+ headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
+ headers.setdefault(hdrs.DATE, rfc822_formatted_time())
+ headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
+
+ # connection header
+ if hdrs.CONNECTION not in headers:
+ if keep_alive:
+ if version == HttpVersion10:
+ headers[hdrs.CONNECTION] = "keep-alive"
+ else:
+ if version == HttpVersion11:
+ headers[hdrs.CONNECTION] = "close"
+
+ async def _write_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ # status line
+ version = request.version
+ status_line = "HTTP/{}.{} {} {}".format(
+ version[0], version[1], self._status, self._reason
+ )
+ await writer.write_headers(status_line, self._headers)
+
+ async def write(self, data: bytes) -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ raise RuntimeError("Cannot call write() after write_eof()")
+ if self._payload_writer is None:
+ raise RuntimeError("Cannot call write() before prepare()")
+
+ await self._payload_writer.write(data)
+
+ async def drain(self) -> None:
+ assert not self._eof_sent, "EOF has already been sent"
+ assert self._payload_writer is not None, "Response has not been started"
+ warnings.warn(
+ "drain method is deprecated, use await resp.write()",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ await self._payload_writer.drain()
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ return
+
+ assert self._payload_writer is not None, "Response has not been started"
+
+ await self._payload_writer.write_eof(data)
+ self._eof_sent = True
+ self._req = None
+ self._body_length = self._payload_writer.output_size
+ self._payload_writer = None
+
+ def __repr__(self) -> str:
+ if self._eof_sent:
+ info = "eof"
+ elif self.prepared:
+ assert self._req is not None
+ info = f"{self._req.method} {self._req.path} "
+ else:
+ info = "not prepared"
+ return f"<{self.__class__.__name__} {self.reason} {info}>"
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ def __hash__(self) -> int:
+ return hash(id(self))
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+
+class Response(StreamResponse):
+ def __init__(
+ self,
+ *,
+ body: Any = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ text: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: Optional[str] = None,
+ charset: Optional[str] = None,
+ zlib_executor_size: Optional[int] = None,
+ zlib_executor: Optional[Executor] = None,
+ ) -> None:
+ if body is not None and text is not None:
+ raise ValueError("body and text are not allowed together")
+
+ if headers is None:
+ real_headers = CIMultiDict() # type: CIMultiDict[str]
+ elif not isinstance(headers, CIMultiDict):
+ real_headers = CIMultiDict(headers)
+ else:
+ real_headers = headers # = cast('CIMultiDict[str]', headers)
+
+ if content_type is not None and "charset" in content_type:
+ raise ValueError("charset must not be in content_type " "argument")
+
+ if text is not None:
+ if hdrs.CONTENT_TYPE in real_headers:
+ if content_type or charset:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ else:
+ # fast path for filling headers
+ if not isinstance(text, str):
+ raise TypeError("text argument must be str (%r)" % type(text))
+ if content_type is None:
+ content_type = "text/plain"
+ if charset is None:
+ charset = "utf-8"
+ real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
+ body = text.encode(charset)
+ text = None
+ else:
+ if hdrs.CONTENT_TYPE in real_headers:
+ if content_type is not None or charset is not None:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ else:
+ if content_type is not None:
+ if charset is not None:
+ content_type += "; charset=" + charset
+ real_headers[hdrs.CONTENT_TYPE] = content_type
+
+ super().__init__(status=status, reason=reason, headers=real_headers)
+
+ if text is not None:
+ self.text = text
+ else:
+ self.body = body
+
+ self._compressed_body = None # type: Optional[bytes]
+ self._zlib_executor_size = zlib_executor_size
+ self._zlib_executor = zlib_executor
+
+ @property
+ def body(self) -> Optional[Union[bytes, Payload]]:
+ return self._body
+
+ @body.setter
+ def body(
+ self,
+ body: bytes,
+ CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ ) -> None:
+ if body is None:
+ self._body = None # type: Optional[bytes]
+ self._body_payload = False # type: bool
+ elif isinstance(body, (bytes, bytearray)):
+ self._body = body
+ self._body_payload = False
+ else:
+ try:
+ self._body = body = payload.PAYLOAD_REGISTRY.get(body)
+ except payload.LookupError:
+ raise ValueError("Unsupported body type %r" % type(body))
+
+ self._body_payload = True
+
+ headers = self._headers
+
+ # set content-length header if needed
+ if not self._chunked and CONTENT_LENGTH not in headers:
+ size = body.size
+ if size is not None:
+ headers[CONTENT_LENGTH] = str(size)
+
+ # set content-type
+ if CONTENT_TYPE not in headers:
+ headers[CONTENT_TYPE] = body.content_type
+
+ # copy payload headers
+ if body.headers:
+ for (key, value) in body.headers.items():
+ if key not in headers:
+ headers[key] = value
+
+ self._compressed_body = None
+
+ @property
+ def text(self) -> Optional[str]:
+ if self._body is None:
+ return None
+ return self._body.decode(self.charset or "utf-8")
+
+ @text.setter
+ def text(self, text: str) -> None:
+ assert text is None or isinstance(
+ text, str
+ ), "text argument must be str (%r)" % type(text)
+
+ if self.content_type == "application/octet-stream":
+ self.content_type = "text/plain"
+ if self.charset is None:
+ self.charset = "utf-8"
+
+ self._body = text.encode(self.charset)
+ self._body_payload = False
+ self._compressed_body = None
+
+ @property
+ def content_length(self) -> Optional[int]:
+ if self._chunked:
+ return None
+
+ if hdrs.CONTENT_LENGTH in self._headers:
+ return super().content_length
+
+ if self._compressed_body is not None:
+ # Return length of the compressed body
+ return len(self._compressed_body)
+ elif self._body_payload:
+ # A payload without content length, or a compressed payload
+ return None
+ elif self._body is not None:
+ return len(self._body)
+ else:
+ return 0
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ raise RuntimeError("Content length is set automatically")
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ if self._eof_sent:
+ return
+ if self._compressed_body is None:
+ body = self._body # type: Optional[Union[bytes, Payload]]
+ else:
+ body = self._compressed_body
+ assert not data, f"data arg is not supported, got {data!r}"
+ assert self._req is not None
+ assert self._payload_writer is not None
+ if body is not None:
+ if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
+ await super().write_eof()
+ elif self._body_payload:
+ payload = cast(Payload, body)
+ await payload.write(self._payload_writer)
+ await super().write_eof()
+ else:
+ await super().write_eof(cast(bytes, body))
+ else:
+ await super().write_eof()
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
+ if not self._body_payload:
+ if self._body is not None:
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
+ else:
+ self._headers[hdrs.CONTENT_LENGTH] = "0"
+
+ return await super()._start(request)
+
+ def _compress_body(self, zlib_mode: int) -> None:
+ assert zlib_mode > 0
+ compressobj = zlib.compressobj(wbits=zlib_mode)
+ body_in = self._body
+ assert body_in is not None
+ self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if self._body_payload or self._chunked:
+ return await super()._do_start_compression(coding)
+
+ if coding != ContentCoding.identity:
+ # Instead of using _payload_writer.enable_compression,
+ # compress the whole body
+ zlib_mode = (
+ 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
+ )
+ body_in = self._body
+ assert body_in is not None
+ if (
+ self._zlib_executor_size is not None
+ and len(body_in) > self._zlib_executor_size
+ ):
+ await asyncio.get_event_loop().run_in_executor(
+ self._zlib_executor, self._compress_body, zlib_mode
+ )
+ else:
+ self._compress_body(zlib_mode)
+
+ body_out = self._compressed_body
+ assert body_out is not None
+
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
+
+
+def json_response(
+ data: Any = sentinel,
+ *,
+ text: Optional[str] = None,
+ body: Optional[bytes] = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+) -> Response:
+ if data is not sentinel:
+ if text or body:
+ raise ValueError("only one of data, text, or body should be specified")
+ else:
+ text = dumps(data)
+ return Response(
+ text=text,
+ body=body,
+ status=status,
+ reason=reason,
+ headers=headers,
+ content_type=content_type,
+ )
diff --git a/contrib/python/aiohttp/aiohttp/web_routedef.py b/contrib/python/aiohttp/aiohttp/web_routedef.py
new file mode 100644
index 0000000000..671e5c7f46
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_routedef.py
@@ -0,0 +1,213 @@
+import abc
+import os # noqa
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Type,
+ Union,
+ overload,
+)
+
+import attr
+
+from . import hdrs
+from .abc import AbstractView
+from .typedefs import Handler, PathLike
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import Request
+ from .web_response import StreamResponse
+ from .web_urldispatcher import AbstractRoute, UrlDispatcher
+else:
+ Request = StreamResponse = UrlDispatcher = AbstractRoute = None
+
+
+__all__ = (
+ "AbstractRouteDef",
+ "RouteDef",
+ "StaticDef",
+ "RouteTableDef",
+ "head",
+ "options",
+ "get",
+ "post",
+ "patch",
+ "put",
+ "delete",
+ "route",
+ "view",
+ "static",
+)
+
+
+class AbstractRouteDef(abc.ABC):
+ @abc.abstractmethod
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ pass # pragma: no cover
+
+
+_HandlerType = Union[Type[AbstractView], Handler]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class RouteDef(AbstractRouteDef):
+ method: str
+ path: str
+ handler: _HandlerType
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
+ method=self.method, path=self.path, handler=self.handler, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ if self.method in hdrs.METH_ALL:
+ reg = getattr(router, "add_" + self.method.lower())
+ return [reg(self.path, self.handler, **self.kwargs)]
+ else:
+ return [
+ router.add_route(self.method, self.path, self.handler, **self.kwargs)
+ ]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class StaticDef(AbstractRouteDef):
+ prefix: str
+ path: PathLike
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<StaticDef {prefix} -> {path}" "{info}>".format(
+ prefix=self.prefix, path=self.path, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ resource = router.add_static(self.prefix, self.path, **self.kwargs)
+ routes = resource.get_info().get("routes", {})
+ return list(routes.values())
+
+
+def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return RouteDef(method, path, handler, kwargs)
+
+
+def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+
+def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+
+def get(
+ path: str,
+ handler: _HandlerType,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+) -> RouteDef:
+ return route(
+ hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
+ )
+
+
+def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_POST, path, handler, **kwargs)
+
+
+def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PUT, path, handler, **kwargs)
+
+
+def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+
+def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+
+def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_ANY, path, handler, **kwargs)
+
+
+def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
+ return StaticDef(prefix, path, kwargs)
+
+
+_Deco = Callable[[_HandlerType], _HandlerType]
+
+
+class RouteTableDef(Sequence[AbstractRouteDef]):
+ """Route definition table"""
+
+ def __init__(self) -> None:
+ self._items = [] # type: List[AbstractRouteDef]
+
+ def __repr__(self) -> str:
+ return f"<RouteTableDef count={len(self._items)}>"
+
+ @overload
+ def __getitem__(self, index: int) -> AbstractRouteDef:
+ ...
+
+ @overload
+ def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
+ ...
+
+ def __getitem__(self, index): # type: ignore[no-untyped-def]
+ return self._items[index]
+
+ def __iter__(self) -> Iterator[AbstractRouteDef]:
+ return iter(self._items)
+
+ def __len__(self) -> int:
+ return len(self._items)
+
+ def __contains__(self, item: object) -> bool:
+ return item in self._items
+
+ def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
+ def inner(handler: _HandlerType) -> _HandlerType:
+ self._items.append(RouteDef(method, path, handler, kwargs))
+ return handler
+
+ return inner
+
+ def head(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_HEAD, path, **kwargs)
+
+ def get(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_GET, path, **kwargs)
+
+ def post(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_POST, path, **kwargs)
+
+ def put(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PUT, path, **kwargs)
+
+ def patch(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PATCH, path, **kwargs)
+
+ def delete(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_DELETE, path, **kwargs)
+
+ def view(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_ANY, path, **kwargs)
+
+ def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
+ self._items.append(StaticDef(prefix, path, kwargs))
diff --git a/contrib/python/aiohttp/aiohttp/web_runner.py b/contrib/python/aiohttp/aiohttp/web_runner.py
new file mode 100644
index 0000000000..f4a64bff66
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_runner.py
@@ -0,0 +1,381 @@
+import asyncio
+import signal
+import socket
+from abc import ABC, abstractmethod
+from typing import Any, List, Optional, Set
+
+from yarl import URL
+
+from .web_app import Application
+from .web_server import Server
+
+try:
+ from ssl import SSLContext
+except ImportError:
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = (
+ "BaseSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ "SockSite",
+ "BaseRunner",
+ "AppRunner",
+ "ServerRunner",
+ "GracefulExit",
+)
+
+
+class GracefulExit(SystemExit):
+ code = 1
+
+
+def _raise_graceful_exit() -> None:
+ raise GracefulExit()
+
+
+class BaseSite(ABC):
+ __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ if runner.server is None:
+ raise RuntimeError("Call runner.setup() before making a site")
+ self._runner = runner
+ self._shutdown_timeout = shutdown_timeout
+ self._ssl_context = ssl_context
+ self._backlog = backlog
+ self._server = None # type: Optional[asyncio.AbstractServer]
+
+ @property
+ @abstractmethod
+ def name(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def start(self) -> None:
+ self._runner._reg_site(self)
+
+ async def stop(self) -> None:
+ self._runner._check_site(self)
+ if self._server is None:
+ self._runner._unreg_site(self)
+ return # not started yet
+ self._server.close()
+ # named pipes do not have wait_closed property
+ if hasattr(self._server, "wait_closed"):
+ await self._server.wait_closed()
+ await self._runner.shutdown()
+ assert self._runner.server
+ await self._runner.server.shutdown(self._shutdown_timeout)
+ self._runner._unreg_site(self)
+
+
+class TCPSite(BaseSite):
+ __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._host = host
+ if port is None:
+ port = 8443 if self._ssl_context else 8080
+ self._port = port
+ self._reuse_address = reuse_address
+ self._reuse_port = reuse_port
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ host = "0.0.0.0" if self._host is None else self._host
+ return str(URL.build(scheme=scheme, host=host, port=self._port))
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server,
+ self._host,
+ self._port,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ reuse_address=self._reuse_address,
+ reuse_port=self._reuse_port,
+ )
+
+
+class UnixSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ path: str,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ return f"{scheme}://unix:{self._path}:"
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_unix_server(
+ server, self._path, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class NamedPipeSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
+ ) -> None:
+ loop = asyncio.get_event_loop()
+ if not isinstance(
+ loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor" "loop under windows"
+ )
+ super().__init__(runner, shutdown_timeout=shutdown_timeout)
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ return self._path
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ _server = await loop.start_serving_pipe( # type: ignore[attr-defined]
+ server, self._path
+ )
+ self._server = _server[0]
+
+
+class SockSite(BaseSite):
+ __slots__ = ("_sock", "_name")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ sock: socket.socket,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._sock = sock
+ scheme = "https" if self._ssl_context else "http"
+ if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
+ name = f"{scheme}://unix:{sock.getsockname()}:"
+ else:
+ host, port = sock.getsockname()[:2]
+ name = str(URL.build(scheme=scheme, host=host, port=port))
+ self._name = name
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class BaseRunner(ABC):
+ __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
+
+ def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
+ self._handle_signals = handle_signals
+ self._kwargs = kwargs
+ self._server = None # type: Optional[Server]
+ self._sites = [] # type: List[BaseSite]
+
+ @property
+ def server(self) -> Optional[Server]:
+ return self._server
+
+ @property
+ def addresses(self) -> List[Any]:
+ ret = [] # type: List[Any]
+ for site in self._sites:
+ server = site._server
+ if server is not None:
+ sockets = server.sockets
+ if sockets is not None:
+ for sock in sockets:
+ ret.append(sock.getsockname())
+ return ret
+
+ @property
+ def sites(self) -> Set[BaseSite]:
+ return set(self._sites)
+
+ async def setup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ if self._handle_signals:
+ try:
+ loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
+ loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
+ except NotImplementedError: # pragma: no cover
+ # add_signal_handler is not implemented on Windows
+ pass
+
+ self._server = await self._make_server()
+
+ @abstractmethod
+ async def shutdown(self) -> None:
+ pass # pragma: no cover
+
+ async def cleanup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ # The loop over sites is intentional, an exception on gather()
+ # leaves self._sites in unpredictable state.
+ # The loop guaranties that a site is either deleted on success or
+ # still present on failure
+ for site in list(self._sites):
+ await site.stop()
+ await self._cleanup_server()
+ self._server = None
+ if self._handle_signals:
+ try:
+ loop.remove_signal_handler(signal.SIGINT)
+ loop.remove_signal_handler(signal.SIGTERM)
+ except NotImplementedError: # pragma: no cover
+ # remove_signal_handler is not implemented on Windows
+ pass
+
+ @abstractmethod
+ async def _make_server(self) -> Server:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def _cleanup_server(self) -> None:
+ pass # pragma: no cover
+
+ def _reg_site(self, site: BaseSite) -> None:
+ if site in self._sites:
+ raise RuntimeError(f"Site {site} is already registered in runner {self}")
+ self._sites.append(site)
+
+ def _check_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+
+ def _unreg_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+ self._sites.remove(site)
+
+
+class ServerRunner(BaseRunner):
+ """Low-level web server runner"""
+
+ __slots__ = ("_web_server",)
+
+ def __init__(
+ self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ self._web_server = web_server
+
+ async def shutdown(self) -> None:
+ pass
+
+ async def _make_server(self) -> Server:
+ return self._web_server
+
+ async def _cleanup_server(self) -> None:
+ pass
+
+
+class AppRunner(BaseRunner):
+ """Web Application runner"""
+
+ __slots__ = ("_app",)
+
+ def __init__(
+ self, app: Application, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ if not isinstance(app, Application):
+ raise TypeError(
+ "The first argument should be web.Application "
+ "instance, got {!r}".format(app)
+ )
+ self._app = app
+
+ @property
+ def app(self) -> Application:
+ return self._app
+
+ async def shutdown(self) -> None:
+ await self._app.shutdown()
+
+ async def _make_server(self) -> Server:
+ loop = asyncio.get_event_loop()
+ self._app._set_loop(loop)
+ self._app.on_startup.freeze()
+ await self._app.startup()
+ self._app.freeze()
+
+ return self._app._make_handler(loop=loop, **self._kwargs)
+
+ async def _cleanup_server(self) -> None:
+ await self._app.cleanup()
diff --git a/contrib/python/aiohttp/aiohttp/web_server.py b/contrib/python/aiohttp/aiohttp/web_server.py
new file mode 100644
index 0000000000..5657ed9c80
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_server.py
@@ -0,0 +1,62 @@
+"""Low level HTTP server."""
+import asyncio
+from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
+
+from .abc import AbstractStreamWriter
+from .helpers import get_running_loop
+from .http_parser import RawRequestMessage
+from .streams import StreamReader
+from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
+from .web_request import BaseRequest
+
+__all__ = ("Server",)
+
+
+class Server:
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ request_factory: Optional[_RequestFactory] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any
+ ) -> None:
+ self._loop = get_running_loop(loop)
+ self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
+ self._kwargs = kwargs
+ self.requests_count = 0
+ self.request_handler = handler
+ self.request_factory = request_factory or self._make_request
+
+ @property
+ def connections(self) -> List[RequestHandler]:
+ return list(self._connections.keys())
+
+ def connection_made(
+ self, handler: RequestHandler, transport: asyncio.Transport
+ ) -> None:
+ self._connections[handler] = transport
+
+ def connection_lost(
+ self, handler: RequestHandler, exc: Optional[BaseException] = None
+ ) -> None:
+ if handler in self._connections:
+ del self._connections[handler]
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ ) -> BaseRequest:
+ return BaseRequest(message, payload, protocol, writer, task, self._loop)
+
+ async def shutdown(self, timeout: Optional[float] = None) -> None:
+ coros = [conn.shutdown(timeout) for conn in self._connections]
+ await asyncio.gather(*coros)
+ self._connections.clear()
+
+ def __call__(self) -> RequestHandler:
+ return RequestHandler(self, loop=self._loop, **self._kwargs)
diff --git a/contrib/python/aiohttp/aiohttp/web_urldispatcher.py b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
new file mode 100644
index 0000000000..73ec4c05d0
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
@@ -0,0 +1,1220 @@
+import abc
+import asyncio
+import base64
+import hashlib
+import inspect
+import keyword
+import os
+import re
+import warnings
+from contextlib import contextmanager
+from functools import wraps
+from pathlib import Path
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Container,
+ Dict,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Pattern,
+ Set,
+ Sized,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined]
+
+from . import hdrs
+from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
+from .helpers import DEBUG
+from .http import HttpVersion11
+from .typedefs import Final, Handler, PathLike, TypedDict
+from .web_exceptions import (
+ HTTPException,
+ HTTPExpectationFailed,
+ HTTPForbidden,
+ HTTPMethodNotAllowed,
+ HTTPNotFound,
+)
+from .web_fileresponse import FileResponse
+from .web_request import Request
+from .web_response import Response, StreamResponse
+from .web_routedef import AbstractRouteDef
+
+__all__ = (
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "AbstractResource",
+ "Resource",
+ "PlainResource",
+ "DynamicResource",
+ "AbstractRoute",
+ "ResourceRoute",
+ "StaticResource",
+ "View",
+)
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+
+ BaseDict = Dict[str, str]
+else:
+ BaseDict = dict
+
+YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
+
+HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
+ r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
+)
+ROUTE_RE: Final[Pattern[str]] = re.compile(
+ r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
+)
+PATH_SEP: Final[str] = re.escape("/")
+
+
+_ExpectHandler = Callable[[Request], Awaitable[None]]
+_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
+
+
+class _InfoDict(TypedDict, total=False):
+ path: str
+
+ formatter: str
+ pattern: Pattern[str]
+
+ directory: Path
+ prefix: str
+ routes: Mapping[str, "AbstractRoute"]
+
+ app: "Application"
+
+ domain: str
+
+ rule: "AbstractRuleMatching"
+
+ http_exception: HTTPException
+
+
+class AbstractResource(Sized, Iterable["AbstractRoute"]):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ self._name = name
+
+ @property
+ def name(self) -> Optional[str]:
+ return self._name
+
+ @property
+ @abc.abstractmethod
+ def canonical(self) -> str:
+ """Exposes the resource's canonical path.
+
+ For example '/foo/bar/{name}'
+
+ """
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, **kwargs: str) -> URL:
+ """Construct url for resource with additional params."""
+
+ @abc.abstractmethod # pragma: no branch
+ async def resolve(self, request: Request) -> _Resolve:
+ """Resolve resource.
+
+ Return (UrlMappingMatchInfo, allowed_methods) pair.
+ """
+
+ @abc.abstractmethod
+ def add_prefix(self, prefix: str) -> None:
+ """Add a prefix to processed URLs.
+
+ Required for subapplications support.
+ """
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ def freeze(self) -> None:
+ pass
+
+ @abc.abstractmethod
+ def raw_match(self, path: str) -> bool:
+ """Perform a raw match against path"""
+
+
+class AbstractRoute(abc.ABC):
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ resource: Optional[AbstractResource] = None,
+ ) -> None:
+
+ if expect_handler is None:
+ expect_handler = _default_expect_handler
+
+ assert asyncio.iscoroutinefunction(
+ expect_handler
+ ), f"Coroutine is expected, got {expect_handler!r}"
+
+ method = method.upper()
+ if not HTTP_METHOD_RE.match(method):
+ raise ValueError(f"{method} is not allowed HTTP method")
+
+ assert callable(handler), handler
+ if asyncio.iscoroutinefunction(handler):
+ pass
+ elif inspect.isgeneratorfunction(handler):
+ warnings.warn(
+ "Bare generators are deprecated, " "use @coroutine wrapper",
+ DeprecationWarning,
+ )
+ elif isinstance(handler, type) and issubclass(handler, AbstractView):
+ pass
+ else:
+ warnings.warn(
+ "Bare functions are deprecated, " "use async ones", DeprecationWarning
+ )
+
+ @wraps(handler)
+ async def handler_wrapper(request: Request) -> StreamResponse:
+ result = old_handler(request)
+ if asyncio.iscoroutine(result):
+ return await result
+ return result # type: ignore[return-value]
+
+ old_handler = handler
+ handler = handler_wrapper
+
+ self._method = method
+ self._handler = handler
+ self._expect_handler = expect_handler
+ self._resource = resource
+
+ @property
+ def method(self) -> str:
+ return self._method
+
+ @property
+ def handler(self) -> Handler:
+ return self._handler
+
+ @property
+ @abc.abstractmethod
+ def name(self) -> Optional[str]:
+ """Optional route's name, always equals to resource's name."""
+
+ @property
+ def resource(self) -> Optional[AbstractResource]:
+ return self._resource
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+
+ async def handle_expect_header(self, request: Request) -> None:
+ await self._expect_handler(request)
+
+
+class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
+ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
+ super().__init__(match_dict)
+ self._route = route
+ self._apps = [] # type: List[Application]
+ self._current_app = None # type: Optional[Application]
+ self._frozen = False
+
+ @property
+ def handler(self) -> Handler:
+ return self._route.handler
+
+ @property
+ def route(self) -> AbstractRoute:
+ return self._route
+
+ @property
+ def expect_handler(self) -> _ExpectHandler:
+ return self._route.handle_expect_header
+
+ @property
+ def http_exception(self) -> Optional[HTTPException]:
+ return None
+
+ def get_info(self) -> _InfoDict: # type: ignore[override]
+ return self._route.get_info()
+
+ @property
+ def apps(self) -> Tuple["Application", ...]:
+ return tuple(self._apps)
+
+ def add_app(self, app: "Application") -> None:
+ if self._frozen:
+ raise RuntimeError("Cannot change apps stack after .freeze() call")
+ if self._current_app is None:
+ self._current_app = app
+ self._apps.insert(0, app)
+
+ @property
+ def current_app(self) -> "Application":
+ app = self._current_app
+ assert app is not None
+ return app
+
+ @contextmanager
+ def set_current_app(self, app: "Application") -> Generator[None, None, None]:
+ if DEBUG: # pragma: no cover
+ if app not in self._apps:
+ raise RuntimeError(
+ "Expected one of the following apps {!r}, got {!r}".format(
+ self._apps, app
+ )
+ )
+ prev = self._current_app
+ self._current_app = app
+ try:
+ yield
+ finally:
+ self._current_app = prev
+
+ def freeze(self) -> None:
+ self._frozen = True
+
+ def __repr__(self) -> str:
+ return f"<MatchInfo {super().__repr__()}: {self._route}>"
+
+
+class MatchInfoError(UrlMappingMatchInfo):
+ def __init__(self, http_exception: HTTPException) -> None:
+ self._exception = http_exception
+ super().__init__({}, SystemRoute(self._exception))
+
+ @property
+ def http_exception(self) -> HTTPException:
+ return self._exception
+
+ def __repr__(self) -> str:
+ return "<MatchInfoError {}: {}>".format(
+ self._exception.status, self._exception.reason
+ )
+
+
+async def _default_expect_handler(request: Request) -> None:
+ """Default handler for Expect header.
+
+ Just send "100 Continue" to client.
+ raise HTTPExpectationFailed if value of header is not "100-continue"
+ """
+ expect = request.headers.get(hdrs.EXPECT, "")
+ if request.version == HttpVersion11:
+ if expect.lower() == "100-continue":
+ await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+ else:
+ raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
+
+
+class Resource(AbstractResource):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ self._routes = [] # type: List[ResourceRoute]
+
+ def add_route(
+ self,
+ method: str,
+ handler: Union[Type[AbstractView], Handler],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> "ResourceRoute":
+
+ for route_obj in self._routes:
+ if route_obj.method == method or route_obj.method == hdrs.METH_ANY:
+ raise RuntimeError(
+ "Added route will never be executed, "
+ "method {route.method} is already "
+ "registered".format(route=route_obj)
+ )
+
+ route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
+ self.register_route(route_obj)
+ return route_obj
+
+ def register_route(self, route: "ResourceRoute") -> None:
+ assert isinstance(
+ route, ResourceRoute
+ ), f"Instance of Route class is required, got {route!r}"
+ self._routes.append(route)
+
+ async def resolve(self, request: Request) -> _Resolve:
+ allowed_methods = set() # type: Set[str]
+
+ match_dict = self._match(request.rel_url.raw_path)
+ if match_dict is None:
+ return None, allowed_methods
+
+ for route_obj in self._routes:
+ route_method = route_obj.method
+ allowed_methods.add(route_method)
+
+ if route_method == request.method or route_method == hdrs.METH_ANY:
+ return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods)
+ else:
+ return None, allowed_methods
+
+ @abc.abstractmethod
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ pass # pragma: no cover
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._routes)
+
+ # TODO: implement all abstract methods
+
+
+class PlainResource(Resource):
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ assert not path or path.startswith("/")
+ self._path = path
+
+ @property
+ def canonical(self) -> str:
+ return self._path
+
+ def freeze(self) -> None:
+ if not self._path:
+ self._path = "/"
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._path = prefix + self._path
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ # string comparison is about 10 times faster than regexp matching
+ if self._path == path:
+ return {}
+ else:
+ return None
+
+ def raw_match(self, path: str) -> bool:
+ return self._path == path
+
+ def get_info(self) -> _InfoDict:
+ return {"path": self._path}
+
+ def url_for(self) -> URL: # type: ignore[override]
+ return URL.build(path=self._path, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return f"<PlainResource {name} {self._path}>"
+
+
+class DynamicResource(Resource):
+
+ DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
+ DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
+ GOOD = r"[^{}/]+"
+
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ pattern = ""
+ formatter = ""
+ for part in ROUTE_RE.split(path):
+ match = self.DYN.fullmatch(part)
+ if match:
+ pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ match = self.DYN_WITH_RE.fullmatch(part)
+ if match:
+ pattern += "(?P<{var}>{re})".format(**match.groupdict())
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ if "{" in part or "}" in part:
+ raise ValueError(f"Invalid path '{path}'['{part}']")
+
+ part = _requote_path(part)
+ formatter += part
+ pattern += re.escape(part)
+
+ try:
+ compiled = re.compile(pattern)
+ except re.error as exc:
+ raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
+ assert compiled.pattern.startswith(PATH_SEP)
+ assert formatter.startswith("/")
+ self._pattern = compiled
+ self._formatter = formatter
+
+ @property
+ def canonical(self) -> str:
+ return self._formatter
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
+ self._formatter = prefix + self._formatter
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ match = self._pattern.fullmatch(path)
+ if match is None:
+ return None
+ else:
+ return {
+ key: _unquote_path(value) for key, value in match.groupdict().items()
+ }
+
+ def raw_match(self, path: str) -> bool:
+ return self._formatter == path
+
+ def get_info(self) -> _InfoDict:
+ return {"formatter": self._formatter, "pattern": self._pattern}
+
+ def url_for(self, **parts: str) -> URL:
+ url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
+ return URL.build(path=url, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return "<DynamicResource {name} {formatter}>".format(
+ name=name, formatter=self._formatter
+ )
+
+
+class PrefixResource(AbstractResource):
+ def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
+ assert not prefix or prefix.startswith("/"), prefix
+ assert prefix in ("", "/") or not prefix.endswith("/"), prefix
+ super().__init__(name=name)
+ self._prefix = _requote_path(prefix)
+ self._prefix2 = self._prefix + "/"
+
+ @property
+ def canonical(self) -> str:
+ return self._prefix
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._prefix = prefix + self._prefix
+ self._prefix2 = self._prefix + "/"
+
+ def raw_match(self, prefix: str) -> bool:
+ return False
+
+ # TODO: impl missing abstract methods
+
+
+class StaticResource(PrefixResource):
+ VERSION_KEY = "v"
+
+ def __init__(
+ self,
+ prefix: str,
+ directory: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> None:
+ super().__init__(prefix, name=name)
+ try:
+ directory = Path(directory)
+ if str(directory).startswith("~"):
+ directory = Path(os.path.expanduser(str(directory)))
+ directory = directory.resolve()
+ if not directory.is_dir():
+ raise ValueError("Not a directory")
+ except (FileNotFoundError, ValueError) as error:
+ raise ValueError(f"No directory exists at '{directory}'") from error
+ self._directory = directory
+ self._show_index = show_index
+ self._chunk_size = chunk_size
+ self._follow_symlinks = follow_symlinks
+ self._expect_handler = expect_handler
+ self._append_version = append_version
+
+ self._routes = {
+ "GET": ResourceRoute(
+ "GET", self._handle, self, expect_handler=expect_handler
+ ),
+ "HEAD": ResourceRoute(
+ "HEAD", self._handle, self, expect_handler=expect_handler
+ ),
+ }
+
+ def url_for( # type: ignore[override]
+ self,
+ *,
+ filename: Union[str, Path],
+ append_version: Optional[bool] = None,
+ ) -> URL:
+ if append_version is None:
+ append_version = self._append_version
+ if isinstance(filename, Path):
+ filename = str(filename)
+ filename = filename.lstrip("/")
+
+ url = URL.build(path=self._prefix, encoded=True)
+ # filename is not encoded
+ if YARL_VERSION < (1, 6):
+ url = url / filename.replace("%", "%25")
+ else:
+ url = url / filename
+
+ if append_version:
+ try:
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError):
+ # ValueError for case when path point to symlink
+ # with follow_symlinks is False
+ return url # relatively safe
+ if filepath.is_file():
+ # TODO cache file content
+ # with file watcher for cache invalidation
+ with filepath.open("rb") as f:
+ file_bytes = f.read()
+ h = self._get_file_hash(file_bytes)
+ url = url.with_query({self.VERSION_KEY: h})
+ return url
+ return url
+
+ @staticmethod
+ def _get_file_hash(byte_array: bytes) -> str:
+ m = hashlib.sha256() # todo sha256 can be configurable param
+ m.update(byte_array)
+ b64 = base64.urlsafe_b64encode(m.digest())
+ return b64.decode("ascii")
+
+ def get_info(self) -> _InfoDict:
+ return {
+ "directory": self._directory,
+ "prefix": self._prefix,
+ "routes": self._routes,
+ }
+
+ def set_options_route(self, handler: Handler) -> None:
+ if "OPTIONS" in self._routes:
+ raise RuntimeError("OPTIONS route was set already")
+ self._routes["OPTIONS"] = ResourceRoute(
+ "OPTIONS", handler, self, expect_handler=self._expect_handler
+ )
+
+ async def resolve(self, request: Request) -> _Resolve:
+ path = request.rel_url.raw_path
+ method = request.method
+ allowed_methods = set(self._routes)
+ if not path.startswith(self._prefix2) and path != self._prefix:
+ return None, set()
+
+ if method not in allowed_methods:
+ return None, allowed_methods
+
+ match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])}
+ return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._routes.values())
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ rel_url = request.match_info["filename"]
+ try:
+ filename = Path(rel_url)
+ if filename.anchor:
+ # rel_url is an absolute name like
+ # /static/\\machine_name\c$ or /static/D:\path
+ # where the static dir is totally different
+ raise HTTPForbidden()
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError) as error:
+ # relatively safe
+ raise HTTPNotFound() from error
+ except HTTPForbidden:
+ raise
+ except Exception as error:
+ # perm error or other kind!
+ request.app.logger.exception(error)
+ raise HTTPNotFound() from error
+
+ # on opening a dir, load its contents if allowed
+ if filepath.is_dir():
+ if self._show_index:
+ try:
+ return Response(
+ text=self._directory_as_html(filepath), content_type="text/html"
+ )
+ except PermissionError:
+ raise HTTPForbidden()
+ else:
+ raise HTTPForbidden()
+ elif filepath.is_file():
+ return FileResponse(filepath, chunk_size=self._chunk_size)
+ else:
+ raise HTTPNotFound
+
+ def _directory_as_html(self, filepath: Path) -> str:
+ # returns directory's index as html
+
+ # sanity check
+ assert filepath.is_dir()
+
+ relative_path_to_dir = filepath.relative_to(self._directory).as_posix()
+ index_of = f"Index of /{relative_path_to_dir}"
+ h1 = f"<h1>{index_of}</h1>"
+
+ index_list = []
+ dir_index = filepath.iterdir()
+ for _file in sorted(dir_index):
+ # show file url as relative to static path
+ rel_path = _file.relative_to(self._directory).as_posix()
+ file_url = self._prefix + "/" + rel_path
+
+ # if file is a directory, add '/' to the end of the name
+ if _file.is_dir():
+ file_name = f"{_file.name}/"
+ else:
+ file_name = _file.name
+
+ index_list.append(
+ '<li><a href="{url}">{name}</a></li>'.format(
+ url=file_url, name=file_name
+ )
+ )
+ ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
+ body = f"<body>\n{h1}\n{ul}\n</body>"
+
+ head_str = f"<head>\n<title>{index_of}</title>\n</head>"
+ html = f"<html>\n{head_str}\n{body}\n</html>"
+
+ return html
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "'" if self.name is not None else ""
+ return "<StaticResource {name} {path} -> {directory!r}>".format(
+ name=name, path=self._prefix, directory=self._directory
+ )
+
+
+class PrefixedSubAppResource(PrefixResource):
+ def __init__(self, prefix: str, app: "Application") -> None:
+ super().__init__(prefix)
+ self._app = app
+ for resource in app.router.resources():
+ resource.add_prefix(prefix)
+
+ def add_prefix(self, prefix: str) -> None:
+ super().add_prefix(prefix)
+ for resource in self._app.router.resources():
+ resource.add_prefix(prefix)
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not supported " "by sub-application root")
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "prefix": self._prefix}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if (
+ not request.url.raw_path.startswith(self._prefix2)
+ and request.url.raw_path != self._prefix
+ ):
+ return None, set()
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __len__(self) -> int:
+ return len(self._app.router.routes())
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._app.router.routes())
+
+ def __repr__(self) -> str:
+ return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
+ prefix=self._prefix, app=self._app
+ )
+
+
+class AbstractRuleMatching(abc.ABC):
+ @abc.abstractmethod # pragma: no branch
+ async def match(self, request: Request) -> bool:
+ """Return bool if the request satisfies the criteria"""
+
+ @abc.abstractmethod # pragma: no branch
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @property
+ @abc.abstractmethod # pragma: no branch
+ def canonical(self) -> str:
+ """Return a str"""
+
+
+class Domain(AbstractRuleMatching):
+ re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__()
+ self._domain = self.validation(domain)
+
+ @property
+ def canonical(self) -> str:
+ return self._domain
+
+ def validation(self, domain: str) -> str:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ domain = domain.rstrip(".").lower()
+ if not domain:
+ raise ValueError("Domain cannot be empty")
+ elif "://" in domain:
+ raise ValueError("Scheme not supported")
+ url = URL("http://" + domain)
+ assert url.raw_host is not None
+ if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
+ raise ValueError("Domain not valid")
+ if url.port == 80:
+ return url.raw_host
+ return f"{url.raw_host}:{url.port}"
+
+ async def match(self, request: Request) -> bool:
+ host = request.headers.get(hdrs.HOST)
+ if not host:
+ return False
+ return self.match_domain(host)
+
+ def match_domain(self, host: str) -> bool:
+ return host.lower() == self._domain
+
+ def get_info(self) -> _InfoDict:
+ return {"domain": self._domain}
+
+
+class MaskDomain(Domain):
+ re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__(domain)
+ mask = self._domain.replace(".", r"\.").replace("*", ".*")
+ self._mask = re.compile(mask)
+
+ @property
+ def canonical(self) -> str:
+ return self._mask.pattern
+
+ def match_domain(self, host: str) -> bool:
+ return self._mask.fullmatch(host) is not None
+
+
+class MatchedSubAppResource(PrefixedSubAppResource):
+ def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
+ AbstractResource.__init__(self)
+ self._prefix = ""
+ self._app = app
+ self._rule = rule
+
+ @property
+ def canonical(self) -> str:
+ return self._rule.canonical
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "rule": self._rule}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if not await self._rule.match(request):
+ return None, set()
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __repr__(self) -> str:
+ return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app)
+
+
+class ResourceRoute(AbstractRoute):
+ """A route with resource"""
+
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ resource: AbstractResource,
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> None:
+ super().__init__(
+ method, handler, expect_handler=expect_handler, resource=resource
+ )
+
+ def __repr__(self) -> str:
+ return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
+ method=self.method, resource=self._resource, handler=self.handler
+ )
+
+ @property
+ def name(self) -> Optional[str]:
+ if self._resource is None:
+ return None
+ return self._resource.name
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+ assert self._resource is not None
+ return self._resource.url_for(*args, **kwargs)
+
+ def get_info(self) -> _InfoDict:
+ assert self._resource is not None
+ return self._resource.get_info()
+
+
+class SystemRoute(AbstractRoute):
+ def __init__(self, http_exception: HTTPException) -> None:
+ super().__init__(hdrs.METH_ANY, self._handle)
+ self._http_exception = http_exception
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not allowed for SystemRoute")
+
+ @property
+ def name(self) -> Optional[str]:
+ return None
+
+ def get_info(self) -> _InfoDict:
+ return {"http_exception": self._http_exception}
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ raise self._http_exception
+
+ @property
+ def status(self) -> int:
+ return self._http_exception.status
+
+ @property
+ def reason(self) -> str:
+ return self._http_exception.reason
+
+ def __repr__(self) -> str:
+ return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
+
+
+class View(AbstractView):
+ async def _iter(self) -> StreamResponse:
+ if self.request.method not in hdrs.METH_ALL:
+ self._raise_allowed_methods()
+ method: Callable[[], Awaitable[StreamResponse]] = getattr(
+ self, self.request.method.lower(), None
+ )
+ if method is None:
+ self._raise_allowed_methods()
+ resp = await method()
+ return resp
+
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
+ return self._iter().__await__()
+
+ def _raise_allowed_methods(self) -> None:
+ allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
+ raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
+
+
+class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
+ def __init__(self, resources: List[AbstractResource]) -> None:
+ self._resources = resources
+
+ def __len__(self) -> int:
+ return len(self._resources)
+
+ def __iter__(self) -> Iterator[AbstractResource]:
+ yield from self._resources
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._resources
+
+
+class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
+ def __init__(self, resources: List[AbstractResource]):
+ self._routes = [] # type: List[AbstractRoute]
+ for resource in resources:
+ for route in resource:
+ self._routes.append(route)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ yield from self._routes
+
+ def __contains__(self, route: object) -> bool:
+ return route in self._routes
+
+
+class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
+
+ NAME_SPLIT_RE = re.compile(r"[.:-]")
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._resources = [] # type: List[AbstractResource]
+ self._named_resources = {} # type: Dict[str, AbstractResource]
+
+ async def resolve(self, request: Request) -> UrlMappingMatchInfo:
+ method = request.method
+ allowed_methods = set() # type: Set[str]
+
+ for resource in self._resources:
+ match_dict, allowed = await resource.resolve(request)
+ if match_dict is not None:
+ return match_dict
+ else:
+ allowed_methods |= allowed
+
+ if allowed_methods:
+ return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
+ else:
+ return MatchInfoError(HTTPNotFound())
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._named_resources)
+
+ def __len__(self) -> int:
+ return len(self._named_resources)
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._named_resources
+
+ def __getitem__(self, name: str) -> AbstractResource:
+ return self._named_resources[name]
+
+ def resources(self) -> ResourcesView:
+ return ResourcesView(self._resources)
+
+ def routes(self) -> RoutesView:
+ return RoutesView(self._resources)
+
+ def named_resources(self) -> Mapping[str, AbstractResource]:
+ return MappingProxyType(self._named_resources)
+
+ def register_resource(self, resource: AbstractResource) -> None:
+ assert isinstance(
+ resource, AbstractResource
+ ), f"Instance of AbstractResource class is required, got {resource!r}"
+ if self.frozen:
+ raise RuntimeError("Cannot register a resource into frozen router.")
+
+ name = resource.name
+
+ if name is not None:
+ parts = self.NAME_SPLIT_RE.split(name)
+ for part in parts:
+ if keyword.iskeyword(part):
+ raise ValueError(
+ f"Incorrect route name {name!r}, "
+ "python keywords cannot be used "
+ "for route name"
+ )
+ if not part.isidentifier():
+ raise ValueError(
+ "Incorrect route name {!r}, "
+ "the name should be a sequence of "
+ "python identifiers separated "
+ "by dash, dot or column".format(name)
+ )
+ if name in self._named_resources:
+ raise ValueError(
+ "Duplicate {!r}, "
+ "already handled by {!r}".format(name, self._named_resources[name])
+ )
+ self._named_resources[name] = resource
+ self._resources.append(resource)
+
+ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
+ if path and not path.startswith("/"):
+ raise ValueError("path should be started with / or be empty")
+ # Reuse last added resource if path and name are the same
+ if self._resources:
+ resource = self._resources[-1]
+ if resource.name == name and resource.raw_match(path):
+ return cast(Resource, resource)
+ if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
+ resource = PlainResource(_requote_path(path), name=name)
+ self.register_resource(resource)
+ return resource
+ resource = DynamicResource(path, name=name)
+ self.register_resource(resource)
+ return resource
+
+ def add_route(
+ self,
+ method: str,
+ path: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> AbstractRoute:
+ resource = self.add_resource(path, name=name)
+ return resource.add_route(method, handler, expect_handler=expect_handler)
+
+ def add_static(
+ self,
+ prefix: str,
+ path: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> AbstractResource:
+ """Add static files view.
+
+ prefix - url prefix
+ path - folder with files
+
+ """
+ assert prefix.startswith("/")
+ if prefix.endswith("/"):
+ prefix = prefix[:-1]
+ resource = StaticResource(
+ prefix,
+ path,
+ name=name,
+ expect_handler=expect_handler,
+ chunk_size=chunk_size,
+ show_index=show_index,
+ follow_symlinks=follow_symlinks,
+ append_version=append_version,
+ )
+ self.register_resource(resource)
+ return resource
+
+ def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method HEAD."""
+ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+ def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method OPTIONS."""
+ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+ def add_get(
+ self,
+ path: str,
+ handler: Handler,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+ ) -> AbstractRoute:
+ """Shortcut for add_route with method GET.
+
+ If allow_head is true, another
+ route is added allowing head requests to the same endpoint.
+ """
+ resource = self.add_resource(path, name=name)
+ if allow_head:
+ resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
+ return resource.add_route(hdrs.METH_GET, handler, **kwargs)
+
+ def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method POST."""
+ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
+
+ def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PUT."""
+ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
+
+ def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PATCH."""
+ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method DELETE."""
+ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+ def add_view(
+ self, path: str, handler: Type[AbstractView], **kwargs: Any
+ ) -> AbstractRoute:
+ """Shortcut for add_route with ANY methods for a class-based view."""
+ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
+
+ def freeze(self) -> None:
+ super().freeze()
+ for resource in self._resources:
+ resource.freeze()
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ """Append routes to route table.
+
+ Parameter should be a sequence of RouteDef objects.
+
+ Returns a list of registered AbstractRoute instances.
+ """
+ registered_routes = []
+ for route_def in routes:
+ registered_routes.extend(route_def.register(self))
+ return registered_routes
+
+
+def _quote_path(value: str) -> str:
+ if YARL_VERSION < (1, 6):
+ value = value.replace("%", "%25")
+ return URL.build(path=value, encoded=False).raw_path
+
+
+def _unquote_path(value: str) -> str:
+ return URL.build(path=value, encoded=True).path
+
+
+def _requote_path(value: str) -> str:
+ # Quote non-ascii characters and other characters which must be quoted,
+ # but preserve existing %-sequences.
+ result = _quote_path(value)
+ if "%" in value:
+ result = result.replace("%25", "%")
+ return result
diff --git a/contrib/python/aiohttp/aiohttp/web_ws.py b/contrib/python/aiohttp/aiohttp/web_ws.py
new file mode 100644
index 0000000000..16b0a1747c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_ws.py
@@ -0,0 +1,487 @@
+import asyncio
+import base64
+import binascii
+import hashlib
+import json
+from typing import Any, Iterable, Optional, Tuple, cast
+
+import async_timeout
+import attr
+from multidict import CIMultiDict
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import call_later, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WS_KEY,
+ WebSocketError,
+ WebSocketReader,
+ WebSocketWriter,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen,
+ ws_ext_parse,
+)
+from .log import ws_logger
+from .streams import EofStream, FlowControlDataQueue
+from .typedefs import Final, JSONDecoder, JSONEncoder
+from .web_exceptions import HTTPBadRequest, HTTPException
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+__all__ = (
+ "WebSocketResponse",
+ "WebSocketReady",
+ "WSMsgType",
+)
+
+THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class WebSocketReady:
+ ok: bool
+ protocol: Optional[str]
+
+ def __bool__(self) -> bool:
+ return self.ok
+
+
+class WebSocketResponse(StreamResponse):
+
+ _length_check = False
+
+ def __init__(
+ self,
+ *,
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ protocols: Iterable[str] = (),
+ compress: bool = True,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> None:
+ super().__init__(status=101)
+ self._protocols = protocols
+ self._ws_protocol = None # type: Optional[str]
+ self._writer = None # type: Optional[WebSocketWriter]
+ self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]]
+ self._closed = False
+ self._closing = False
+ self._conn_lost = 0
+ self._close_code = None # type: Optional[int]
+ self._loop = None # type: Optional[asyncio.AbstractEventLoop]
+ self._waiting = None # type: Optional[asyncio.Future[bool]]
+ self._exception = None # type: Optional[BaseException]
+ self._timeout = timeout
+ self._receive_timeout = receive_timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
+ self._compress = compress
+ self._max_msg_size = max_msg_size
+
+ def _cancel_heartbeat(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ self._cancel_heartbeat()
+
+ if self._heartbeat is not None:
+ assert self._loop is not None
+ self._heartbeat_cb = call_later(
+ self._send_heartbeat, self._heartbeat, self._loop
+ )
+
+ def _send_heartbeat(self) -> None:
+ if self._heartbeat is not None and not self._closed:
+ assert self._loop is not None
+ # fire-and-forget a task is not perfect but maybe ok for
+ # sending ping. Otherwise we need a long-living heartbeat
+ # task in the class.
+ self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
+
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = call_later(
+ self._pong_not_received, self._pong_heartbeat, self._loop
+ )
+
+ def _pong_not_received(self) -> None:
+ if self._req is not None and self._req.transport is not None:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ self._req.transport.close()
+
+ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
+ # make pre-check to don't hide it by do_handshake() exceptions
+ if self._payload_writer is not None:
+ return self._payload_writer
+
+ protocol, writer = self._pre_start(request)
+ payload_writer = await super().prepare(request)
+ assert payload_writer is not None
+ self._post_start(request, protocol, writer)
+ await payload_writer.drain()
+ return payload_writer
+
+ def _handshake(
+ self, request: BaseRequest
+ ) -> Tuple["CIMultiDict[str]", str, bool, bool]:
+ headers = request.headers
+ if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
+ raise HTTPBadRequest(
+ text=(
+ "No WebSocket UPGRADE hdr: {}\n Can "
+ '"Upgrade" only to "WebSocket".'
+ ).format(headers.get(hdrs.UPGRADE))
+ )
+
+ if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
+ raise HTTPBadRequest(
+ text="No CONNECTION upgrade hdr: {}".format(
+ headers.get(hdrs.CONNECTION)
+ )
+ )
+
+ # find common sub-protocol between client and server
+ protocol = None
+ if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
+ req_protocols = [
+ str(proto.strip())
+ for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in req_protocols:
+ if proto in self._protocols:
+ protocol = proto
+ break
+ else:
+ # No overlap found: Return no protocol as per spec
+ ws_logger.warning(
+ "Client protocols %r don’t overlap server-known ones %r",
+ req_protocols,
+ self._protocols,
+ )
+
+ # check supported version
+ version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
+ if version not in ("13", "8", "7"):
+ raise HTTPBadRequest(text=f"Unsupported version: {version}")
+
+ # check client handshake for validity
+ key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
+ try:
+ if not key or len(base64.b64decode(key)) != 16:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}")
+ except binascii.Error:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
+
+ accept_val = base64.b64encode(
+ hashlib.sha1(key.encode() + WS_KEY).digest()
+ ).decode()
+ response_headers = CIMultiDict( # type: ignore[var-annotated]
+ {
+ hdrs.UPGRADE: "websocket", # type: ignore[arg-type]
+ hdrs.CONNECTION: "upgrade",
+ hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
+ }
+ )
+
+ notakeover = False
+ compress = 0
+ if self._compress:
+ extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ # Server side always get return with no exception.
+ # If something happened, just drop compress extension
+ compress, notakeover = ws_ext_parse(extensions, isserver=True)
+ if compress:
+ enabledext = ws_ext_gen(
+ compress=compress, isserver=True, server_notakeover=notakeover
+ )
+ response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
+
+ if protocol:
+ response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
+ return (
+ response_headers,
+ protocol,
+ compress,
+ notakeover,
+ ) # type: ignore[return-value]
+
+ def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
+ self._loop = request._loop
+
+ headers, protocol, compress, notakeover = self._handshake(request)
+
+ self.set_status(101)
+ self.headers.update(headers)
+ self.force_close()
+ self._compress = compress
+ transport = request._protocol.transport
+ assert transport is not None
+ writer = WebSocketWriter(
+ request._protocol, transport, compress=compress, notakeover=notakeover
+ )
+
+ return protocol, writer
+
+ def _post_start(
+ self, request: BaseRequest, protocol: str, writer: WebSocketWriter
+ ) -> None:
+ self._ws_protocol = protocol
+ self._writer = writer
+
+ self._reset_heartbeat()
+
+ loop = self._loop
+ assert loop is not None
+ self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
+ request.protocol.set_parser(
+ WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
+ )
+ # disable HTTP keepalive for WebSocket
+ request.protocol.keep_alive(False)
+
+ def can_prepare(self, request: BaseRequest) -> WebSocketReady:
+ if self._writer is not None:
+ raise RuntimeError("Already started")
+ try:
+ _, protocol, _, _ = self._handshake(request)
+ except HTTPException:
+ return WebSocketReady(False, None)
+ else:
+ return WebSocketReady(True, protocol)
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def ws_protocol(self) -> Optional[str]:
+ return self._ws_protocol
+
+ @property
+ def compress(self) -> bool:
+ return self._compress
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.ping(message)
+
+ async def pong(self, message: bytes = b"") -> None:
+ # unsolicited pong
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.pong(message)
+
+ async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send(data, binary=False, compress=compress)
+
+ async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send(data, binary=True, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[bool] = None,
+ *,
+ dumps: JSONEncoder = json.dumps,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def write_eof(self) -> None: # type: ignore[override]
+ if self._eof_sent:
+ return
+ if self._payload_writer is None:
+ raise RuntimeError("Response has not been started")
+
+ await self.close()
+ self._eof_sent = True
+
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+
+ self._cancel_heartbeat()
+ reader = self._reader
+ assert reader is not None
+
+ # we need to break `receive()` cycle first,
+ # `close()` may be called from different task
+ if self._waiting is not None and not self._closed:
+ reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._waiting
+
+ if not self._closed:
+ self._closed = True
+ try:
+ await self._writer.close(code, message)
+ writer = self._payload_writer
+ assert writer is not None
+ await writer.drain()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ return True
+
+ if self._closing:
+ return True
+
+ reader = self._reader
+ assert reader is not None
+ try:
+ async with async_timeout.timeout(self._timeout):
+ msg = await reader.read()
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ return True
+
+ if msg.type == WSMsgType.CLOSE:
+ self._close_code = msg.data
+ return True
+
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ return True
+ else:
+ return False
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ if self._reader is None:
+ raise RuntimeError("Call .prepare() first")
+
+ loop = self._loop
+ assert loop is not None
+ while True:
+ if self._waiting is not None:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ self._conn_lost += 1
+ if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
+ raise RuntimeError("WebSocket connection is closed.")
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ return WS_CLOSING_MESSAGE
+
+ try:
+ self._waiting = loop.create_future()
+ try:
+ async with async_timeout.timeout(timeout or self._receive_timeout):
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ waiter = self._waiting
+ set_result(waiter, True)
+ self._waiting = None
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._closing = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type == WSMsgType.CLOSE:
+ self._closing = True
+ self._close_code = msg.data
+ if not self._closed and self._autoclose:
+ await self.close()
+ elif msg.type == WSMsgType.CLOSING:
+ self._closing = True
+ elif msg.type == WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type == WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.TEXT:
+ raise TypeError(
+ "Received message {}:{!r} is not WSMsgType.TEXT".format(
+ msg.type, msg.data
+ )
+ )
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.BINARY:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ async def write(self, data: bytes) -> None:
+ raise RuntimeError("Cannot call .write() for websocket")
+
+ def __aiter__(self) -> "WebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
+
+ def _cancel(self, exc: BaseException) -> None:
+ if self._reader is not None:
+ self._reader.set_exception(exc)
diff --git a/contrib/python/aiohttp/aiohttp/worker.py b/contrib/python/aiohttp/aiohttp/worker.py
new file mode 100644
index 0000000000..08945bcb4b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/worker.py
@@ -0,0 +1,269 @@
+"""Async gunicorn worker for aiohttp.web"""
+
+import asyncio
+import os
+import re
+import signal
+import sys
+from types import FrameType
+from typing import Any, Awaitable, Callable, Optional, Union # noqa
+
+from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
+from gunicorn.workers import base
+
+from aiohttp import web
+
+from .helpers import set_result
+from .web_app import Application
+from .web_log import AccessLogger
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
+
+
+class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
+
+ DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
+ DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
+
+ def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
+ super().__init__(*args, **kw)
+
+ self._task = None # type: Optional[asyncio.Task[None]]
+ self.exit_code = 0
+ self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
+
+ def init_process(self) -> None:
+ # create new event_loop after fork
+ asyncio.get_event_loop().close()
+
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ super().init_process()
+
+ def run(self) -> None:
+ self._task = self.loop.create_task(self._run())
+
+ try: # ignore all finalization problems
+ self.loop.run_until_complete(self._task)
+ except Exception:
+ self.log.exception("Exception in gunicorn worker")
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ sys.exit(self.exit_code)
+
+ async def _run(self) -> None:
+ runner = None
+ if isinstance(self.wsgi, Application):
+ app = self.wsgi
+ elif asyncio.iscoroutinefunction(self.wsgi):
+ wsgi = await self.wsgi()
+ if isinstance(wsgi, web.AppRunner):
+ runner = wsgi
+ app = runner.app
+ else:
+ app = wsgi
+ else:
+ raise RuntimeError(
+ "wsgi app should be either Application or "
+ "async function returning Application, got {}".format(self.wsgi)
+ )
+
+ if runner is None:
+ access_log = self.log.access_log if self.cfg.accesslog else None
+ runner = web.AppRunner(
+ app,
+ logger=self.log,
+ keepalive_timeout=self.cfg.keepalive,
+ access_log=access_log,
+ access_log_format=self._get_valid_log_format(
+ self.cfg.access_log_format
+ ),
+ )
+ await runner.setup()
+
+ ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
+
+ runner = runner
+ assert runner is not None
+ server = runner.server
+ assert server is not None
+ for sock in self.sockets:
+ site = web.SockSite(
+ runner,
+ sock,
+ ssl_context=ctx,
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
+ )
+ await site.start()
+
+ # If our parent changed then we shut down.
+ pid = os.getpid()
+ try:
+ while self.alive: # type: ignore[has-type]
+ self.notify()
+
+ cnt = server.requests_count
+ if self.cfg.max_requests and cnt > self.cfg.max_requests:
+ self.alive = False
+ self.log.info("Max requests, shutting down: %s", self)
+
+ elif pid == os.getpid() and self.ppid != os.getppid():
+ self.alive = False
+ self.log.info("Parent changed, shutting down: %s", self)
+ else:
+ await self._wait_next_notify()
+ except BaseException:
+ pass
+
+ await runner.cleanup()
+
+ def _wait_next_notify(self) -> "asyncio.Future[bool]":
+ self._notify_waiter_done()
+
+ loop = self.loop
+ assert loop is not None
+ self._notify_waiter = waiter = loop.create_future()
+ self.loop.call_later(1.0, self._notify_waiter_done, waiter)
+
+ return waiter
+
+ def _notify_waiter_done(
+ self, waiter: Optional["asyncio.Future[bool]"] = None
+ ) -> None:
+ if waiter is None:
+ waiter = self._notify_waiter
+ if waiter is not None:
+ set_result(waiter, True)
+
+ if waiter is self._notify_waiter:
+ self._notify_waiter = None
+
+ def init_signals(self) -> None:
+ # Set up signals through the event loop API.
+
+ self.loop.add_signal_handler(
+ signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGINT, self.handle_quit, signal.SIGINT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
+ )
+
+ # Don't let SIGTERM and SIGUSR1 disturb active requests
+ # by interrupting system calls
+ signal.siginterrupt(signal.SIGTERM, False)
+ signal.siginterrupt(signal.SIGUSR1, False)
+ # Reset signals so Gunicorn doesn't swallow subprocess return codes
+ # See: https://github.com/aio-libs/aiohttp/issues/6130
+ if sys.version_info < (3, 8):
+ # Starting from Python 3.8,
+ # the default child watcher is ThreadedChildWatcher.
+ # The watcher doesn't depend on SIGCHLD signal,
+ # there is no need to reset it.
+ signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+
+ def handle_quit(self, sig: int, frame: FrameType) -> None:
+ self.alive = False
+
+ # worker_int callback
+ self.cfg.worker_int(self)
+
+ # wakeup closing process
+ self._notify_waiter_done()
+
+ def handle_abort(self, sig: int, frame: FrameType) -> None:
+ self.alive = False
+ self.exit_code = 1
+ self.cfg.worker_abort(self)
+ sys.exit(1)
+
+ @staticmethod
+ def _create_ssl_context(cfg: Any) -> "SSLContext":
+ """Creates SSLContext instance for usage in asyncio.create_server.
+
+ See ssl.SSLSocket.__init__ for more details.
+ """
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+
+ ctx = ssl.SSLContext(cfg.ssl_version)
+ ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
+ ctx.verify_mode = cfg.cert_reqs
+ if cfg.ca_certs:
+ ctx.load_verify_locations(cfg.ca_certs)
+ if cfg.ciphers:
+ ctx.set_ciphers(cfg.ciphers)
+ return ctx
+
+ def _get_valid_log_format(self, source_format: str) -> str:
+ if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
+ return self.DEFAULT_AIOHTTP_LOG_FORMAT
+ elif re.search(r"%\([^\)]+\)", source_format):
+ raise ValueError(
+ "Gunicorn's style options in form of `%(name)s` are not "
+ "supported for the log formatting. Please use aiohttp's "
+ "format specification to configure access log formatting: "
+ "http://docs.aiohttp.org/en/stable/logging.html"
+ "#format-specification"
+ )
+ else:
+ return source_format
+
+
+class GunicornUVLoopWebWorker(GunicornWebWorker):
+ def init_process(self) -> None:
+ import uvloop
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup uvloop policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of uvloop event loop.
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+
+ super().init_process()
+
+
+class GunicornTokioWebWorker(GunicornWebWorker):
+ def init_process(self) -> None: # pragma: no cover
+ import tokio
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup tokio policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of tokio event loop.
+ asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
+
+ super().init_process()
diff --git a/contrib/python/aiohttp/ya.make b/contrib/python/aiohttp/ya.make
new file mode 100644
index 0000000000..9fd0c0df74
--- /dev/null
+++ b/contrib/python/aiohttp/ya.make
@@ -0,0 +1,101 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(3.8.1)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/aiosignal
+ contrib/python/async-timeout
+ contrib/python/attrs
+ contrib/python/charset-normalizer
+ contrib/python/frozenlist
+ contrib/python/multidict
+ contrib/python/yarl
+ contrib/restricted/llhttp
+)
+
+ADDINCL(
+ contrib/restricted/llhttp/include
+ contrib/python/aiohttp/aiohttp
+ FOR cython contrib/python/aiohttp
+)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ aiohttp.pytest_plugin
+ aiohttp.worker
+)
+
+SRCS(
+ aiohttp/_find_header.c
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ aiohttp/__init__.py
+ aiohttp/_helpers.pyi
+ aiohttp/abc.py
+ aiohttp/base_protocol.py
+ aiohttp/client.py
+ aiohttp/client_exceptions.py
+ aiohttp/client_proto.py
+ aiohttp/client_reqrep.py
+ aiohttp/client_ws.py
+ aiohttp/connector.py
+ aiohttp/cookiejar.py
+ aiohttp/formdata.py
+ aiohttp/hdrs.py
+ aiohttp/helpers.py
+ aiohttp/http.py
+ aiohttp/http_exceptions.py
+ aiohttp/http_parser.py
+ aiohttp/http_websocket.py
+ aiohttp/http_writer.py
+ aiohttp/locks.py
+ aiohttp/log.py
+ aiohttp/multipart.py
+ aiohttp/payload.py
+ aiohttp/payload_streamer.py
+ aiohttp/pytest_plugin.py
+ aiohttp/resolver.py
+ aiohttp/streams.py
+ aiohttp/tcp_helpers.py
+ aiohttp/test_utils.py
+ aiohttp/tracing.py
+ aiohttp/typedefs.py
+ aiohttp/web.py
+ aiohttp/web_app.py
+ aiohttp/web_exceptions.py
+ aiohttp/web_fileresponse.py
+ aiohttp/web_log.py
+ aiohttp/web_middlewares.py
+ aiohttp/web_protocol.py
+ aiohttp/web_request.py
+ aiohttp/web_response.py
+ aiohttp/web_routedef.py
+ aiohttp/web_runner.py
+ aiohttp/web_server.py
+ aiohttp/web_urldispatcher.py
+ aiohttp/web_ws.py
+ aiohttp/worker.py
+ CYTHON_C
+ aiohttp/_helpers.pyx
+ aiohttp/_http_parser.pyx
+ aiohttp/_http_writer.pyx
+ aiohttp/_websocket.pyx
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/aiohttp/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ aiohttp/py.typed
+)
+
+END()
diff --git a/contrib/python/aiosignal/.dist-info/METADATA b/contrib/python/aiosignal/.dist-info/METADATA
new file mode 100644
index 0000000000..fc964525f0
--- /dev/null
+++ b/contrib/python/aiosignal/.dist-info/METADATA
@@ -0,0 +1,128 @@
+Metadata-Version: 2.1
+Name: aiosignal
+Version: 1.3.1
+Summary: aiosignal: a list of registered asynchronous callbacks
+Home-page: https://github.com/aio-libs/aiosignal
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache 2.0
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal
+Project-URL: Docs: RTD, https://docs.aiosignal.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Framework :: AsyncIO
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: frozenlist (>=1.1.0)
+
+=========
+aiosignal
+=========
+
+.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiosignal
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiosignal.svg
+ :target: https://pypi.org/project/aiosignal
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest
+ :target: https://aiosignal.readthedocs.io/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F
+ :target: https://aio-libs.discourse.group/
+ :alt: Discourse group for io-libs
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+Introduction
+============
+
+A project to manage callbacks in `asyncio` projects.
+
+``Signal`` is a list of registered asynchronous callbacks.
+
+The signal's life-cycle has two stages: after creation its content
+could be filled by using standard list operations: ``sig.append()``
+etc.
+
+After you call ``sig.freeze()`` the signal is *frozen*: adding, removing
+and dropping callbacks is forbidden.
+
+The only available operation is calling the previously registered
+callbacks by using ``await sig.send(data)``.
+
+For concrete usage examples see the `Signals
+<https://docs.aiohttp.org/en/stable/web_advanced.html#aiohttp-web-signals>
+section of the `Web Server Advanced
+<https://docs.aiohttp.org/en/stable/web_advanced.html>` chapter of the `aiohttp
+documentation`_.
+
+
+Installation
+------------
+
+::
+
+ $ pip install aiosignal
+
+The library requires Python 3.6 or newer.
+
+
+Documentation
+=============
+
+https://aiosignal.readthedocs.io/
+
+Communication channels
+======================
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+Requirements
+============
+
+- Python >= 3.6
+- frozenlist >= 1.0.0
+
+License
+=======
+
+``aiosignal`` is offered under the Apache 2 license.
+
+Source code
+===========
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/aiosignal/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/aiosignal
+.. _aiohttp documentation: https://docs.aiohttp.org/
diff --git a/contrib/python/aiosignal/.dist-info/top_level.txt b/contrib/python/aiosignal/.dist-info/top_level.txt
new file mode 100644
index 0000000000..ac6df3afe7
--- /dev/null
+++ b/contrib/python/aiosignal/.dist-info/top_level.txt
@@ -0,0 +1 @@
+aiosignal
diff --git a/contrib/python/aiosignal/LICENSE b/contrib/python/aiosignal/LICENSE
new file mode 100644
index 0000000000..7082a2d5b9
--- /dev/null
+++ b/contrib/python/aiosignal/LICENSE
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/aiosignal/README.rst b/contrib/python/aiosignal/README.rst
new file mode 100644
index 0000000000..d21fc96bd6
--- /dev/null
+++ b/contrib/python/aiosignal/README.rst
@@ -0,0 +1,94 @@
+=========
+aiosignal
+=========
+
+.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiosignal
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiosignal.svg
+ :target: https://pypi.org/project/aiosignal
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest
+ :target: https://aiosignal.readthedocs.io/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F
+ :target: https://aio-libs.discourse.group/
+ :alt: Discourse group for io-libs
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+Introduction
+============
+
+A project to manage callbacks in `asyncio` projects.
+
+``Signal`` is a list of registered asynchronous callbacks.
+
+The signal's life-cycle has two stages: after creation its content
+could be filled by using standard list operations: ``sig.append()``
+etc.
+
+After you call ``sig.freeze()`` the signal is *frozen*: adding, removing
+and dropping callbacks is forbidden.
+
+The only available operation is calling the previously registered
+callbacks by using ``await sig.send(data)``.
+
+For concrete usage examples see the `Signals
+<https://docs.aiohttp.org/en/stable/web_advanced.html#aiohttp-web-signals>
+section of the `Web Server Advanced
+<https://docs.aiohttp.org/en/stable/web_advanced.html>` chapter of the `aiohttp
+documentation`_.
+
+
+Installation
+------------
+
+::
+
+ $ pip install aiosignal
+
+The library requires Python 3.6 or newer.
+
+
+Documentation
+=============
+
+https://aiosignal.readthedocs.io/
+
+Communication channels
+======================
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+Requirements
+============
+
+- Python >= 3.6
+- frozenlist >= 1.0.0
+
+License
+=======
+
+``aiosignal`` is offered under the Apache 2 license.
+
+Source code
+===========
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/aiosignal/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/aiosignal
+.. _aiohttp documentation: https://docs.aiohttp.org/
diff --git a/contrib/python/aiosignal/aiosignal/__init__.py b/contrib/python/aiosignal/aiosignal/__init__.py
new file mode 100644
index 0000000000..3d288e6ede
--- /dev/null
+++ b/contrib/python/aiosignal/aiosignal/__init__.py
@@ -0,0 +1,36 @@
+from frozenlist import FrozenList
+
+__version__ = "1.3.1"
+
+__all__ = ("Signal",)
+
+
+class Signal(FrozenList):
+ """Coroutine-based signal implementation.
+
+ To connect a callback to a signal, use any list method.
+
+ Signals are fired using the send() coroutine, which takes named
+ arguments.
+ """
+
+ __slots__ = ("_owner",)
+
+ def __init__(self, owner):
+ super().__init__()
+ self._owner = owner
+
+ def __repr__(self):
+ return "<Signal owner={}, frozen={}, {!r}>".format(
+ self._owner, self.frozen, list(self)
+ )
+
+ async def send(self, *args, **kwargs):
+ """
+ Sends data to all registered receivers.
+ """
+ if not self.frozen:
+ raise RuntimeError("Cannot send non-frozen signal.")
+
+ for receiver in self:
+ await receiver(*args, **kwargs) # type: ignore
diff --git a/contrib/python/aiosignal/aiosignal/py.typed b/contrib/python/aiosignal/aiosignal/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/aiosignal/aiosignal/py.typed
diff --git a/contrib/python/aiosignal/tests/conftest.py b/contrib/python/aiosignal/tests/conftest.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/aiosignal/tests/conftest.py
diff --git a/contrib/python/aiosignal/tests/test_signals.py b/contrib/python/aiosignal/tests/test_signals.py
new file mode 100644
index 0000000000..a040a78ae5
--- /dev/null
+++ b/contrib/python/aiosignal/tests/test_signals.py
@@ -0,0 +1,160 @@
+import re
+from unittest import mock
+
+import pytest
+
+from aiosignal import Signal
+
+
+class Owner:
+ def __repr__(self) -> str:
+ return "<Owner 0xdeadbeef>"
+
+
+@pytest.fixture
+def owner() -> Owner:
+ return Owner()
+
+
+@pytest.mark.asyncio
+async def test_add_signal_handler_not_a_callable(owner: Owner) -> None:
+ callback = True
+ signal = Signal(owner)
+ signal.append(callback)
+ signal.freeze()
+ with pytest.raises(TypeError):
+ await signal.send()
+
+
+@pytest.mark.asyncio
+async def test_function_signal_dispatch_kwargs(owner: Owner) -> None:
+ signal = Signal(owner)
+ kwargs = {"foo": 1, "bar": 2}
+
+ callback_mock = mock.Mock()
+
+ async def callback(**kwargs):
+ callback_mock(**kwargs)
+
+ signal.append(callback)
+ signal.freeze()
+
+ await signal.send(**kwargs)
+ callback_mock.assert_called_once_with(**kwargs)
+
+
+@pytest.mark.asyncio
+async def test_function_signal_dispatch_args_kwargs(owner: Owner) -> None:
+ signal = Signal(owner)
+ args = {"a", "b"}
+ kwargs = {"foo": 1, "bar": 2}
+
+ callback_mock = mock.Mock()
+
+ async def callback(*args, **kwargs):
+ callback_mock(*args, **kwargs)
+
+ signal.append(callback)
+ signal.freeze()
+
+ await signal.send(*args, **kwargs)
+ callback_mock.assert_called_once_with(*args, **kwargs)
+
+
+@pytest.mark.asyncio
+async def test_non_coroutine(owner: Owner) -> None:
+ signal = Signal(owner)
+ kwargs = {"foo": 1, "bar": 2}
+
+ callback = mock.Mock()
+
+ signal.append(callback)
+ signal.freeze()
+
+ with pytest.raises(TypeError):
+ await signal.send(**kwargs)
+
+
+def test_setitem(owner: Owner) -> None:
+ signal = Signal(owner)
+ m1 = mock.Mock()
+ signal.append(m1)
+ assert signal[0] is m1
+ m2 = mock.Mock()
+ signal[0] = m2
+ assert signal[0] is m2
+
+
+def test_delitem(owner: Owner) -> None:
+ signal = Signal(owner)
+ m1 = mock.Mock()
+ signal.append(m1)
+ assert len(signal) == 1
+ del signal[0]
+ assert len(signal) == 0
+
+
+def test_cannot_append_to_frozen_signal(owner: Owner) -> None:
+ signal = Signal(owner)
+ m1 = mock.Mock()
+ m2 = mock.Mock()
+ signal.append(m1)
+ signal.freeze()
+ with pytest.raises(RuntimeError):
+ signal.append(m2)
+
+ assert list(signal) == [m1]
+
+
+def test_cannot_setitem_in_frozen_signal(owner: Owner) -> None:
+ signal = Signal(owner)
+ m1 = mock.Mock()
+ m2 = mock.Mock()
+ signal.append(m1)
+ signal.freeze()
+ with pytest.raises(RuntimeError):
+ signal[0] = m2
+
+ assert list(signal) == [m1]
+
+
+def test_cannot_delitem_in_frozen_signal(owner: Owner) -> None:
+ signal = Signal(owner)
+ m1 = mock.Mock()
+ signal.append(m1)
+ signal.freeze()
+ with pytest.raises(RuntimeError):
+ del signal[0]
+
+ assert list(signal) == [m1]
+
+
+@pytest.mark.asyncio
+async def test_cannot_send_non_frozen_signal(owner: Owner) -> None:
+ signal = Signal(owner)
+
+ callback_mock = mock.Mock()
+
+ async def callback(**kwargs):
+ callback_mock(**kwargs)
+
+ signal.append(callback)
+
+ with pytest.raises(RuntimeError):
+ await signal.send()
+
+ assert not callback_mock.called
+
+
+def test_repr(owner: Owner) -> None:
+ signal = Signal(owner)
+
+ signal.append(mock.Mock(__repr__=lambda *a: "<callback>"))
+
+ assert (
+ re.match(
+ r"<Signal owner=<Owner 0xdeadbeef>, frozen=False, " r"\[<callback>\]>",
+ repr(signal),
+ )
+ is not None
+ )
diff --git a/contrib/python/aiosignal/tests/ya.make b/contrib/python/aiosignal/tests/ya.make
new file mode 100644
index 0000000000..edba214080
--- /dev/null
+++ b/contrib/python/aiosignal/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+PEERDIR(
+ contrib/python/aiosignal
+)
+
+TEST_SRCS(
+ conftest.py
+ test_signals.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/aiosignal/ya.make b/contrib/python/aiosignal/ya.make
new file mode 100644
index 0000000000..6094942c66
--- /dev/null
+++ b/contrib/python/aiosignal/ya.make
@@ -0,0 +1,32 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(1.3.1)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/frozenlist
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ aiosignal/__init__.py
+ aiosignal/__init__.pyi
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/aiosignal/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ aiosignal/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/async-timeout/.dist-info/METADATA b/contrib/python/async-timeout/.dist-info/METADATA
new file mode 100644
index 0000000000..d8dd6d12d6
--- /dev/null
+++ b/contrib/python/async-timeout/.dist-info/METADATA
@@ -0,0 +1,131 @@
+Metadata-Version: 2.1
+Name: async-timeout
+Version: 4.0.3
+Summary: Timeout context manager for asyncio programs
+Home-page: https://github.com/aio-libs/async-timeout
+Author: Andrew Svetlov <andrew.svetlov@gmail.com>
+Author-email: andrew.svetlov@gmail.com
+License: Apache 2
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-timeout/actions
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/async-timeout
+Project-URL: GitHub: issues, https://github.com/aio-libs/async-timeout/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/async-timeout
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: typing-extensions >=3.6.5 ; python_version < "3.8"
+
+async-timeout
+=============
+.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master
+ :target: https://travis-ci.com/aio-libs/async-timeout
+.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/async-timeout
+.. image:: https://img.shields.io/pypi/v/async-timeout.svg
+ :target: https://pypi.python.org/pypi/async-timeout
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+asyncio-compatible timeout context manager.
+
+
+Usage example
+-------------
+
+
+The context manager is useful in cases when you want to apply timeout
+logic around block of code or in cases when ``asyncio.wait_for()`` is
+not suitable. Also it's much faster than ``asyncio.wait_for()``
+because ``timeout`` doesn't create a new task.
+
+The ``timeout(delay, *, loop=None)`` call returns a context manager
+that cancels a block on *timeout* expiring::
+
+ from async_timeout import timeout
+ async with timeout(1.5):
+ await inner()
+
+1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
+ happens.
+2. Otherwise ``inner()`` is cancelled internally by sending
+ ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
+ raised outside of context manager scope.
+
+*timeout* parameter could be ``None`` for skipping timeout functionality.
+
+
+Alternatively, ``timeout_at(when)`` can be used for scheduling
+at the absolute time::
+
+ loop = asyncio.get_event_loop()
+ now = loop.time()
+
+ async with timeout_at(now + 1.5):
+ await inner()
+
+
+Please note: it is not POSIX time but a time with
+undefined starting base, e.g. the time of the system power on.
+
+
+Context manager has ``.expired`` property for check if timeout happens
+exactly in context manager::
+
+ async with timeout(1.5) as cm:
+ await inner()
+ print(cm.expired)
+
+The property is ``True`` if ``inner()`` execution is cancelled by
+timeout context manager.
+
+If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
+is ``False``.
+
+The scheduled deadline time is available as ``.deadline`` property::
+
+ async with timeout(1.5) as cm:
+ cm.deadline
+
+Not finished yet timeout can be rescheduled by ``shift_by()``
+or ``shift_to()`` methods::
+
+ async with timeout(1.5) as cm:
+ cm.shift(1) # add another second on waiting
+ cm.update(loop.time() + 5) # reschedule to now+5 seconds
+
+Rescheduling is forbidden if the timeout is expired or after exit from ``async with``
+code block.
+
+
+Installation
+------------
+
+::
+
+ $ pip install async-timeout
+
+The library is Python 3 only!
+
+
+
+Authors and License
+-------------------
+
+The module is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
diff --git a/contrib/python/async-timeout/.dist-info/top_level.txt b/contrib/python/async-timeout/.dist-info/top_level.txt
new file mode 100644
index 0000000000..ad29955ef9
--- /dev/null
+++ b/contrib/python/async-timeout/.dist-info/top_level.txt
@@ -0,0 +1 @@
+async_timeout
diff --git a/contrib/python/async-timeout/LICENSE b/contrib/python/async-timeout/LICENSE
new file mode 100644
index 0000000000..033c86b7a4
--- /dev/null
+++ b/contrib/python/async-timeout/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2016-2020 aio-libs collaboration.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/contrib/python/async-timeout/README.rst b/contrib/python/async-timeout/README.rst
new file mode 100644
index 0000000000..5ed02e4e93
--- /dev/null
+++ b/contrib/python/async-timeout/README.rst
@@ -0,0 +1,100 @@
+async-timeout
+=============
+.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master
+ :target: https://travis-ci.com/aio-libs/async-timeout
+.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/async-timeout
+.. image:: https://img.shields.io/pypi/v/async-timeout.svg
+ :target: https://pypi.python.org/pypi/async-timeout
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+asyncio-compatible timeout context manager.
+
+
+Usage example
+-------------
+
+
+The context manager is useful in cases when you want to apply timeout
+logic around block of code or in cases when ``asyncio.wait_for()`` is
+not suitable. Also it's much faster than ``asyncio.wait_for()``
+because ``timeout`` doesn't create a new task.
+
+The ``timeout(delay, *, loop=None)`` call returns a context manager
+that cancels a block on *timeout* expiring::
+
+ from async_timeout import timeout
+ async with timeout(1.5):
+ await inner()
+
+1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
+ happens.
+2. Otherwise ``inner()`` is cancelled internally by sending
+ ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
+ raised outside of context manager scope.
+
+*timeout* parameter could be ``None`` for skipping timeout functionality.
+
+
+Alternatively, ``timeout_at(when)`` can be used for scheduling
+at the absolute time::
+
+ loop = asyncio.get_event_loop()
+ now = loop.time()
+
+ async with timeout_at(now + 1.5):
+ await inner()
+
+
+Please note: it is not POSIX time but a time with
+undefined starting base, e.g. the time of the system power on.
+
+
+Context manager has ``.expired`` property for check if timeout happens
+exactly in context manager::
+
+ async with timeout(1.5) as cm:
+ await inner()
+ print(cm.expired)
+
+The property is ``True`` if ``inner()`` execution is cancelled by
+timeout context manager.
+
+If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
+is ``False``.
+
+The scheduled deadline time is available as ``.deadline`` property::
+
+ async with timeout(1.5) as cm:
+ cm.deadline
+
+Not finished yet timeout can be rescheduled by ``shift_by()``
+or ``shift_to()`` methods::
+
+ async with timeout(1.5) as cm:
+ cm.shift(1) # add another second on waiting
+ cm.update(loop.time() + 5) # reschedule to now+5 seconds
+
+Rescheduling is forbidden if the timeout is expired or after exit from ``async with``
+code block.
+
+
+Installation
+------------
+
+::
+
+ $ pip install async-timeout
+
+The library is Python 3 only!
+
+
+
+Authors and License
+-------------------
+
+The module is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
diff --git a/contrib/python/async-timeout/async_timeout/__init__.py b/contrib/python/async-timeout/async_timeout/__init__.py
new file mode 100644
index 0000000000..1ffb069fce
--- /dev/null
+++ b/contrib/python/async-timeout/async_timeout/__init__.py
@@ -0,0 +1,239 @@
+import asyncio
+import enum
+import sys
+import warnings
+from types import TracebackType
+from typing import Optional, Type
+
+
+if sys.version_info >= (3, 8):
+ from typing import final
+else:
+ from typing_extensions import final
+
+
+if sys.version_info >= (3, 11):
+
+ def _uncancel_task(task: "asyncio.Task[object]") -> None:
+ task.uncancel()
+
+else:
+
+ def _uncancel_task(task: "asyncio.Task[object]") -> None:
+ pass
+
+
+__version__ = "4.0.3"
+
+
+__all__ = ("timeout", "timeout_at", "Timeout")
+
+
+def timeout(delay: Optional[float]) -> "Timeout":
+ """timeout context manager.
+
+ Useful in cases when you want to apply timeout logic around block
+ of code or in cases when asyncio.wait_for is not suitable. For example:
+
+ >>> async with timeout(0.001):
+ ... async with aiohttp.get('https://github.com') as r:
+ ... await r.text()
+
+
+ delay - value in seconds or None to disable timeout logic
+ """
+ loop = asyncio.get_running_loop()
+ if delay is not None:
+ deadline = loop.time() + delay # type: Optional[float]
+ else:
+ deadline = None
+ return Timeout(deadline, loop)
+
+
+def timeout_at(deadline: Optional[float]) -> "Timeout":
+ """Schedule the timeout at absolute time.
+
+ deadline argument points on the time in the same clock system
+ as loop.time().
+
+ Please note: it is not POSIX time but a time with
+ undefined starting base, e.g. the time of the system power on.
+
+ >>> async with timeout_at(loop.time() + 10):
+ ... async with aiohttp.get('https://github.com') as r:
+ ... await r.text()
+
+
+ """
+ loop = asyncio.get_running_loop()
+ return Timeout(deadline, loop)
+
+
+class _State(enum.Enum):
+ INIT = "INIT"
+ ENTER = "ENTER"
+ TIMEOUT = "TIMEOUT"
+ EXIT = "EXIT"
+
+
+@final
+class Timeout:
+ # Internal class, please don't instantiate it directly
+ # Use timeout() and timeout_at() public factories instead.
+ #
+ # Implementation note: `async with timeout()` is preferred
+ # over `with timeout()`.
+ # While technically the Timeout class implementation
+ # doesn't need to be async at all,
+ # the `async with` statement explicitly points that
+ # the context manager should be used from async function context.
+ #
+ # This design allows to avoid many silly misusages.
+ #
+ # TimeoutError is raised immediately when scheduled
+ # if the deadline is passed.
+ # The purpose is to time out as soon as possible
+ # without waiting for the next await expression.
+
+ __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
+
+ def __init__(
+ self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
+ ) -> None:
+ self._loop = loop
+ self._state = _State.INIT
+
+ self._task: Optional["asyncio.Task[object]"] = None
+ self._timeout_handler = None # type: Optional[asyncio.Handle]
+ if deadline is None:
+ self._deadline = None # type: Optional[float]
+ else:
+ self.update(deadline)
+
+ def __enter__(self) -> "Timeout":
+ warnings.warn(
+ "with timeout() is deprecated, use async with timeout() instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._do_enter()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> Optional[bool]:
+ self._do_exit(exc_type)
+ return None
+
+ async def __aenter__(self) -> "Timeout":
+ self._do_enter()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> Optional[bool]:
+ self._do_exit(exc_type)
+ return None
+
+ @property
+ def expired(self) -> bool:
+ """Is timeout expired during execution?"""
+ return self._state == _State.TIMEOUT
+
+ @property
+ def deadline(self) -> Optional[float]:
+ return self._deadline
+
+ def reject(self) -> None:
+ """Reject scheduled timeout if any."""
+ # cancel is maybe better name but
+ # task.cancel() raises CancelledError in asyncio world.
+ if self._state not in (_State.INIT, _State.ENTER):
+ raise RuntimeError(f"invalid state {self._state.value}")
+ self._reject()
+
+ def _reject(self) -> None:
+ self._task = None
+ if self._timeout_handler is not None:
+ self._timeout_handler.cancel()
+ self._timeout_handler = None
+
+ def shift(self, delay: float) -> None:
+ """Advance timeout on delay seconds.
+
+ The delay can be negative.
+
+ Raise RuntimeError if shift is called when deadline is not scheduled
+ """
+ deadline = self._deadline
+ if deadline is None:
+ raise RuntimeError("cannot shift timeout if deadline is not scheduled")
+ self.update(deadline + delay)
+
+ def update(self, deadline: float) -> None:
+ """Set deadline to absolute value.
+
+ deadline argument points on the time in the same clock system
+ as loop.time().
+
+ If new deadline is in the past the timeout is raised immediately.
+
+ Please note: it is not POSIX time but a time with
+ undefined starting base, e.g. the time of the system power on.
+ """
+ if self._state == _State.EXIT:
+ raise RuntimeError("cannot reschedule after exit from context manager")
+ if self._state == _State.TIMEOUT:
+ raise RuntimeError("cannot reschedule expired timeout")
+ if self._timeout_handler is not None:
+ self._timeout_handler.cancel()
+ self._deadline = deadline
+ if self._state != _State.INIT:
+ self._reschedule()
+
+ def _reschedule(self) -> None:
+ assert self._state == _State.ENTER
+ deadline = self._deadline
+ if deadline is None:
+ return
+
+ now = self._loop.time()
+ if self._timeout_handler is not None:
+ self._timeout_handler.cancel()
+
+ self._task = asyncio.current_task()
+ if deadline <= now:
+ self._timeout_handler = self._loop.call_soon(self._on_timeout)
+ else:
+ self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
+
+ def _do_enter(self) -> None:
+ if self._state != _State.INIT:
+ raise RuntimeError(f"invalid state {self._state.value}")
+ self._state = _State.ENTER
+ self._reschedule()
+
+ def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
+ if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
+ assert self._task is not None
+ _uncancel_task(self._task)
+ self._timeout_handler = None
+ self._task = None
+ raise asyncio.TimeoutError
+ # timeout has not expired
+ self._state = _State.EXIT
+ self._reject()
+ return None
+
+ def _on_timeout(self) -> None:
+ assert self._task is not None
+ self._task.cancel()
+ self._state = _State.TIMEOUT
+ # drop the reference early
+ self._timeout_handler = None
diff --git a/contrib/python/async-timeout/async_timeout/py.typed b/contrib/python/async-timeout/async_timeout/py.typed
new file mode 100644
index 0000000000..3b94f91573
--- /dev/null
+++ b/contrib/python/async-timeout/async_timeout/py.typed
@@ -0,0 +1 @@
+Placeholder
diff --git a/contrib/python/async-timeout/ya.make b/contrib/python/async-timeout/ya.make
new file mode 100644
index 0000000000..6bc2c940a1
--- /dev/null
+++ b/contrib/python/async-timeout/ya.make
@@ -0,0 +1,23 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(4.0.3)
+
+LICENSE(Apache-2.0)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ async_timeout/__init__.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/async-timeout/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ async_timeout/py.typed
+)
+
+END()
diff --git a/contrib/python/frozenlist/.dist-info/METADATA b/contrib/python/frozenlist/.dist-info/METADATA
new file mode 100644
index 0000000000..0c9ca3b06f
--- /dev/null
+++ b/contrib/python/frozenlist/.dist-info/METADATA
@@ -0,0 +1,150 @@
+Metadata-Version: 2.1
+Name: frozenlist
+Version: 1.4.0
+Summary: A list-like structure which implements collections.abc.MutableSequence
+Home-page: https://github.com/aio-libs/frozenlist
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache 2
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist
+Project-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog
+Project-URL: Docs: RTD, https://frozenlist.aio-libs.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+==========
+frozenlist
+==========
+
+.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/frozenlist/actions
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/frozenlist
+ :alt: codecov.io status for master branch
+
+.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white
+ :target: https://pypi.org/project/frozenlist
+ :alt: frozenlist @ PyPI
+
+.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest
+ :target: https://frozenlist.aio-libs.org
+ :alt: Read The Docs build status badge
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+============
+
+``frozenlist.FrozenList`` is a list-like structure which implements
+``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze``
+is called, after which list modifications raise ``RuntimeError``:
+
+
+>>> from frozenlist import FrozenList
+>>> fl = FrozenList([17, 42])
+>>> fl.append('spam')
+>>> fl.append('Vikings')
+>>> fl
+<FrozenList(frozen=False, [17, 42, 'spam', 'Vikings'])>
+>>> fl.freeze()
+>>> fl
+<FrozenList(frozen=True, [17, 42, 'spam', 'Vikings'])>
+>>> fl.frozen
+True
+>>> fl.append("Monty")
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append
+ self._check_frozen()
+ File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen
+ raise RuntimeError("Cannot modify frozen list.")
+RuntimeError: Cannot modify frozen list.
+
+
+FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError:
+
+
+>>> fl = FrozenList([17, 42, 'spam'])
+>>> hash(fl)
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__
+ raise RuntimeError("Cannot hash unfrozen list.")
+RuntimeError: Cannot hash unfrozen list.
+>>> fl.freeze()
+>>> hash(fl)
+3713081631934410656
+>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key
+>>> dictionary
+{<FrozenList(frozen=True, [1, 2])>: 'Vikings'}
+
+
+Installation
+------------
+
+::
+
+ $ pip install frozenlist
+
+The library requires Python 3.8 or newer.
+
+
+Documentation
+=============
+
+https://frozenlist.aio-libs.org
+
+Communication channels
+======================
+
+We have a *Matrix Space* `#aio-libs-space:matrix.org
+<https://matrix.to/#/%23aio-libs-space:matrix.org>`_ which is
+also accessible via Gitter.
+
+Requirements
+============
+
+- Python >= 3.8
+
+License
+=======
+
+``frozenlist`` is offered under the Apache 2 license.
+
+Source code
+===========
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/frozenlist/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/frozenlist
diff --git a/contrib/python/frozenlist/.dist-info/top_level.txt b/contrib/python/frozenlist/.dist-info/top_level.txt
new file mode 100644
index 0000000000..52f13fc459
--- /dev/null
+++ b/contrib/python/frozenlist/.dist-info/top_level.txt
@@ -0,0 +1 @@
+frozenlist
diff --git a/contrib/python/frozenlist/LICENSE b/contrib/python/frozenlist/LICENSE
new file mode 100644
index 0000000000..7082a2d5b9
--- /dev/null
+++ b/contrib/python/frozenlist/LICENSE
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/frozenlist/README.rst b/contrib/python/frozenlist/README.rst
new file mode 100644
index 0000000000..0e864cf28d
--- /dev/null
+++ b/contrib/python/frozenlist/README.rst
@@ -0,0 +1,117 @@
+==========
+frozenlist
+==========
+
+.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/frozenlist/actions
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/frozenlist
+ :alt: codecov.io status for master branch
+
+.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white
+ :target: https://pypi.org/project/frozenlist
+ :alt: frozenlist @ PyPI
+
+.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest
+ :target: https://frozenlist.aio-libs.org
+ :alt: Read The Docs build status badge
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+============
+
+``frozenlist.FrozenList`` is a list-like structure which implements
+``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze``
+is called, after which list modifications raise ``RuntimeError``:
+
+
+>>> from frozenlist import FrozenList
+>>> fl = FrozenList([17, 42])
+>>> fl.append('spam')
+>>> fl.append('Vikings')
+>>> fl
+<FrozenList(frozen=False, [17, 42, 'spam', 'Vikings'])>
+>>> fl.freeze()
+>>> fl
+<FrozenList(frozen=True, [17, 42, 'spam', 'Vikings'])>
+>>> fl.frozen
+True
+>>> fl.append("Monty")
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append
+ self._check_frozen()
+ File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen
+ raise RuntimeError("Cannot modify frozen list.")
+RuntimeError: Cannot modify frozen list.
+
+
+FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError:
+
+
+>>> fl = FrozenList([17, 42, 'spam'])
+>>> hash(fl)
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__
+ raise RuntimeError("Cannot hash unfrozen list.")
+RuntimeError: Cannot hash unfrozen list.
+>>> fl.freeze()
+>>> hash(fl)
+3713081631934410656
+>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key
+>>> dictionary
+{<FrozenList(frozen=True, [1, 2])>: 'Vikings'}
+
+
+Installation
+------------
+
+::
+
+ $ pip install frozenlist
+
+The library requires Python 3.8 or newer.
+
+
+Documentation
+=============
+
+https://frozenlist.aio-libs.org
+
+Communication channels
+======================
+
+We have a *Matrix Space* `#aio-libs-space:matrix.org
+<https://matrix.to/#/%23aio-libs-space:matrix.org>`_ which is
+also accessible via Gitter.
+
+Requirements
+============
+
+- Python >= 3.8
+
+License
+=======
+
+``frozenlist`` is offered under the Apache 2 license.
+
+Source code
+===========
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/frozenlist/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/frozenlist
diff --git a/contrib/python/frozenlist/frozenlist/__init__.py b/contrib/python/frozenlist/frozenlist/__init__.py
new file mode 100644
index 0000000000..152356588d
--- /dev/null
+++ b/contrib/python/frozenlist/frozenlist/__init__.py
@@ -0,0 +1,95 @@
+import os
+import sys
+import types
+from collections.abc import MutableSequence
+from functools import total_ordering
+from typing import Type
+
+__version__ = "1.4.0"
+
+__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...]
+
+
+NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool
+
+
+@total_ordering
+class FrozenList(MutableSequence):
+ __slots__ = ("_frozen", "_items")
+
+ if sys.version_info >= (3, 9):
+ __class_getitem__ = classmethod(types.GenericAlias)
+ else:
+
+ @classmethod
+ def __class_getitem__(cls: Type["FrozenList"]) -> Type["FrozenList"]:
+ return cls
+
+ def __init__(self, items=None):
+ self._frozen = False
+ if items is not None:
+ items = list(items)
+ else:
+ items = []
+ self._items = items
+
+ @property
+ def frozen(self):
+ return self._frozen
+
+ def freeze(self):
+ self._frozen = True
+
+ def __getitem__(self, index):
+ return self._items[index]
+
+ def __setitem__(self, index, value):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ self._items[index] = value
+
+ def __delitem__(self, index):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ del self._items[index]
+
+ def __len__(self):
+ return self._items.__len__()
+
+ def __iter__(self):
+ return self._items.__iter__()
+
+ def __reversed__(self):
+ return self._items.__reversed__()
+
+ def __eq__(self, other):
+ return list(self) == other
+
+ def __le__(self, other):
+ return list(self) <= other
+
+ def insert(self, pos, item):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ self._items.insert(pos, item)
+
+ def __repr__(self):
+ return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
+
+ def __hash__(self):
+ if self._frozen:
+ return hash(tuple(self))
+ else:
+ raise RuntimeError("Cannot hash unfrozen list.")
+
+
+PyFrozenList = FrozenList
+
+
+try:
+ from ._frozenlist import FrozenList as CFrozenList # type: ignore
+
+ if not NO_EXTENSIONS: # pragma: no cover
+ FrozenList = CFrozenList # type: ignore
+except ImportError: # pragma: no cover
+ pass
diff --git a/contrib/python/frozenlist/frozenlist/_frozenlist.pyx b/contrib/python/frozenlist/frozenlist/_frozenlist.pyx
new file mode 100644
index 0000000000..9ee846c1ae
--- /dev/null
+++ b/contrib/python/frozenlist/frozenlist/_frozenlist.pyx
@@ -0,0 +1,123 @@
+import sys
+import types
+from collections.abc import MutableSequence
+
+
+cdef class FrozenList:
+
+ if sys.version_info >= (3, 9):
+ __class_getitem__ = classmethod(types.GenericAlias)
+ else:
+ @classmethod
+ def __class_getitem__(cls):
+ return cls
+
+ cdef readonly bint frozen
+ cdef list _items
+
+ def __init__(self, items=None):
+ self.frozen = False
+ if items is not None:
+ items = list(items)
+ else:
+ items = []
+ self._items = items
+
+ cdef object _check_frozen(self):
+ if self.frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+
+ cdef inline object _fast_len(self):
+ return len(self._items)
+
+ def freeze(self):
+ self.frozen = True
+
+ def __getitem__(self, index):
+ return self._items[index]
+
+ def __setitem__(self, index, value):
+ self._check_frozen()
+ self._items[index] = value
+
+ def __delitem__(self, index):
+ self._check_frozen()
+ del self._items[index]
+
+ def __len__(self):
+ return self._fast_len()
+
+ def __iter__(self):
+ return self._items.__iter__()
+
+ def __reversed__(self):
+ return self._items.__reversed__()
+
+ def __richcmp__(self, other, op):
+ if op == 0: # <
+ return list(self) < other
+ if op == 1: # <=
+ return list(self) <= other
+ if op == 2: # ==
+ return list(self) == other
+ if op == 3: # !=
+ return list(self) != other
+ if op == 4: # >
+ return list(self) > other
+ if op == 5: # =>
+ return list(self) >= other
+
+ def insert(self, pos, item):
+ self._check_frozen()
+ self._items.insert(pos, item)
+
+ def __contains__(self, item):
+ return item in self._items
+
+ def __iadd__(self, items):
+ self._check_frozen()
+ self._items += list(items)
+ return self
+
+ def index(self, item):
+ return self._items.index(item)
+
+ def remove(self, item):
+ self._check_frozen()
+ self._items.remove(item)
+
+ def clear(self):
+ self._check_frozen()
+ self._items.clear()
+
+ def extend(self, items):
+ self._check_frozen()
+ self._items += list(items)
+
+ def reverse(self):
+ self._check_frozen()
+ self._items.reverse()
+
+ def pop(self, index=-1):
+ self._check_frozen()
+ return self._items.pop(index)
+
+ def append(self, item):
+ self._check_frozen()
+ return self._items.append(item)
+
+ def count(self, item):
+ return self._items.count(item)
+
+ def __repr__(self):
+ return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
+ self._items)
+
+ def __hash__(self):
+ if self.frozen:
+ return hash(tuple(self._items))
+ else:
+ raise RuntimeError("Cannot hash unfrozen list.")
+
+
+MutableSequence.register(FrozenList)
diff --git a/contrib/python/frozenlist/frozenlist/py.typed b/contrib/python/frozenlist/frozenlist/py.typed
new file mode 100644
index 0000000000..f5642f79f2
--- /dev/null
+++ b/contrib/python/frozenlist/frozenlist/py.typed
@@ -0,0 +1 @@
+Marker
diff --git a/contrib/python/frozenlist/tests/conftest.py b/contrib/python/frozenlist/tests/conftest.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/frozenlist/tests/conftest.py
diff --git a/contrib/python/frozenlist/tests/test_frozenlist.py b/contrib/python/frozenlist/tests/test_frozenlist.py
new file mode 100644
index 0000000000..f3b1a8a4df
--- /dev/null
+++ b/contrib/python/frozenlist/tests/test_frozenlist.py
@@ -0,0 +1,246 @@
+from collections.abc import MutableSequence
+
+import pytest
+
+from frozenlist import FrozenList, PyFrozenList
+
+
+class FrozenListMixin:
+ FrozenList = NotImplemented
+
+ SKIP_METHODS = {"__abstractmethods__", "__slots__"}
+
+ def test_subclass(self) -> None:
+ assert issubclass(self.FrozenList, MutableSequence)
+
+ def test_iface(self) -> None:
+ for name in set(dir(MutableSequence)) - self.SKIP_METHODS:
+ if name.startswith("_") and not name.endswith("_"):
+ continue
+ assert hasattr(self.FrozenList, name)
+
+ def test_ctor_default(self) -> None:
+ _list = self.FrozenList([])
+ assert not _list.frozen
+
+ def test_ctor(self) -> None:
+ _list = self.FrozenList([1])
+ assert not _list.frozen
+
+ def test_ctor_copy_list(self) -> None:
+ orig = [1]
+ _list = self.FrozenList(orig)
+ del _list[0]
+ assert _list != orig
+
+ def test_freeze(self) -> None:
+ _list = self.FrozenList()
+ _list.freeze()
+ assert _list.frozen
+
+ def test_repr(self) -> None:
+ _list = self.FrozenList([1])
+ assert repr(_list) == "<FrozenList(frozen=False, [1])>"
+ _list.freeze()
+ assert repr(_list) == "<FrozenList(frozen=True, [1])>"
+
+ def test_getitem(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert _list[1] == 2
+
+ def test_setitem(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list[1] = 3
+ assert _list[1] == 3
+
+ def test_delitem(self) -> None:
+ _list = self.FrozenList([1, 2])
+ del _list[0]
+ assert len(_list) == 1
+ assert _list[0] == 2
+
+ def test_len(self) -> None:
+ _list = self.FrozenList([1])
+ assert len(_list) == 1
+
+ def test_iter(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert list(iter(_list)) == [1, 2]
+
+ def test_reversed(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert list(reversed(_list)) == [2, 1]
+
+ def test_eq(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list == [1]
+
+ def test_ne(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list != [2]
+
+ def test_le(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list <= [1]
+
+ def test_lt(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list <= [3]
+
+ def test_ge(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list >= [1]
+
+ def test_gt(self) -> None:
+ _list = self.FrozenList([2])
+ assert _list > [1]
+
+ def test_insert(self) -> None:
+ _list = self.FrozenList([2])
+ _list.insert(0, 1)
+ assert _list == [1, 2]
+
+ def test_frozen_setitem(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list[0] = 2
+
+ def test_frozen_delitem(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ del _list[0]
+
+ def test_frozen_insert(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.insert(0, 2)
+
+ def test_contains(self) -> None:
+ _list = self.FrozenList([2])
+ assert 2 in _list
+
+ def test_iadd(self) -> None:
+ _list = self.FrozenList([1])
+ _list += [2]
+ assert _list == [1, 2]
+
+ def test_iadd_frozen(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list += [2]
+ assert _list == [1]
+
+ def test_index(self) -> None:
+ _list = self.FrozenList([1])
+ assert _list.index(1) == 0
+
+ def test_remove(self) -> None:
+ _list = self.FrozenList([1])
+ _list.remove(1)
+ assert len(_list) == 0
+
+ def test_remove_frozen(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.remove(1)
+ assert _list == [1]
+
+ def test_clear(self) -> None:
+ _list = self.FrozenList([1])
+ _list.clear()
+ assert len(_list) == 0
+
+ def test_clear_frozen(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.clear()
+ assert _list == [1]
+
+ def test_extend(self) -> None:
+ _list = self.FrozenList([1])
+ _list.extend([2])
+ assert _list == [1, 2]
+
+ def test_extend_frozen(self) -> None:
+ _list = self.FrozenList([1])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.extend([2])
+ assert _list == [1]
+
+ def test_reverse(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.reverse()
+ assert _list == [2, 1]
+
+ def test_reverse_frozen(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.reverse()
+ assert _list == [1, 2]
+
+ def test_pop(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert _list.pop(0) == 1
+ assert _list == [2]
+
+ def test_pop_default(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert _list.pop() == 2
+ assert _list == [1]
+
+ def test_pop_frozen(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.pop()
+ assert _list == [1, 2]
+
+ def test_append(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.append(3)
+ assert _list == [1, 2, 3]
+
+ def test_append_frozen(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.freeze()
+ with pytest.raises(RuntimeError):
+ _list.append(3)
+ assert _list == [1, 2]
+
+ def test_hash(self) -> None:
+ _list = self.FrozenList([1, 2])
+ with pytest.raises(RuntimeError):
+ hash(_list)
+
+ def test_hash_frozen(self) -> None:
+ _list = self.FrozenList([1, 2])
+ _list.freeze()
+ h = hash(_list)
+ assert h == hash((1, 2))
+
+ def test_dict_key(self) -> None:
+ _list = self.FrozenList([1, 2])
+ with pytest.raises(RuntimeError):
+ {_list: "hello"}
+ _list.freeze()
+ {_list: "hello"}
+
+ def test_count(self) -> None:
+ _list = self.FrozenList([1, 2])
+ assert _list.count(1) == 1
+
+
+class TestFrozenList(FrozenListMixin):
+ FrozenList = FrozenList
+
+
+class TestFrozenListPy(FrozenListMixin):
+ FrozenList = PyFrozenList
diff --git a/contrib/python/frozenlist/tests/ya.make b/contrib/python/frozenlist/tests/ya.make
new file mode 100644
index 0000000000..148532159a
--- /dev/null
+++ b/contrib/python/frozenlist/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+PEERDIR(
+ contrib/python/frozenlist
+)
+
+TEST_SRCS(
+ conftest.py
+ test_frozenlist.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/frozenlist/ya.make b/contrib/python/frozenlist/ya.make
new file mode 100644
index 0000000000..4928a9f7da
--- /dev/null
+++ b/contrib/python/frozenlist/ya.make
@@ -0,0 +1,32 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(1.4.0)
+
+LICENSE(Apache-2.0)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ frozenlist/__init__.py
+ frozenlist/__init__.pyi
+ CYTHON_CPP
+ frozenlist/_frozenlist.pyx
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/frozenlist/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ frozenlist/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/multidict/.dist-info/METADATA b/contrib/python/multidict/.dist-info/METADATA
new file mode 100644
index 0000000000..55377fcd85
--- /dev/null
+++ b/contrib/python/multidict/.dist-info/METADATA
@@ -0,0 +1,130 @@
+Metadata-Version: 2.1
+Name: multidict
+Version: 6.0.4
+Summary: multidict implementation
+Home-page: https://github.com/aio-libs/multidict
+Author: Andrew Svetlov
+Author-email: andrew.svetlov@gmail.com
+License: Apache 2
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict
+Project-URL: Docs: RTD, https://multidict.readthedocs.io
+Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/multidict
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Development Status :: 5 - Production/Stable
+Requires-Python: >=3.7
+License-File: LICENSE
+
+=========
+multidict
+=========
+
+.. image:: https://github.com/aio-libs/multidict/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/multidict/actions?query=workflow%3ACI
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/multidict
+ :alt: Coverage metrics
+
+.. image:: https://img.shields.io/pypi/v/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: PyPI
+
+.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest
+ :target: http://multidict.readthedocs.org/en/latest/?badge=latest
+ :alt: Documentationb
+
+.. image:: https://img.shields.io/pypi/pyversions/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: Python versions
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+Multidict is dict-like collection of *key-value pairs* where key
+might occur more than once in the container.
+
+Introduction
+------------
+
+*HTTP Headers* and *URL query string* require specific data structure:
+*multidict*. It behaves mostly like a regular ``dict`` but it may have
+several *values* for the same *key* and *preserves insertion ordering*.
+
+The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries).
+
+``multidict`` has four multidict classes:
+``MultiDict``, ``MultiDictProxy``, ``CIMultiDict``
+and ``CIMultiDictProxy``.
+
+Immutable proxies (``MultiDictProxy`` and
+``CIMultiDictProxy``) provide a dynamic view for the
+proxied multidict, the view reflects underlying collection changes. They
+implement the ``collections.abc.Mapping`` interface.
+
+Regular mutable (``MultiDict`` and ``CIMultiDict``) classes
+implement ``collections.abc.MutableMapping`` and allows them to change
+their own content.
+
+
+*Case insensitive* (``CIMultiDict`` and
+``CIMultiDictProxy``) assume the *keys* are case
+insensitive, e.g.::
+
+ >>> dct = CIMultiDict(key='val')
+ >>> 'Key' in dct
+ True
+ >>> dct['Key']
+ 'val'
+
+*Keys* should be ``str`` or ``istr`` instances.
+
+The library has optional C Extensions for speed.
+
+
+License
+-------
+
+Apache 2
+
+Library Installation
+--------------------
+
+.. code-block:: bash
+
+ $ pip install multidict
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the
+tarball will be used to compile the library from source. It requires a C compiler and
+Python headers to be installed.
+
+To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable,
+e.g.:
+
+.. code-block:: bash
+
+ $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict
+
+Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on
+the usage scenario!!!
+
+
+
+Changelog
+---------
+See `RTD page <http://multidict.readthedocs.org/en/latest/changes.html>`_.
diff --git a/contrib/python/multidict/.dist-info/top_level.txt b/contrib/python/multidict/.dist-info/top_level.txt
new file mode 100644
index 0000000000..afcecdff08
--- /dev/null
+++ b/contrib/python/multidict/.dist-info/top_level.txt
@@ -0,0 +1 @@
+multidict
diff --git a/contrib/python/multidict/LICENSE b/contrib/python/multidict/LICENSE
new file mode 100644
index 0000000000..305eef6003
--- /dev/null
+++ b/contrib/python/multidict/LICENSE
@@ -0,0 +1,13 @@
+ Copyright 2016-2021 Andrew Svetlov and aio-libs team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/multidict/README.rst b/contrib/python/multidict/README.rst
new file mode 100644
index 0000000000..0fb146c446
--- /dev/null
+++ b/contrib/python/multidict/README.rst
@@ -0,0 +1,103 @@
+=========
+multidict
+=========
+
+.. image:: https://github.com/aio-libs/multidict/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/multidict/actions?query=workflow%3ACI
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/multidict
+ :alt: Coverage metrics
+
+.. image:: https://img.shields.io/pypi/v/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: PyPI
+
+.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest
+ :target: http://multidict.readthedocs.org/en/latest/?badge=latest
+ :alt: Documentationb
+
+.. image:: https://img.shields.io/pypi/pyversions/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: Python versions
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+Multidict is dict-like collection of *key-value pairs* where key
+might occur more than once in the container.
+
+Introduction
+------------
+
+*HTTP Headers* and *URL query string* require specific data structure:
+*multidict*. It behaves mostly like a regular ``dict`` but it may have
+several *values* for the same *key* and *preserves insertion ordering*.
+
+The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries).
+
+``multidict`` has four multidict classes:
+``MultiDict``, ``MultiDictProxy``, ``CIMultiDict``
+and ``CIMultiDictProxy``.
+
+Immutable proxies (``MultiDictProxy`` and
+``CIMultiDictProxy``) provide a dynamic view for the
+proxied multidict, the view reflects underlying collection changes. They
+implement the ``collections.abc.Mapping`` interface.
+
+Regular mutable (``MultiDict`` and ``CIMultiDict``) classes
+implement ``collections.abc.MutableMapping`` and allows them to change
+their own content.
+
+
+*Case insensitive* (``CIMultiDict`` and
+``CIMultiDictProxy``) assume the *keys* are case
+insensitive, e.g.::
+
+ >>> dct = CIMultiDict(key='val')
+ >>> 'Key' in dct
+ True
+ >>> dct['Key']
+ 'val'
+
+*Keys* should be ``str`` or ``istr`` instances.
+
+The library has optional C Extensions for speed.
+
+
+License
+-------
+
+Apache 2
+
+Library Installation
+--------------------
+
+.. code-block:: bash
+
+ $ pip install multidict
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the
+tarball will be used to compile the library from source. It requires a C compiler and
+Python headers to be installed.
+
+To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable,
+e.g.:
+
+.. code-block:: bash
+
+ $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict
+
+Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on
+the usage scenario!!!
+
+
+
+Changelog
+---------
+See `RTD page <http://multidict.readthedocs.org/en/latest/changes.html>`_.
diff --git a/contrib/python/multidict/multidict/__init__.py b/contrib/python/multidict/multidict/__init__.py
new file mode 100644
index 0000000000..d9ea722167
--- /dev/null
+++ b/contrib/python/multidict/multidict/__init__.py
@@ -0,0 +1,48 @@
+"""Multidict implementation.
+
+HTTP Headers and URL query string require specific data structure:
+multidict. It behaves mostly like a dict but it can have
+several values for the same key.
+"""
+
+from ._abc import MultiMapping, MutableMultiMapping
+from ._compat import USE_EXTENSIONS
+
+__all__ = (
+ "MultiMapping",
+ "MutableMultiMapping",
+ "MultiDictProxy",
+ "CIMultiDictProxy",
+ "MultiDict",
+ "CIMultiDict",
+ "upstr",
+ "istr",
+ "getversion",
+)
+
+__version__ = "6.0.4"
+
+
+try:
+ if not USE_EXTENSIONS:
+ raise ImportError
+ from ._multidict import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ getversion,
+ istr,
+ )
+except ImportError: # pragma: no cover
+ from ._multidict_py import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ getversion,
+ istr,
+ )
+
+
+upstr = istr
diff --git a/contrib/python/multidict/multidict/_abc.py b/contrib/python/multidict/multidict/_abc.py
new file mode 100644
index 0000000000..0603cdd244
--- /dev/null
+++ b/contrib/python/multidict/multidict/_abc.py
@@ -0,0 +1,48 @@
+import abc
+import sys
+import types
+from collections.abc import Mapping, MutableMapping
+
+
+class _TypingMeta(abc.ABCMeta):
+ # A fake metaclass to satisfy typing deps in runtime
+ # basically MultiMapping[str] and other generic-like type instantiations
+ # are emulated.
+ # Note: real type hints are provided by __init__.pyi stub file
+ if sys.version_info >= (3, 9):
+
+ def __getitem__(self, key):
+ return types.GenericAlias(self, key)
+
+ else:
+
+ def __getitem__(self, key):
+ return self
+
+
+class MultiMapping(Mapping, metaclass=_TypingMeta):
+ @abc.abstractmethod
+ def getall(self, key, default=None):
+ raise KeyError
+
+ @abc.abstractmethod
+ def getone(self, key, default=None):
+ raise KeyError
+
+
+class MutableMultiMapping(MultiMapping, MutableMapping):
+ @abc.abstractmethod
+ def add(self, key, value):
+ raise NotImplementedError
+
+ @abc.abstractmethod
+ def extend(self, *args, **kwargs):
+ raise NotImplementedError
+
+ @abc.abstractmethod
+ def popone(self, key, default=None):
+ raise KeyError
+
+ @abc.abstractmethod
+ def popall(self, key, default=None):
+ raise KeyError
diff --git a/contrib/python/multidict/multidict/_compat.py b/contrib/python/multidict/multidict/_compat.py
new file mode 100644
index 0000000000..d1ff392b25
--- /dev/null
+++ b/contrib/python/multidict/multidict/_compat.py
@@ -0,0 +1,14 @@
+import os
+import platform
+
+NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS"))
+
+PYPY = platform.python_implementation() == "PyPy"
+
+USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY
+
+if USE_EXTENSIONS:
+ try:
+ from . import _multidict # noqa
+ except ImportError:
+ USE_EXTENSIONS = False
diff --git a/contrib/python/multidict/multidict/_multidict.c b/contrib/python/multidict/multidict/_multidict.c
new file mode 100644
index 0000000000..1ba79df304
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multidict.c
@@ -0,0 +1,1824 @@
+#include "Python.h"
+#include "structmember.h"
+
+// Include order important
+#include "_multilib/defs.h"
+#include "_multilib/istr.h"
+#include "_multilib/pair_list.h"
+#include "_multilib/dict.h"
+#include "_multilib/iter.h"
+#include "_multilib/views.h"
+
+#ifndef _PyArg_UnpackKeywords
+#define FASTCALL_OLD
+#endif
+
+
+static PyObject *collections_abc_mapping;
+static PyObject *collections_abc_mut_mapping;
+static PyObject *collections_abc_mut_multi_mapping;
+
+static PyTypeObject multidict_type;
+static PyTypeObject cimultidict_type;
+static PyTypeObject multidict_proxy_type;
+static PyTypeObject cimultidict_proxy_type;
+
+static PyObject *repr_func;
+
+#define MultiDict_CheckExact(o) (Py_TYPE(o) == &multidict_type)
+#define CIMultiDict_CheckExact(o) (Py_TYPE(o) == &cimultidict_type)
+#define MultiDictProxy_CheckExact(o) (Py_TYPE(o) == &multidict_proxy_type)
+#define CIMultiDictProxy_CheckExact(o) (Py_TYPE(o) == &cimultidict_proxy_type)
+
+/* Helper macro for something like isinstance(obj, Base) */
+#define _MultiDict_Check(o) \
+ ((MultiDict_CheckExact(o)) || \
+ (CIMultiDict_CheckExact(o)) || \
+ (MultiDictProxy_CheckExact(o)) || \
+ (CIMultiDictProxy_CheckExact(o)))
+
+/******************** Internal Methods ********************/
+
+/* Forward declaration */
+static PyObject *multidict_items(MultiDictObject *self);
+
+static inline PyObject *
+_multidict_getone(MultiDictObject *self, PyObject *key, PyObject *_default)
+{
+ PyObject *val = pair_list_get_one(&self->pairs, key);
+
+ if (val == NULL &&
+ PyErr_ExceptionMatches(PyExc_KeyError) &&
+ _default != NULL)
+ {
+ PyErr_Clear();
+ Py_INCREF(_default);
+ return _default;
+ }
+
+ return val;
+}
+
+static inline int
+_multidict_eq(MultiDictObject *self, MultiDictObject *other)
+{
+ Py_ssize_t pos1 = 0,
+ pos2 = 0;
+
+ Py_hash_t h1 = 0,
+ h2 = 0;
+
+ PyObject *identity1 = NULL,
+ *identity2 = NULL,
+ *value1 = NULL,
+ *value2 = NULL;
+
+ int cmp_identity = 0,
+ cmp_value = 0;
+
+ if (self == other) {
+ return 1;
+ }
+
+ if (pair_list_len(&self->pairs) != pair_list_len(&other->pairs)) {
+ return 0;
+ }
+
+ while (_pair_list_next(&self->pairs, &pos1, &identity1, NULL, &value1, &h1) &&
+ _pair_list_next(&other->pairs, &pos2, &identity2, NULL, &value2, &h2))
+ {
+ if (h1 != h2) {
+ return 0;
+ }
+ cmp_identity = PyObject_RichCompareBool(identity1, identity2, Py_NE);
+ if (cmp_identity < 0) {
+ return -1;
+ }
+ cmp_value = PyObject_RichCompareBool(value1, value2, Py_NE);
+ if (cmp_value < 0) {
+ return -1;
+ }
+ if (cmp_identity || cmp_value) {
+ return 0;
+ }
+ }
+
+ return 1;
+}
+
+static inline int
+_multidict_update_items(MultiDictObject *self, pair_list_t *pairs)
+{
+ return pair_list_update(&self->pairs, pairs);
+}
+
+static inline int
+_multidict_append_items(MultiDictObject *self, pair_list_t *pairs)
+{
+ PyObject *key = NULL,
+ *value = NULL;
+
+ Py_ssize_t pos = 0;
+
+ while (_pair_list_next(pairs, &pos, NULL, &key, &value, NULL)) {
+ if (pair_list_add(&self->pairs, key, value) < 0) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+static inline int
+_multidict_append_items_seq(MultiDictObject *self, PyObject *arg,
+ const char *name)
+{
+ PyObject *key = NULL,
+ *value = NULL,
+ *item = NULL,
+ *iter = PyObject_GetIter(arg);
+
+ if (iter == NULL) {
+ return -1;
+ }
+
+ while ((item = PyIter_Next(iter)) != NULL) {
+ if (PyTuple_CheckExact(item)) {
+ if (PyTuple_GET_SIZE(item) != 2) {
+ goto invalid_type;
+ }
+ key = PyTuple_GET_ITEM(item, 0);
+ Py_INCREF(key);
+ value = PyTuple_GET_ITEM(item, 1);
+ Py_INCREF(value);
+ }
+ else if (PyList_CheckExact(item)) {
+ if (PyList_GET_SIZE(item) != 2) {
+ goto invalid_type;
+ }
+ key = PyList_GET_ITEM(item, 0);
+ Py_INCREF(key);
+ value = PyList_GET_ITEM(item, 1);
+ Py_INCREF(value);
+ }
+ else if (PySequence_Check(item)) {
+ if (PySequence_Size(item) != 2) {
+ goto invalid_type;
+ }
+ key = PySequence_GetItem(item, 0);
+ value = PySequence_GetItem(item, 1);
+ } else {
+ goto invalid_type;
+ }
+
+ if (pair_list_add(&self->pairs, key, value) < 0) {
+ goto fail;
+ }
+ Py_CLEAR(key);
+ Py_CLEAR(value);
+ Py_CLEAR(item);
+ }
+
+ Py_DECREF(iter);
+
+ if (PyErr_Occurred()) {
+ return -1;
+ }
+
+ return 0;
+invalid_type:
+ PyErr_Format(
+ PyExc_TypeError,
+ "%s takes either dict or list of (key, value) pairs",
+ name,
+ NULL
+ );
+ goto fail;
+fail:
+ Py_XDECREF(key);
+ Py_XDECREF(value);
+ Py_XDECREF(item);
+ Py_DECREF(iter);
+ return -1;
+}
+
+static inline int
+_multidict_list_extend(PyObject *list, PyObject *target_list)
+{
+ PyObject *item = NULL,
+ *iter = PyObject_GetIter(target_list);
+
+ if (iter == NULL) {
+ return -1;
+ }
+
+ while ((item = PyIter_Next(iter)) != NULL) {
+ if (PyList_Append(list, item) < 0) {
+ Py_DECREF(item);
+ Py_DECREF(iter);
+ return -1;
+ }
+ Py_DECREF(item);
+ }
+
+ Py_DECREF(iter);
+
+ if (PyErr_Occurred()) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static inline int
+_multidict_extend_with_args(MultiDictObject *self, PyObject *arg,
+ PyObject *kwds, const char *name, int do_add)
+{
+ PyObject *arg_items = NULL, /* tracked by GC */
+ *kwds_items = NULL; /* new reference */
+ pair_list_t *pairs = NULL;
+
+ int err = 0;
+
+ if (kwds && !PyArg_ValidateKeywordArguments(kwds)) {
+ return -1;
+ }
+
+ // TODO: mb can be refactored more clear
+ if (_MultiDict_Check(arg) && kwds == NULL) {
+ if (MultiDict_CheckExact(arg) || CIMultiDict_CheckExact(arg)) {
+ pairs = &((MultiDictObject*)arg)->pairs;
+ } else if (MultiDictProxy_CheckExact(arg) || CIMultiDictProxy_CheckExact(arg)) {
+ pairs = &((MultiDictProxyObject*)arg)->md->pairs;
+ }
+
+ if (do_add) {
+ return _multidict_append_items(self, pairs);
+ }
+
+ return _multidict_update_items(self, pairs);
+ }
+
+ if (PyObject_HasAttrString(arg, "items")) {
+ if (_MultiDict_Check(arg)) {
+ arg_items = multidict_items((MultiDictObject*)arg);
+ } else {
+ arg_items = PyMapping_Items(arg);
+ }
+ if (arg_items == NULL) {
+ return -1;
+ }
+ } else {
+ arg_items = arg;
+ Py_INCREF(arg_items);
+ }
+
+ if (kwds) {
+ PyObject *tmp = PySequence_List(arg_items);
+ Py_DECREF(arg_items);
+ arg_items = tmp;
+ if (arg_items == NULL) {
+ return -1;
+ }
+
+ kwds_items = PyDict_Items(kwds);
+ if (kwds_items == NULL) {
+ Py_DECREF(arg_items);
+ return -1;
+ }
+ err = _multidict_list_extend(arg_items, kwds_items);
+ Py_DECREF(kwds_items);
+ if (err < 0) {
+ Py_DECREF(arg_items);
+ return -1;
+ }
+ }
+
+ if (do_add) {
+ err = _multidict_append_items_seq(self, arg_items, name);
+ } else {
+ err = pair_list_update_from_seq(&self->pairs, arg_items);
+ }
+
+ Py_DECREF(arg_items);
+
+ return err;
+}
+
+static inline int
+_multidict_extend_with_kwds(MultiDictObject *self, PyObject *kwds,
+ const char *name, int do_add)
+{
+ PyObject *arg = NULL;
+
+ int err = 0;
+
+ if (!PyArg_ValidateKeywordArguments(kwds)) {
+ return -1;
+ }
+
+ arg = PyDict_Items(kwds);
+ if (do_add) {
+ err = _multidict_append_items_seq(self, arg, name);
+ } else {
+ err = pair_list_update_from_seq(&self->pairs, arg);
+ }
+
+ Py_DECREF(arg);
+ return err;
+}
+
+static inline int
+_multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds,
+ const char *name, int do_add)
+{
+ PyObject *arg = NULL;
+
+ if (args && PyObject_Length(args) > 1) {
+ PyErr_Format(
+ PyExc_TypeError,
+ "%s takes at most 1 positional argument (%zd given)",
+ name, PyObject_Length(args), NULL
+ );
+ return -1;
+ }
+
+ if (args && PyObject_Length(args) > 0) {
+ if (!PyArg_UnpackTuple(args, name, 0, 1, &arg)) {
+ return -1;
+ }
+ if (_multidict_extend_with_args(self, arg, kwds, name, do_add) < 0) {
+ return -1;
+ }
+ } else if (kwds && PyObject_Length(kwds) > 0) {
+ if (_multidict_extend_with_kwds(self, kwds, name, do_add) < 0) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+static inline PyObject *
+_multidict_copy(MultiDictObject *self, PyTypeObject *multidict_tp_object)
+{
+ MultiDictObject *new_multidict = NULL;
+
+ PyObject *arg_items = NULL,
+ *items = NULL;
+
+ new_multidict = (MultiDictObject*)PyType_GenericNew(
+ multidict_tp_object, NULL, NULL);
+ if (new_multidict == NULL) {
+ return NULL;
+ }
+
+ if (multidict_tp_object->tp_init(
+ (PyObject*)new_multidict, NULL, NULL) < 0)
+ {
+ return NULL;
+ }
+
+ items = multidict_items(self);
+ if (items == NULL) {
+ goto fail;
+ }
+
+ // TODO: "Implementation looks as slow as possible ..."
+ arg_items = PyTuple_New(1);
+ if (arg_items == NULL) {
+ goto fail;
+ }
+
+ Py_INCREF(items);
+ PyTuple_SET_ITEM(arg_items, 0, items);
+
+ if (_multidict_extend(
+ new_multidict, arg_items, NULL, "copy", 1) < 0)
+ {
+ goto fail;
+ }
+
+ Py_DECREF(items);
+ Py_DECREF(arg_items);
+
+ return (PyObject*)new_multidict;
+
+fail:
+ Py_XDECREF(items);
+ Py_XDECREF(arg_items);
+
+ Py_DECREF(new_multidict);
+
+ return NULL;
+}
+
+static inline PyObject *
+_multidict_proxy_copy(MultiDictProxyObject *self, PyTypeObject *type)
+{
+ PyObject *new_multidict = PyType_GenericNew(type, NULL, NULL);
+ if (new_multidict == NULL) {
+ goto fail;
+ }
+ if (type->tp_init(new_multidict, NULL, NULL) < 0) {
+ goto fail;
+ }
+ if (_multidict_extend_with_args(
+ (MultiDictObject*)new_multidict, (PyObject*)self, NULL, "copy", 1) < 0)
+ {
+ goto fail;
+ }
+
+ return new_multidict;
+
+fail:
+ Py_XDECREF(new_multidict);
+ return NULL;
+}
+
+
+/******************** Base Methods ********************/
+
+static inline PyObject *
+multidict_getall(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *list = NULL,
+ *key = NULL,
+ *_default = NULL;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:getall", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "getall", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+
+ _default = args[1];
+skip_optional_pos:
+#endif
+ list = pair_list_get_all(&self->pairs, key);
+
+ if (list == NULL &&
+ PyErr_ExceptionMatches(PyExc_KeyError) &&
+ _default != NULL)
+ {
+ PyErr_Clear();
+ Py_INCREF(_default);
+ return _default;
+ }
+
+ return list;
+}
+
+static inline PyObject *
+multidict_getone(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = NULL;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:getone", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "getone", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+
+ _default = args[1];
+skip_optional_pos:
+#endif
+ return _multidict_getone(self, key, _default);
+}
+
+static inline PyObject *
+multidict_get(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = Py_None,
+ *ret;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:get", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "get", 0};
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+
+ _default = args[1];
+skip_optional_pos:
+#endif
+ ret = _multidict_getone(self, key, _default);
+ return ret;
+}
+
+static inline PyObject *
+multidict_keys(MultiDictObject *self)
+{
+ return multidict_keysview_new((PyObject*)self);
+}
+
+static inline PyObject *
+multidict_items(MultiDictObject *self)
+{
+ return multidict_itemsview_new((PyObject*)self);
+}
+
+static inline PyObject *
+multidict_values(MultiDictObject *self)
+{
+ return multidict_valuesview_new((PyObject*)self);
+}
+
+static inline PyObject *
+multidict_reduce(MultiDictObject *self)
+{
+ PyObject *items = NULL,
+ *items_list = NULL,
+ *args = NULL,
+ *result = NULL;
+
+ items = multidict_items(self);
+ if (items == NULL) {
+ goto ret;
+ }
+
+ items_list = PySequence_List(items);
+ if (items_list == NULL) {
+ goto ret;
+ }
+
+ args = PyTuple_Pack(1, items_list);
+ if (args == NULL) {
+ goto ret;
+ }
+
+ result = PyTuple_Pack(2, Py_TYPE(self), args);
+
+ret:
+ Py_XDECREF(args);
+ Py_XDECREF(items_list);
+ Py_XDECREF(items);
+
+ return result;
+}
+
+static inline PyObject *
+multidict_repr(PyObject *self)
+{
+ return PyObject_CallFunctionObjArgs(
+ repr_func, self, NULL);
+}
+
+static inline Py_ssize_t
+multidict_mp_len(MultiDictObject *self)
+{
+ return pair_list_len(&self->pairs);
+}
+
+static inline PyObject *
+multidict_mp_subscript(MultiDictObject *self, PyObject *key)
+{
+ return _multidict_getone(self, key, NULL);
+}
+
+static inline int
+multidict_mp_as_subscript(MultiDictObject *self, PyObject *key, PyObject *val)
+{
+ if (val == NULL) {
+ return pair_list_del(&self->pairs, key);
+ } else {
+ return pair_list_replace(&self->pairs, key, val);
+ }
+}
+
+static inline int
+multidict_sq_contains(MultiDictObject *self, PyObject *key)
+{
+ return pair_list_contains(&self->pairs, key);
+}
+
+static inline PyObject *
+multidict_tp_iter(MultiDictObject *self)
+{
+ return multidict_keys_iter_new(self);
+}
+
+static inline PyObject *
+multidict_tp_richcompare(PyObject *self, PyObject *other, int op)
+{
+ // TODO: refactoring me with love
+
+ int cmp = 0;
+
+ if (op != Py_EQ && op != Py_NE) {
+ Py_RETURN_NOTIMPLEMENTED;
+ }
+
+ if (MultiDict_CheckExact(other) || CIMultiDict_CheckExact(other)) {
+ cmp = _multidict_eq(
+ (MultiDictObject*)self,
+ (MultiDictObject*)other
+ );
+ if (cmp < 0) {
+ return NULL;
+ }
+ if (op == Py_NE) {
+ cmp = !cmp;
+ }
+ return PyBool_FromLong(cmp);
+ }
+
+ if (MultiDictProxy_CheckExact(other) || CIMultiDictProxy_CheckExact(other)) {
+ cmp = _multidict_eq(
+ (MultiDictObject*)self,
+ ((MultiDictProxyObject*)other)->md
+ );
+ if (cmp < 0) {
+ return NULL;
+ }
+ if (op == Py_NE) {
+ cmp = !cmp;
+ }
+ return PyBool_FromLong(cmp);
+ }
+
+ cmp = PyObject_IsInstance(other, (PyObject*)collections_abc_mapping);
+ if (cmp < 0) {
+ return NULL;
+ }
+
+ if (cmp) {
+ cmp = pair_list_eq_to_mapping(&((MultiDictObject*)self)->pairs, other);
+ if (cmp < 0) {
+ return NULL;
+ }
+ if (op == Py_NE) {
+ cmp = !cmp;
+ }
+ return PyBool_FromLong(cmp);
+ }
+
+ Py_RETURN_NOTIMPLEMENTED;
+}
+
+static inline void
+multidict_tp_dealloc(MultiDictObject *self)
+{
+ PyObject_GC_UnTrack(self);
+ Py_TRASHCAN_SAFE_BEGIN(self);
+ if (self->weaklist != NULL) {
+ PyObject_ClearWeakRefs((PyObject *)self);
+ };
+ pair_list_dealloc(&self->pairs);
+ Py_TYPE(self)->tp_free((PyObject *)self);
+ Py_TRASHCAN_SAFE_END(self);
+}
+
+static inline int
+multidict_tp_traverse(MultiDictObject *self, visitproc visit, void *arg)
+{
+ return pair_list_traverse(&self->pairs, visit, arg);
+}
+
+static inline int
+multidict_tp_clear(MultiDictObject *self)
+{
+ return pair_list_clear(&self->pairs);
+}
+
+PyDoc_STRVAR(multidict_getall_doc,
+"Return a list of all values matching the key.");
+
+PyDoc_STRVAR(multidict_getone_doc,
+"Get first value matching the key.");
+
+PyDoc_STRVAR(multidict_get_doc,
+"Get first value matching the key.\n\nThe method is alias for .getone().");
+
+PyDoc_STRVAR(multidict_keys_doc,
+"Return a new view of the dictionary's keys.");
+
+PyDoc_STRVAR(multidict_items_doc,
+"Return a new view of the dictionary's items *(key, value) pairs).");
+
+PyDoc_STRVAR(multidict_values_doc,
+"Return a new view of the dictionary's values.");
+
+/******************** MultiDict ********************/
+
+static inline int
+multidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds)
+{
+ if (pair_list_init(&self->pairs) < 0) {
+ return -1;
+ }
+ if (_multidict_extend(self, args, kwds, "MultiDict", 1) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+static inline PyObject *
+multidict_add(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *val = NULL;
+
+ static const char * const _keywords[] = {"key", "value", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"OO:add", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &val)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "add", 0};
+ PyObject *argsbuf[2];
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 2, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ val = args[1];
+#endif
+ if (pair_list_add(&self->pairs, key, val) < 0) {
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static inline PyObject *
+multidict_copy(MultiDictObject *self)
+{
+ return _multidict_copy(self, &multidict_type);
+}
+
+static inline PyObject *
+multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds)
+{
+ if (_multidict_extend(self, args, kwds, "extend", 1) < 0) {
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static inline PyObject *
+multidict_clear(MultiDictObject *self)
+{
+ if (pair_list_clear(&self->pairs) < 0) {
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static inline PyObject *
+multidict_setdefault(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = NULL;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:setdefault", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "setdefault", 0};
+ PyObject *argsbuf[3];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ _default = args[1];
+
+skip_optional_pos:
+#endif
+ return pair_list_set_default(&self->pairs, key, _default);
+}
+
+static inline PyObject *
+multidict_popone(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = NULL,
+ *ret_val = NULL;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:popone", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "popone", 0};
+ PyObject *argsbuf[3];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ _default = args[1];
+
+skip_optional_pos:
+#endif
+ ret_val = pair_list_pop_one(&self->pairs, key);
+
+ if (ret_val == NULL &&
+ PyErr_ExceptionMatches(PyExc_KeyError) &&
+ _default != NULL)
+ {
+ PyErr_Clear();
+ Py_INCREF(_default);
+ return _default;
+ }
+
+ return ret_val;
+}
+
+static inline PyObject *
+multidict_pop(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = NULL,
+ *ret_val = NULL;
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:pop", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "pop", 0};
+ PyObject *argsbuf[3];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ _default = args[1];
+
+skip_optional_pos:
+#endif
+ ret_val = pair_list_pop_one(&self->pairs, key);
+
+ if (ret_val == NULL &&
+ PyErr_ExceptionMatches(PyExc_KeyError) &&
+ _default != NULL)
+ {
+ PyErr_Clear();
+ Py_INCREF(_default);
+ return _default;
+ }
+
+ return ret_val;
+}
+
+static inline PyObject *
+multidict_popall(MultiDictObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *key = NULL,
+ *_default = NULL,
+ *ret_val = NULL;
+
+
+ static const char * const _keywords[] = {"key", "default", NULL};
+#ifdef FASTCALL_OLD
+ static _PyArg_Parser _parser = {"O|O:popall", _keywords, 0};
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &key, &_default)) {
+ return NULL;
+ }
+#else
+ static _PyArg_Parser _parser = {NULL, _keywords, "popall", 0};
+ PyObject *argsbuf[3];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames,
+ &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ return NULL;
+ }
+ key = args[0];
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ _default = args[1];
+
+skip_optional_pos:
+#endif
+ ret_val = pair_list_pop_all(&self->pairs, key);
+
+ if (ret_val == NULL &&
+ PyErr_ExceptionMatches(PyExc_KeyError) &&
+ _default != NULL)
+ {
+ PyErr_Clear();
+ Py_INCREF(_default);
+ return _default;
+ }
+
+ return ret_val;
+}
+
+static inline PyObject *
+multidict_popitem(MultiDictObject *self)
+{
+ return pair_list_pop_item(&self->pairs);
+}
+
+static inline PyObject *
+multidict_update(MultiDictObject *self, PyObject *args, PyObject *kwds)
+{
+ if (_multidict_extend(self, args, kwds, "update", 0) < 0) {
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(multidict_add_doc,
+"Add the key and value, not overwriting any previous value.");
+
+PyDoc_STRVAR(multidict_copy_doc,
+"Return a copy of itself.");
+
+PyDoc_STRVAR(multdicit_method_extend_doc,
+"Extend current MultiDict with more values.\n\
+This method must be used instead of update.");
+
+PyDoc_STRVAR(multidict_clear_doc,
+"Remove all items from MultiDict");
+
+PyDoc_STRVAR(multidict_setdefault_doc,
+"Return value for key, set value to default if key is not present.");
+
+PyDoc_STRVAR(multidict_popone_doc,
+"Remove the last occurrence of key and return the corresponding value.\n\n\
+If key is not found, default is returned if given, otherwise KeyError is \
+raised.\n");
+
+PyDoc_STRVAR(multidict_pop_doc,
+"Remove the last occurrence of key and return the corresponding value.\n\n\
+If key is not found, default is returned if given, otherwise KeyError is \
+raised.\n");
+
+PyDoc_STRVAR(multidict_popall_doc,
+"Remove all occurrences of key and return the list of corresponding values.\n\n\
+If key is not found, default is returned if given, otherwise KeyError is \
+raised.\n");
+
+PyDoc_STRVAR(multidict_popitem_doc,
+"Remove and return an arbitrary (key, value) pair.");
+
+PyDoc_STRVAR(multidict_update_doc,
+"Update the dictionary from *other*, overwriting existing keys.");
+
+
+#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
+#define multidict_class_getitem Py_GenericAlias
+#else
+static inline PyObject *
+multidict_class_getitem(PyObject *self, PyObject *arg)
+{
+ Py_INCREF(self);
+ return self;
+}
+#endif
+
+
+PyDoc_STRVAR(sizeof__doc__,
+"D.__sizeof__() -> size of D in memory, in bytes");
+
+static inline PyObject *
+_multidict_sizeof(MultiDictObject *self)
+{
+ Py_ssize_t size = sizeof(MultiDictObject);
+ if (self->pairs.pairs != self->pairs.buffer) {
+ size += (Py_ssize_t)sizeof(pair_t) * self->pairs.capacity;
+ }
+ return PyLong_FromSsize_t(size);
+}
+
+
+static PySequenceMethods multidict_sequence = {
+ .sq_contains = (objobjproc)multidict_sq_contains,
+};
+
+static PyMappingMethods multidict_mapping = {
+ .mp_length = (lenfunc)multidict_mp_len,
+ .mp_subscript = (binaryfunc)multidict_mp_subscript,
+ .mp_ass_subscript = (objobjargproc)multidict_mp_as_subscript,
+};
+
+static PyMethodDef multidict_methods[] = {
+ {
+ "getall",
+ (PyCFunction)multidict_getall,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_getall_doc
+ },
+ {
+ "getone",
+ (PyCFunction)multidict_getone,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_getone_doc
+ },
+ {
+ "get",
+ (PyCFunction)multidict_get,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_get_doc
+ },
+ {
+ "keys",
+ (PyCFunction)multidict_keys,
+ METH_NOARGS,
+ multidict_keys_doc
+ },
+ {
+ "items",
+ (PyCFunction)multidict_items,
+ METH_NOARGS,
+ multidict_items_doc
+ },
+ {
+ "values",
+ (PyCFunction)multidict_values,
+ METH_NOARGS,
+ multidict_values_doc
+ },
+ {
+ "add",
+ (PyCFunction)multidict_add,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_add_doc
+ },
+ {
+ "copy",
+ (PyCFunction)multidict_copy,
+ METH_NOARGS,
+ multidict_copy_doc
+ },
+ {
+ "extend",
+ (PyCFunction)multidict_extend,
+ METH_VARARGS | METH_KEYWORDS,
+ multdicit_method_extend_doc
+ },
+ {
+ "clear",
+ (PyCFunction)multidict_clear,
+ METH_NOARGS,
+ multidict_clear_doc
+ },
+ {
+ "setdefault",
+ (PyCFunction)multidict_setdefault,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_setdefault_doc
+ },
+ {
+ "popone",
+ (PyCFunction)multidict_popone,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_popone_doc
+ },
+ {
+ "pop",
+ (PyCFunction)multidict_pop,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_pop_doc
+ },
+ {
+ "popall",
+ (PyCFunction)multidict_popall,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_popall_doc
+ },
+ {
+ "popitem",
+ (PyCFunction)multidict_popitem,
+ METH_NOARGS,
+ multidict_popitem_doc
+ },
+ {
+ "update",
+ (PyCFunction)multidict_update,
+ METH_VARARGS | METH_KEYWORDS,
+ multidict_update_doc
+ },
+ {
+ "__reduce__",
+ (PyCFunction)multidict_reduce,
+ METH_NOARGS,
+ NULL,
+ },
+ {
+ "__class_getitem__",
+ (PyCFunction)multidict_class_getitem,
+ METH_O | METH_CLASS,
+ NULL
+ },
+ {
+ "__sizeof__",
+ (PyCFunction)_multidict_sizeof,
+ METH_NOARGS,
+ sizeof__doc__,
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+
+PyDoc_STRVAR(MultDict_doc,
+"Dictionary with the support for duplicate keys.");
+
+
+static PyTypeObject multidict_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "multidict._multidict.MultiDict", /* tp_name */
+ sizeof(MultiDictObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_tp_dealloc,
+ .tp_repr = (reprfunc)multidict_repr,
+ .tp_as_sequence = &multidict_sequence,
+ .tp_as_mapping = &multidict_mapping,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
+ .tp_doc = MultDict_doc,
+ .tp_traverse = (traverseproc)multidict_tp_traverse,
+ .tp_clear = (inquiry)multidict_tp_clear,
+ .tp_richcompare = (richcmpfunc)multidict_tp_richcompare,
+ .tp_weaklistoffset = offsetof(MultiDictObject, weaklist),
+ .tp_iter = (getiterfunc)multidict_tp_iter,
+ .tp_methods = multidict_methods,
+ .tp_init = (initproc)multidict_tp_init,
+ .tp_alloc = PyType_GenericAlloc,
+ .tp_new = PyType_GenericNew,
+ .tp_free = PyObject_GC_Del,
+};
+
+/******************** CIMultiDict ********************/
+
+static inline int
+cimultidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds)
+{
+ if (ci_pair_list_init(&self->pairs) < 0) {
+ return -1;
+ }
+ if (_multidict_extend(self, args, kwds, "CIMultiDict", 1) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+static inline PyObject *
+cimultidict_copy(MultiDictObject *self)
+{
+ return _multidict_copy(self, &cimultidict_type);
+}
+
+PyDoc_STRVAR(cimultidict_copy_doc,
+"Return a copy of itself.");
+
+static PyMethodDef cimultidict_methods[] = {
+ {
+ "copy",
+ (PyCFunction)cimultidict_copy,
+ METH_NOARGS,
+ cimultidict_copy_doc
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+PyDoc_STRVAR(CIMultDict_doc,
+"Dictionary with the support for duplicate case-insensitive keys.");
+
+
+static PyTypeObject cimultidict_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "multidict._multidict.CIMultiDict", /* tp_name */
+ sizeof(MultiDictObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_tp_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
+ .tp_doc = CIMultDict_doc,
+ .tp_traverse = (traverseproc)multidict_tp_traverse,
+ .tp_clear = (inquiry)multidict_tp_clear,
+ .tp_weaklistoffset = offsetof(MultiDictObject, weaklist),
+ .tp_methods = cimultidict_methods,
+ .tp_base = &multidict_type,
+ .tp_init = (initproc)cimultidict_tp_init,
+ .tp_alloc = PyType_GenericAlloc,
+ .tp_new = PyType_GenericNew,
+ .tp_free = PyObject_GC_Del,
+};
+
+/******************** MultiDictProxy ********************/
+
+static inline int
+multidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args,
+ PyObject *kwds)
+{
+ PyObject *arg = NULL;
+ MultiDictObject *md = NULL;
+
+ if (!PyArg_UnpackTuple(args, "multidict._multidict.MultiDictProxy",
+ 0, 1, &arg))
+ {
+ return -1;
+ }
+ if (arg == NULL) {
+ PyErr_Format(
+ PyExc_TypeError,
+ "__init__() missing 1 required positional argument: 'arg'"
+ );
+ return -1;
+ }
+ if (!MultiDictProxy_CheckExact(arg) &&
+ !CIMultiDict_CheckExact(arg) &&
+ !MultiDict_CheckExact(arg))
+ {
+ PyErr_Format(
+ PyExc_TypeError,
+ "ctor requires MultiDict or MultiDictProxy instance, "
+ "not <class '%s'>",
+ Py_TYPE(arg)->tp_name
+ );
+ return -1;
+ }
+
+ md = (MultiDictObject*)arg;
+ if (MultiDictProxy_CheckExact(arg)) {
+ md = ((MultiDictProxyObject*)arg)->md;
+ }
+ Py_INCREF(md);
+ self->md = md;
+
+ return 0;
+}
+
+static inline PyObject *
+multidict_proxy_getall(MultiDictProxyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ return multidict_getall(self->md, args, nargs, kwnames);
+}
+
+static inline PyObject *
+multidict_proxy_getone(MultiDictProxyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ return multidict_getone(self->md, args, nargs, kwnames);
+}
+
+static inline PyObject *
+multidict_proxy_get(MultiDictProxyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames)
+{
+ return multidict_get(self->md, args, nargs, kwnames);
+}
+
+static inline PyObject *
+multidict_proxy_keys(MultiDictProxyObject *self)
+{
+ return multidict_keys(self->md);
+}
+
+static inline PyObject *
+multidict_proxy_items(MultiDictProxyObject *self)
+{
+ return multidict_items(self->md);
+}
+
+static inline PyObject *
+multidict_proxy_values(MultiDictProxyObject *self)
+{
+ return multidict_values(self->md);
+}
+
+static inline PyObject *
+multidict_proxy_copy(MultiDictProxyObject *self)
+{
+ return _multidict_proxy_copy(self, &multidict_type);
+}
+
+static inline PyObject *
+multidict_proxy_reduce(MultiDictProxyObject *self)
+{
+ PyErr_Format(
+ PyExc_TypeError,
+ "can't pickle %s objects", Py_TYPE(self)->tp_name
+ );
+
+ return NULL;
+}
+
+static inline Py_ssize_t
+multidict_proxy_mp_len(MultiDictProxyObject *self)
+{
+ return multidict_mp_len(self->md);
+}
+
+static inline PyObject *
+multidict_proxy_mp_subscript(MultiDictProxyObject *self, PyObject *key)
+{
+ return multidict_mp_subscript(self->md, key);
+}
+
+static inline int
+multidict_proxy_sq_contains(MultiDictProxyObject *self, PyObject *key)
+{
+ return multidict_sq_contains(self->md, key);
+}
+
+static inline PyObject *
+multidict_proxy_tp_iter(MultiDictProxyObject *self)
+{
+ return multidict_tp_iter(self->md);
+}
+
+static inline PyObject *
+multidict_proxy_tp_richcompare(MultiDictProxyObject *self, PyObject *other,
+ int op)
+{
+ return multidict_tp_richcompare((PyObject*)self->md, other, op);
+}
+
+static inline void
+multidict_proxy_tp_dealloc(MultiDictProxyObject *self)
+{
+ PyObject_GC_UnTrack(self);
+ if (self->weaklist != NULL) {
+ PyObject_ClearWeakRefs((PyObject *)self);
+ };
+ Py_XDECREF(self->md);
+ Py_TYPE(self)->tp_free((PyObject *)self);
+}
+
+static inline int
+multidict_proxy_tp_traverse(MultiDictProxyObject *self, visitproc visit,
+ void *arg)
+{
+ Py_VISIT(self->md);
+ return 0;
+}
+
+static inline int
+multidict_proxy_tp_clear(MultiDictProxyObject *self)
+{
+ Py_CLEAR(self->md);
+ return 0;
+}
+
+static PySequenceMethods multidict_proxy_sequence = {
+ .sq_contains = (objobjproc)multidict_proxy_sq_contains,
+};
+
+static PyMappingMethods multidict_proxy_mapping = {
+ .mp_length = (lenfunc)multidict_proxy_mp_len,
+ .mp_subscript = (binaryfunc)multidict_proxy_mp_subscript,
+};
+
+static PyMethodDef multidict_proxy_methods[] = {
+ {
+ "getall",
+ (PyCFunction)multidict_proxy_getall,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_getall_doc
+ },
+ {
+ "getone",
+ (PyCFunction)multidict_proxy_getone,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_getone_doc
+ },
+ {
+ "get",
+ (PyCFunction)multidict_proxy_get,
+ METH_FASTCALL | METH_KEYWORDS,
+ multidict_get_doc
+ },
+ {
+ "keys",
+ (PyCFunction)multidict_proxy_keys,
+ METH_NOARGS,
+ multidict_keys_doc
+ },
+ {
+ "items",
+ (PyCFunction)multidict_proxy_items,
+ METH_NOARGS,
+ multidict_items_doc
+ },
+ {
+ "values",
+ (PyCFunction)multidict_proxy_values,
+ METH_NOARGS,
+ multidict_values_doc
+ },
+ {
+ "copy",
+ (PyCFunction)multidict_proxy_copy,
+ METH_NOARGS,
+ multidict_copy_doc
+ },
+ {
+ "__reduce__",
+ (PyCFunction)multidict_proxy_reduce,
+ METH_NOARGS,
+ NULL
+ },
+ {
+ "__class_getitem__",
+ (PyCFunction)multidict_class_getitem,
+ METH_O | METH_CLASS,
+ NULL
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+
+PyDoc_STRVAR(MultDictProxy_doc,
+"Read-only proxy for MultiDict instance.");
+
+
+static PyTypeObject multidict_proxy_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "multidict._multidict.MultiDictProxy", /* tp_name */
+ sizeof(MultiDictProxyObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_proxy_tp_dealloc,
+ .tp_repr = (reprfunc)multidict_repr,
+ .tp_as_sequence = &multidict_proxy_sequence,
+ .tp_as_mapping = &multidict_proxy_mapping,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
+ .tp_doc = MultDictProxy_doc,
+ .tp_traverse = (traverseproc)multidict_proxy_tp_traverse,
+ .tp_clear = (inquiry)multidict_proxy_tp_clear,
+ .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare,
+ .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist),
+ .tp_iter = (getiterfunc)multidict_proxy_tp_iter,
+ .tp_methods = multidict_proxy_methods,
+ .tp_init = (initproc)multidict_proxy_tp_init,
+ .tp_alloc = PyType_GenericAlloc,
+ .tp_new = PyType_GenericNew,
+ .tp_free = PyObject_GC_Del,
+};
+
+/******************** CIMultiDictProxy ********************/
+
+static inline int
+cimultidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args,
+ PyObject *kwds)
+{
+ PyObject *arg = NULL;
+ MultiDictObject *md = NULL;
+
+ if (!PyArg_UnpackTuple(args, "multidict._multidict.CIMultiDictProxy",
+ 1, 1, &arg))
+ {
+ return -1;
+ }
+ if (arg == NULL) {
+ PyErr_Format(
+ PyExc_TypeError,
+ "__init__() missing 1 required positional argument: 'arg'"
+ );
+ return -1;
+ }
+ if (!CIMultiDictProxy_CheckExact(arg) && !CIMultiDict_CheckExact(arg)) {
+ PyErr_Format(
+ PyExc_TypeError,
+ "ctor requires CIMultiDict or CIMultiDictProxy instance, "
+ "not <class '%s'>",
+ Py_TYPE(arg)->tp_name
+ );
+ return -1;
+ }
+
+ md = (MultiDictObject*)arg;
+ if (CIMultiDictProxy_CheckExact(arg)) {
+ md = ((MultiDictProxyObject*)arg)->md;
+ }
+ Py_INCREF(md);
+ self->md = md;
+
+ return 0;
+}
+
+static inline PyObject *
+cimultidict_proxy_copy(MultiDictProxyObject *self)
+{
+ return _multidict_proxy_copy(self, &cimultidict_type);
+}
+
+
+PyDoc_STRVAR(CIMultDictProxy_doc,
+"Read-only proxy for CIMultiDict instance.");
+
+PyDoc_STRVAR(cimultidict_proxy_copy_doc,
+"Return copy of itself");
+
+static PyMethodDef cimultidict_proxy_methods[] = {
+ {
+ "copy",
+ (PyCFunction)cimultidict_proxy_copy,
+ METH_NOARGS,
+ cimultidict_proxy_copy_doc
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+static PyTypeObject cimultidict_proxy_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "multidict._multidict.CIMultiDictProxy", /* tp_name */
+ sizeof(MultiDictProxyObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_proxy_tp_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
+ .tp_doc = CIMultDictProxy_doc,
+ .tp_traverse = (traverseproc)multidict_proxy_tp_traverse,
+ .tp_clear = (inquiry)multidict_proxy_tp_clear,
+ .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare,
+ .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist),
+ .tp_methods = cimultidict_proxy_methods,
+ .tp_base = &multidict_proxy_type,
+ .tp_init = (initproc)cimultidict_proxy_tp_init,
+ .tp_alloc = PyType_GenericAlloc,
+ .tp_new = PyType_GenericNew,
+ .tp_free = PyObject_GC_Del,
+};
+
+/******************** Other functions ********************/
+
+static inline PyObject *
+getversion(PyObject *self, PyObject *md)
+{
+ pair_list_t *pairs = NULL;
+ if (MultiDict_CheckExact(md) || CIMultiDict_CheckExact(md)) {
+ pairs = &((MultiDictObject*)md)->pairs;
+ } else if (MultiDictProxy_CheckExact(md) || CIMultiDictProxy_CheckExact(md)) {
+ pairs = &((MultiDictProxyObject*)md)->md->pairs;
+ } else {
+ PyErr_Format(PyExc_TypeError, "unexpected type");
+ return NULL;
+ }
+ return PyLong_FromUnsignedLong(pair_list_version(pairs));
+}
+
+/******************** Module ********************/
+
+static inline void
+module_free(void *m)
+{
+ Py_CLEAR(collections_abc_mapping);
+ Py_CLEAR(collections_abc_mut_mapping);
+ Py_CLEAR(collections_abc_mut_multi_mapping);
+}
+
+static PyMethodDef multidict_module_methods[] = {
+ {
+ "getversion",
+ (PyCFunction)getversion,
+ METH_O
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+static PyModuleDef multidict_module = {
+ PyModuleDef_HEAD_INIT, /* m_base */
+ "_multidict", /* m_name */
+ .m_size = -1,
+ .m_methods = multidict_module_methods,
+ .m_free = (freefunc)module_free,
+};
+
+PyMODINIT_FUNC
+PyInit__multidict()
+{
+ PyObject *module = NULL,
+ *reg_func_call_result = NULL;
+
+#define WITH_MOD(NAME) \
+ Py_CLEAR(module); \
+ module = PyImport_ImportModule(NAME); \
+ if (module == NULL) { \
+ goto fail; \
+ }
+
+#define GET_MOD_ATTR(VAR, NAME) \
+ VAR = PyObject_GetAttrString(module, NAME); \
+ if (VAR == NULL) { \
+ goto fail; \
+ }
+
+ if (multidict_views_init() < 0) {
+ goto fail;
+ }
+
+ if (multidict_iter_init() < 0) {
+ goto fail;
+ }
+
+ if (istr_init() < 0) {
+ goto fail;
+ }
+
+ if (PyType_Ready(&multidict_type) < 0 ||
+ PyType_Ready(&cimultidict_type) < 0 ||
+ PyType_Ready(&multidict_proxy_type) < 0 ||
+ PyType_Ready(&cimultidict_proxy_type) < 0)
+ {
+ goto fail;
+ }
+
+ WITH_MOD("collections.abc");
+ GET_MOD_ATTR(collections_abc_mapping, "Mapping");
+
+ WITH_MOD("multidict._abc");
+ GET_MOD_ATTR(collections_abc_mut_mapping, "MultiMapping");
+
+ WITH_MOD("multidict._abc");
+ GET_MOD_ATTR(collections_abc_mut_multi_mapping, "MutableMultiMapping");
+
+ WITH_MOD("multidict._multidict_base");
+ GET_MOD_ATTR(repr_func, "_mdrepr");
+
+ /* Register in _abc mappings (CI)MultiDict and (CI)MultiDictProxy */
+ reg_func_call_result = PyObject_CallMethod(
+ collections_abc_mut_mapping,
+ "register", "O",
+ (PyObject*)&multidict_proxy_type
+ );
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ reg_func_call_result = PyObject_CallMethod(
+ collections_abc_mut_mapping,
+ "register", "O",
+ (PyObject*)&cimultidict_proxy_type
+ );
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ reg_func_call_result = PyObject_CallMethod(
+ collections_abc_mut_multi_mapping,
+ "register", "O",
+ (PyObject*)&multidict_type
+ );
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ reg_func_call_result = PyObject_CallMethod(
+ collections_abc_mut_multi_mapping,
+ "register", "O",
+ (PyObject*)&cimultidict_type
+ );
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ /* Instantiate this module */
+ module = PyModule_Create(&multidict_module);
+
+ Py_INCREF(&istr_type);
+ if (PyModule_AddObject(
+ module, "istr", (PyObject*)&istr_type) < 0)
+ {
+ goto fail;
+ }
+
+ Py_INCREF(&multidict_type);
+ if (PyModule_AddObject(
+ module, "MultiDict", (PyObject*)&multidict_type) < 0)
+ {
+ goto fail;
+ }
+
+ Py_INCREF(&cimultidict_type);
+ if (PyModule_AddObject(
+ module, "CIMultiDict", (PyObject*)&cimultidict_type) < 0)
+ {
+ goto fail;
+ }
+
+ Py_INCREF(&multidict_proxy_type);
+ if (PyModule_AddObject(
+ module, "MultiDictProxy", (PyObject*)&multidict_proxy_type) < 0)
+ {
+ goto fail;
+ }
+
+ Py_INCREF(&cimultidict_proxy_type);
+ if (PyModule_AddObject(
+ module, "CIMultiDictProxy", (PyObject*)&cimultidict_proxy_type) < 0)
+ {
+ goto fail;
+ }
+
+ return module;
+
+fail:
+ Py_XDECREF(collections_abc_mapping);
+ Py_XDECREF(collections_abc_mut_mapping);
+ Py_XDECREF(collections_abc_mut_multi_mapping);
+
+ return NULL;
+
+#undef WITH_MOD
+#undef GET_MOD_ATTR
+}
diff --git a/contrib/python/multidict/multidict/_multidict_base.py b/contrib/python/multidict/multidict/_multidict_base.py
new file mode 100644
index 0000000000..394466548c
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multidict_base.py
@@ -0,0 +1,144 @@
+from collections.abc import ItemsView, Iterable, KeysView, Set, ValuesView
+
+
+def _abc_itemsview_register(view_cls):
+ ItemsView.register(view_cls)
+
+
+def _abc_keysview_register(view_cls):
+ KeysView.register(view_cls)
+
+
+def _abc_valuesview_register(view_cls):
+ ValuesView.register(view_cls)
+
+
+def _viewbaseset_richcmp(view, other, op):
+ if op == 0: # <
+ if not isinstance(other, Set):
+ return NotImplemented
+ return len(view) < len(other) and view <= other
+ elif op == 1: # <=
+ if not isinstance(other, Set):
+ return NotImplemented
+ if len(view) > len(other):
+ return False
+ for elem in view:
+ if elem not in other:
+ return False
+ return True
+ elif op == 2: # ==
+ if not isinstance(other, Set):
+ return NotImplemented
+ return len(view) == len(other) and view <= other
+ elif op == 3: # !=
+ return not view == other
+ elif op == 4: # >
+ if not isinstance(other, Set):
+ return NotImplemented
+ return len(view) > len(other) and view >= other
+ elif op == 5: # >=
+ if not isinstance(other, Set):
+ return NotImplemented
+ if len(view) < len(other):
+ return False
+ for elem in other:
+ if elem not in view:
+ return False
+ return True
+
+
+def _viewbaseset_and(view, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ if isinstance(view, Set):
+ view = set(iter(view))
+ if isinstance(other, Set):
+ other = set(iter(other))
+ if not isinstance(other, Set):
+ other = set(iter(other))
+ return view & other
+
+
+def _viewbaseset_or(view, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ if isinstance(view, Set):
+ view = set(iter(view))
+ if isinstance(other, Set):
+ other = set(iter(other))
+ if not isinstance(other, Set):
+ other = set(iter(other))
+ return view | other
+
+
+def _viewbaseset_sub(view, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ if isinstance(view, Set):
+ view = set(iter(view))
+ if isinstance(other, Set):
+ other = set(iter(other))
+ if not isinstance(other, Set):
+ other = set(iter(other))
+ return view - other
+
+
+def _viewbaseset_xor(view, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ if isinstance(view, Set):
+ view = set(iter(view))
+ if isinstance(other, Set):
+ other = set(iter(other))
+ if not isinstance(other, Set):
+ other = set(iter(other))
+ return view ^ other
+
+
+def _itemsview_isdisjoint(view, other):
+ "Return True if two sets have a null intersection."
+ for v in other:
+ if v in view:
+ return False
+ return True
+
+
+def _itemsview_repr(view):
+ lst = []
+ for k, v in view:
+ lst.append("{!r}: {!r}".format(k, v))
+ body = ", ".join(lst)
+ return "{}({})".format(view.__class__.__name__, body)
+
+
+def _keysview_isdisjoint(view, other):
+ "Return True if two sets have a null intersection."
+ for k in other:
+ if k in view:
+ return False
+ return True
+
+
+def _keysview_repr(view):
+ lst = []
+ for k in view:
+ lst.append("{!r}".format(k))
+ body = ", ".join(lst)
+ return "{}({})".format(view.__class__.__name__, body)
+
+
+def _valuesview_repr(view):
+ lst = []
+ for v in view:
+ lst.append("{!r}".format(v))
+ body = ", ".join(lst)
+ return "{}({})".format(view.__class__.__name__, body)
+
+
+def _mdrepr(md):
+ lst = []
+ for k, v in md.items():
+ lst.append("'{}': {!r}".format(k, v))
+ body = ", ".join(lst)
+ return "<{}({})>".format(md.__class__.__name__, body)
diff --git a/contrib/python/multidict/multidict/_multidict_py.py b/contrib/python/multidict/multidict/_multidict_py.py
new file mode 100644
index 0000000000..cdbc328903
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multidict_py.py
@@ -0,0 +1,526 @@
+import sys
+import types
+from array import array
+from collections import abc
+
+from ._abc import MultiMapping, MutableMultiMapping
+
+_marker = object()
+
+if sys.version_info >= (3, 9):
+ GenericAlias = types.GenericAlias
+else:
+ def GenericAlias(cls):
+ return cls
+
+
+class istr(str):
+
+ """Case insensitive str."""
+
+ __is_istr__ = True
+
+
+upstr = istr # for relaxing backward compatibility problems
+
+
+def getversion(md):
+ if not isinstance(md, _Base):
+ raise TypeError("Parameter should be multidict or proxy")
+ return md._impl._version
+
+
+_version = array("Q", [0])
+
+
+class _Impl:
+ __slots__ = ("_items", "_version")
+
+ def __init__(self):
+ self._items = []
+ self.incr_version()
+
+ def incr_version(self):
+ global _version
+ v = _version
+ v[0] += 1
+ self._version = v[0]
+
+ if sys.implementation.name != "pypy":
+
+ def __sizeof__(self):
+ return object.__sizeof__(self) + sys.getsizeof(self._items)
+
+
+class _Base:
+ def _title(self, key):
+ return key
+
+ def getall(self, key, default=_marker):
+ """Return a list of all values matching the key."""
+ identity = self._title(key)
+ res = [v for i, k, v in self._impl._items if i == identity]
+ if res:
+ return res
+ if not res and default is not _marker:
+ return default
+ raise KeyError("Key not found: %r" % key)
+
+ def getone(self, key, default=_marker):
+ """Get first value matching the key.
+
+ Raises KeyError if the key is not found and no default is provided.
+ """
+ identity = self._title(key)
+ for i, k, v in self._impl._items:
+ if i == identity:
+ return v
+ if default is not _marker:
+ return default
+ raise KeyError("Key not found: %r" % key)
+
+ # Mapping interface #
+
+ def __getitem__(self, key):
+ return self.getone(key)
+
+ def get(self, key, default=None):
+ """Get first value matching the key.
+
+ If the key is not found, returns the default (or None if no default is provided)
+ """
+ return self.getone(key, default)
+
+ def __iter__(self):
+ return iter(self.keys())
+
+ def __len__(self):
+ return len(self._impl._items)
+
+ def keys(self):
+ """Return a new view of the dictionary's keys."""
+ return _KeysView(self._impl)
+
+ def items(self):
+ """Return a new view of the dictionary's items *(key, value) pairs)."""
+ return _ItemsView(self._impl)
+
+ def values(self):
+ """Return a new view of the dictionary's values."""
+ return _ValuesView(self._impl)
+
+ def __eq__(self, other):
+ if not isinstance(other, abc.Mapping):
+ return NotImplemented
+ if isinstance(other, _Base):
+ lft = self._impl._items
+ rht = other._impl._items
+ if len(lft) != len(rht):
+ return False
+ for (i1, k2, v1), (i2, k2, v2) in zip(lft, rht):
+ if i1 != i2 or v1 != v2:
+ return False
+ return True
+ if len(self._impl._items) != len(other):
+ return False
+ for k, v in self.items():
+ nv = other.get(k, _marker)
+ if v != nv:
+ return False
+ return True
+
+ def __contains__(self, key):
+ identity = self._title(key)
+ for i, k, v in self._impl._items:
+ if i == identity:
+ return True
+ return False
+
+ def __repr__(self):
+ body = ", ".join("'{}': {!r}".format(k, v) for k, v in self.items())
+ return "<{}({})>".format(self.__class__.__name__, body)
+
+ __class_getitem__ = classmethod(GenericAlias)
+
+
+class MultiDictProxy(_Base, MultiMapping):
+ """Read-only proxy for MultiDict instance."""
+
+ def __init__(self, arg):
+ if not isinstance(arg, (MultiDict, MultiDictProxy)):
+ raise TypeError(
+ "ctor requires MultiDict or MultiDictProxy instance"
+ ", not {}".format(type(arg))
+ )
+
+ self._impl = arg._impl
+
+ def __reduce__(self):
+ raise TypeError("can't pickle {} objects".format(self.__class__.__name__))
+
+ def copy(self):
+ """Return a copy of itself."""
+ return MultiDict(self.items())
+
+
+class CIMultiDictProxy(MultiDictProxy):
+ """Read-only proxy for CIMultiDict instance."""
+
+ def __init__(self, arg):
+ if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)):
+ raise TypeError(
+ "ctor requires CIMultiDict or CIMultiDictProxy instance"
+ ", not {}".format(type(arg))
+ )
+
+ self._impl = arg._impl
+
+ def _title(self, key):
+ return key.title()
+
+ def copy(self):
+ """Return a copy of itself."""
+ return CIMultiDict(self.items())
+
+
+class MultiDict(_Base, MutableMultiMapping):
+ """Dictionary with the support for duplicate keys."""
+
+ def __init__(self, *args, **kwargs):
+ self._impl = _Impl()
+
+ self._extend(args, kwargs, self.__class__.__name__, self._extend_items)
+
+ if sys.implementation.name != "pypy":
+
+ def __sizeof__(self):
+ return object.__sizeof__(self) + sys.getsizeof(self._impl)
+
+ def __reduce__(self):
+ return (self.__class__, (list(self.items()),))
+
+ def _title(self, key):
+ return key
+
+ def _key(self, key):
+ if isinstance(key, str):
+ return key
+ else:
+ raise TypeError(
+ "MultiDict keys should be either str " "or subclasses of str"
+ )
+
+ def add(self, key, value):
+ identity = self._title(key)
+ self._impl._items.append((identity, self._key(key), value))
+ self._impl.incr_version()
+
+ def copy(self):
+ """Return a copy of itself."""
+ cls = self.__class__
+ return cls(self.items())
+
+ __copy__ = copy
+
+ def extend(self, *args, **kwargs):
+ """Extend current MultiDict with more values.
+
+ This method must be used instead of update.
+ """
+ self._extend(args, kwargs, "extend", self._extend_items)
+
+ def _extend(self, args, kwargs, name, method):
+ if len(args) > 1:
+ raise TypeError(
+ "{} takes at most 1 positional argument"
+ " ({} given)".format(name, len(args))
+ )
+ if args:
+ arg = args[0]
+ if isinstance(args[0], (MultiDict, MultiDictProxy)) and not kwargs:
+ items = arg._impl._items
+ else:
+ if hasattr(arg, "items"):
+ arg = arg.items()
+ if kwargs:
+ arg = list(arg)
+ arg.extend(list(kwargs.items()))
+ items = []
+ for item in arg:
+ if not len(item) == 2:
+ raise TypeError(
+ "{} takes either dict or list of (key, value) "
+ "tuples".format(name)
+ )
+ items.append((self._title(item[0]), self._key(item[0]), item[1]))
+
+ method(items)
+ else:
+ method(
+ [
+ (self._title(key), self._key(key), value)
+ for key, value in kwargs.items()
+ ]
+ )
+
+ def _extend_items(self, items):
+ for identity, key, value in items:
+ self.add(key, value)
+
+ def clear(self):
+ """Remove all items from MultiDict."""
+ self._impl._items.clear()
+ self._impl.incr_version()
+
+ # Mapping interface #
+
+ def __setitem__(self, key, value):
+ self._replace(key, value)
+
+ def __delitem__(self, key):
+ identity = self._title(key)
+ items = self._impl._items
+ found = False
+ for i in range(len(items) - 1, -1, -1):
+ if items[i][0] == identity:
+ del items[i]
+ found = True
+ if not found:
+ raise KeyError(key)
+ else:
+ self._impl.incr_version()
+
+ def setdefault(self, key, default=None):
+ """Return value for key, set value to default if key is not present."""
+ identity = self._title(key)
+ for i, k, v in self._impl._items:
+ if i == identity:
+ return v
+ self.add(key, default)
+ return default
+
+ def popone(self, key, default=_marker):
+ """Remove specified key and return the corresponding value.
+
+ If key is not found, d is returned if given, otherwise
+ KeyError is raised.
+
+ """
+ identity = self._title(key)
+ for i in range(len(self._impl._items)):
+ if self._impl._items[i][0] == identity:
+ value = self._impl._items[i][2]
+ del self._impl._items[i]
+ self._impl.incr_version()
+ return value
+ if default is _marker:
+ raise KeyError(key)
+ else:
+ return default
+
+ pop = popone # type: ignore
+
+ def popall(self, key, default=_marker):
+ """Remove all occurrences of key and return the list of corresponding
+ values.
+
+ If key is not found, default is returned if given, otherwise
+ KeyError is raised.
+
+ """
+ found = False
+ identity = self._title(key)
+ ret = []
+ for i in range(len(self._impl._items) - 1, -1, -1):
+ item = self._impl._items[i]
+ if item[0] == identity:
+ ret.append(item[2])
+ del self._impl._items[i]
+ self._impl.incr_version()
+ found = True
+ if not found:
+ if default is _marker:
+ raise KeyError(key)
+ else:
+ return default
+ else:
+ ret.reverse()
+ return ret
+
+ def popitem(self):
+ """Remove and return an arbitrary (key, value) pair."""
+ if self._impl._items:
+ i = self._impl._items.pop(0)
+ self._impl.incr_version()
+ return i[1], i[2]
+ else:
+ raise KeyError("empty multidict")
+
+ def update(self, *args, **kwargs):
+ """Update the dictionary from *other*, overwriting existing keys."""
+ self._extend(args, kwargs, "update", self._update_items)
+
+ def _update_items(self, items):
+ if not items:
+ return
+ used_keys = {}
+ for identity, key, value in items:
+ start = used_keys.get(identity, 0)
+ for i in range(start, len(self._impl._items)):
+ item = self._impl._items[i]
+ if item[0] == identity:
+ used_keys[identity] = i + 1
+ self._impl._items[i] = (identity, key, value)
+ break
+ else:
+ self._impl._items.append((identity, key, value))
+ used_keys[identity] = len(self._impl._items)
+
+ # drop tails
+ i = 0
+ while i < len(self._impl._items):
+ item = self._impl._items[i]
+ identity = item[0]
+ pos = used_keys.get(identity)
+ if pos is None:
+ i += 1
+ continue
+ if i >= pos:
+ del self._impl._items[i]
+ else:
+ i += 1
+
+ self._impl.incr_version()
+
+ def _replace(self, key, value):
+ key = self._key(key)
+ identity = self._title(key)
+ items = self._impl._items
+
+ for i in range(len(items)):
+ item = items[i]
+ if item[0] == identity:
+ items[i] = (identity, key, value)
+ # i points to last found item
+ rgt = i
+ self._impl.incr_version()
+ break
+ else:
+ self._impl._items.append((identity, key, value))
+ self._impl.incr_version()
+ return
+
+ # remove all tail items
+ i = rgt + 1
+ while i < len(items):
+ item = items[i]
+ if item[0] == identity:
+ del items[i]
+ else:
+ i += 1
+
+
+class CIMultiDict(MultiDict):
+ """Dictionary with the support for duplicate case-insensitive keys."""
+
+ def _title(self, key):
+ return key.title()
+
+
+class _Iter:
+ __slots__ = ("_size", "_iter")
+
+ def __init__(self, size, iterator):
+ self._size = size
+ self._iter = iterator
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iter)
+
+ def __length_hint__(self):
+ return self._size
+
+
+class _ViewBase:
+ def __init__(self, impl):
+ self._impl = impl
+
+ def __len__(self):
+ return len(self._impl._items)
+
+
+class _ItemsView(_ViewBase, abc.ItemsView):
+ def __contains__(self, item):
+ assert isinstance(item, tuple) or isinstance(item, list)
+ assert len(item) == 2
+ for i, k, v in self._impl._items:
+ if item[0] == k and item[1] == v:
+ return True
+ return False
+
+ def __iter__(self):
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version):
+ for i, k, v in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield k, v
+
+ def __repr__(self):
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}: {!r}".format(item[1], item[2]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
+
+
+class _ValuesView(_ViewBase, abc.ValuesView):
+ def __contains__(self, value):
+ for item in self._impl._items:
+ if item[2] == value:
+ return True
+ return False
+
+ def __iter__(self):
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version):
+ for item in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield item[2]
+
+ def __repr__(self):
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}".format(item[2]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
+
+
+class _KeysView(_ViewBase, abc.KeysView):
+ def __contains__(self, key):
+ for item in self._impl._items:
+ if item[1] == key:
+ return True
+ return False
+
+ def __iter__(self):
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version):
+ for item in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield item[1]
+
+ def __repr__(self):
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}".format(item[1]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
diff --git a/contrib/python/multidict/multidict/_multilib/defs.h b/contrib/python/multidict/multidict/_multilib/defs.h
new file mode 100644
index 0000000000..c7027c817e
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/defs.h
@@ -0,0 +1,22 @@
+#ifndef _MULTIDICT_DEFS_H
+#define _MULTIDICT_DEFS_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_Py_IDENTIFIER(lower);
+
+/* We link this module statically for convenience. If compiled as a shared
+ library instead, some compilers don't allow addresses of Python objects
+ defined in other libraries to be used in static initializers here. The
+ DEFERRED_ADDRESS macro is used to tag the slots where such addresses
+ appear; the module init function must fill in the tagged slots at runtime.
+ The argument is for documentation -- the macro ignores it.
+*/
+#define DEFERRED_ADDRESS(ADDR) 0
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/multidict/multidict/_multilib/dict.h b/contrib/python/multidict/multidict/_multilib/dict.h
new file mode 100644
index 0000000000..3caf83e5b4
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/dict.h
@@ -0,0 +1,24 @@
+#ifndef _MULTIDICT_C_H
+#define _MULTIDICT_C_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct { // 16 or 24 for GC prefix
+ PyObject_HEAD // 16
+ PyObject *weaklist;
+ pair_list_t pairs;
+} MultiDictObject;
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *weaklist;
+ MultiDictObject *md;
+} MultiDictProxyObject;
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/contrib/python/multidict/multidict/_multilib/istr.h b/contrib/python/multidict/multidict/_multilib/istr.h
new file mode 100644
index 0000000000..2688f48914
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/istr.h
@@ -0,0 +1,85 @@
+#ifndef _MULTIDICT_ISTR_H
+#define _MULTIDICT_ISTR_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+ PyUnicodeObject str;
+ PyObject * canonical;
+} istrobject;
+
+PyDoc_STRVAR(istr__doc__, "istr class implementation");
+
+static PyTypeObject istr_type;
+
+static inline void
+istr_dealloc(istrobject *self)
+{
+ Py_XDECREF(self->canonical);
+ PyUnicode_Type.tp_dealloc((PyObject*)self);
+}
+
+static inline PyObject *
+istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *x = NULL;
+ static char *kwlist[] = {"object", "encoding", "errors", 0};
+ PyObject *encoding = NULL;
+ PyObject *errors = NULL;
+ PyObject *s = NULL;
+ PyObject * ret = NULL;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO:str",
+ kwlist, &x, &encoding, &errors)) {
+ return NULL;
+ }
+ if (x != NULL && Py_TYPE(x) == &istr_type) {
+ Py_INCREF(x);
+ return x;
+ }
+ ret = PyUnicode_Type.tp_new(type, args, kwds);
+ if (!ret) {
+ goto fail;
+ }
+ s =_PyObject_CallMethodId(ret, &PyId_lower, NULL);
+ if (!s) {
+ goto fail;
+ }
+ ((istrobject*)ret)->canonical = s;
+ s = NULL; /* the reference is stollen by .canonical */
+ return ret;
+fail:
+ Py_XDECREF(ret);
+ return NULL;
+}
+
+static PyTypeObject istr_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict.istr",
+ sizeof(istrobject),
+ .tp_dealloc = (destructor)istr_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT
+ | Py_TPFLAGS_BASETYPE
+ | Py_TPFLAGS_UNICODE_SUBCLASS,
+ .tp_doc = istr__doc__,
+ .tp_base = DEFERRED_ADDRESS(&PyUnicode_Type),
+ .tp_new = (newfunc)istr_new,
+};
+
+
+static inline int
+istr_init(void)
+{
+ istr_type.tp_base = &PyUnicode_Type;
+ if (PyType_Ready(&istr_type) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/multidict/multidict/_multilib/iter.h b/contrib/python/multidict/multidict/_multilib/iter.h
new file mode 100644
index 0000000000..4e2e32b387
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/iter.h
@@ -0,0 +1,238 @@
+#ifndef _MULTIDICT_ITER_H
+#define _MULTIDICT_ITER_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+static PyTypeObject multidict_items_iter_type;
+static PyTypeObject multidict_values_iter_type;
+static PyTypeObject multidict_keys_iter_type;
+
+typedef struct multidict_iter {
+ PyObject_HEAD
+ MultiDictObject *md; // MultiDict or CIMultiDict
+ Py_ssize_t current;
+ uint64_t version;
+} MultidictIter;
+
+static inline void
+_init_iter(MultidictIter *it, MultiDictObject *md)
+{
+ Py_INCREF(md);
+
+ it->md = md;
+ it->current = 0;
+ it->version = pair_list_version(&md->pairs);
+}
+
+static inline PyObject *
+multidict_items_iter_new(MultiDictObject *md)
+{
+ MultidictIter *it = PyObject_GC_New(
+ MultidictIter, &multidict_items_iter_type);
+ if (it == NULL) {
+ return NULL;
+ }
+
+ _init_iter(it, md);
+
+ PyObject_GC_Track(it);
+ return (PyObject *)it;
+}
+
+static inline PyObject *
+multidict_keys_iter_new(MultiDictObject *md)
+{
+ MultidictIter *it = PyObject_GC_New(
+ MultidictIter, &multidict_keys_iter_type);
+ if (it == NULL) {
+ return NULL;
+ }
+
+ _init_iter(it, md);
+
+ PyObject_GC_Track(it);
+ return (PyObject *)it;
+}
+
+static inline PyObject *
+multidict_values_iter_new(MultiDictObject *md)
+{
+ MultidictIter *it = PyObject_GC_New(
+ MultidictIter, &multidict_values_iter_type);
+ if (it == NULL) {
+ return NULL;
+ }
+
+ _init_iter(it, md);
+
+ PyObject_GC_Track(it);
+ return (PyObject *)it;
+}
+
+static inline PyObject *
+multidict_items_iter_iternext(MultidictIter *self)
+{
+ PyObject *key = NULL;
+ PyObject *value = NULL;
+ PyObject *ret = NULL;
+
+ if (self->version != pair_list_version(&self->md->pairs)) {
+ PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration");
+ return NULL;
+ }
+
+ if (!_pair_list_next(&self->md->pairs, &self->current, NULL, &key, &value, NULL)) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ ret = PyTuple_Pack(2, key, value);
+ if (ret == NULL) {
+ return NULL;
+ }
+
+ return ret;
+}
+
+static inline PyObject *
+multidict_values_iter_iternext(MultidictIter *self)
+{
+ PyObject *value = NULL;
+
+ if (self->version != pair_list_version(&self->md->pairs)) {
+ PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration");
+ return NULL;
+ }
+
+ if (!pair_list_next(&self->md->pairs, &self->current, NULL, NULL, &value)) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ Py_INCREF(value);
+
+ return value;
+}
+
+static inline PyObject *
+multidict_keys_iter_iternext(MultidictIter *self)
+{
+ PyObject *key = NULL;
+
+ if (self->version != pair_list_version(&self->md->pairs)) {
+ PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration");
+ return NULL;
+ }
+
+ if (!pair_list_next(&self->md->pairs, &self->current, NULL, &key, NULL)) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ Py_INCREF(key);
+
+ return key;
+}
+
+static inline void
+multidict_iter_dealloc(MultidictIter *self)
+{
+ PyObject_GC_UnTrack(self);
+ Py_XDECREF(self->md);
+ PyObject_GC_Del(self);
+}
+
+static inline int
+multidict_iter_traverse(MultidictIter *self, visitproc visit, void *arg)
+{
+ Py_VISIT(self->md);
+ return 0;
+}
+
+static inline int
+multidict_iter_clear(MultidictIter *self)
+{
+ Py_CLEAR(self->md);
+ return 0;
+}
+
+static inline PyObject *
+multidict_iter_len(MultidictIter *self)
+{
+ return PyLong_FromLong(pair_list_len(&self->md->pairs));
+}
+
+PyDoc_STRVAR(length_hint_doc,
+ "Private method returning an estimate of len(list(it)).");
+
+static PyMethodDef multidict_iter_methods[] = {
+ {
+ "__length_hint__",
+ (PyCFunction)(void(*)(void))multidict_iter_len,
+ METH_NOARGS,
+ length_hint_doc
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+/***********************************************************************/
+
+static PyTypeObject multidict_items_iter_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._itemsiter", /* tp_name */
+ sizeof(MultidictIter), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_iter_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_iter_traverse,
+ .tp_clear = (inquiry)multidict_iter_clear,
+ .tp_iter = PyObject_SelfIter,
+ .tp_iternext = (iternextfunc)multidict_items_iter_iternext,
+ .tp_methods = multidict_iter_methods,
+};
+
+static PyTypeObject multidict_values_iter_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._valuesiter", /* tp_name */
+ sizeof(MultidictIter), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_iter_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_iter_traverse,
+ .tp_clear = (inquiry)multidict_iter_clear,
+ .tp_iter = PyObject_SelfIter,
+ .tp_iternext = (iternextfunc)multidict_values_iter_iternext,
+ .tp_methods = multidict_iter_methods,
+};
+
+static PyTypeObject multidict_keys_iter_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._keysiter", /* tp_name */
+ sizeof(MultidictIter), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_iter_dealloc,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_iter_traverse,
+ .tp_clear = (inquiry)multidict_iter_clear,
+ .tp_iter = PyObject_SelfIter,
+ .tp_iternext = (iternextfunc)multidict_keys_iter_iternext,
+ .tp_methods = multidict_iter_methods,
+};
+
+static inline int
+multidict_iter_init()
+{
+ if (PyType_Ready(&multidict_items_iter_type) < 0 ||
+ PyType_Ready(&multidict_values_iter_type) < 0 ||
+ PyType_Ready(&multidict_keys_iter_type) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/multidict/multidict/_multilib/pair_list.h b/contrib/python/multidict/multidict/_multilib/pair_list.h
new file mode 100644
index 0000000000..7eafd215b5
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/pair_list.h
@@ -0,0 +1,1244 @@
+#ifndef _MULTIDICT_PAIR_LIST_H
+#define _MULTIDICT_PAIR_LIST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <string.h>
+#include <stddef.h>
+#include <stdint.h>
+
+typedef PyObject * (*calc_identity_func)(PyObject *key);
+
+typedef struct pair {
+ PyObject *identity; // 8
+ PyObject *key; // 8
+ PyObject *value; // 8
+ Py_hash_t hash; // 8
+} pair_t;
+
+/* Note about the structure size
+With 29 pairs the MultiDict object size is slightly less than 1KiB
+(1000-1008 bytes depending on Python version,
+plus extra 12 bytes for memory allocator internal structures).
+As the result the max reserved size is 1020 bytes at most.
+
+To fit into 512 bytes, the structure can contain only 13 pairs
+which is too small, e.g. https://www.python.org returns 16 headers
+(9 of them are caching proxy information though).
+
+The embedded buffer intention is to fit the vast majority of possible
+HTTP headers into the buffer without allocating an extra memory block.
+*/
+
+#if (PY_VERSION_HEX < 0x03080000)
+#define EMBEDDED_CAPACITY 28
+#else
+#define EMBEDDED_CAPACITY 29
+#endif
+
+typedef struct pair_list { // 40
+ Py_ssize_t capacity; // 8
+ Py_ssize_t size; // 8
+ uint64_t version; // 8
+ calc_identity_func calc_identity; // 8
+ pair_t *pairs; // 8
+ pair_t buffer[EMBEDDED_CAPACITY];
+} pair_list_t;
+
+#define MIN_CAPACITY 63
+#define CAPACITY_STEP 64
+
+/* Global counter used to set ma_version_tag field of dictionary.
+ * It is incremented each time that a dictionary is created and each
+ * time that a dictionary is modified. */
+static uint64_t pair_list_global_version = 0;
+
+#define NEXT_VERSION() (++pair_list_global_version)
+
+
+static inline int
+str_cmp(PyObject *s1, PyObject *s2)
+{
+ PyObject *ret = PyUnicode_RichCompare(s1, s2, Py_EQ);
+ if (ret == Py_True) {
+ Py_DECREF(ret);
+ return 1;
+ }
+ else if (ret == NULL) {
+ return -1;
+ }
+ else {
+ Py_DECREF(ret);
+ return 0;
+ }
+}
+
+
+static inline PyObject *
+key_to_str(PyObject *key)
+{
+ PyObject *ret;
+ PyTypeObject *type = Py_TYPE(key);
+ if (type == &istr_type) {
+ ret = ((istrobject*)key)->canonical;
+ Py_INCREF(ret);
+ return ret;
+ }
+ if (PyUnicode_CheckExact(key)) {
+ Py_INCREF(key);
+ return key;
+ }
+ if (PyUnicode_Check(key)) {
+ return PyObject_Str(key);
+ }
+ PyErr_SetString(PyExc_TypeError,
+ "MultiDict keys should be either str "
+ "or subclasses of str");
+ return NULL;
+}
+
+
+static inline PyObject *
+ci_key_to_str(PyObject *key)
+{
+ PyObject *ret;
+ PyTypeObject *type = Py_TYPE(key);
+ if (type == &istr_type) {
+ ret = ((istrobject*)key)->canonical;
+ Py_INCREF(ret);
+ return ret;
+ }
+ if (PyUnicode_Check(key)) {
+ return _PyObject_CallMethodId(key, &PyId_lower, NULL);
+ }
+ PyErr_SetString(PyExc_TypeError,
+ "CIMultiDict keys should be either str "
+ "or subclasses of str");
+ return NULL;
+}
+
+static inline pair_t *
+pair_list_get(pair_list_t *list, Py_ssize_t i)
+{
+ pair_t *item = list->pairs + i;
+ return item;
+}
+
+
+static inline int
+pair_list_grow(pair_list_t *list)
+{
+ // Grow by one element if needed
+ Py_ssize_t new_capacity;
+ pair_t *new_pairs;
+
+ if (list->size < list->capacity) {
+ return 0;
+ }
+
+ if (list->pairs == list->buffer) {
+ new_pairs = PyMem_New(pair_t, MIN_CAPACITY);
+ memcpy(new_pairs, list->buffer, (size_t)list->capacity * sizeof(pair_t));
+
+ list->pairs = new_pairs;
+ list->capacity = MIN_CAPACITY;
+ return 0;
+ } else {
+ new_capacity = list->capacity + CAPACITY_STEP;
+ new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity);
+
+ if (NULL == new_pairs) {
+ // Resizing error
+ return -1;
+ }
+
+ list->pairs = new_pairs;
+ list->capacity = new_capacity;
+ return 0;
+ }
+}
+
+
+static inline int
+pair_list_shrink(pair_list_t *list)
+{
+ // Shrink by one element if needed.
+ // Optimization is applied to prevent jitter
+ // (grow-shrink-grow-shrink on adding-removing the single element
+ // when the buffer is full).
+ // To prevent this, the buffer is resized if the size is less than the capacity
+ // by 2*CAPACITY_STEP factor.
+ // The switch back to embedded buffer is never performed for both reasons:
+ // the code simplicity and the jitter prevention.
+
+ pair_t *new_pairs;
+ Py_ssize_t new_capacity;
+
+ if (list->capacity - list->size < 2 * CAPACITY_STEP) {
+ return 0;
+ }
+ new_capacity = list->capacity - CAPACITY_STEP;
+ if (new_capacity < MIN_CAPACITY) {
+ return 0;
+ }
+
+ new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity);
+
+ if (NULL == new_pairs) {
+ // Resizing error
+ return -1;
+ }
+
+ list->pairs = new_pairs;
+ list->capacity = new_capacity;
+
+ return 0;
+}
+
+
+static inline int
+_pair_list_init(pair_list_t *list, calc_identity_func calc_identity)
+{
+ list->pairs = list->buffer;
+ list->capacity = EMBEDDED_CAPACITY;
+ list->size = 0;
+ list->version = NEXT_VERSION();
+ list->calc_identity = calc_identity;
+ return 0;
+}
+
+static inline int
+pair_list_init(pair_list_t *list)
+{
+ return _pair_list_init(list, key_to_str);
+}
+
+
+static inline int
+ci_pair_list_init(pair_list_t *list)
+{
+ return _pair_list_init(list, ci_key_to_str);
+}
+
+
+static inline void
+pair_list_dealloc(pair_list_t *list)
+{
+ pair_t *pair;
+ Py_ssize_t pos;
+
+ for (pos = 0; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+
+ Py_XDECREF(pair->identity);
+ Py_XDECREF(pair->key);
+ Py_XDECREF(pair->value);
+ }
+
+ /*
+ Strictly speaking, resetting size and capacity and
+ assigning pairs to buffer is not necessary.
+ Do it to consistency and idemotency.
+ The cleanup doesn't hurt performance.
+ !!!
+ !!! The buffer deletion is crucial though.
+ !!!
+ */
+ list->size = 0;
+ if (list->pairs != list->buffer) {
+ PyMem_Del(list->pairs);
+ list->pairs = list->buffer;
+ list->capacity = EMBEDDED_CAPACITY;
+ }
+}
+
+
+static inline Py_ssize_t
+pair_list_len(pair_list_t *list)
+{
+ return list->size;
+}
+
+
+static inline int
+_pair_list_add_with_hash(pair_list_t *list,
+ PyObject *identity,
+ PyObject *key,
+ PyObject *value,
+ Py_hash_t hash)
+{
+ pair_t *pair;
+
+ if (pair_list_grow(list) < 0) {
+ return -1;
+ }
+
+ pair = pair_list_get(list, list->size);
+
+ Py_INCREF(identity);
+ pair->identity = identity;
+
+ Py_INCREF(key);
+ pair->key = key;
+
+ Py_INCREF(value);
+ pair->value = value;
+
+ pair->hash = hash;
+
+ list->version = NEXT_VERSION();
+ list->size += 1;
+
+ return 0;
+}
+
+
+static inline int
+pair_list_add(pair_list_t *list,
+ PyObject *key,
+ PyObject *value)
+{
+ Py_hash_t hash;
+ PyObject *identity = NULL;
+ int ret;
+
+ identity = list->calc_identity(key);
+ if (identity == NULL) {
+ goto fail;
+ }
+ hash = PyObject_Hash(identity);
+ if (hash == -1) {
+ goto fail;
+ }
+ ret = _pair_list_add_with_hash(list, identity, key, value, hash);
+ Py_DECREF(identity);
+ return ret;
+fail:
+ Py_XDECREF(identity);
+ return -1;
+}
+
+
+static inline int
+pair_list_del_at(pair_list_t *list, Py_ssize_t pos)
+{
+ // return 1 on success, -1 on failure
+ Py_ssize_t tail;
+ pair_t *pair;
+
+ pair = pair_list_get(list, pos);
+ Py_DECREF(pair->identity);
+ Py_DECREF(pair->key);
+ Py_DECREF(pair->value);
+
+ list->size -= 1;
+ list->version = NEXT_VERSION();
+
+ if (list->size == pos) {
+ // remove from tail, no need to shift body
+ return 0;
+ }
+
+ tail = list->size - pos;
+ // TODO: raise an error if tail < 0
+ memmove((void *)pair_list_get(list, pos),
+ (void *)pair_list_get(list, pos + 1),
+ sizeof(pair_t) * (size_t)tail);
+
+ return pair_list_shrink(list);
+}
+
+
+static inline int
+_pair_list_drop_tail(pair_list_t *list, PyObject *identity, Py_hash_t hash,
+ Py_ssize_t pos)
+{
+ // return 1 if deleted, 0 if not found
+ pair_t *pair;
+ int ret;
+ int found = 0;
+
+ if (pos >= list->size) {
+ return 0;
+ }
+
+ for (; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ if (pair->hash != hash) {
+ continue;
+ }
+ ret = str_cmp(pair->identity, identity);
+ if (ret > 0) {
+ if (pair_list_del_at(list, pos) < 0) {
+ return -1;
+ }
+ found = 1;
+ pos--;
+ }
+ else if (ret == -1) {
+ return -1;
+ }
+ }
+
+ return found;
+}
+
+static inline int
+_pair_list_del_hash(pair_list_t *list, PyObject *identity,
+ PyObject *key, Py_hash_t hash)
+{
+ int ret = _pair_list_drop_tail(list, identity, hash, 0);
+
+ if (ret < 0) {
+ return -1;
+ }
+ else if (ret == 0) {
+ PyErr_SetObject(PyExc_KeyError, key);
+ return -1;
+ }
+ else {
+ list->version = NEXT_VERSION();
+ return 0;
+ }
+}
+
+
+static inline int
+pair_list_del(pair_list_t *list, PyObject *key)
+{
+ PyObject *identity = NULL;
+ Py_hash_t hash;
+ int ret;
+
+ identity = list->calc_identity(key);
+ if (identity == NULL) {
+ goto fail;
+ }
+
+ hash = PyObject_Hash(identity);
+ if (hash == -1) {
+ goto fail;
+ }
+
+ ret = _pair_list_del_hash(list, identity, key, hash);
+ Py_DECREF(identity);
+ return ret;
+fail:
+ Py_XDECREF(identity);
+ return -1;
+}
+
+
+static inline uint64_t
+pair_list_version(pair_list_t *list)
+{
+ return list->version;
+}
+
+
+static inline int
+_pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity,
+ PyObject **pkey, PyObject **pvalue, Py_hash_t *phash)
+{
+ pair_t *pair;
+
+ if (*ppos >= list->size) {
+ return 0;
+ }
+
+ pair = pair_list_get(list, *ppos);
+
+ if (pidentity) {
+ *pidentity = pair->identity;
+ }
+ if (pkey) {
+ *pkey = pair->key;
+ }
+ if (pvalue) {
+ *pvalue = pair->value;
+ }
+ if (phash) {
+ *phash = pair->hash;
+ }
+
+ *ppos += 1;
+ return 1;
+}
+
+
+static inline int
+pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity,
+ PyObject **pkey, PyObject **pvalue)
+{
+ Py_hash_t hash;
+ return _pair_list_next(list, ppos, pidentity, pkey, pvalue, &hash);
+}
+
+
+static inline int
+pair_list_contains(pair_list_t *list, PyObject *key)
+{
+ Py_hash_t hash1, hash2;
+ Py_ssize_t pos = 0;
+ PyObject *ident = NULL;
+ PyObject *identity = NULL;
+ int tmp;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash1 = PyObject_Hash(ident);
+ if (hash1 == -1) {
+ goto fail;
+ }
+
+ while (_pair_list_next(list, &pos, &identity, NULL, NULL, &hash2)) {
+ if (hash1 != hash2) {
+ continue;
+ }
+ tmp = str_cmp(ident, identity);
+ if (tmp > 0) {
+ Py_DECREF(ident);
+ return 1;
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ Py_DECREF(ident);
+ return 0;
+fail:
+ Py_XDECREF(ident);
+ return -1;
+}
+
+
+static inline PyObject *
+pair_list_get_one(pair_list_t *list, PyObject *key)
+{
+ Py_hash_t hash1, hash2;
+ Py_ssize_t pos = 0;
+ PyObject *ident = NULL;
+ PyObject *identity = NULL;
+ PyObject *value = NULL;
+ int tmp;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash1 = PyObject_Hash(ident);
+ if (hash1 == -1) {
+ goto fail;
+ }
+
+ while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) {
+ if (hash1 != hash2) {
+ continue;
+ }
+ tmp = str_cmp(ident, identity);
+ if (tmp > 0) {
+ Py_INCREF(value);
+ Py_DECREF(ident);
+ return value;
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ Py_DECREF(ident);
+ PyErr_SetObject(PyExc_KeyError, key);
+ return NULL;
+fail:
+ Py_XDECREF(ident);
+ return NULL;
+}
+
+
+static inline PyObject *
+pair_list_get_all(pair_list_t *list, PyObject *key)
+{
+ Py_hash_t hash1, hash2;
+ Py_ssize_t pos = 0;
+ PyObject *ident = NULL;
+ PyObject *identity = NULL;
+ PyObject *value = NULL;
+ PyObject *res = NULL;
+ int tmp;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash1 = PyObject_Hash(ident);
+ if (hash1 == -1) {
+ goto fail;
+ }
+
+ while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) {
+ if (hash1 != hash2) {
+ continue;
+ }
+ tmp = str_cmp(ident, identity);
+ if (tmp > 0) {
+ if (res == NULL) {
+ res = PyList_New(1);
+ if (res == NULL) {
+ goto fail;
+ }
+ if (PyList_SetItem(res, 0, value) < 0) {
+ goto fail;
+ }
+ Py_INCREF(value);
+ }
+ else if (PyList_Append(res, value) < 0) {
+ goto fail;
+ }
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ if (res == NULL) {
+ PyErr_SetObject(PyExc_KeyError, key);
+ }
+ Py_DECREF(ident);
+ return res;
+
+fail:
+ Py_XDECREF(ident);
+ Py_XDECREF(res);
+ return NULL;
+}
+
+
+static inline PyObject *
+pair_list_set_default(pair_list_t *list, PyObject *key, PyObject *value)
+{
+ Py_hash_t hash1, hash2;
+ Py_ssize_t pos = 0;
+ PyObject *ident = NULL;
+ PyObject *identity = NULL;
+ PyObject *value2 = NULL;
+ int tmp;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash1 = PyObject_Hash(ident);
+ if (hash1 == -1) {
+ goto fail;
+ }
+
+ while (_pair_list_next(list, &pos, &identity, NULL, &value2, &hash2)) {
+ if (hash1 != hash2) {
+ continue;
+ }
+ tmp = str_cmp(ident, identity);
+ if (tmp > 0) {
+ Py_INCREF(value2);
+ Py_DECREF(ident);
+ return value2;
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ if (_pair_list_add_with_hash(list, ident, key, value, hash1) < 0) {
+ goto fail;
+ }
+
+ Py_INCREF(value);
+ Py_DECREF(ident);
+ return value;
+fail:
+ Py_XDECREF(ident);
+ return NULL;
+}
+
+
+static inline PyObject *
+pair_list_pop_one(pair_list_t *list, PyObject *key)
+{
+ pair_t *pair;
+
+ Py_hash_t hash;
+ Py_ssize_t pos;
+ PyObject *value = NULL;
+ int tmp;
+ PyObject *ident = NULL;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash = PyObject_Hash(ident);
+ if (hash == -1) {
+ goto fail;
+ }
+
+ for (pos=0; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ if (pair->hash != hash) {
+ continue;
+ }
+ tmp = str_cmp(ident, pair->identity);
+ if (tmp > 0) {
+ value = pair->value;
+ Py_INCREF(value);
+ if (pair_list_del_at(list, pos) < 0) {
+ goto fail;
+ }
+ Py_DECREF(ident);
+ return value;
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ PyErr_SetObject(PyExc_KeyError, key);
+ goto fail;
+
+fail:
+ Py_XDECREF(value);
+ Py_XDECREF(ident);
+ return NULL;
+}
+
+
+static inline PyObject *
+pair_list_pop_all(pair_list_t *list, PyObject *key)
+{
+ Py_hash_t hash;
+ Py_ssize_t pos;
+ pair_t *pair;
+ int tmp;
+ PyObject *res = NULL;
+ PyObject *ident = NULL;
+
+ ident = list->calc_identity(key);
+ if (ident == NULL) {
+ goto fail;
+ }
+
+ hash = PyObject_Hash(ident);
+ if (hash == -1) {
+ goto fail;
+ }
+
+ if (list->size == 0) {
+ PyErr_SetObject(PyExc_KeyError, ident);
+ goto fail;
+ }
+
+ for (pos = list->size - 1; pos >= 0; pos--) {
+ pair = pair_list_get(list, pos);
+ if (hash != pair->hash) {
+ continue;
+ }
+ tmp = str_cmp(ident, pair->identity);
+ if (tmp > 0) {
+ if (res == NULL) {
+ res = PyList_New(1);
+ if (res == NULL) {
+ goto fail;
+ }
+ if (PyList_SetItem(res, 0, pair->value) < 0) {
+ goto fail;
+ }
+ Py_INCREF(pair->value);
+ } else if (PyList_Append(res, pair->value) < 0) {
+ goto fail;
+ }
+ if (pair_list_del_at(list, pos) < 0) {
+ goto fail;
+ }
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ if (res == NULL) {
+ PyErr_SetObject(PyExc_KeyError, key);
+ } else if (PyList_Reverse(res) < 0) {
+ goto fail;
+ }
+ Py_DECREF(ident);
+ return res;
+
+fail:
+ Py_XDECREF(ident);
+ Py_XDECREF(res);
+ return NULL;
+}
+
+
+static inline PyObject *
+pair_list_pop_item(pair_list_t *list)
+{
+ PyObject *ret;
+ pair_t *pair;
+
+ if (list->size == 0) {
+ PyErr_SetString(PyExc_KeyError, "empty multidict");
+ return NULL;
+ }
+
+ pair = pair_list_get(list, 0);
+ ret = PyTuple_Pack(2, pair->key, pair->value);
+ if (ret == NULL) {
+ return NULL;
+ }
+
+ if (pair_list_del_at(list, 0) < 0) {
+ Py_DECREF(ret);
+ return NULL;
+ }
+
+ return ret;
+}
+
+
+static inline int
+pair_list_replace(pair_list_t *list, PyObject * key, PyObject *value)
+{
+ pair_t *pair;
+
+ Py_ssize_t pos;
+ int tmp;
+ int found = 0;
+
+ PyObject *identity = NULL;
+ Py_hash_t hash;
+
+ identity = list->calc_identity(key);
+ if (identity == NULL) {
+ goto fail;
+ }
+
+ hash = PyObject_Hash(identity);
+ if (hash == -1) {
+ goto fail;
+ }
+
+
+ for (pos = 0; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ if (hash != pair->hash) {
+ continue;
+ }
+ tmp = str_cmp(identity, pair->identity);
+ if (tmp > 0) {
+ found = 1;
+ Py_INCREF(key);
+ Py_DECREF(pair->key);
+ pair->key = key;
+ Py_INCREF(value);
+ Py_DECREF(pair->value);
+ pair->value = value;
+ break;
+ }
+ else if (tmp < 0) {
+ goto fail;
+ }
+ }
+
+ if (!found) {
+ if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) {
+ goto fail;
+ }
+ Py_DECREF(identity);
+ return 0;
+ }
+ else {
+ list->version = NEXT_VERSION();
+ if (_pair_list_drop_tail(list, identity, hash, pos+1) < 0) {
+ goto fail;
+ }
+ Py_DECREF(identity);
+ return 0;
+ }
+fail:
+ Py_XDECREF(identity);
+ return -1;
+}
+
+
+static inline int
+_dict_set_number(PyObject *dict, PyObject *key, Py_ssize_t num)
+{
+ PyObject *tmp = PyLong_FromSsize_t(num);
+ if (tmp == NULL) {
+ return -1;
+ }
+
+ if (PyDict_SetItem(dict, key, tmp) < 0) {
+ Py_DECREF(tmp);
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static inline int
+_pair_list_post_update(pair_list_t *list, PyObject* used_keys, Py_ssize_t pos)
+{
+ pair_t *pair;
+ PyObject *tmp;
+ Py_ssize_t num;
+
+ for (; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ tmp = PyDict_GetItem(used_keys, pair->identity);
+ if (tmp == NULL) {
+ // not found
+ continue;
+ }
+
+ num = PyLong_AsSsize_t(tmp);
+ if (num == -1) {
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_RuntimeError, "invalid internal state");
+ }
+ return -1;
+ }
+
+ if (pos >= num) {
+ // del self[pos]
+ if (pair_list_del_at(list, pos) < 0) {
+ return -1;
+ }
+ pos--;
+ }
+ }
+
+ list->version = NEXT_VERSION();
+ return 0;
+}
+
+// TODO: need refactoring function name
+static inline int
+_pair_list_update(pair_list_t *list, PyObject *key,
+ PyObject *value, PyObject *used_keys,
+ PyObject *identity, Py_hash_t hash)
+{
+ PyObject *item = NULL;
+ pair_t *pair = NULL;
+ Py_ssize_t pos;
+ int found;
+ int ident_cmp_res;
+
+ item = PyDict_GetItem(used_keys, identity);
+ if (item == NULL) {
+ pos = 0;
+ }
+ else {
+ pos = PyLong_AsSsize_t(item);
+ if (pos == -1) {
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_RuntimeError, "invalid internal state");
+ }
+ return -1;
+ }
+ }
+
+ found = 0;
+ for (; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ if (pair->hash != hash) {
+ continue;
+ }
+
+ ident_cmp_res = str_cmp(pair->identity, identity);
+ if (ident_cmp_res > 0) {
+ Py_INCREF(key);
+ Py_DECREF(pair->key);
+ pair->key = key;
+
+ Py_INCREF(value);
+ Py_DECREF(pair->value);
+ pair->value = value;
+
+ if (_dict_set_number(used_keys, pair->identity, pos + 1) < 0) {
+ return -1;
+ }
+
+ found = 1;
+ break;
+ }
+ else if (ident_cmp_res < 0) {
+ return -1;
+ }
+ }
+
+ if (!found) {
+ if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) {
+ return -1;
+ }
+ if (_dict_set_number(used_keys, identity, list->size) < 0) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+
+static inline int
+pair_list_update(pair_list_t *list, pair_list_t *other)
+{
+ PyObject *used_keys = NULL;
+ pair_t *pair = NULL;
+
+ Py_ssize_t pos;
+
+ if (other->size == 0) {
+ return 0;
+ }
+
+ used_keys = PyDict_New();
+ if (used_keys == NULL) {
+ return -1;
+ }
+
+ for (pos = 0; pos < other->size; pos++) {
+ pair = pair_list_get(other, pos);
+ if (_pair_list_update(list, pair->key, pair->value, used_keys,
+ pair->identity, pair->hash) < 0) {
+ goto fail;
+ }
+ }
+
+ if (_pair_list_post_update(list, used_keys, 0) < 0) {
+ goto fail;
+ }
+
+ Py_DECREF(used_keys);
+ return 0;
+
+fail:
+ Py_XDECREF(used_keys);
+ return -1;
+}
+
+
+static inline int
+pair_list_update_from_seq(pair_list_t *list, PyObject *seq)
+{
+ PyObject *it = NULL; // iter(seq)
+ PyObject *fast = NULL; // item as a 2-tuple or 2-list
+ PyObject *item = NULL; // seq[i]
+ PyObject *used_keys = NULL; // dict(<Identitty: Pos>)
+
+ PyObject *key = NULL;
+ PyObject *value = NULL;
+ PyObject *identity = NULL;
+
+ Py_hash_t hash;
+
+ Py_ssize_t i;
+ Py_ssize_t n;
+
+ it = PyObject_GetIter(seq);
+ if (it == NULL) {
+ return -1;
+ }
+
+ used_keys = PyDict_New();
+ if (used_keys == NULL) {
+ goto fail_1;
+ }
+
+ for (i = 0; ; ++i) { // i - index into seq of current element
+ fast = NULL;
+ item = PyIter_Next(it);
+ if (item == NULL) {
+ if (PyErr_Occurred()) {
+ goto fail_1;
+ }
+ break;
+ }
+
+ // Convert item to sequence, and verify length 2.
+ fast = PySequence_Fast(item, "");
+ if (fast == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_TypeError)) {
+ PyErr_Format(PyExc_TypeError,
+ "multidict cannot convert sequence element #%zd"
+ " to a sequence",
+ i);
+ }
+ goto fail_1;
+ }
+
+ n = PySequence_Fast_GET_SIZE(fast);
+ if (n != 2) {
+ PyErr_Format(PyExc_ValueError,
+ "multidict update sequence element #%zd "
+ "has length %zd; 2 is required",
+ i, n);
+ goto fail_1;
+ }
+
+ key = PySequence_Fast_GET_ITEM(fast, 0);
+ value = PySequence_Fast_GET_ITEM(fast, 1);
+ Py_INCREF(key);
+ Py_INCREF(value);
+
+ identity = list->calc_identity(key);
+ if (identity == NULL) {
+ goto fail_1;
+ }
+
+ hash = PyObject_Hash(identity);
+ if (hash == -1) {
+ goto fail_1;
+ }
+
+ if (_pair_list_update(list, key, value, used_keys, identity, hash) < 0) {
+ goto fail_1;
+ }
+
+ Py_DECREF(key);
+ Py_DECREF(value);
+ Py_DECREF(fast);
+ Py_DECREF(item);
+ Py_DECREF(identity);
+ }
+
+ if (_pair_list_post_update(list, used_keys, 0) < 0) {
+ goto fail_2;
+ }
+
+ Py_DECREF(it);
+ Py_DECREF(used_keys);
+ return 0;
+
+fail_1:
+ Py_XDECREF(key);
+ Py_XDECREF(value);
+ Py_XDECREF(fast);
+ Py_XDECREF(item);
+ Py_XDECREF(identity);
+
+fail_2:
+ Py_XDECREF(it);
+ Py_XDECREF(used_keys);
+ return -1;
+}
+
+static inline int
+pair_list_eq_to_mapping(pair_list_t *list, PyObject *other)
+{
+ PyObject *key = NULL;
+ PyObject *avalue = NULL;
+ PyObject *bvalue;
+
+ Py_ssize_t pos, other_len;
+
+ int eq;
+
+ if (!PyMapping_Check(other)) {
+ PyErr_Format(PyExc_TypeError,
+ "other argument must be a mapping, not %s",
+ Py_TYPE(other)->tp_name);
+ return -1;
+ }
+
+ other_len = PyMapping_Size(other);
+ if (other_len < 0) {
+ return -1;
+ }
+ if (pair_list_len(list) != other_len) {
+ return 0;
+ }
+
+ pos = 0;
+ while (pair_list_next(list, &pos, NULL, &key, &avalue)) {
+ bvalue = PyObject_GetItem(other, key);
+ if (bvalue == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_KeyError)) {
+ PyErr_Clear();
+ return 0;
+ }
+ return -1;
+ }
+
+ eq = PyObject_RichCompareBool(avalue, bvalue, Py_EQ);
+ Py_DECREF(bvalue);
+
+ if (eq <= 0) {
+ return eq;
+ }
+ }
+
+ return 1;
+}
+
+
+/***********************************************************************/
+
+static inline int
+pair_list_traverse(pair_list_t *list, visitproc visit, void *arg)
+{
+ pair_t *pair = NULL;
+ Py_ssize_t pos;
+
+ for (pos = 0; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ // Don't need traverse the identity: it is a terminal
+ Py_VISIT(pair->key);
+ Py_VISIT(pair->value);
+ }
+
+ return 0;
+}
+
+
+static inline int
+pair_list_clear(pair_list_t *list)
+{
+ pair_t *pair = NULL;
+ Py_ssize_t pos;
+
+ if (list->size == 0) {
+ return 0;
+ }
+
+ list->version = NEXT_VERSION();
+ for (pos = 0; pos < list->size; pos++) {
+ pair = pair_list_get(list, pos);
+ Py_CLEAR(pair->key);
+ Py_CLEAR(pair->identity);
+ Py_CLEAR(pair->value);
+ }
+ list->size = 0;
+ if (list->pairs != list->buffer) {
+ PyMem_Del(list->pairs);
+ list->pairs = list->buffer;
+ }
+
+ return 0;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/multidict/multidict/_multilib/views.h b/contrib/python/multidict/multidict/_multilib/views.h
new file mode 100644
index 0000000000..5b1ebfe77c
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/views.h
@@ -0,0 +1,464 @@
+#ifndef _MULTIDICT_VIEWS_H
+#define _MULTIDICT_VIEWS_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+static PyTypeObject multidict_itemsview_type;
+static PyTypeObject multidict_valuesview_type;
+static PyTypeObject multidict_keysview_type;
+
+static PyObject *viewbaseset_richcmp_func;
+static PyObject *viewbaseset_and_func;
+static PyObject *viewbaseset_or_func;
+static PyObject *viewbaseset_sub_func;
+static PyObject *viewbaseset_xor_func;
+
+static PyObject *abc_itemsview_register_func;
+static PyObject *abc_keysview_register_func;
+static PyObject *abc_valuesview_register_func;
+
+static PyObject *itemsview_isdisjoint_func;
+static PyObject *itemsview_repr_func;
+
+static PyObject *keysview_repr_func;
+static PyObject *keysview_isdisjoint_func;
+
+static PyObject *valuesview_repr_func;
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *md;
+} _Multidict_ViewObject;
+
+
+/********** Base **********/
+
+static inline void
+_init_view(_Multidict_ViewObject *self, PyObject *md)
+{
+ Py_INCREF(md);
+ self->md = md;
+}
+
+static inline void
+multidict_view_dealloc(_Multidict_ViewObject *self)
+{
+ PyObject_GC_UnTrack(self);
+ Py_XDECREF(self->md);
+ PyObject_GC_Del(self);
+}
+
+static inline int
+multidict_view_traverse(_Multidict_ViewObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(self->md);
+ return 0;
+}
+
+static inline int
+multidict_view_clear(_Multidict_ViewObject *self)
+{
+ Py_CLEAR(self->md);
+ return 0;
+}
+
+static inline Py_ssize_t
+multidict_view_len(_Multidict_ViewObject *self)
+{
+ return pair_list_len(&((MultiDictObject*)self->md)->pairs);
+}
+
+static inline PyObject *
+multidict_view_richcompare(PyObject *self, PyObject *other, int op)
+{
+ PyObject *ret;
+ PyObject *op_obj = PyLong_FromLong(op);
+ if (op_obj == NULL) {
+ return NULL;
+ }
+ ret = PyObject_CallFunctionObjArgs(
+ viewbaseset_richcmp_func, self, other, op_obj, NULL);
+ Py_DECREF(op_obj);
+ return ret;
+}
+
+static inline PyObject *
+multidict_view_and(PyObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ viewbaseset_and_func, self, other, NULL);
+}
+
+static inline PyObject *
+multidict_view_or(PyObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ viewbaseset_or_func, self, other, NULL);
+}
+
+static inline PyObject *
+multidict_view_sub(PyObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ viewbaseset_sub_func, self, other, NULL);
+}
+
+static inline PyObject *
+multidict_view_xor(PyObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ viewbaseset_xor_func, self, other, NULL);
+}
+
+static PyNumberMethods multidict_view_as_number = {
+ .nb_subtract = (binaryfunc)multidict_view_sub,
+ .nb_and = (binaryfunc)multidict_view_and,
+ .nb_xor = (binaryfunc)multidict_view_xor,
+ .nb_or = (binaryfunc)multidict_view_or,
+};
+
+/********** Items **********/
+
+static inline PyObject *
+multidict_itemsview_new(PyObject *md)
+{
+ _Multidict_ViewObject *mv = PyObject_GC_New(
+ _Multidict_ViewObject, &multidict_itemsview_type);
+ if (mv == NULL) {
+ return NULL;
+ }
+
+ _init_view(mv, md);
+
+ PyObject_GC_Track(mv);
+ return (PyObject *)mv;
+}
+
+static inline PyObject *
+multidict_itemsview_iter(_Multidict_ViewObject *self)
+{
+ return multidict_items_iter_new((MultiDictObject*)self->md);
+}
+
+static inline PyObject *
+multidict_itemsview_repr(_Multidict_ViewObject *self)
+{
+ return PyObject_CallFunctionObjArgs(
+ itemsview_repr_func, self, NULL);
+}
+
+static inline PyObject *
+multidict_itemsview_isdisjoint(_Multidict_ViewObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ itemsview_isdisjoint_func, self, other, NULL);
+}
+
+PyDoc_STRVAR(itemsview_isdisjoint_doc,
+ "Return True if two sets have a null intersection.");
+
+static PyMethodDef multidict_itemsview_methods[] = {
+ {
+ "isdisjoint",
+ (PyCFunction)multidict_itemsview_isdisjoint,
+ METH_O,
+ itemsview_isdisjoint_doc
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+static inline int
+multidict_itemsview_contains(_Multidict_ViewObject *self, PyObject *obj)
+{
+ PyObject *akey = NULL,
+ *aval = NULL,
+ *bkey = NULL,
+ *bval = NULL,
+ *iter = NULL,
+ *item = NULL;
+ int ret1, ret2;
+
+ if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 2) {
+ return 0;
+ }
+
+ bkey = PyTuple_GET_ITEM(obj, 0);
+ bval = PyTuple_GET_ITEM(obj, 1);
+
+ iter = multidict_itemsview_iter(self);
+ if (iter == NULL) {
+ return 0;
+ }
+
+ while ((item = PyIter_Next(iter)) != NULL) {
+ akey = PyTuple_GET_ITEM(item, 0);
+ aval = PyTuple_GET_ITEM(item, 1);
+
+ ret1 = PyObject_RichCompareBool(akey, bkey, Py_EQ);
+ if (ret1 < 0) {
+ Py_DECREF(iter);
+ Py_DECREF(item);
+ return -1;
+ }
+ ret2 = PyObject_RichCompareBool(aval, bval, Py_EQ);
+ if (ret2 < 0) {
+ Py_DECREF(iter);
+ Py_DECREF(item);
+ return -1;
+ }
+ if (ret1 > 0 && ret2 > 0)
+ {
+ Py_DECREF(iter);
+ Py_DECREF(item);
+ return 1;
+ }
+
+ Py_DECREF(item);
+ }
+
+ Py_DECREF(iter);
+
+ if (PyErr_Occurred()) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PySequenceMethods multidict_itemsview_as_sequence = {
+ .sq_length = (lenfunc)multidict_view_len,
+ .sq_contains = (objobjproc)multidict_itemsview_contains,
+};
+
+static PyTypeObject multidict_itemsview_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._ItemsView", /* tp_name */
+ sizeof(_Multidict_ViewObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_view_dealloc,
+ .tp_repr = (reprfunc)multidict_itemsview_repr,
+ .tp_as_number = &multidict_view_as_number,
+ .tp_as_sequence = &multidict_itemsview_as_sequence,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_view_traverse,
+ .tp_clear = (inquiry)multidict_view_clear,
+ .tp_richcompare = multidict_view_richcompare,
+ .tp_iter = (getiterfunc)multidict_itemsview_iter,
+ .tp_methods = multidict_itemsview_methods,
+};
+
+
+/********** Keys **********/
+
+static inline PyObject *
+multidict_keysview_new(PyObject *md)
+{
+ _Multidict_ViewObject *mv = PyObject_GC_New(
+ _Multidict_ViewObject, &multidict_keysview_type);
+ if (mv == NULL) {
+ return NULL;
+ }
+
+ _init_view(mv, md);
+
+ PyObject_GC_Track(mv);
+ return (PyObject *)mv;
+}
+
+static inline PyObject *
+multidict_keysview_iter(_Multidict_ViewObject *self)
+{
+ return multidict_keys_iter_new(((MultiDictObject*)self->md));
+}
+
+static inline PyObject *
+multidict_keysview_repr(_Multidict_ViewObject *self)
+{
+ return PyObject_CallFunctionObjArgs(
+ keysview_repr_func, self, NULL);
+}
+
+static inline PyObject *
+multidict_keysview_isdisjoint(_Multidict_ViewObject *self, PyObject *other)
+{
+ return PyObject_CallFunctionObjArgs(
+ keysview_isdisjoint_func, self, other, NULL);
+}
+
+PyDoc_STRVAR(keysview_isdisjoint_doc,
+ "Return True if two sets have a null intersection.");
+
+static PyMethodDef multidict_keysview_methods[] = {
+ {
+ "isdisjoint",
+ (PyCFunction)multidict_keysview_isdisjoint,
+ METH_O,
+ keysview_isdisjoint_doc
+ },
+ {
+ NULL,
+ NULL
+ } /* sentinel */
+};
+
+static inline int
+multidict_keysview_contains(_Multidict_ViewObject *self, PyObject *key)
+{
+ return pair_list_contains(&((MultiDictObject*)self->md)->pairs, key);
+}
+
+static PySequenceMethods multidict_keysview_as_sequence = {
+ .sq_length = (lenfunc)multidict_view_len,
+ .sq_contains = (objobjproc)multidict_keysview_contains,
+};
+
+static PyTypeObject multidict_keysview_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._KeysView", /* tp_name */
+ sizeof(_Multidict_ViewObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_view_dealloc,
+ .tp_repr = (reprfunc)multidict_keysview_repr,
+ .tp_as_number = &multidict_view_as_number,
+ .tp_as_sequence = &multidict_keysview_as_sequence,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_view_traverse,
+ .tp_clear = (inquiry)multidict_view_clear,
+ .tp_richcompare = multidict_view_richcompare,
+ .tp_iter = (getiterfunc)multidict_keysview_iter,
+ .tp_methods = multidict_keysview_methods,
+};
+
+
+/********** Values **********/
+
+static inline PyObject *
+multidict_valuesview_new(PyObject *md)
+{
+ _Multidict_ViewObject *mv = PyObject_GC_New(
+ _Multidict_ViewObject, &multidict_valuesview_type);
+ if (mv == NULL) {
+ return NULL;
+ }
+
+ _init_view(mv, md);
+
+ PyObject_GC_Track(mv);
+ return (PyObject *)mv;
+}
+
+static inline PyObject *
+multidict_valuesview_iter(_Multidict_ViewObject *self)
+{
+ return multidict_values_iter_new(((MultiDictObject*)self->md));
+}
+
+static inline PyObject *
+multidict_valuesview_repr(_Multidict_ViewObject *self)
+{
+ return PyObject_CallFunctionObjArgs(
+ valuesview_repr_func, self, NULL);
+}
+
+static PySequenceMethods multidict_valuesview_as_sequence = {
+ .sq_length = (lenfunc)multidict_view_len,
+};
+
+static PyTypeObject multidict_valuesview_type = {
+ PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0)
+ "multidict._multidict._ValuesView", /* tp_name */
+ sizeof(_Multidict_ViewObject), /* tp_basicsize */
+ .tp_dealloc = (destructor)multidict_view_dealloc,
+ .tp_repr = (reprfunc)multidict_valuesview_repr,
+ .tp_as_sequence = &multidict_valuesview_as_sequence,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)multidict_view_traverse,
+ .tp_clear = (inquiry)multidict_view_clear,
+ .tp_iter = (getiterfunc)multidict_valuesview_iter,
+};
+
+
+static inline int
+multidict_views_init()
+{
+ PyObject *reg_func_call_result = NULL;
+ PyObject *module = PyImport_ImportModule("multidict._multidict_base");
+ if (module == NULL) {
+ goto fail;
+ }
+
+#define GET_MOD_ATTR(VAR, NAME) \
+ VAR = PyObject_GetAttrString(module, NAME); \
+ if (VAR == NULL) { \
+ goto fail; \
+ }
+
+ GET_MOD_ATTR(viewbaseset_richcmp_func, "_viewbaseset_richcmp");
+ GET_MOD_ATTR(viewbaseset_and_func, "_viewbaseset_and");
+ GET_MOD_ATTR(viewbaseset_or_func, "_viewbaseset_or");
+ GET_MOD_ATTR(viewbaseset_sub_func, "_viewbaseset_sub");
+ GET_MOD_ATTR(viewbaseset_xor_func, "_viewbaseset_xor");
+
+ GET_MOD_ATTR(abc_itemsview_register_func, "_abc_itemsview_register");
+ GET_MOD_ATTR(abc_keysview_register_func, "_abc_keysview_register");
+ GET_MOD_ATTR(abc_valuesview_register_func, "_abc_valuesview_register");
+
+ GET_MOD_ATTR(itemsview_repr_func, "_itemsview_isdisjoint");
+ GET_MOD_ATTR(itemsview_repr_func, "_itemsview_repr");
+
+ GET_MOD_ATTR(keysview_repr_func, "_keysview_repr");
+ GET_MOD_ATTR(keysview_isdisjoint_func, "_keysview_isdisjoint");
+
+ GET_MOD_ATTR(valuesview_repr_func, "_valuesview_repr");
+
+ if (PyType_Ready(&multidict_itemsview_type) < 0 ||
+ PyType_Ready(&multidict_valuesview_type) < 0 ||
+ PyType_Ready(&multidict_keysview_type) < 0)
+ {
+ goto fail;
+ }
+
+ // abc.ItemsView.register(_ItemsView)
+ reg_func_call_result = PyObject_CallFunctionObjArgs(
+ abc_itemsview_register_func, (PyObject*)&multidict_itemsview_type, NULL);
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ // abc.KeysView.register(_KeysView)
+ reg_func_call_result = PyObject_CallFunctionObjArgs(
+ abc_keysview_register_func, (PyObject*)&multidict_keysview_type, NULL);
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ // abc.ValuesView.register(_KeysView)
+ reg_func_call_result = PyObject_CallFunctionObjArgs(
+ abc_valuesview_register_func, (PyObject*)&multidict_valuesview_type, NULL);
+ if (reg_func_call_result == NULL) {
+ goto fail;
+ }
+ Py_DECREF(reg_func_call_result);
+
+ Py_DECREF(module);
+ return 0;
+
+fail:
+ Py_CLEAR(module);
+ return -1;
+
+#undef GET_MOD_ATTR
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/multidict/multidict/py.typed b/contrib/python/multidict/multidict/py.typed
new file mode 100644
index 0000000000..dfe8cc048e
--- /dev/null
+++ b/contrib/python/multidict/multidict/py.typed
@@ -0,0 +1 @@
+PEP-561 marker. \ No newline at end of file
diff --git a/contrib/python/multidict/tests/__init__.py b/contrib/python/multidict/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/multidict/tests/__init__.py
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.0 b/contrib/python/multidict/tests/cimultidict.pickle.0
new file mode 100644
index 0000000000..7b2ed00845
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.0
@@ -0,0 +1,14 @@
+cmultidict._multidict
+CIMultiDict
+p0
+((lp1
+(Va
+p2
+L1L
+tp3
+a(g2
+L2L
+tp4
+atp5
+Rp6
+. \ No newline at end of file
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.1 b/contrib/python/multidict/tests/cimultidict.pickle.1
new file mode 100644
index 0000000000..225458ba29
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.1
Binary files differ
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.2 b/contrib/python/multidict/tests/cimultidict.pickle.2
new file mode 100644
index 0000000000..d33600e615
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.2
Binary files differ
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.3 b/contrib/python/multidict/tests/cimultidict.pickle.3
new file mode 100644
index 0000000000..cbb8624db0
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.3
Binary files differ
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.4 b/contrib/python/multidict/tests/cimultidict.pickle.4
new file mode 100644
index 0000000000..1f5164ca37
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.4
Binary files differ
diff --git a/contrib/python/multidict/tests/cimultidict.pickle.5 b/contrib/python/multidict/tests/cimultidict.pickle.5
new file mode 100644
index 0000000000..11bf552c43
--- /dev/null
+++ b/contrib/python/multidict/tests/cimultidict.pickle.5
Binary files differ
diff --git a/contrib/python/multidict/tests/conftest.py b/contrib/python/multidict/tests/conftest.py
new file mode 100644
index 0000000000..bd3b4de027
--- /dev/null
+++ b/contrib/python/multidict/tests/conftest.py
@@ -0,0 +1,29 @@
+import pickle
+
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+
+OPTIONAL_CYTHON = (
+ ()
+ if USE_EXTENSIONS
+ else pytest.mark.skip(reason="No extensions available")
+)
+
+
+@pytest.fixture( # type: ignore[call-overload]
+ scope="session",
+ params=[
+ pytest.param("multidict._multidict", marks=OPTIONAL_CYTHON), # type: ignore
+ "multidict._multidict_py",
+ ],
+)
+def _multidict(request):
+ return pytest.importorskip(request.param)
+
+
+def pytest_generate_tests(metafunc):
+ if "pickle_protocol" in metafunc.fixturenames:
+ metafunc.parametrize(
+ "pickle_protocol", list(range(pickle.HIGHEST_PROTOCOL + 1)), scope="session"
+ )
diff --git a/contrib/python/multidict/tests/gen_pickles.py b/contrib/python/multidict/tests/gen_pickles.py
new file mode 100644
index 0000000000..028a01f8cc
--- /dev/null
+++ b/contrib/python/multidict/tests/gen_pickles.py
@@ -0,0 +1,32 @@
+import pickle
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as PyCIMultiDict # noqa
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa
+
+try:
+ from multidict._multidict import ( # type: ignore # noqa
+ CIMultiDict,
+ MultiDict,
+ )
+except ImportError:
+ pass
+
+
+def write(name, proto):
+ cls = globals()[name]
+ d = cls([("a", 1), ("a", 2)])
+ with open("{}.pickle.{}".format(name.lower(), proto), "wb") as f:
+ pickle.dump(d, f, proto)
+
+
+def generate():
+ if not USE_EXTENSIONS:
+ raise RuntimeError("C Extension is required")
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ for name in ("MultiDict", "CIMultiDict", "PyMultiDict", "PyCIMultiDict"):
+ write(name, proto)
+
+
+if __name__ == "__main__":
+ generate()
diff --git a/contrib/python/multidict/tests/multidict.pickle.0 b/contrib/python/multidict/tests/multidict.pickle.0
new file mode 100644
index 0000000000..eb979fcf72
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.0
@@ -0,0 +1,14 @@
+cmultidict._multidict
+MultiDict
+p0
+((lp1
+(Va
+p2
+L1L
+tp3
+a(g2
+L2L
+tp4
+atp5
+Rp6
+. \ No newline at end of file
diff --git a/contrib/python/multidict/tests/multidict.pickle.1 b/contrib/python/multidict/tests/multidict.pickle.1
new file mode 100644
index 0000000000..a4f211d7b1
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.1
Binary files differ
diff --git a/contrib/python/multidict/tests/multidict.pickle.2 b/contrib/python/multidict/tests/multidict.pickle.2
new file mode 100644
index 0000000000..b4563f879d
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.2
Binary files differ
diff --git a/contrib/python/multidict/tests/multidict.pickle.3 b/contrib/python/multidict/tests/multidict.pickle.3
new file mode 100644
index 0000000000..415960a3ee
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.3
Binary files differ
diff --git a/contrib/python/multidict/tests/multidict.pickle.4 b/contrib/python/multidict/tests/multidict.pickle.4
new file mode 100644
index 0000000000..00ef17c3f7
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.4
Binary files differ
diff --git a/contrib/python/multidict/tests/multidict.pickle.5 b/contrib/python/multidict/tests/multidict.pickle.5
new file mode 100644
index 0000000000..2c4ae0a0d3
--- /dev/null
+++ b/contrib/python/multidict/tests/multidict.pickle.5
Binary files differ
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.0 b/contrib/python/multidict/tests/pycimultidict.pickle.0
new file mode 100644
index 0000000000..bd39b6db20
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.0
@@ -0,0 +1,14 @@
+cmultidict._multidict_py
+CIMultiDict
+p0
+((lp1
+(Va
+p2
+L1L
+tp3
+a(g2
+L2L
+tp4
+atp5
+Rp6
+. \ No newline at end of file
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.1 b/contrib/python/multidict/tests/pycimultidict.pickle.1
new file mode 100644
index 0000000000..866003d26c
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.1
Binary files differ
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.2 b/contrib/python/multidict/tests/pycimultidict.pickle.2
new file mode 100644
index 0000000000..c9e43fef9c
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.2
Binary files differ
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.3 b/contrib/python/multidict/tests/pycimultidict.pickle.3
new file mode 100644
index 0000000000..821659fe0c
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.3
Binary files differ
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.4 b/contrib/python/multidict/tests/pycimultidict.pickle.4
new file mode 100644
index 0000000000..a17c6e9b73
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.4
Binary files differ
diff --git a/contrib/python/multidict/tests/pycimultidict.pickle.5 b/contrib/python/multidict/tests/pycimultidict.pickle.5
new file mode 100644
index 0000000000..479bfe3a5d
--- /dev/null
+++ b/contrib/python/multidict/tests/pycimultidict.pickle.5
Binary files differ
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.0 b/contrib/python/multidict/tests/pymultidict.pickle.0
new file mode 100644
index 0000000000..e91023ecf9
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.0
@@ -0,0 +1,14 @@
+cmultidict._multidict_py
+MultiDict
+p0
+((lp1
+(Va
+p2
+L1L
+tp3
+a(g2
+L2L
+tp4
+atp5
+Rp6
+. \ No newline at end of file
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.1 b/contrib/python/multidict/tests/pymultidict.pickle.1
new file mode 100644
index 0000000000..acce9bf793
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.1
Binary files differ
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.2 b/contrib/python/multidict/tests/pymultidict.pickle.2
new file mode 100644
index 0000000000..900446ad8d
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.2
Binary files differ
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.3 b/contrib/python/multidict/tests/pymultidict.pickle.3
new file mode 100644
index 0000000000..9b9073515a
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.3
Binary files differ
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.4 b/contrib/python/multidict/tests/pymultidict.pickle.4
new file mode 100644
index 0000000000..db363f8d3c
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.4
Binary files differ
diff --git a/contrib/python/multidict/tests/pymultidict.pickle.5 b/contrib/python/multidict/tests/pymultidict.pickle.5
new file mode 100644
index 0000000000..7dc772d58f
--- /dev/null
+++ b/contrib/python/multidict/tests/pymultidict.pickle.5
Binary files differ
diff --git a/contrib/python/multidict/tests/test_abc.py b/contrib/python/multidict/tests/test_abc.py
new file mode 100644
index 0000000000..4636b3bc1a
--- /dev/null
+++ b/contrib/python/multidict/tests/test_abc.py
@@ -0,0 +1,132 @@
+from collections.abc import Mapping, MutableMapping
+
+import pytest
+
+from multidict import MultiMapping, MutableMultiMapping
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as PyCIMultiDict
+from multidict._multidict_py import CIMultiDictProxy as PyCIMultiDictProxy
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa: E402
+from multidict._multidict_py import MultiDictProxy as PyMultiDictProxy
+
+if USE_EXTENSIONS:
+ from multidict._multidict import ( # type: ignore
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ )
+
+
+@pytest.fixture(
+ params=([MultiDict, CIMultiDict] if USE_EXTENSIONS else [])
+ + [PyMultiDict, PyCIMultiDict],
+ ids=(["MultiDict", "CIMultiDict"] if USE_EXTENSIONS else [])
+ + ["PyMultiDict", "PyCIMultiDict"],
+)
+def cls(request):
+ return request.param
+
+
+@pytest.fixture(
+ params=(
+ [(MultiDictProxy, MultiDict), (CIMultiDictProxy, CIMultiDict)]
+ if USE_EXTENSIONS
+ else []
+ )
+ + [(PyMultiDictProxy, PyMultiDict), (PyCIMultiDictProxy, PyCIMultiDict)],
+ ids=(["MultiDictProxy", "CIMultiDictProxy"] if USE_EXTENSIONS else [])
+ + ["PyMultiDictProxy", "PyCIMultiDictProxy"],
+)
+def proxy_classes(request):
+ return request.param
+
+
+def test_abc_inheritance():
+ assert issubclass(MultiMapping, Mapping)
+ assert not issubclass(MultiMapping, MutableMapping)
+ assert issubclass(MutableMultiMapping, Mapping)
+ assert issubclass(MutableMultiMapping, MutableMapping)
+
+
+class A(MultiMapping):
+ def __getitem__(self, key):
+ pass
+
+ def __iter__(self):
+ pass
+
+ def __len__(self):
+ pass
+
+ def getall(self, key, default=None):
+ super().getall(key, default)
+
+ def getone(self, key, default=None):
+ super().getone(key, default)
+
+
+def test_abc_getall():
+ with pytest.raises(KeyError):
+ A().getall("key")
+
+
+def test_abc_getone():
+ with pytest.raises(KeyError):
+ A().getone("key")
+
+
+class B(A, MutableMultiMapping):
+ def __setitem__(self, key, value):
+ pass
+
+ def __delitem__(self, key):
+ pass
+
+ def add(self, key, value):
+ super().add(key, value)
+
+ def extend(self, *args, **kwargs):
+ super().extend(*args, **kwargs)
+
+ def popall(self, key, default=None):
+ super().popall(key, default)
+
+ def popone(self, key, default=None):
+ super().popone(key, default)
+
+
+def test_abc_add():
+ with pytest.raises(NotImplementedError):
+ B().add("key", "val")
+
+
+def test_abc_extend():
+ with pytest.raises(NotImplementedError):
+ B().extend()
+
+
+def test_abc_popone():
+ with pytest.raises(KeyError):
+ B().popone("key")
+
+
+def test_abc_popall():
+ with pytest.raises(KeyError):
+ B().popall("key")
+
+
+def test_multidict_inheritance(cls):
+ assert issubclass(cls, MultiMapping)
+ assert issubclass(cls, MutableMultiMapping)
+
+
+def test_proxy_inheritance(proxy_classes):
+ proxy, _ = proxy_classes
+ assert issubclass(proxy, MultiMapping)
+ assert not issubclass(proxy, MutableMultiMapping)
+
+
+def test_generic_type_in_runtime():
+ MultiMapping[str]
+ MutableMultiMapping[str]
diff --git a/contrib/python/multidict/tests/test_copy.py b/contrib/python/multidict/tests/test_copy.py
new file mode 100644
index 0000000000..564cdde597
--- /dev/null
+++ b/contrib/python/multidict/tests/test_copy.py
@@ -0,0 +1,79 @@
+import copy
+
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as PyCIMultiDict
+from multidict._multidict_py import CIMultiDictProxy as PyCIMultiDictProxy
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa: E402
+from multidict._multidict_py import MultiDictProxy as PyMultiDictProxy
+
+if USE_EXTENSIONS:
+ from multidict._multidict import ( # type: ignore
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ )
+
+
+@pytest.fixture(
+ params=([MultiDict, CIMultiDict] if USE_EXTENSIONS else [])
+ + [PyMultiDict, PyCIMultiDict],
+ ids=(["MultiDict", "CIMultiDict"] if USE_EXTENSIONS else [])
+ + ["PyMultiDict", "PyCIMultiDict"],
+)
+def cls(request):
+ return request.param
+
+
+@pytest.fixture(
+ params=(
+ [(MultiDictProxy, MultiDict), (CIMultiDictProxy, CIMultiDict)]
+ if USE_EXTENSIONS
+ else []
+ )
+ + [(PyMultiDictProxy, PyMultiDict), (PyCIMultiDictProxy, PyCIMultiDict)],
+ ids=(["MultiDictProxy", "CIMultiDictProxy"] if USE_EXTENSIONS else [])
+ + ["PyMultiDictProxy", "PyCIMultiDictProxy"],
+)
+def proxy_classes(request):
+ return request.param
+
+
+def test_copy(cls):
+ d = cls()
+ d["foo"] = 6
+ d2 = d.copy()
+ d2["foo"] = 7
+ assert d["foo"] == 6
+ assert d2["foo"] == 7
+
+
+def test_copy_proxy(proxy_classes):
+ proxy_cls, dict_cls = proxy_classes
+ d = dict_cls()
+ d["foo"] = 6
+ p = proxy_cls(d)
+ d2 = p.copy()
+ d2["foo"] = 7
+ assert d["foo"] == 6
+ assert p["foo"] == 6
+ assert d2["foo"] == 7
+
+
+def test_copy_std_copy(cls):
+ d = cls()
+ d["foo"] = 6
+ d2 = copy.copy(d)
+ d2["foo"] = 7
+ assert d["foo"] == 6
+ assert d2["foo"] == 7
+
+
+def test_ci_multidict_clone(cls):
+ d = cls(foo=6)
+ d2 = cls(d)
+ d2["foo"] = 7
+ assert d["foo"] == 6
+ assert d2["foo"] == 7
diff --git a/contrib/python/multidict/tests/test_guard.py b/contrib/python/multidict/tests/test_guard.py
new file mode 100644
index 0000000000..823cc1afb8
--- /dev/null
+++ b/contrib/python/multidict/tests/test_guard.py
@@ -0,0 +1,39 @@
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa: E402
+
+if USE_EXTENSIONS:
+ from multidict._multidict import MultiDict # type: ignore
+
+
+@pytest.fixture(
+ params=([MultiDict] if USE_EXTENSIONS else []) + [PyMultiDict],
+ ids=(["MultiDict"] if USE_EXTENSIONS else []) + ["PyMultiDict"],
+)
+def cls(request):
+ return request.param
+
+
+def test_guard_items(cls):
+ md = cls({"a": "b"})
+ it = iter(md.items())
+ md["a"] = "c"
+ with pytest.raises(RuntimeError):
+ next(it)
+
+
+def test_guard_keys(cls):
+ md = cls({"a": "b"})
+ it = iter(md.keys())
+ md["a"] = "c"
+ with pytest.raises(RuntimeError):
+ next(it)
+
+
+def test_guard_values(cls):
+ md = cls({"a": "b"})
+ it = iter(md.values())
+ md["a"] = "c"
+ with pytest.raises(RuntimeError):
+ next(it)
diff --git a/contrib/python/multidict/tests/test_istr.py b/contrib/python/multidict/tests/test_istr.py
new file mode 100644
index 0000000000..caae397f2d
--- /dev/null
+++ b/contrib/python/multidict/tests/test_istr.py
@@ -0,0 +1,83 @@
+import gc
+import sys
+from typing import Type
+
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import istr as _istr # noqa: E402
+
+if USE_EXTENSIONS:
+ from multidict._multidict import istr # type: ignore
+else:
+ from multidict import istr
+
+
+IMPLEMENTATION = getattr(sys, "implementation") # to suppress mypy error
+
+
+class IStrMixin:
+
+ cls = Type[istr]
+
+ def test_ctor(self):
+ s = self.cls()
+ assert "" == s
+
+ def test_ctor_str(self):
+ s = self.cls("aBcD")
+ assert "aBcD" == s
+
+ def test_ctor_istr(self):
+ s = self.cls("A")
+ s2 = self.cls(s)
+ assert "A" == s
+ assert s == s2
+
+ def test_ctor_buffer(self):
+ s = self.cls(b"aBc")
+ assert "b'aBc'" == s
+
+ def test_ctor_repr(self):
+ s = self.cls(None)
+ assert "None" == s
+
+ def test_str(self):
+ s = self.cls("aBcD")
+ s1 = str(s)
+ assert s1 == "aBcD"
+ assert type(s1) is str
+
+ def test_eq(self):
+ s1 = "Abc"
+ s2 = self.cls(s1)
+ assert s1 == s2
+
+
+class TestPyIStr(IStrMixin):
+ cls = _istr
+
+ @staticmethod
+ def _create_strs():
+ _istr("foobarbaz")
+ istr2 = _istr()
+ _istr(istr2)
+
+ @pytest.mark.skipif(
+ IMPLEMENTATION.name != "cpython", reason="PyPy has different GC implementation"
+ )
+ def test_leak(self):
+ gc.collect()
+ cnt = len(gc.get_objects())
+ for _ in range(10000):
+ self._create_strs()
+
+ gc.collect()
+ cnt2 = len(gc.get_objects())
+ assert abs(cnt - cnt2) < 10 # on PyPy these numbers are not equal
+
+
+if USE_EXTENSIONS:
+
+ class TestIStr(IStrMixin):
+ cls = istr
diff --git a/contrib/python/multidict/tests/test_multidict.py b/contrib/python/multidict/tests/test_multidict.py
new file mode 100644
index 0000000000..706fc93e75
--- /dev/null
+++ b/contrib/python/multidict/tests/test_multidict.py
@@ -0,0 +1,602 @@
+import gc
+import operator
+import sys
+import weakref
+from collections import deque
+from collections.abc import Mapping
+from functools import reduce
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+import pytest
+
+import multidict
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+
+_MultiDictClasses = Union[Type[MultiDict[str]], Type[CIMultiDict[str]]]
+
+
+def chained_callable(
+ module: object, callables: Union[str, Iterable[str]]
+) -> Callable[..., Any]:
+ """
+ Returns callable that will get and call all given objects in module in
+ exact order. If `names` is a single object's name function will return
+ object itself.
+
+ Will treat `names` of type `str` as a list of single element.
+ """
+ callables = (callables,) if isinstance(callables, str) else callables
+ _callable, *rest = (getattr(module, name) for name in callables)
+
+ def chained_call(*args: object, **kwargs: object) -> Any:
+ return reduce(lambda res, c: c(res), rest, _callable(*args, **kwargs))
+
+ return chained_call if len(rest) > 0 else _callable # type: ignore[no-any-return]
+
+
+@pytest.fixture(scope="function")
+def cls(request: Any, _multidict: Any) -> Any:
+ return chained_callable(_multidict, request.param)
+
+
+@pytest.fixture(scope="function")
+def classes(request: Any, _multidict: Any) -> Any:
+ return tuple(chained_callable(_multidict, n) for n in request.param)
+
+
+@pytest.mark.parametrize("cls", ["MultiDict", "CIMultiDict"], indirect=True)
+def test_exposed_names(
+ cls: Union[Type[MultiDict[object]], Type[CIMultiDict[object]]]
+) -> None:
+ name = cls.__name__
+
+ while name.startswith("_"):
+ name = name[1:]
+
+ assert name in multidict.__all__ # type: ignore[attr-defined]
+
+
+@pytest.mark.parametrize(
+ "cls, key_cls",
+ [("MultiDict", str), (("MultiDict", "MultiDictProxy"), str)],
+ indirect=["cls"],
+)
+def test__iter__types(
+ cls: Type[MultiDict[Union[str, int]]], key_cls: Type[object]
+) -> None:
+ d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
+ for i in d:
+ assert type(i) is key_cls, (type(i), key_cls)
+
+
+_ClsPair = TypeVar(
+ "_ClsPair",
+ Tuple[Type[MultiDict[str]], Type[MultiDictProxy[str]]],
+ Tuple[Type[CIMultiDict[str]], Type[CIMultiDictProxy[str]]],
+)
+
+
+@pytest.mark.parametrize(
+ "classes",
+ [("MultiDict", "MultiDictProxy"), ("CIMultiDict", "CIMultiDictProxy")],
+ indirect=True,
+)
+def test_proxy_copy(classes: _ClsPair) -> None:
+ dict_cls, proxy_cls = classes
+ d1 = dict_cls(key="value", a="b")
+ p1 = proxy_cls(d1)
+
+ d2 = p1.copy()
+ assert d1 == d2
+ assert d1 is not d2
+
+
+@pytest.mark.parametrize(
+ "cls",
+ ["MultiDict", "CIMultiDict", "MultiDictProxy", "CIMultiDictProxy"],
+ indirect=True,
+)
+def test_subclassing(cls: Any) -> None:
+ class MyClass(cls): # type: ignore[valid-type,misc]
+ pass
+
+
+class BaseMultiDictTest:
+ def test_instantiate__empty(self, cls: _MultiDictClasses) -> None:
+ d = cls()
+ empty: Mapping[str, str] = {}
+ assert d == empty
+ assert len(d) == 0
+ assert list(d.keys()) == []
+ assert list(d.values()) == []
+ assert list(d.items()) == []
+
+ assert cls() != list() # type: ignore[comparison-overlap]
+ with pytest.raises(TypeError, match=r"(2 given)"):
+ cls(("key1", "value1"), ("key2", "value2")) # type: ignore[arg-type,call-arg] # noqa: E501
+
+ @pytest.mark.parametrize("arg0", [[("key", "value1")], {"key": "value1"}])
+ def test_instantiate__from_arg0(
+ self,
+ cls: _MultiDictClasses,
+ arg0: Union[List[Tuple[str, str]], Dict[str, str]],
+ ) -> None:
+ d = cls(arg0)
+
+ assert d == {"key": "value1"}
+ assert len(d) == 1
+ assert list(d.keys()) == ["key"]
+ assert list(d.values()) == ["value1"]
+ assert list(d.items()) == [("key", "value1")]
+
+ def test_instantiate__with_kwargs(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")], key2="value2")
+
+ assert d == {"key": "value1", "key2": "value2"}
+ assert len(d) == 2
+ assert sorted(d.keys()) == ["key", "key2"]
+ assert sorted(d.values()) == ["value1", "value2"]
+ assert sorted(d.items()) == [("key", "value1"), ("key2", "value2")]
+
+ def test_instantiate__from_generator(
+ self, cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ d = cls((str(i), i) for i in range(2))
+
+ assert d == {"0": 0, "1": 1}
+ assert len(d) == 2
+ assert sorted(d.keys()) == ["0", "1"]
+ assert sorted(d.values()) == [0, 1]
+ assert sorted(d.items()) == [("0", 0), ("1", 1)]
+
+ def test_instantiate__from_list_of_lists(self, cls: _MultiDictClasses) -> None:
+ # Should work at runtime, but won't type check.
+ d = cls([["key", "value1"]]) # type: ignore[list-item]
+ assert d == {"key": "value1"}
+
+ def test_instantiate__from_list_of_custom_pairs(
+ self, cls: _MultiDictClasses
+ ) -> None:
+ class Pair:
+ def __len__(self) -> int:
+ return 2
+
+ def __getitem__(self, pos: int) -> str:
+ if pos == 0:
+ return "key"
+ elif pos == 1:
+ return "value1"
+ else:
+ raise IndexError
+
+ # Works at runtime, but won't type check.
+ d = cls([Pair()]) # type: ignore[list-item]
+ assert d == {"key": "value1"}
+
+ def test_getone(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")], key="value2")
+
+ assert d.getone("key") == "value1"
+ assert d.get("key") == "value1"
+ assert d["key"] == "value1"
+
+ with pytest.raises(KeyError, match="key2"):
+ d["key2"]
+ with pytest.raises(KeyError, match="key2"):
+ d.getone("key2")
+
+ assert d.getone("key2", "default") == "default"
+
+ def test__iter__(
+ self,
+ cls: Union[Type[MultiDict[Union[str, int]]], Type[CIMultiDict[Union[str, int]]]]
+ ) -> None:
+ d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
+ assert list(d) == ["key", "key2", "key"]
+
+ def test_keys__contains(
+ self,
+ cls: Union[Type[MultiDict[Union[str, int]]], Type[CIMultiDict[Union[str, int]]]]
+ ) -> None:
+ d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
+
+ assert list(d.keys()) == ["key", "key2", "key"]
+
+ assert "key" in d.keys()
+ assert "key2" in d.keys()
+
+ assert "foo" not in d.keys()
+
+ def test_values__contains(
+ self,
+ cls: Union[Type[MultiDict[Union[str, int]]], Type[CIMultiDict[Union[str, int]]]]
+ ) -> None:
+ d = cls([("key", "one"), ("key", "two"), ("key", 3)])
+
+ assert list(d.values()) == ["one", "two", 3]
+
+ assert "one" in d.values()
+ assert "two" in d.values()
+ assert 3 in d.values()
+
+ assert "foo" not in d.values()
+
+ def test_items__contains(
+ self,
+ cls: Union[Type[MultiDict[Union[str, int]]], Type[CIMultiDict[Union[str, int]]]]
+ ) -> None:
+ d = cls([("key", "one"), ("key", "two"), ("key", 3)])
+
+ assert list(d.items()) == [("key", "one"), ("key", "two"), ("key", 3)]
+
+ assert ("key", "one") in d.items()
+ assert ("key", "two") in d.items()
+ assert ("key", 3) in d.items()
+
+ assert ("foo", "bar") not in d.items()
+
+ def test_cannot_create_from_unaccepted(self, cls: _MultiDictClasses) -> None:
+ with pytest.raises(TypeError):
+ cls([(1, 2, 3)]) # type: ignore[list-item]
+
+ def test_keys_is_set_less(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.keys() < {"key", "key2"}
+
+ def test_keys_is_set_less_equal(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.keys() <= {"key"}
+
+ def test_keys_is_set_equal(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.keys() == {"key"}
+
+ def test_keys_is_set_greater(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key", "key2"} > d.keys()
+
+ def test_keys_is_set_greater_equal(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key"} >= d.keys()
+
+ def test_keys_is_set_not_equal(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.keys() != {"key2"}
+
+ def test_eq(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key": "value1"} == d
+
+ def test_eq2(self, cls: _MultiDictClasses) -> None:
+ d1 = cls([("key", "value1")])
+ d2 = cls([("key2", "value1")])
+
+ assert d1 != d2
+
+ def test_eq3(self, cls: _MultiDictClasses) -> None:
+ d1 = cls([("key", "value1")])
+ d2 = cls()
+
+ assert d1 != d2
+
+ def test_eq_other_mapping_contains_more_keys(self, cls: _MultiDictClasses) -> None:
+ d1 = cls(foo="bar")
+ d2 = dict(foo="bar", bar="baz")
+
+ assert d1 != d2
+
+ def test_eq_bad_mapping_len(
+ self, cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ class BadMapping(Mapping[str, int]):
+ def __getitem__(self, key: str) -> int:
+ return 1
+
+ def __iter__(self) -> Iterator[str]:
+ yield "a"
+
+ def __len__(self) -> int: # type: ignore[return]
+ 1 / 0
+
+ d1 = cls(a=1)
+ d2 = BadMapping()
+ with pytest.raises(ZeroDivisionError):
+ d1 == d2
+
+ def test_eq_bad_mapping_getitem(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ class BadMapping(Mapping[str, int]):
+ def __getitem__(self, key: str) -> int: # type: ignore[return]
+ 1 / 0
+
+ def __iter__(self) -> Iterator[str]:
+ yield "a"
+
+ def __len__(self) -> int:
+ return 1
+
+ d1 = cls(a=1)
+ d2 = BadMapping()
+ with pytest.raises(ZeroDivisionError):
+ d1 == d2
+
+ def test_ne(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert d != {"key": "another_value"}
+
+ def test_and(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key"} == d.keys() & {"key", "key2"}
+
+ def test_and2(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key"} == {"key", "key2"} & d.keys()
+
+ def test_or(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key", "key2"} == d.keys() | {"key2"}
+
+ def test_or2(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1")])
+
+ assert {"key", "key2"} == {"key2"} | d.keys()
+
+ def test_sub(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1"), ("key2", "value2")])
+
+ assert {"key"} == d.keys() - {"key2"}
+
+ def test_sub2(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1"), ("key2", "value2")])
+
+ assert {"key3"} == {"key", "key2", "key3"} - d.keys()
+
+ def test_xor(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1"), ("key2", "value2")])
+
+ assert {"key", "key3"} == d.keys() ^ {"key2", "key3"}
+
+ def test_xor2(self, cls: _MultiDictClasses) -> None:
+ d = cls([("key", "value1"), ("key2", "value2")])
+
+ assert {"key", "key3"} == {"key2", "key3"} ^ d.keys()
+
+ @pytest.mark.parametrize("_set, expected", [({"key2"}, True), ({"key"}, False)])
+ def test_isdisjoint(
+ self, cls: _MultiDictClasses, _set: Set[str], expected: bool
+ ) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.keys().isdisjoint(_set) == expected
+
+ def test_repr_issue_410(self, cls: _MultiDictClasses) -> None:
+ d = cls()
+
+ try:
+ raise Exception
+ pytest.fail("Should never happen") # pragma: no cover
+ except Exception as e:
+ repr(d)
+
+ assert sys.exc_info()[1] == e
+
+ @pytest.mark.parametrize(
+ "op", [operator.or_, operator.and_, operator.sub, operator.xor]
+ )
+ @pytest.mark.parametrize("other", [{"other"}])
+ def test_op_issue_410(
+ self,
+ cls: _MultiDictClasses,
+ op: Callable[[object, object], object],
+ other: Set[str],
+ ) -> None:
+ d = cls([("key", "value")])
+
+ try:
+ raise Exception
+ pytest.fail("Should never happen") # pragma: no cover
+ except Exception as e:
+ op(d.keys(), other)
+
+ assert sys.exc_info()[1] == e
+
+ def test_weakref(self, cls: _MultiDictClasses) -> None:
+ called = False
+
+ def cb(wr: object) -> None:
+ nonlocal called
+ called = True
+
+ d = cls()
+ wr = weakref.ref(d, cb)
+ del d
+ gc.collect()
+ assert called
+ del wr
+
+ def test_iter_length_hint_keys(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ md = cls(a=1, b=2)
+ it = iter(md.keys())
+ assert it.__length_hint__() == 2 # type: ignore[attr-defined]
+
+ def test_iter_length_hint_items(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ md = cls(a=1, b=2)
+ it = iter(md.items())
+ assert it.__length_hint__() == 2 # type: ignore[attr-defined]
+
+ def test_iter_length_hint_values(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ md = cls(a=1, b=2)
+ it = iter(md.values())
+ assert it.__length_hint__() == 2 # type: ignore[attr-defined]
+
+ def test_ctor_list_arg_and_kwds(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ arg = [("a", 1)]
+ obj = cls(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == [("a", 1)]
+
+ def test_ctor_tuple_arg_and_kwds(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ arg = (("a", 1),)
+ obj = cls(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == (("a", 1),)
+
+ def test_ctor_deque_arg_and_kwds(
+ self,
+ cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ ) -> None:
+ arg = deque([("a", 1)])
+ obj = cls(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == deque([("a", 1)])
+
+
+class TestMultiDict(BaseMultiDictTest):
+ @pytest.fixture(params=["MultiDict", ("MultiDict", "MultiDictProxy")])
+ def cls(self, request: Any, _multidict: Any) -> Any:
+ return chained_callable(_multidict, request.param)
+
+ def test__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ d = cls()
+ _cls = type(d)
+
+ assert str(d) == "<%s()>" % _cls.__name__
+
+ d = cls([("key", "one"), ("key", "two")])
+
+ assert str(d) == "<%s('key': 'one', 'key': 'two')>" % _cls.__name__
+
+ def test_getall(self, cls: Type[MultiDict[str]]) -> None:
+ d = cls([("key", "value1")], key="value2")
+
+ assert d != {"key": "value1"}
+ assert len(d) == 2
+
+ assert d.getall("key") == ["value1", "value2"]
+
+ with pytest.raises(KeyError, match="some_key"):
+ d.getall("some_key")
+
+ default = object()
+ assert d.getall("some_key", default) is default
+
+ def test_preserve_stable_ordering(
+ self, cls: Type[MultiDict[Union[str, int]]]
+ ) -> None:
+ d = cls([("a", 1), ("b", "2"), ("a", 3)])
+ s = "&".join("{}={}".format(k, v) for k, v in d.items())
+
+ assert s == "a=1&b=2&a=3"
+
+ def test_get(self, cls: Type[MultiDict[int]]) -> None:
+ d = cls([("a", 1), ("a", 2)])
+ assert d["a"] == 1
+
+ def test_items__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ d = cls([("key", "value1")], key="value2")
+ expected = "_ItemsView('key': 'value1', 'key': 'value2')"
+ assert repr(d.items()) == expected
+
+ def test_keys__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ d = cls([("key", "value1")], key="value2")
+ assert repr(d.keys()) == "_KeysView('key', 'key')"
+
+ def test_values__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ d = cls([("key", "value1")], key="value2")
+ assert repr(d.values()) == "_ValuesView('value1', 'value2')"
+
+
+class TestCIMultiDict(BaseMultiDictTest):
+ @pytest.fixture(params=["CIMultiDict", ("CIMultiDict", "CIMultiDictProxy")])
+ def cls(self, request: Any, _multidict: Any) -> Any:
+ return chained_callable(_multidict, request.param)
+
+ def test_basics(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], KEY="value2")
+
+ assert d.getone("key") == "value1"
+ assert d.get("key") == "value1"
+ assert d.get("key2", "val") == "val"
+ assert d["key"] == "value1"
+ assert "key" in d
+
+ with pytest.raises(KeyError, match="key2"):
+ d["key2"]
+ with pytest.raises(KeyError, match="key2"):
+ d.getone("key2")
+
+ def test_getall(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], KEY="value2")
+
+ assert not d == {"KEY": "value1"}
+ assert len(d) == 2
+
+ assert d.getall("key") == ["value1", "value2"]
+
+ with pytest.raises(KeyError, match="some_key"):
+ d.getall("some_key")
+
+ def test_get(self, cls: Type[CIMultiDict[int]]) -> None:
+ d = cls([("A", 1), ("a", 2)])
+ assert 1 == d["a"]
+
+ def test__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], key="value2")
+ _cls = type(d)
+
+ expected = "<%s('KEY': 'value1', 'key': 'value2')>" % _cls.__name__
+ assert str(d) == expected
+
+ def test_items__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], key="value2")
+ expected = "_ItemsView('KEY': 'value1', 'key': 'value2')"
+ assert repr(d.items()) == expected
+
+ def test_keys__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], key="value2")
+ assert repr(d.keys()) == "_KeysView('KEY', 'key')"
+
+ def test_values__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ d = cls([("KEY", "value1")], key="value2")
+ assert repr(d.values()) == "_ValuesView('value1', 'value2')"
diff --git a/contrib/python/multidict/tests/test_mutable_multidict.py b/contrib/python/multidict/tests/test_mutable_multidict.py
new file mode 100644
index 0000000000..3d4d16ac03
--- /dev/null
+++ b/contrib/python/multidict/tests/test_mutable_multidict.py
@@ -0,0 +1,510 @@
+import string
+import sys
+
+import pytest
+
+
+class TestMutableMultiDict:
+ @pytest.fixture
+ def cls(self, _multidict):
+ return _multidict.MultiDict
+
+ @pytest.fixture
+ def proxy_cls(self, _multidict):
+ return _multidict.MultiDictProxy
+
+ @pytest.fixture
+ def istr(self, _multidict):
+ return _multidict.istr
+
+ def test_copy(self, cls):
+ d1 = cls(key="value", a="b")
+
+ d2 = d1.copy()
+ assert d1 == d2
+ assert d1 is not d2
+
+ def test__repr__(self, cls):
+ d = cls()
+ assert str(d) == "<%s()>" % cls.__name__
+
+ d = cls([("key", "one"), ("key", "two")])
+
+ expected = "<%s('key': 'one', 'key': 'two')>" % cls.__name__
+ assert str(d) == expected
+
+ def test_getall(self, cls):
+ d = cls([("key", "value1")], key="value2")
+ assert len(d) == 2
+
+ assert d.getall("key") == ["value1", "value2"]
+
+ with pytest.raises(KeyError, match="some_key"):
+ d.getall("some_key")
+
+ default = object()
+ assert d.getall("some_key", default) is default
+
+ def test_add(self, cls):
+ d = cls()
+
+ assert d == {}
+ d["key"] = "one"
+ assert d == {"key": "one"}
+ assert d.getall("key") == ["one"]
+
+ d["key"] = "two"
+ assert d == {"key": "two"}
+ assert d.getall("key") == ["two"]
+
+ d.add("key", "one")
+ assert 2 == len(d)
+ assert d.getall("key") == ["two", "one"]
+
+ d.add("foo", "bar")
+ assert 3 == len(d)
+ assert d.getall("foo") == ["bar"]
+
+ def test_extend(self, cls):
+ d = cls()
+ assert d == {}
+
+ d.extend([("key", "one"), ("key", "two")], key=3, foo="bar")
+ assert d != {"key": "one", "foo": "bar"}
+ assert 4 == len(d)
+ itms = d.items()
+ # we can't guarantee order of kwargs
+ assert ("key", "one") in itms
+ assert ("key", "two") in itms
+ assert ("key", 3) in itms
+ assert ("foo", "bar") in itms
+
+ other = cls(bar="baz")
+ assert other == {"bar": "baz"}
+
+ d.extend(other)
+ assert ("bar", "baz") in d.items()
+
+ d.extend({"foo": "moo"})
+ assert ("foo", "moo") in d.items()
+
+ d.extend()
+ assert 6 == len(d)
+
+ with pytest.raises(TypeError):
+ d.extend("foo", "bar")
+
+ def test_extend_from_proxy(self, cls, proxy_cls):
+ d = cls([("a", "a"), ("b", "b")])
+ proxy = proxy_cls(d)
+
+ d2 = cls()
+ d2.extend(proxy)
+
+ assert [("a", "a"), ("b", "b")] == list(d2.items())
+
+ def test_clear(self, cls):
+ d = cls([("key", "one")], key="two", foo="bar")
+
+ d.clear()
+ assert d == {}
+ assert list(d.items()) == []
+
+ def test_del(self, cls):
+ d = cls([("key", "one"), ("key", "two")], foo="bar")
+ assert list(d.keys()) == ["key", "key", "foo"]
+
+ del d["key"]
+ assert d == {"foo": "bar"}
+ assert list(d.items()) == [("foo", "bar")]
+
+ with pytest.raises(KeyError, match="key"):
+ del d["key"]
+
+ def test_set_default(self, cls):
+ d = cls([("key", "one"), ("key", "two")], foo="bar")
+ assert "one" == d.setdefault("key", "three")
+ assert "three" == d.setdefault("otherkey", "three")
+ assert "otherkey" in d
+ assert "three" == d["otherkey"]
+
+ def test_popitem(self, cls):
+ d = cls()
+ d.add("key", "val1")
+ d.add("key", "val2")
+
+ assert ("key", "val1") == d.popitem()
+ assert [("key", "val2")] == list(d.items())
+
+ def test_popitem_empty_multidict(self, cls):
+ d = cls()
+
+ with pytest.raises(KeyError):
+ d.popitem()
+
+ def test_pop(self, cls):
+ d = cls()
+ d.add("key", "val1")
+ d.add("key", "val2")
+
+ assert "val1" == d.pop("key")
+ assert {"key": "val2"} == d
+
+ def test_pop2(self, cls):
+ d = cls()
+ d.add("key", "val1")
+ d.add("key2", "val2")
+ d.add("key", "val3")
+
+ assert "val1" == d.pop("key")
+ assert [("key2", "val2"), ("key", "val3")] == list(d.items())
+
+ def test_pop_default(self, cls):
+ d = cls(other="val")
+
+ assert "default" == d.pop("key", "default")
+ assert "other" in d
+
+ def test_pop_raises(self, cls):
+ d = cls(other="val")
+
+ with pytest.raises(KeyError, match="key"):
+ d.pop("key")
+
+ assert "other" in d
+
+ def test_replacement_order(self, cls):
+ d = cls()
+ d.add("key1", "val1")
+ d.add("key2", "val2")
+ d.add("key1", "val3")
+ d.add("key2", "val4")
+
+ d["key1"] = "val"
+
+ expected = [("key1", "val"), ("key2", "val2"), ("key2", "val4")]
+
+ assert expected == list(d.items())
+
+ def test_nonstr_key(self, cls):
+ d = cls()
+ with pytest.raises(TypeError):
+ d[1] = "val"
+
+ def test_istr_key(self, cls, istr):
+ d = cls()
+ d[istr("1")] = "val"
+ assert type(list(d.keys())[0]) is istr
+
+ def test_str_derived_key(self, cls):
+ class A(str):
+ pass
+
+ d = cls()
+ d[A("1")] = "val"
+ assert type(list(d.keys())[0]) is A
+
+ def test_istr_key_add(self, cls, istr):
+ d = cls()
+ d.add(istr("1"), "val")
+ assert type(list(d.keys())[0]) is istr
+
+ def test_str_derived_key_add(self, cls):
+ class A(str):
+ pass
+
+ d = cls()
+ d.add(A("1"), "val")
+ assert type(list(d.keys())[0]) is A
+
+ def test_popall(self, cls):
+ d = cls()
+ d.add("key1", "val1")
+ d.add("key2", "val2")
+ d.add("key1", "val3")
+ ret = d.popall("key1")
+ assert ["val1", "val3"] == ret
+ assert {"key2": "val2"} == d
+
+ def test_popall_default(self, cls):
+ d = cls()
+ assert "val" == d.popall("key", "val")
+
+ def test_popall_key_error(self, cls):
+ d = cls()
+ with pytest.raises(KeyError, match="key"):
+ d.popall("key")
+
+ def test_large_multidict_resizing(self, cls):
+ SIZE = 1024
+ d = cls()
+ for i in range(SIZE):
+ d["key" + str(i)] = i
+
+ for i in range(SIZE - 1):
+ del d["key" + str(i)]
+
+ assert {"key" + str(SIZE - 1): SIZE - 1} == d
+
+
+class TestCIMutableMultiDict:
+ @pytest.fixture
+ def cls(self, _multidict):
+ return _multidict.CIMultiDict
+
+ @pytest.fixture
+ def proxy_cls(self, _multidict):
+ return _multidict.CIMultiDictProxy
+
+ @pytest.fixture
+ def istr(self, _multidict):
+ return _multidict.istr
+
+ def test_getall(self, cls):
+ d = cls([("KEY", "value1")], KEY="value2")
+
+ assert d != {"KEY": "value1"}
+ assert len(d) == 2
+
+ assert d.getall("key") == ["value1", "value2"]
+
+ with pytest.raises(KeyError, match="some_key"):
+ d.getall("some_key")
+
+ def test_ctor(self, cls):
+ d = cls(k1="v1")
+ assert "v1" == d["K1"]
+ assert ("k1", "v1") in d.items()
+
+ def test_setitem(self, cls):
+ d = cls()
+ d["k1"] = "v1"
+ assert "v1" == d["K1"]
+ assert ("k1", "v1") in d.items()
+
+ def test_delitem(self, cls):
+ d = cls()
+ d["k1"] = "v1"
+ assert "K1" in d
+ del d["k1"]
+ assert "K1" not in d
+
+ def test_copy(self, cls):
+ d1 = cls(key="KEY", a="b")
+
+ d2 = d1.copy()
+ assert d1 == d2
+ assert d1.items() == d2.items()
+ assert d1 is not d2
+
+ def test__repr__(self, cls):
+ d = cls()
+ assert str(d) == "<%s()>" % cls.__name__
+
+ d = cls([("KEY", "one"), ("KEY", "two")])
+
+ expected = "<%s('KEY': 'one', 'KEY': 'two')>" % cls.__name__
+ assert str(d) == expected
+
+ def test_add(self, cls):
+ d = cls()
+
+ assert d == {}
+ d["KEY"] = "one"
+ assert ("KEY", "one") in d.items()
+ assert d == cls({"Key": "one"})
+ assert d.getall("key") == ["one"]
+
+ d["KEY"] = "two"
+ assert ("KEY", "two") in d.items()
+ assert d == cls({"Key": "two"})
+ assert d.getall("key") == ["two"]
+
+ d.add("KEY", "one")
+ assert ("KEY", "one") in d.items()
+ assert 2 == len(d)
+ assert d.getall("key") == ["two", "one"]
+
+ d.add("FOO", "bar")
+ assert ("FOO", "bar") in d.items()
+ assert 3 == len(d)
+ assert d.getall("foo") == ["bar"]
+
+ d.add(key="test", value="test")
+ assert ("test", "test") in d.items()
+ assert 4 == len(d)
+ assert d.getall("test") == ["test"]
+
+ def test_extend(self, cls):
+ d = cls()
+ assert d == {}
+
+ d.extend([("KEY", "one"), ("key", "two")], key=3, foo="bar")
+ assert 4 == len(d)
+ itms = d.items()
+ # we can't guarantee order of kwargs
+ assert ("KEY", "one") in itms
+ assert ("key", "two") in itms
+ assert ("key", 3) in itms
+ assert ("foo", "bar") in itms
+
+ other = cls(Bar="baz")
+ assert other == {"Bar": "baz"}
+
+ d.extend(other)
+ assert ("Bar", "baz") in d.items()
+ assert "bar" in d
+
+ d.extend({"Foo": "moo"})
+ assert ("Foo", "moo") in d.items()
+ assert "foo" in d
+
+ d.extend()
+ assert 6 == len(d)
+
+ with pytest.raises(TypeError):
+ d.extend("foo", "bar")
+
+ def test_extend_from_proxy(self, cls, proxy_cls):
+ d = cls([("a", "a"), ("b", "b")])
+ proxy = proxy_cls(d)
+
+ d2 = cls()
+ d2.extend(proxy)
+
+ assert [("a", "a"), ("b", "b")] == list(d2.items())
+
+ def test_clear(self, cls):
+ d = cls([("KEY", "one")], key="two", foo="bar")
+
+ d.clear()
+ assert d == {}
+ assert list(d.items()) == []
+
+ def test_del(self, cls):
+ d = cls([("KEY", "one"), ("key", "two")], foo="bar")
+
+ del d["key"]
+ assert d == {"foo": "bar"}
+ assert list(d.items()) == [("foo", "bar")]
+
+ with pytest.raises(KeyError, match="key"):
+ del d["key"]
+
+ def test_set_default(self, cls):
+ d = cls([("KEY", "one"), ("key", "two")], foo="bar")
+ assert "one" == d.setdefault("key", "three")
+ assert "three" == d.setdefault("otherkey", "three")
+ assert "otherkey" in d
+ assert ("otherkey", "three") in d.items()
+ assert "three" == d["OTHERKEY"]
+
+ def test_popitem(self, cls):
+ d = cls()
+ d.add("KEY", "val1")
+ d.add("key", "val2")
+
+ pair = d.popitem()
+ assert ("KEY", "val1") == pair
+ assert isinstance(pair[0], str)
+ assert [("key", "val2")] == list(d.items())
+
+ def test_popitem_empty_multidict(self, cls):
+ d = cls()
+
+ with pytest.raises(KeyError):
+ d.popitem()
+
+ def test_pop(self, cls):
+ d = cls()
+ d.add("KEY", "val1")
+ d.add("key", "val2")
+
+ assert "val1" == d.pop("KEY")
+ assert {"key": "val2"} == d
+
+ def test_pop_lowercase(self, cls):
+ d = cls()
+ d.add("KEY", "val1")
+ d.add("key", "val2")
+
+ assert "val1" == d.pop("key")
+ assert {"key": "val2"} == d
+
+ def test_pop_default(self, cls):
+ d = cls(OTHER="val")
+
+ assert "default" == d.pop("key", "default")
+ assert "other" in d
+
+ def test_pop_raises(self, cls):
+ d = cls(OTHER="val")
+
+ with pytest.raises(KeyError, match="KEY"):
+ d.pop("KEY")
+
+ assert "other" in d
+
+ def test_extend_with_istr(self, cls, istr):
+ us = istr("aBc")
+ d = cls()
+
+ d.extend([(us, "val")])
+ assert [("aBc", "val")] == list(d.items())
+
+ def test_copy_istr(self, cls, istr):
+ d = cls({istr("Foo"): "bar"})
+ d2 = d.copy()
+ assert d == d2
+
+ def test_eq(self, cls):
+ d1 = cls(Key="val")
+ d2 = cls(KEY="val")
+
+ assert d1 == d2
+
+ @pytest.mark.skipif(
+ sys.implementation.name == "pypy",
+ reason="getsizeof() is not implemented on PyPy",
+ )
+ def test_sizeof(self, cls):
+ md = cls()
+ s1 = sys.getsizeof(md)
+ for i in string.ascii_lowercase:
+ for j in string.ascii_uppercase:
+ md[i + j] = i + j
+ # multidict should be resized
+ s2 = sys.getsizeof(md)
+ assert s2 > s1
+
+ @pytest.mark.skipif(
+ sys.implementation.name == "pypy",
+ reason="getsizeof() is not implemented on PyPy",
+ )
+ def test_min_sizeof(self, cls):
+ md = cls()
+ assert sys.getsizeof(md) < 1024
+
+ def test_issue_620_items(self, cls):
+ # https://github.com/aio-libs/multidict/issues/620
+ d = cls({"a": "123, 456", "b": "789"})
+ before_mutation_items = d.items()
+ d["c"] = "000"
+ # This causes an error on pypy.
+ list(before_mutation_items)
+
+ def test_issue_620_keys(self, cls):
+ # https://github.com/aio-libs/multidict/issues/620
+ d = cls({"a": "123, 456", "b": "789"})
+ before_mutation_keys = d.keys()
+ d["c"] = "000"
+ # This causes an error on pypy.
+ list(before_mutation_keys)
+
+ def test_issue_620_values(self, cls):
+ # https://github.com/aio-libs/multidict/issues/620
+ d = cls({"a": "123, 456", "b": "789"})
+ before_mutation_values = d.values()
+ d["c"] = "000"
+ # This causes an error on pypy.
+ list(before_mutation_values)
diff --git a/contrib/python/multidict/tests/test_mypy.py b/contrib/python/multidict/tests/test_mypy.py
new file mode 100644
index 0000000000..62bb62e52d
--- /dev/null
+++ b/contrib/python/multidict/tests/test_mypy.py
@@ -0,0 +1,278 @@
+# This file doesn't use test parametrization because mypy doesn't nothing about it.
+# Concrete types are required
+
+import multidict
+
+
+def test_classes_not_abstract() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ multidict.MultiDictProxy(d1)
+ multidict.CIMultiDictProxy(d2)
+
+
+def test_getitem() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("a")
+
+ assert d1["a"] == "b"
+ assert d2["a"] == "b"
+ assert d3["a"] == "b"
+ assert d4["a"] == "b"
+
+ assert d1[key] == "b"
+ assert d2[key] == "b"
+ assert d3[key] == "b"
+ assert d4[key] == "b"
+
+
+def test_get() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("a")
+
+ assert d1.get("a") == "b"
+ assert d2.get("a") == "b"
+ assert d3.get("a") == "b"
+ assert d4.get("a") == "b"
+
+ assert d1.get(key) == "b"
+ assert d2.get(key) == "b"
+ assert d3.get(key) == "b"
+ assert d4.get(key) == "b"
+
+
+def test_get_default() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("b")
+
+ assert d1.get("b", "d") == "d"
+ assert d2.get("b", "d") == "d"
+ assert d3.get("b", "d") == "d"
+ assert d4.get("b", "d") == "d"
+
+ assert d1.get(key, "d") == "d"
+ assert d2.get(key, "d") == "d"
+ assert d3.get(key, "d") == "d"
+ assert d4.get(key, "d") == "d"
+
+
+def test_getone() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("a")
+
+ assert d1.getone("a") == "b"
+ assert d2.getone("a") == "b"
+ assert d3.getone("a") == "b"
+ assert d4.getone("a") == "b"
+
+ assert d1.getone(key) == "b"
+ assert d2.getone(key) == "b"
+ assert d3.getone(key) == "b"
+ assert d4.getone(key) == "b"
+
+
+def test_getone_default() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("b")
+
+ assert d1.getone("b", 1) == 1
+ assert d2.getone("b", 1) == 1
+ assert d3.getone("b", 1) == 1
+ assert d4.getone("b", 1) == 1
+
+ assert d1.getone(key, 1) == 1
+ assert d2.getone(key, 1) == 1
+ assert d3.getone(key, 1) == 1
+ assert d4.getone(key, 1) == 1
+
+
+def test_getall() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("a")
+
+ assert d1.getall("a") == ["b"]
+ assert d2.getall("a") == ["b"]
+ assert d3.getall("a") == ["b"]
+ assert d4.getall("a") == ["b"]
+
+ assert d1.getall(key) == ["b"]
+ assert d2.getall(key) == ["b"]
+ assert d3.getall(key) == ["b"]
+ assert d4.getall(key) == ["b"]
+
+
+def test_getall_default() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ key = multidict.istr("b")
+
+ assert d1.getall("b", 1) == 1
+ assert d2.getall("b", 1) == 1
+ assert d3.getall("b", 1) == 1
+ assert d4.getall("b", 1) == 1
+
+ assert d1.getall(key, 1) == 1
+ assert d2.getall(key, 1) == 1
+ assert d3.getall(key, 1) == 1
+ assert d4.getall(key, 1) == 1
+
+
+def test_copy() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ assert d1.copy() == d1
+ assert d2.copy() == d2
+ assert d3.copy() == d1
+ assert d4.copy() == d2
+
+
+def test_iter() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ d3 = multidict.MultiDictProxy(d1)
+ d4 = multidict.CIMultiDictProxy(d2)
+
+ for i in d1:
+ i.lower() # str-specific class
+ for i in d2:
+ i.lower() # str-specific class
+ for i in d3:
+ i.lower() # str-specific class
+ for i in d4:
+ i.lower() # str-specific class
+
+
+def test_setitem() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ key = multidict.istr("a")
+
+ d1["a"] = "b"
+ d2["a"] = "b"
+
+ d1[key] = "b"
+ d2[key] = "b"
+
+def test_delitem() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ del d1["a"]
+ del d2["a"]
+
+ key = multidict.istr("a")
+
+ d3: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d4: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ del d3[key]
+ del d4[key]
+
+
+def test_additem() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ key = multidict.istr("a")
+
+ d1.add("a", "b")
+ d2.add("a", "b")
+
+ d1.add(key, "b")
+ d2.add(key, "b")
+
+
+def test_extend_mapping() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ key = multidict.istr("a")
+
+ d1.extend({"a": "b"})
+ d2.extend({"a": "b"})
+
+ d1.extend({key: "b"})
+ d2.extend({key: "b"})
+
+
+def test_update_mapping() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ key = multidict.istr("a")
+
+ d1.update({"a": "b"})
+ d2.update({"a": "b"})
+
+ d1.update({key: "b"})
+ d2.update({key: "b"})
+
+def test_popone() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ assert d1.popone("a") == "b"
+ assert d2.popone("a") == "b"
+
+ key = multidict.istr("a")
+ d1 = multidict.MultiDict({"a": "b"})
+ d2 = multidict.CIMultiDict({"a": "b"})
+
+ assert d1.popone(key) == "b"
+ assert d2.popone(key) == "b"
+
+
+def test_popall() -> None:
+ d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"})
+ d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"})
+
+ assert d1.popall("a") == ["b"]
+ assert d2.popall("a") == ["b"]
+
+ key = multidict.istr("a")
+ d1 = multidict.MultiDict({"a": "b"})
+ d2 = multidict.CIMultiDict({"a": "b"})
+
+ assert d1.popall(key) == ["b"]
+ assert d2.popall(key) == ["b"]
diff --git a/contrib/python/multidict/tests/test_pickle.py b/contrib/python/multidict/tests/test_pickle.py
new file mode 100644
index 0000000000..ce7383684f
--- /dev/null
+++ b/contrib/python/multidict/tests/test_pickle.py
@@ -0,0 +1,82 @@
+import pickle
+from pathlib import Path
+
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as PyCIMultiDict
+from multidict._multidict_py import CIMultiDictProxy as PyCIMultiDictProxy
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa: E402
+from multidict._multidict_py import MultiDictProxy as PyMultiDictProxy
+
+import yatest.common
+
+if USE_EXTENSIONS:
+ from multidict._multidict import ( # type: ignore
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ )
+
+
+here = Path(yatest.common.test_source_path()).resolve()
+
+
+@pytest.fixture(
+ params=(["MultiDict", "CIMultiDict"] if USE_EXTENSIONS else [])
+ + ["PyMultiDict", "PyCIMultiDict"]
+)
+def cls_name(request):
+ return request.param
+
+
+@pytest.fixture(
+ params=([MultiDict, CIMultiDict] if USE_EXTENSIONS else [])
+ + [PyMultiDict, PyCIMultiDict],
+ ids=(["MultiDict", "CIMultiDict"] if USE_EXTENSIONS else [])
+ + ["PyMultiDict", "PyCIMultiDict"],
+)
+def cls(request):
+ return request.param
+
+
+@pytest.fixture(
+ params=(
+ [(MultiDictProxy, MultiDict), (CIMultiDictProxy, CIMultiDict)]
+ if USE_EXTENSIONS
+ else []
+ )
+ + [(PyMultiDictProxy, PyMultiDict), (PyCIMultiDictProxy, PyCIMultiDict)],
+ ids=(["MultiDictProxy", "CIMultiDictProxy"] if USE_EXTENSIONS else [])
+ + ["PyMultiDictProxy", "PyCIMultiDictProxy"],
+)
+def proxy_classes(request):
+ return request.param
+
+
+def test_pickle(cls, pickle_protocol):
+ d = cls([("a", 1), ("a", 2)])
+ pbytes = pickle.dumps(d, pickle_protocol)
+ obj = pickle.loads(pbytes)
+ assert d == obj
+ assert isinstance(obj, cls)
+
+
+def test_pickle_proxy(proxy_classes):
+ proxy_cls, dict_cls = proxy_classes
+ d = dict_cls([("a", 1), ("a", 2)])
+ proxy = proxy_cls(d)
+ with pytest.raises(TypeError):
+ pickle.dumps(proxy)
+
+
+def test_load_from_file(pickle_protocol, cls_name):
+ cls = globals()[cls_name]
+ d = cls([("a", 1), ("a", 2)])
+ fname = "{}.pickle.{}".format(cls_name.lower(), pickle_protocol)
+ p = here / fname
+ with p.open("rb") as f:
+ obj = pickle.load(f)
+ assert d == obj
+ assert isinstance(obj, cls)
diff --git a/contrib/python/multidict/tests/test_types.py b/contrib/python/multidict/tests/test_types.py
new file mode 100644
index 0000000000..3ae2cbb844
--- /dev/null
+++ b/contrib/python/multidict/tests/test_types.py
@@ -0,0 +1,109 @@
+import sys
+import types
+
+import pytest
+
+
+def test_proxies(_multidict):
+ assert issubclass(_multidict.CIMultiDictProxy, _multidict.MultiDictProxy)
+
+
+def test_dicts(_multidict):
+ assert issubclass(_multidict.CIMultiDict, _multidict.MultiDict)
+
+
+def test_proxy_not_inherited_from_dict(_multidict):
+ assert not issubclass(_multidict.MultiDictProxy, _multidict.MultiDict)
+
+
+def test_dict_not_inherited_from_proxy(_multidict):
+ assert not issubclass(_multidict.MultiDict, _multidict.MultiDictProxy)
+
+
+def test_multidict_proxy_copy_type(_multidict):
+ d = _multidict.MultiDict(key="val")
+ p = _multidict.MultiDictProxy(d)
+ assert isinstance(p.copy(), _multidict.MultiDict)
+
+
+def test_cimultidict_proxy_copy_type(_multidict):
+ d = _multidict.CIMultiDict(key="val")
+ p = _multidict.CIMultiDictProxy(d)
+ assert isinstance(p.copy(), _multidict.CIMultiDict)
+
+
+def test_create_multidict_proxy_from_nonmultidict(_multidict):
+ with pytest.raises(TypeError):
+ _multidict.MultiDictProxy({})
+
+
+def test_create_multidict_proxy_from_cimultidict(_multidict):
+ d = _multidict.CIMultiDict(key="val")
+ p = _multidict.MultiDictProxy(d)
+ assert p == d
+
+
+def test_create_multidict_proxy_from_multidict_proxy_from_mdict(_multidict):
+ d = _multidict.MultiDict(key="val")
+ p = _multidict.MultiDictProxy(d)
+ assert p == d
+ p2 = _multidict.MultiDictProxy(p)
+ assert p2 == p
+
+
+def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci(_multidict):
+ d = _multidict.CIMultiDict(key="val")
+ p = _multidict.CIMultiDictProxy(d)
+ assert p == d
+ p2 = _multidict.CIMultiDictProxy(p)
+ assert p2 == p
+
+
+def test_create_cimultidict_proxy_from_nonmultidict(_multidict):
+ with pytest.raises(
+ TypeError,
+ match=(
+ "ctor requires CIMultiDict or CIMultiDictProxy instance, "
+ "not <class 'dict'>"
+ ),
+ ):
+ _multidict.CIMultiDictProxy({})
+
+
+def test_create_ci_multidict_proxy_from_multidict(_multidict):
+ d = _multidict.MultiDict(key="val")
+ with pytest.raises(
+ TypeError,
+ match=(
+ "ctor requires CIMultiDict or CIMultiDictProxy instance, "
+ "not <class 'multidict._multidict.*.MultiDict'>"
+ ),
+ ):
+ _multidict.CIMultiDictProxy(d)
+
+
+@pytest.mark.skipif(
+ sys.version_info >= (3, 9), reason="Python 3.9 uses GenericAlias which is different"
+)
+def test_generic_exists(_multidict) -> None:
+ assert _multidict.MultiDict[int] is _multidict.MultiDict
+ assert _multidict.MultiDictProxy[int] is _multidict.MultiDictProxy
+ assert _multidict.CIMultiDict[int] is _multidict.CIMultiDict
+ assert _multidict.CIMultiDictProxy[int] is _multidict.CIMultiDictProxy
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 9), reason="Python 3.9 is required for GenericAlias"
+)
+def test_generic_alias(_multidict) -> None:
+
+ assert _multidict.MultiDict[int] == types.GenericAlias(_multidict.MultiDict, (int,))
+ assert _multidict.MultiDictProxy[int] == types.GenericAlias(
+ _multidict.MultiDictProxy, (int,)
+ )
+ assert _multidict.CIMultiDict[int] == types.GenericAlias(
+ _multidict.CIMultiDict, (int,)
+ )
+ assert _multidict.CIMultiDictProxy[int] == types.GenericAlias(
+ _multidict.CIMultiDictProxy, (int,)
+ )
diff --git a/contrib/python/multidict/tests/test_update.py b/contrib/python/multidict/tests/test_update.py
new file mode 100644
index 0000000000..4bacdbce77
--- /dev/null
+++ b/contrib/python/multidict/tests/test_update.py
@@ -0,0 +1,147 @@
+from collections import deque
+
+import pytest
+
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as PyCIMultiDict
+from multidict._multidict_py import MultiDict as PyMultiDict # noqa: E402
+
+if USE_EXTENSIONS:
+ from multidict._multidict import CIMultiDict, MultiDict # type: ignore
+
+
+@pytest.fixture(
+ params=([MultiDict, CIMultiDict] if USE_EXTENSIONS else [])
+ + [PyMultiDict, PyCIMultiDict],
+ ids=(["MultiDict", "CIMultiDict"] if USE_EXTENSIONS else [])
+ + ["PyMultiDict", "PyCIMultiDict"],
+)
+def cls(request):
+ return request.param
+
+
+@pytest.fixture
+def md_cls(_multidict):
+ return _multidict.MultiDict
+
+
+@pytest.fixture
+def ci_md_cls(_multidict):
+ return _multidict.CIMultiDict
+
+
+@pytest.fixture
+def istr(_multidict):
+ return _multidict.istr
+
+
+def test_update_replace(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = cls([("a", 4), ("b", 5), ("a", 6)])
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_append(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = cls([("a", 4), ("a", 5), ("a", 6)])
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_remove(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = cls([("a", 4)])
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 2), ("c", 10)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_replace_seq(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = [("a", 4), ("b", 5), ("a", 6)]
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_replace_seq2(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj1.update([("a", 4)], b=5, a=6)
+ expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_append_seq(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = [("a", 4), ("a", 5), ("a", 6)]
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_remove_seq(cls):
+ obj1 = cls([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
+ obj2 = [("a", 4)]
+ obj1.update(obj2)
+ expected = [("a", 4), ("b", 2), ("c", 10)]
+ assert list(obj1.items()) == expected
+
+
+def test_update_md(md_cls):
+ d = md_cls()
+ d.add("key", "val1")
+ d.add("key", "val2")
+ d.add("key2", "val3")
+
+ d.update(key="val")
+
+ assert [("key", "val"), ("key2", "val3")] == list(d.items())
+
+
+def test_update_istr_ci_md(ci_md_cls, istr):
+ d = ci_md_cls()
+ d.add(istr("KEY"), "val1")
+ d.add("key", "val2")
+ d.add("key2", "val3")
+
+ d.update({istr("key"): "val"})
+
+ assert [("key", "val"), ("key2", "val3")] == list(d.items())
+
+
+def test_update_ci_md(ci_md_cls):
+ d = ci_md_cls()
+ d.add("KEY", "val1")
+ d.add("key", "val2")
+ d.add("key2", "val3")
+
+ d.update(Key="val")
+
+ assert [("Key", "val"), ("key2", "val3")] == list(d.items())
+
+
+def test_update_list_arg_and_kwds(cls):
+ obj = cls()
+ arg = [("a", 1)]
+ obj.update(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == [("a", 1)]
+
+
+def test_update_tuple_arg_and_kwds(cls):
+ obj = cls()
+ arg = (("a", 1),)
+ obj.update(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == (("a", 1),)
+
+
+def test_update_deque_arg_and_kwds(cls):
+ obj = cls()
+ arg = deque([("a", 1)])
+ obj.update(arg, b=2)
+ assert list(obj.items()) == [("a", 1), ("b", 2)]
+ assert arg == deque([("a", 1)])
diff --git a/contrib/python/multidict/tests/test_version.py b/contrib/python/multidict/tests/test_version.py
new file mode 100644
index 0000000000..067d6210ce
--- /dev/null
+++ b/contrib/python/multidict/tests/test_version.py
@@ -0,0 +1,199 @@
+from typing import Type
+
+import pytest
+
+from multidict import MultiMapping
+from multidict._compat import USE_EXTENSIONS
+from multidict._multidict_py import CIMultiDict as _CIMultiDict
+from multidict._multidict_py import MultiDict as _MultiDict # noqa: E402
+from multidict._multidict_py import getversion as _getversion
+
+if USE_EXTENSIONS:
+ from multidict._multidict import ( # type: ignore
+ CIMultiDict,
+ MultiDict,
+ getversion,
+ )
+
+
+class VersionMixin:
+ cls: Type[MultiMapping[str]]
+
+ def getver(self, md):
+ raise NotImplementedError
+
+ def test_getversion_bad_param(self):
+ with pytest.raises(TypeError):
+ self.getver(1)
+
+ def test_ctor(self):
+ m1 = self.cls()
+ v1 = self.getver(m1)
+ m2 = self.cls()
+ v2 = self.getver(m2)
+ assert v1 != v2
+
+ def test_add(self):
+ m = self.cls()
+ v = self.getver(m)
+ m.add("key", "val")
+ assert self.getver(m) > v
+
+ def test_delitem(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ del m["key"]
+ assert self.getver(m) > v
+
+ def test_delitem_not_found(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ with pytest.raises(KeyError):
+ del m["notfound"]
+ assert self.getver(m) == v
+
+ def test_setitem(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m["key"] = "val2"
+ assert self.getver(m) > v
+
+ def test_setitem_not_found(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m["notfound"] = "val2"
+ assert self.getver(m) > v
+
+ def test_clear(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.clear()
+ assert self.getver(m) > v
+
+ def test_setdefault(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.setdefault("key2", "val2")
+ assert self.getver(m) > v
+
+ def test_popone(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.popone("key")
+ assert self.getver(m) > v
+
+ def test_popone_default(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.popone("key2", "default")
+ assert self.getver(m) == v
+
+ def test_popone_key_error(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ with pytest.raises(KeyError):
+ m.popone("key2")
+ assert self.getver(m) == v
+
+ def test_pop(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.pop("key")
+ assert self.getver(m) > v
+
+ def test_pop_default(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.pop("key2", "default")
+ assert self.getver(m) == v
+
+ def test_pop_key_error(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ with pytest.raises(KeyError):
+ m.pop("key2")
+ assert self.getver(m) == v
+
+ def test_popall(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.popall("key")
+ assert self.getver(m) > v
+
+ def test_popall_default(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.popall("key2", "default")
+ assert self.getver(m) == v
+
+ def test_popall_key_error(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ with pytest.raises(KeyError):
+ m.popall("key2")
+ assert self.getver(m) == v
+
+ def test_popitem(self):
+ m = self.cls()
+ m.add("key", "val")
+ v = self.getver(m)
+ m.popitem()
+ assert self.getver(m) > v
+
+ def test_popitem_key_error(self):
+ m = self.cls()
+ v = self.getver(m)
+ with pytest.raises(KeyError):
+ m.popitem()
+ assert self.getver(m) == v
+
+
+if USE_EXTENSIONS:
+
+ class TestMultiDict(VersionMixin):
+
+ cls = MultiDict
+
+ def getver(self, md):
+ return getversion(md)
+
+
+if USE_EXTENSIONS:
+
+ class TestCIMultiDict(VersionMixin):
+
+ cls = CIMultiDict
+
+ def getver(self, md):
+ return getversion(md)
+
+
+class TestPyMultiDict(VersionMixin):
+
+ cls = _MultiDict # type: ignore[assignment]
+
+ def getver(self, md):
+ return _getversion(md)
+
+
+class TestPyCIMultiDict(VersionMixin):
+
+ cls = _CIMultiDict # type: ignore[assignment]
+
+ def getver(self, md):
+ return _getversion(md)
diff --git a/contrib/python/multidict/tests/ya.make b/contrib/python/multidict/tests/ya.make
new file mode 100644
index 0000000000..f9d823fb92
--- /dev/null
+++ b/contrib/python/multidict/tests/ya.make
@@ -0,0 +1,29 @@
+PY3TEST()
+
+PEERDIR(
+ contrib/python/multidict
+)
+
+TEST_SRCS(
+ conftest.py
+ test_abc.py
+ test_copy.py
+ test_guard.py
+ test_istr.py
+ test_multidict.py
+ test_mutable_multidict.py
+ test_mypy.py
+ test_pickle.py
+ test_types.py
+ test_update.py
+ test_version.py
+)
+
+DATA(
+ arcadia/contrib/python/multidict/tests
+)
+
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/multidict/ya.make b/contrib/python/multidict/ya.make
new file mode 100644
index 0000000000..bdba18c601
--- /dev/null
+++ b/contrib/python/multidict/ya.make
@@ -0,0 +1,46 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(6.0.4)
+
+LICENSE(Apache-2.0)
+
+ADDINCL(
+ contrib/python/multidict/multidict
+)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+SRCS(
+ multidict/_multidict.c
+)
+
+PY_REGISTER(
+ multidict._multidict
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ multidict/__init__.py
+ multidict/__init__.pyi
+ multidict/_abc.py
+ multidict/_compat.py
+ multidict/_multidict_base.py
+ multidict/_multidict_py.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/multidict/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ multidict/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/yarl/.dist-info/METADATA b/contrib/python/yarl/.dist-info/METADATA
new file mode 100644
index 0000000000..8585ab738d
--- /dev/null
+++ b/contrib/python/yarl/.dist-info/METADATA
@@ -0,0 +1,1010 @@
+Metadata-Version: 2.1
+Name: yarl
+Version: 1.9.3
+Summary: Yet another URL library
+Home-page: https://github.com/aio-libs/yarl
+Author: Andrew Svetlov
+Author-email: andrew.svetlov@gmail.com
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache-2.0
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: GitHub Workflows, https://github.com/aio-libs/yarl/actions?query=branch:master
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/yarl
+Project-URL: Docs: Changelog, https://yarl.aio-libs.org/en/latest/changes/
+Project-URL: Docs: RTD, https://yarl.aio-libs.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/yarl/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/yarl
+Keywords: cython,cext,yarl
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Cython
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: NOTICE
+Requires-Dist: idna >=2.0
+Requires-Dist: multidict >=4.0
+Requires-Dist: typing-extensions >=3.7.4 ; python_version < "3.8"
+
+yarl
+====
+
+The module provides handy URL class for URL parsing and changing.
+
+.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI
+ :align: right
+
+.. image:: https://codecov.io/gh/aio-libs/yarl/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/yarl
+
+.. image:: https://badge.fury.io/py/yarl.svg
+ :target: https://badge.fury.io/py/yarl
+
+
+.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest
+ :target: https://yarl.aio-libs.org
+
+
+.. image:: https://img.shields.io/pypi/pyversions/yarl.svg
+ :target: https://pypi.python.org/pypi/yarl
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+------------
+
+Url is constructed from ``str``:
+
+.. code-block:: pycon
+
+ >>> from yarl import URL
+ >>> url = URL('https://www.python.org/~guido?arg=1#frag')
+ >>> url
+ URL('https://www.python.org/~guido?arg=1#frag')
+
+All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*,
+*query* and *fragment* are accessible by properties:
+
+.. code-block:: pycon
+
+ >>> url.scheme
+ 'https'
+ >>> url.host
+ 'www.python.org'
+ >>> url.path
+ '/~guido'
+ >>> url.query_string
+ 'arg=1'
+ >>> url.query
+ <MultiDictProxy('arg': '1')>
+ >>> url.fragment
+ 'frag'
+
+All url manipulations produce a new url object:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org')
+ >>> url / 'foo' / 'bar'
+ URL('https://www.python.org/foo/bar')
+ >>> url / 'foo' % {'bar': 'baz'}
+ URL('https://www.python.org/foo?bar=baz')
+
+Strings passed to constructor and modification methods are
+automatically encoded giving canonical representation as result:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org/путь')
+ >>> url
+ URL('https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C')
+
+Regular properties are *percent-decoded*, use ``raw_`` versions for
+getting *encoded* strings:
+
+.. code-block:: pycon
+
+ >>> url.path
+ '/путь'
+
+ >>> url.raw_path
+ '/%D0%BF%D1%83%D1%82%D1%8C'
+
+Human readable representation of URL is available as ``.human_repr()``:
+
+.. code-block:: pycon
+
+ >>> url.human_repr()
+ 'https://www.python.org/путь'
+
+For full documentation please read https://yarl.aio-libs.org.
+
+
+Installation
+------------
+
+::
+
+ $ pip install yarl
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``yarl`` on another operating system (like *Alpine Linux*, which is not
+manylinux-compliant because of the missing glibc and therefore, cannot be
+used with our wheels) the the tarball will be used to compile the library from
+the source code. It requires a C compiler and and Python headers installed.
+
+To skip the compilation you must explicitly opt-in by using a PEP 517
+configuration setting ``--pure-python``, or setting the ``YARL_NO_EXTENSIONS``
+environment variable to a non-empty value, e.g.:
+
+.. code-block:: console
+
+ $ pip install yarl --config-settings=--pure-python=
+
+Please note that the pure-Python (uncompiled) version is much slower. However,
+PyPy always uses a pure-Python implementation, and, as such, it is unaffected
+by this variable.
+
+Dependencies
+------------
+
+YARL requires multidict_ library.
+
+
+API documentation
+------------------
+
+The documentation is located at https://yarl.aio-libs.org.
+
+
+Why isn't boolean supported by the URL query API?
+-------------------------------------------------
+
+There is no standard for boolean representation of boolean values.
+
+Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``,
+``Y``/``N``, ``1``/``0``, etc.
+
+``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because
+it is specific to how the end-user's application is built and would be different for
+different apps. The library doesn't accept booleans in the API; a user should convert
+bools into strings using own preferred translation protocol.
+
+
+Comparison with other URL libraries
+------------------------------------
+
+* furl (https://pypi.python.org/pypi/furl)
+
+ The library has rich functionality but the ``furl`` object is mutable.
+
+ I'm afraid to pass this object into foreign code: who knows if the
+ code will modify my url in a terrible way while I just want to send URL
+ with handy helpers for accessing URL properties.
+
+ ``furl`` has other non-obvious tricky things but the main objection
+ is mutability.
+
+* URLObject (https://pypi.python.org/pypi/URLObject)
+
+ URLObject is immutable, that's pretty good.
+
+ Every URL change generates a new URL object.
+
+ But the library doesn't do any decode/encode transformations leaving the
+ end user to cope with these gory details.
+
+
+Source code
+-----------
+
+The project is hosted on GitHub_
+
+Please file an issue on the `bug tracker
+<https://github.com/aio-libs/yarl/issues>`_ if you have found a bug
+or have some suggestion in order to improve the library.
+
+The library uses `Azure Pipelines <https://dev.azure.com/aio-libs/yarl>`_ for
+Continuous Integration.
+
+Discussion list
+---------------
+
+*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
+
+Feel free to post your questions and ideas here.
+
+
+Authors and License
+-------------------
+
+The ``yarl`` package is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
+
+
+.. _GitHub: https://github.com/aio-libs/yarl
+
+.. _multidict: https://github.com/aio-libs/multidict
+
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/#adding-a-news-entry
+ we named the news folder "changes".
+
+ WARNING: Don't drop the next directive!
+
+.. towncrier release notes start
+
+1.9.3 (2023-11-20)
+==================
+
+Bug fixes
+---------
+
+- Stopped dropping trailing slashes in ``yarl.URL.joinpath()`` -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#862 <https://github.com/aio-libs/yarl/issues/862>`__, `#866 <https://github.com/aio-libs/yarl/issues/866>`__)
+- Started accepting string subclasses in ``__truediv__()`` operations (``URL / segment``) -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#871 <https://github.com/aio-libs/yarl/issues/871>`__, `#884 <https://github.com/aio-libs/yarl/issues/884>`__)
+- Fixed the human representation of URLs with square brackets in usernames and passwords -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#876 <https://github.com/aio-libs/yarl/issues/876>`__, `#882 <https://github.com/aio-libs/yarl/issues/882>`__)
+- Updated type hints to include ``URL.missing_port()``, ``URL.__bytes__()``
+ and the ``encoding`` argument to ``yarl.URL.joinpath()``
+ -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#891 <https://github.com/aio-libs/yarl/issues/891>`__)
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Integrated Cython 3 to enable building *yarl* under Python 3.12 -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#829 <https://github.com/aio-libs/yarl/issues/829>`__, `#881 <https://github.com/aio-libs/yarl/issues/881>`__)
+- Declared modern ``setuptools.build_meta`` as the ``517`` build
+ backend in ``pyproject.toml`` explicitly -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__. (`#886 <https://github.com/aio-libs/yarl/issues/886>`__)
+- Converted most of the packaging setup into a declarative ``setup.cfg``
+ config -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__. (`#890 <https://github.com/aio-libs/yarl/issues/890>`__)
+- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an
+ in-tree ``517`` build backend -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ Whenever the end-users or downstream packagers need to build ``yarl`` from
+ source (a Git checkout or an sdist), they may pass a ``config_settings``
+ flag ``--pure-python``. If this flag is not set, a C-extension will be built
+ and included into the distribution.
+
+ Here is how this can be done with ``pip``:
+
+ .. code-block:: console
+
+ $ python -m pip install . --config-settings=--pure-python=
+
+ This will also work with ``-e | --editable``.
+
+ The same can be achieved via ``pypa/build``:
+
+ .. code-block:: console
+
+ $ python -m build --config-setting=--pure-python=
+
+ Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source
+ directly, as opposed to building an ``sdist`` and then building from it. (`#893 <https://github.com/aio-libs/yarl/issues/893>`__)
+- Declared Python 3.12 supported officially in the distribution package metadata
+ -- by `@edgarrmondragon <https://github.com/sponsors/edgarrmondragon>`__. (`#942 <https://github.com/aio-libs/yarl/issues/942>`__)
+
+
+Contributor-facing changes
+--------------------------
+
+- A regression test for no-host URLs was added per `#821 <https://github.com/aio-libs/yarl/issues/821>`__
+ and ``3986`` -- by `@kenballus <https://github.com/sponsors/kenballus>`__. (`#821 <https://github.com/aio-libs/yarl/issues/821>`__, `#822 <https://github.com/aio-libs/yarl/issues/822>`__)
+- Started testing *yarl* against Python 3.12 in CI -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#881 <https://github.com/aio-libs/yarl/issues/881>`__)
+- All Python 3.12 jobs are now marked as required to pass in CI
+ -- by `@edgarrmondragon <https://github.com/sponsors/edgarrmondragon>`__. (`#942 <https://github.com/aio-libs/yarl/issues/942>`__)
+- MyST is now integrated in Sphinx -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This allows the contributors to author new documents in Markdown
+ when they have difficulties with going straight RST. (`#953 <https://github.com/aio-libs/yarl/issues/953>`__)
+
+
+1.9.2 (2023-04-25)
+==================
+
+Bugfixes
+--------
+
+- Fix regression with ``__truediv__`` and absolute URLs with empty paths causing the raw path to lack the leading ``/``.
+ (`#854 <https://github.com/aio-libs/yarl/issues/854>`_)
+
+
+1.9.1 (2023-04-21)
+==================
+
+Bugfixes
+--------
+
+- Marked tests that fail on older Python patch releases (< 3.7.10, < 3.8.8 and < 3.9.2) as expected to fail due to missing a security fix for CVE-2021-23336. (`#850 <https://github.com/aio-libs/yarl/issues/850>`_)
+
+
+1.9.0 (2023-04-19)
+==================
+
+This release was never published to PyPI, due to issues with the build process.
+
+Features
+--------
+
+- Added ``URL.joinpath(*elements)``, to create a new URL appending multiple path elements. (`#704 <https://github.com/aio-libs/yarl/issues/704>`_)
+- Made ``URL.__truediv__()`` return ``NotImplemented`` if called with an
+ unsupported type — by `@michaeljpeters <https://github.com/sponsors/michaeljpeters>`__.
+ (`#832 <https://github.com/aio-libs/yarl/issues/832>`_)
+
+
+Bugfixes
+--------
+
+- Path normalization for absolute URLs no longer raises a ValueError exception
+ when ``..`` segments would otherwise go beyond the URL path root.
+ (`#536 <https://github.com/aio-libs/yarl/issues/536>`_)
+- Fixed an issue with update_query() not getting rid of the query when argument is None. (`#792 <https://github.com/aio-libs/yarl/issues/792>`_)
+- Added some input restrictions on with_port() function to prevent invalid boolean inputs or out of valid port inputs; handled incorrect 0 port representation. (`#793 <https://github.com/aio-libs/yarl/issues/793>`_)
+- Made ``yarl.URL.build()`` raise a ``TypeError`` if the ``host`` argument is ``None`` — by `@paulpapacz <https://github.com/sponsors/paulpapacz>`__. (`#808 <https://github.com/aio-libs/yarl/issues/808>`_)
+- Fixed an issue with ``update_query()`` getting rid of the query when the argument
+ is empty but not ``None``. (`#845 <https://github.com/aio-libs/yarl/issues/845>`_)
+
+
+Misc
+----
+
+- `#220 <https://github.com/aio-libs/yarl/issues/220>`_
+
+
+1.8.2 (2022-12-03)
+==================
+
+This is the first release that started shipping wheels for Python 3.11.
+
+
+1.8.1 (2022-08-01)
+==================
+
+Misc
+----
+
+- `#694 <https://github.com/aio-libs/yarl/issues/694>`_, `#699 <https://github.com/aio-libs/yarl/issues/699>`_, `#700 <https://github.com/aio-libs/yarl/issues/700>`_, `#701 <https://github.com/aio-libs/yarl/issues/701>`_, `#702 <https://github.com/aio-libs/yarl/issues/702>`_, `#703 <https://github.com/aio-libs/yarl/issues/703>`_, `#739 <https://github.com/aio-libs/yarl/issues/739>`_
+
+
+1.8.0 (2022-08-01)
+==================
+
+Features
+--------
+
+- Added ``URL.raw_suffix``, ``URL.suffix``, ``URL.raw_suffixes``, ``URL.suffixes``, ``URL.with_suffix``. (`#613 <https://github.com/aio-libs/yarl/issues/613>`_)
+
+
+Improved Documentation
+----------------------
+
+- Fixed broken internal references to ``yarl.URL.human_repr()``.
+ (`#665 <https://github.com/aio-libs/yarl/issues/665>`_)
+- Fixed broken external references to ``multidict:index`` docs. (`#665 <https://github.com/aio-libs/yarl/issues/665>`_)
+
+
+Deprecations and Removals
+-------------------------
+
+- Dropped Python 3.6 support. (`#672 <https://github.com/aio-libs/yarl/issues/672>`_)
+
+
+Misc
+----
+
+- `#646 <https://github.com/aio-libs/yarl/issues/646>`_, `#699 <https://github.com/aio-libs/yarl/issues/699>`_, `#701 <https://github.com/aio-libs/yarl/issues/701>`_
+
+
+1.7.2 (2021-11-01)
+==================
+
+Bugfixes
+--------
+
+- Changed call in ``with_port()`` to stop reencoding parts of the URL that were already encoded. (`#623 <https://github.com/aio-libs/yarl/issues/623>`_)
+
+
+1.7.1 (2021-10-07)
+==================
+
+Bugfixes
+--------
+
+- Fix 1.7.0 build error
+
+1.7.0 (2021-10-06)
+==================
+
+Features
+--------
+
+- Add ``__bytes__()`` magic method so that ``bytes(url)`` will work and use optimal ASCII encoding.
+ (`#582 <https://github.com/aio-libs/yarl/issues/582>`_)
+- Started shipping platform-specific arm64 wheels for Apple Silicon. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+- Added support for Python 3.10. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+
+
+1.6.3 (2020-11-14)
+==================
+
+Bugfixes
+--------
+
+- No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved.
+ `#517 <https://github.com/aio-libs/yarl/issues/517>`_
+- Provide x86 Windows wheels.
+ `#535 <https://github.com/aio-libs/yarl/issues/535>`_
+
+
+----
+
+
+1.6.2 (2020-10-12)
+==================
+
+
+Bugfixes
+--------
+
+- Provide generated ``.c`` files in TarBall distribution.
+ `#530 <https://github.com/aio-libs/multidict/issues/530>`_
+
+1.6.1 (2020-10-12)
+==================
+
+Features
+--------
+
+- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on
+ Linux as well as ``x86_64``.
+ `#507 <https://github.com/aio-libs/yarl/issues/507>`_
+- Provide wheels for Python 3.9.
+ `#526 <https://github.com/aio-libs/yarl/issues/526>`_
+
+Bugfixes
+--------
+
+- ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid).
+ `#511 <https://github.com/aio-libs/yarl/issues/511>`_
+- Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation.
+ `#514 <https://github.com/aio-libs/yarl/issues/514>`_
+- Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits.
+ `#516 <https://github.com/aio-libs/yarl/issues/516>`_
+- Fix decoding ``%`` followed by a space and hexadecimal digit.
+ `#520 <https://github.com/aio-libs/yarl/issues/520>`_
+- Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case.
+ `#528 <https://github.com/aio-libs/yarl/issues/528>`_
+
+Removal
+-------
+
+- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version.
+
+
+----
+
+
+1.6.0 (2020-09-23)
+==================
+
+Features
+--------
+
+- Allow for int and float subclasses in query, while still denying bool.
+ `#492 <https://github.com/aio-libs/yarl/issues/492>`_
+
+
+Bugfixes
+--------
+
+- Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator.
+ `#502 <https://github.com/aio-libs/yarl/issues/502>`_
+- Keep IPv6 brackets in ``origin()``.
+ `#504 <https://github.com/aio-libs/yarl/issues/504>`_
+
+
+----
+
+
+1.5.1 (2020-08-01)
+==================
+
+Bugfixes
+--------
+
+- Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists.
+ `#485 <https://github.com/aio-libs/yarl/issues/485>`_
+
+
+Misc
+----
+
+- `#484 <https://github.com/aio-libs/yarl/issues/484>`_
+
+
+----
+
+
+1.5.0 (2020-07-26)
+==================
+
+Features
+--------
+
+- Convert host to lowercase on URL building.
+ `#386 <https://github.com/aio-libs/yarl/issues/386>`_
+- Allow using ``mod`` operator (``%``) for updating query string (an alias for ``update_query()`` method).
+ `#435 <https://github.com/aio-libs/yarl/issues/435>`_
+- Allow use of sequences such as ``list`` and ``tuple`` in the values
+ of a mapping such as ``dict`` to represent that a key has many values::
+
+ url = URL("http://example.com")
+ assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2")
+
+ `#443 <https://github.com/aio-libs/yarl/issues/443>`_
+- Support ``URL.build()`` with scheme and path (creates a relative URL).
+ `#464 <https://github.com/aio-libs/yarl/issues/464>`_
+- Cache slow IDNA encode/decode calls.
+ `#476 <https://github.com/aio-libs/yarl/issues/476>`_
+- Add ``@final`` / ``Final`` type hints
+ `#477 <https://github.com/aio-libs/yarl/issues/477>`_
+- Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method.
+ `#478 <https://github.com/aio-libs/yarl/issues/478>`_
+- Hide the library implementation details, make the exposed public list very clean.
+ `#483 <https://github.com/aio-libs/yarl/issues/483>`_
+
+
+Bugfixes
+--------
+
+- Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+).
+ `#409 <https://github.com/aio-libs/yarl/issues/409>`_
+- Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way.
+ `#426 <https://github.com/aio-libs/yarl/issues/426>`_
+- Hide ``Query`` and ``QueryVariable`` type aliases in ``__init__.pyi``, now they are prefixed with underscore.
+ `#431 <https://github.com/aio-libs/yarl/issues/431>`_
+- Keep IPv6 brackets after updating port/user/password.
+ `#451 <https://github.com/aio-libs/yarl/issues/451>`_
+
+
+----
+
+
+1.4.2 (2019-12-05)
+==================
+
+Features
+--------
+
+- Workaround for missing ``str.isascii()`` in Python 3.6
+ `#389 <https://github.com/aio-libs/yarl/issues/389>`_
+
+
+----
+
+
+1.4.1 (2019-11-29)
+==================
+
+* Fix regression, make the library work on Python 3.5 and 3.6 again.
+
+1.4.0 (2019-11-29)
+==================
+
+* Distinguish an empty password in URL from a password not provided at all (#262)
+
+* Fixed annotations for optional parameters of ``URL.build`` (#309)
+
+* Use None as default value of ``user`` parameter of ``URL.build`` (#309)
+
+* Enforce building C Accelerated modules when installing from source tarball, use
+ ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python
+ implementation (#329)
+
+* Drop Python 3.5 support
+
+* Fix quoting of plus in path by pure python version (#339)
+
+* Don't create a new URL if fragment is unchanged (#292)
+
+* Included in error message the path that produces starting slash forbidden error (#376)
+
+* Skip slow IDNA encoding for ASCII-only strings (#387)
+
+
+1.3.0 (2018-12-11)
+==================
+
+* Fix annotations for ``query`` parameter (#207)
+
+* An incoming query sequence can have int variables (the same as for
+ Mapping type) (#208)
+
+* Add ``URL.explicit_port`` property (#218)
+
+* Give a friendlier error when port can't be converted to int (#168)
+
+* ``bool(URL())`` now returns ``False`` (#272)
+
+1.2.6 (2018-06-14)
+==================
+
+* Drop Python 3.4 trove classifier (#205)
+
+1.2.5 (2018-05-23)
+==================
+
+* Fix annotations for ``build`` (#199)
+
+1.2.4 (2018-05-08)
+==================
+
+* Fix annotations for ``cached_property`` (#195)
+
+1.2.3 (2018-05-03)
+==================
+
+* Accept ``str`` subclasses in ``URL`` constructor (#190)
+
+1.2.2 (2018-05-01)
+==================
+
+* Fix build
+
+1.2.1 (2018-04-30)
+==================
+
+* Pin minimal required Python to 3.5.3 (#189)
+
+1.2.0 (2018-04-30)
+==================
+
+* Forbid inheritance, replace ``__init__`` with ``__new__`` (#171)
+
+* Support PEP-561 (provide type hinting marker) (#182)
+
+1.1.1 (2018-02-17)
+==================
+
+* Fix performance regression: don't encode empty ``netloc`` (#170)
+
+1.1.0 (2018-01-21)
+==================
+
+* Make pure Python quoter consistent with Cython version (#162)
+
+1.0.0 (2018-01-15)
+==================
+
+* Use fast path if quoted string does not need requoting (#154)
+
+* Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155)
+
+* Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155)
+
+* Add custom string writer, reuse static buffer if available (#157)
+ Code is 50-80 times faster than Pure Python version (was 4-5 times faster)
+
+* Don't recode IP zone (#144)
+
+* Support ``encoded=True`` in ``yarl.URL.build()`` (#158)
+
+* Fix updating query with multiple keys (#160)
+
+0.18.0 (2018-01-10)
+===================
+
+* Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152)
+
+0.17.0 (2017-12-30)
+===================
+
+* Use IDNA 2008 for domain name processing (#149)
+
+0.16.0 (2017-12-07)
+===================
+
+* Fix raising ``TypeError`` by ``url.query_string()`` after
+ ``url.with_query({})`` (empty mapping) (#141)
+
+0.15.0 (2017-11-23)
+===================
+
+* Add ``raw_path_qs`` attribute (#137)
+
+0.14.2 (2017-11-14)
+===================
+
+* Restore ``strict`` parameter as no-op in ``quote`` / ``unquote``
+
+0.14.1 (2017-11-13)
+===================
+
+* Restore ``strict`` parameter as no-op for sake of compatibility with
+ aiohttp 2.2
+
+0.14.0 (2017-11-11)
+===================
+
+* Drop strict mode (#123)
+
+* Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the URL (#124)
+
+0.13.0 (2017-10-01)
+===================
+
+* Document ``encoded`` parameter (#102)
+
+* Support relative URLs like ``'?key=value'`` (#100)
+
+* Unsafe encoding for QS fixed. Encode ``;`` character in value parameter (#104)
+
+* Process passwords without user names (#95)
+
+0.12.0 (2017-06-26)
+===================
+
+* Properly support paths without leading slash in ``URL.with_path()`` (#90)
+
+* Enable type annotation checks
+
+0.11.0 (2017-06-26)
+===================
+
+* Normalize path (#86)
+
+* Clear query and fragment parts in ``.with_path()`` (#85)
+
+0.10.3 (2017-06-13)
+===================
+
+* Prevent double URL arguments unquoting (#83)
+
+0.10.2 (2017-05-05)
+===================
+
+* Unexpected hash behavior (#75)
+
+
+0.10.1 (2017-05-03)
+===================
+
+* Unexpected compare behavior (#73)
+
+* Do not quote or unquote + if not a query string. (#74)
+
+
+0.10.0 (2017-03-14)
+===================
+
+* Added ``URL.build`` class method (#58)
+
+* Added ``path_qs`` attribute (#42)
+
+
+0.9.8 (2017-02-16)
+==================
+
+* Do not quote ``:`` in path
+
+
+0.9.7 (2017-02-16)
+==================
+
+* Load from pickle without _cache (#56)
+
+* Percent-encoded pluses in path variables become spaces (#59)
+
+
+0.9.6 (2017-02-15)
+==================
+
+* Revert backward incompatible change (BaseURL)
+
+
+0.9.5 (2017-02-14)
+==================
+
+* Fix BaseURL rich comparison support
+
+
+0.9.4 (2017-02-14)
+==================
+
+* Use BaseURL
+
+
+0.9.3 (2017-02-14)
+==================
+
+* Added BaseURL
+
+
+0.9.2 (2017-02-08)
+==================
+
+* Remove debug print
+
+
+0.9.1 (2017-02-07)
+==================
+
+* Do not lose tail chars (#45)
+
+
+0.9.0 (2017-02-07)
+==================
+
+* Allow to quote ``%`` in non strict mode (#21)
+
+* Incorrect parsing of query parameters with %3B (;) inside (#34)
+
+* Fix core dumps (#41)
+
+* ``tmpbuf`` - compiling error (#43)
+
+* Added ``URL.update_path()`` method
+
+* Added ``URL.update_query()`` method (#47)
+
+
+0.8.1 (2016-12-03)
+==================
+
+* Fix broken aiohttp: revert back ``quote`` / ``unquote``.
+
+
+0.8.0 (2016-12-03)
+==================
+
+* Support more verbose error messages in ``.with_query()`` (#24)
+
+* Don't percent-encode ``@`` and ``:`` in path (#32)
+
+* Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are
+ part of private API
+
+0.7.1 (2016-11-18)
+==================
+
+* Accept not only ``str`` but all classes inherited from ``str`` also (#25)
+
+0.7.0 (2016-11-07)
+==================
+
+* Accept ``int`` as value for ``.with_query()``
+
+0.6.0 (2016-11-07)
+==================
+
+* Explicitly use UTF8 encoding in ``setup.py`` (#20)
+* Properly unquote non-UTF8 strings (#19)
+
+0.5.3 (2016-11-02)
+==================
+
+* Don't use ``typing.NamedTuple`` fields but indexes on URL construction
+
+0.5.2 (2016-11-02)
+==================
+
+* Inline ``_encode`` class method
+
+0.5.1 (2016-11-02)
+==================
+
+* Make URL construction faster by removing extra classmethod calls
+
+0.5.0 (2016-11-02)
+==================
+
+* Add Cython optimization for quoting/unquoting
+* Provide binary wheels
+
+0.4.3 (2016-09-29)
+==================
+
+* Fix typing stubs
+
+0.4.2 (2016-09-29)
+==================
+
+* Expose ``quote()`` and ``unquote()`` as public API
+
+0.4.1 (2016-09-28)
+==================
+
+* Support empty values in query (``'/path?arg'``)
+
+0.4.0 (2016-09-27)
+==================
+
+* Introduce ``relative()`` (#16)
+
+0.3.2 (2016-09-27)
+==================
+
+* Typo fixes #15
+
+0.3.1 (2016-09-26)
+==================
+
+* Support sequence of pairs as ``with_query()`` parameter
+
+0.3.0 (2016-09-26)
+==================
+
+* Introduce ``is_default_port()``
+
+0.2.1 (2016-09-26)
+==================
+
+* Raise ValueError for URLs like 'http://:8080/'
+
+0.2.0 (2016-09-18)
+==================
+
+* Avoid doubling slashes when joining paths (#13)
+
+* Appending path starting from slash is forbidden (#12)
+
+0.1.4 (2016-09-09)
+==================
+
+* Add ``kwargs`` support for ``with_query()`` (#10)
+
+0.1.3 (2016-09-07)
+==================
+
+* Document ``with_query()``, ``with_fragment()`` and ``origin()``
+
+* Allow ``None`` for ``with_query()`` and ``with_fragment()``
+
+0.1.2 (2016-09-07)
+==================
+
+* Fix links, tune docs theme.
+
+0.1.1 (2016-09-06)
+==================
+
+* Update README, old version used obsolete API
+
+0.1.0 (2016-09-06)
+==================
+
+* The library was deeply refactored, bytes are gone away but all
+ accepted strings are encoded if needed.
+
+0.0.1 (2016-08-30)
+==================
+
+* The first release.
diff --git a/contrib/python/yarl/.dist-info/top_level.txt b/contrib/python/yarl/.dist-info/top_level.txt
new file mode 100644
index 0000000000..e93e8bddef
--- /dev/null
+++ b/contrib/python/yarl/.dist-info/top_level.txt
@@ -0,0 +1 @@
+yarl
diff --git a/contrib/python/yarl/LICENSE b/contrib/python/yarl/LICENSE
new file mode 100644
index 0000000000..d645695673
--- /dev/null
+++ b/contrib/python/yarl/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/yarl/NOTICE b/contrib/python/yarl/NOTICE
new file mode 100644
index 0000000000..fa53b2b138
--- /dev/null
+++ b/contrib/python/yarl/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2016-2021, Andrew Svetlov and aio-libs team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/yarl/README.rst b/contrib/python/yarl/README.rst
new file mode 100644
index 0000000000..a1032b206a
--- /dev/null
+++ b/contrib/python/yarl/README.rst
@@ -0,0 +1,209 @@
+yarl
+====
+
+The module provides handy URL class for URL parsing and changing.
+
+.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI
+ :align: right
+
+.. image:: https://codecov.io/gh/aio-libs/yarl/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/yarl
+
+.. image:: https://badge.fury.io/py/yarl.svg
+ :target: https://badge.fury.io/py/yarl
+
+
+.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest
+ :target: https://yarl.aio-libs.org
+
+
+.. image:: https://img.shields.io/pypi/pyversions/yarl.svg
+ :target: https://pypi.python.org/pypi/yarl
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+------------
+
+Url is constructed from ``str``:
+
+.. code-block:: pycon
+
+ >>> from yarl import URL
+ >>> url = URL('https://www.python.org/~guido?arg=1#frag')
+ >>> url
+ URL('https://www.python.org/~guido?arg=1#frag')
+
+All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*,
+*query* and *fragment* are accessible by properties:
+
+.. code-block:: pycon
+
+ >>> url.scheme
+ 'https'
+ >>> url.host
+ 'www.python.org'
+ >>> url.path
+ '/~guido'
+ >>> url.query_string
+ 'arg=1'
+ >>> url.query
+ <MultiDictProxy('arg': '1')>
+ >>> url.fragment
+ 'frag'
+
+All url manipulations produce a new url object:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org')
+ >>> url / 'foo' / 'bar'
+ URL('https://www.python.org/foo/bar')
+ >>> url / 'foo' % {'bar': 'baz'}
+ URL('https://www.python.org/foo?bar=baz')
+
+Strings passed to constructor and modification methods are
+automatically encoded giving canonical representation as result:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org/путь')
+ >>> url
+ URL('https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C')
+
+Regular properties are *percent-decoded*, use ``raw_`` versions for
+getting *encoded* strings:
+
+.. code-block:: pycon
+
+ >>> url.path
+ '/путь'
+
+ >>> url.raw_path
+ '/%D0%BF%D1%83%D1%82%D1%8C'
+
+Human readable representation of URL is available as ``.human_repr()``:
+
+.. code-block:: pycon
+
+ >>> url.human_repr()
+ 'https://www.python.org/путь'
+
+For full documentation please read https://yarl.aio-libs.org.
+
+
+Installation
+------------
+
+::
+
+ $ pip install yarl
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``yarl`` on another operating system (like *Alpine Linux*, which is not
+manylinux-compliant because of the missing glibc and therefore, cannot be
+used with our wheels) the the tarball will be used to compile the library from
+the source code. It requires a C compiler and and Python headers installed.
+
+To skip the compilation you must explicitly opt-in by using a PEP 517
+configuration setting ``--pure-python``, or setting the ``YARL_NO_EXTENSIONS``
+environment variable to a non-empty value, e.g.:
+
+.. code-block:: console
+
+ $ pip install yarl --config-settings=--pure-python=
+
+Please note that the pure-Python (uncompiled) version is much slower. However,
+PyPy always uses a pure-Python implementation, and, as such, it is unaffected
+by this variable.
+
+Dependencies
+------------
+
+YARL requires multidict_ library.
+
+
+API documentation
+------------------
+
+The documentation is located at https://yarl.aio-libs.org.
+
+
+Why isn't boolean supported by the URL query API?
+-------------------------------------------------
+
+There is no standard for boolean representation of boolean values.
+
+Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``,
+``Y``/``N``, ``1``/``0``, etc.
+
+``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because
+it is specific to how the end-user's application is built and would be different for
+different apps. The library doesn't accept booleans in the API; a user should convert
+bools into strings using own preferred translation protocol.
+
+
+Comparison with other URL libraries
+------------------------------------
+
+* furl (https://pypi.python.org/pypi/furl)
+
+ The library has rich functionality but the ``furl`` object is mutable.
+
+ I'm afraid to pass this object into foreign code: who knows if the
+ code will modify my url in a terrible way while I just want to send URL
+ with handy helpers for accessing URL properties.
+
+ ``furl`` has other non-obvious tricky things but the main objection
+ is mutability.
+
+* URLObject (https://pypi.python.org/pypi/URLObject)
+
+ URLObject is immutable, that's pretty good.
+
+ Every URL change generates a new URL object.
+
+ But the library doesn't do any decode/encode transformations leaving the
+ end user to cope with these gory details.
+
+
+Source code
+-----------
+
+The project is hosted on GitHub_
+
+Please file an issue on the `bug tracker
+<https://github.com/aio-libs/yarl/issues>`_ if you have found a bug
+or have some suggestion in order to improve the library.
+
+The library uses `Azure Pipelines <https://dev.azure.com/aio-libs/yarl>`_ for
+Continuous Integration.
+
+Discussion list
+---------------
+
+*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
+
+Feel free to post your questions and ideas here.
+
+
+Authors and License
+-------------------
+
+The ``yarl`` package is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
+
+
+.. _GitHub: https://github.com/aio-libs/yarl
+
+.. _multidict: https://github.com/aio-libs/multidict
diff --git a/contrib/python/yarl/tests/test_cache.py b/contrib/python/yarl/tests/test_cache.py
new file mode 100644
index 0000000000..22141dd085
--- /dev/null
+++ b/contrib/python/yarl/tests/test_cache.py
@@ -0,0 +1,28 @@
+import yarl
+
+# Don't check the actual behavior but make sure that calls are allowed
+
+
+def teardown_module():
+ yarl.cache_configure()
+
+
+def test_cache_clear() -> None:
+ yarl.cache_clear()
+
+
+def test_cache_info() -> None:
+ info = yarl.cache_info()
+ assert info.keys() == {"idna_encode", "idna_decode"}
+
+
+def test_cache_configure_default() -> None:
+ yarl.cache_configure()
+
+
+def test_cache_configure_None() -> None:
+ yarl.cache_configure(idna_encode_size=None, idna_decode_size=None)
+
+
+def test_cache_configure_explicit() -> None:
+ yarl.cache_configure(idna_encode_size=128, idna_decode_size=128)
diff --git a/contrib/python/yarl/tests/test_cached_property.py b/contrib/python/yarl/tests/test_cached_property.py
new file mode 100644
index 0000000000..5dcb5ece23
--- /dev/null
+++ b/contrib/python/yarl/tests/test_cached_property.py
@@ -0,0 +1,45 @@
+import pytest
+
+from yarl._url import cached_property
+
+
+def test_reify():
+ class A:
+ def __init__(self):
+ self._cache = {}
+
+ @cached_property
+ def prop(self):
+ return 1
+
+ a = A()
+ assert 1 == a.prop
+
+
+def test_reify_class():
+ class A:
+ def __init__(self):
+ self._cache = {}
+
+ @cached_property
+ def prop(self):
+ """Docstring."""
+ return 1
+
+ assert isinstance(A.prop, cached_property)
+ assert "Docstring." == A.prop.__doc__
+
+
+def test_reify_assignment():
+ class A:
+ def __init__(self):
+ self._cache = {}
+
+ @cached_property
+ def prop(self):
+ return 1
+
+ a = A()
+
+ with pytest.raises(AttributeError):
+ a.prop = 123
diff --git a/contrib/python/yarl/tests/test_normalize_path.py b/contrib/python/yarl/tests/test_normalize_path.py
new file mode 100644
index 0000000000..defc4d8dd7
--- /dev/null
+++ b/contrib/python/yarl/tests/test_normalize_path.py
@@ -0,0 +1,34 @@
+import pytest
+
+from yarl import URL
+
+PATHS = [
+ # No dots
+ ("", ""),
+ ("/", "/"),
+ ("//", "//"),
+ ("///", "///"),
+ # Single-dot
+ ("path/to", "path/to"),
+ ("././path/to", "path/to"),
+ ("path/./to", "path/to"),
+ ("path/././to", "path/to"),
+ ("path/to/.", "path/to/"),
+ ("path/to/./.", "path/to/"),
+ # Double-dots
+ ("../path/to", "path/to"),
+ ("path/../to", "to"),
+ ("path/../../to", "to"),
+ # absolute path root / is maintained; tests based on two
+ # tests from web-platform-tests project's urltestdata.json
+ ("/foo/../../../ton", "/ton"),
+ ("/foo/../../../..bar", "/..bar"),
+ # Non-ASCII characters
+ ("μονοπάτι/../../να/ᴜɴɪ/ᴄᴏᴅᴇ", "να/ᴜɴɪ/ᴄᴏᴅᴇ"),
+ ("μονοπάτι/../../να/𝕦𝕟𝕚/𝕔𝕠𝕕𝕖/.", "να/𝕦𝕟𝕚/𝕔𝕠𝕕𝕖/"),
+]
+
+
+@pytest.mark.parametrize("original,expected", PATHS)
+def test__normalize_path(original, expected):
+ assert URL._normalize_path(original) == expected
diff --git a/contrib/python/yarl/tests/test_pickle.py b/contrib/python/yarl/tests/test_pickle.py
new file mode 100644
index 0000000000..a1f29ab68c
--- /dev/null
+++ b/contrib/python/yarl/tests/test_pickle.py
@@ -0,0 +1,23 @@
+import pickle
+
+from yarl import URL
+
+# serialize
+
+
+def test_pickle():
+ u1 = URL("test")
+ hash(u1)
+ v = pickle.dumps(u1)
+ u2 = pickle.loads(v)
+ assert u1._cache
+ assert not u2._cache
+ assert hash(u1) == hash(u2)
+
+
+def test_default_style_state():
+ u = URL("test")
+ hash(u)
+ u.__setstate__((None, {"_val": "test", "_strict": False, "_cache": {"hash": 1}}))
+ assert not u._cache
+ assert u._val == "test"
diff --git a/contrib/python/yarl/tests/test_quoting.py b/contrib/python/yarl/tests/test_quoting.py
new file mode 100644
index 0000000000..7ebc0f9b04
--- /dev/null
+++ b/contrib/python/yarl/tests/test_quoting.py
@@ -0,0 +1,450 @@
+import pytest
+
+from yarl._quoting import NO_EXTENSIONS
+from yarl._quoting_py import _Quoter as _PyQuoter
+from yarl._quoting_py import _Unquoter as _PyUnquoter
+
+if not NO_EXTENSIONS:
+ from yarl._quoting_c import _Quoter as _CQuoter
+ from yarl._quoting_c import _Unquoter as _CUnquoter
+
+ @pytest.fixture(params=[_PyQuoter, _CQuoter], ids=["py_quoter", "c_quoter"])
+ def quoter(request):
+ return request.param
+
+ @pytest.fixture(params=[_PyUnquoter, _CUnquoter], ids=["py_unquoter", "c_unquoter"])
+ def unquoter(request):
+ return request.param
+
+else:
+
+ @pytest.fixture(params=[_PyQuoter], ids=["py_quoter"])
+ def quoter(request):
+ return request.param
+
+ @pytest.fixture(params=[_PyUnquoter], ids=["py_unquoter"])
+ def unquoter(request):
+ return request.param
+
+
+def hexescape(char):
+ """Escape char as RFC 2396 specifies"""
+ hex_repr = hex(ord(char))[2:].upper()
+ if len(hex_repr) == 1:
+ hex_repr = "0%s" % hex_repr
+ return "%" + hex_repr
+
+
+def test_quote_not_allowed_non_strict(quoter):
+ assert quoter()("%HH") == "%25HH"
+
+
+def test_quote_unfinished_tail_percent_non_strict(quoter):
+ assert quoter()("%") == "%25"
+
+
+def test_quote_unfinished_tail_digit_non_strict(quoter):
+ assert quoter()("%2") == "%252"
+
+
+def test_quote_unfinished_tail_safe_non_strict(quoter):
+ assert quoter()("%x") == "%25x"
+
+
+def test_quote_unfinished_tail_unsafe_non_strict(quoter):
+ assert quoter()("%#") == "%25%23"
+
+
+def test_quote_unfinished_tail_non_ascii_non_strict(quoter):
+ assert quoter()("%ß") == "%25%C3%9F"
+
+
+def test_quote_unfinished_tail_non_ascii2_non_strict(quoter):
+ assert quoter()("%€") == "%25%E2%82%AC"
+
+
+def test_quote_unfinished_tail_non_ascii3_non_strict(quoter):
+ assert quoter()("%🐍") == "%25%F0%9F%90%8D"
+
+
+def test_quote_from_bytes(quoter):
+ assert quoter()("archaeological arcana") == "archaeological%20arcana"
+ assert quoter()("") == ""
+
+
+def test_quote_ignore_broken_unicode(quoter):
+ s = quoter()(
+ "j\u001a\udcf4q\udcda/\udc97g\udcee\udccb\u000ch\udccb"
+ "\u0018\udce4v\u001b\udce2\udcce\udccecom/y\udccepj\u0016"
+ )
+
+ assert s == "j%1Aq%2Fg%0Ch%18v%1Bcom%2Fypj%16"
+ assert quoter()(s) == s
+
+
+def test_unquote_to_bytes(unquoter):
+ assert unquoter()("abc%20def") == "abc def"
+ assert unquoter()("") == ""
+
+
+def test_never_quote(quoter):
+ # Make sure quote() does not quote letters, digits, and "_,.-~"
+ do_not_quote = (
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789" "_.-~"
+ )
+ assert quoter()(do_not_quote) == do_not_quote
+ assert quoter(qs=True)(do_not_quote) == do_not_quote
+
+
+def test_safe(quoter):
+ # Test setting 'safe' parameter does what it should do
+ quote_by_default = "<>"
+ assert quoter(safe=quote_by_default)(quote_by_default) == quote_by_default
+
+ ret = quoter(safe=quote_by_default, qs=True)(quote_by_default)
+ assert ret == quote_by_default
+
+
+_SHOULD_QUOTE = [chr(num) for num in range(32)]
+_SHOULD_QUOTE.append(r'<>#"{}|\^[]`')
+_SHOULD_QUOTE.append(chr(127)) # For 0x7F
+SHOULD_QUOTE = "".join(_SHOULD_QUOTE)
+
+
+@pytest.mark.parametrize("char", SHOULD_QUOTE)
+def test_default_quoting(char, quoter):
+ # Make sure all characters that should be quoted are by default sans
+ # space (separate test for that).
+ result = quoter()(char)
+ assert hexescape(char) == result
+ result = quoter(qs=True)(char)
+ assert hexescape(char) == result
+
+
+# TODO: should it encode percent?
+def test_default_quoting_percent(quoter):
+ result = quoter()("%25")
+ assert "%25" == result
+ result = quoter(qs=True)("%25")
+ assert "%25" == result
+ result = quoter(requote=False)("%25")
+ assert "%2525" == result
+
+
+def test_default_quoting_partial(quoter):
+ partial_quote = "ab[]cd"
+ expected = "ab%5B%5Dcd"
+ result = quoter()(partial_quote)
+ assert expected == result
+ result = quoter(qs=True)(partial_quote)
+ assert expected == result
+
+
+def test_quoting_space(quoter):
+ # Make sure quote() and quote_plus() handle spaces as specified in
+ # their unique way
+ result = quoter()(" ")
+ assert result == hexescape(" ")
+ result = quoter(qs=True)(" ")
+ assert result == "+"
+
+ given = "a b cd e f"
+ expect = given.replace(" ", hexescape(" "))
+ result = quoter()(given)
+ assert expect == result
+ expect = given.replace(" ", "+")
+ result = quoter(qs=True)(given)
+ assert expect == result
+
+
+def test_quoting_plus(quoter):
+ assert quoter(qs=False)("alpha+beta gamma") == "alpha+beta%20gamma"
+ assert quoter(qs=True)("alpha+beta gamma") == "alpha%2Bbeta+gamma"
+ assert quoter(safe="+", qs=True)("alpha+beta gamma") == "alpha+beta+gamma"
+
+
+def test_quote_with_unicode(quoter):
+ # Characters in Latin-1 range, encoded by default in UTF-8
+ given = "\u00a2\u00d8ab\u00ff"
+ expect = "%C2%A2%C3%98ab%C3%BF"
+ result = quoter()(given)
+ assert expect == result
+ # Characters in BMP, encoded by default in UTF-8
+ given = "\u6f22\u5b57" # "Kanji"
+ expect = "%E6%BC%A2%E5%AD%97"
+ result = quoter()(given)
+ assert expect == result
+
+
+def test_quote_plus_with_unicode(quoter):
+ # Characters in Latin-1 range, encoded by default in UTF-8
+ given = "\u00a2\u00d8ab\u00ff"
+ expect = "%C2%A2%C3%98ab%C3%BF"
+ result = quoter(qs=True)(given)
+ assert expect == result
+ # Characters in BMP, encoded by default in UTF-8
+ given = "\u6f22\u5b57" # "Kanji"
+ expect = "%E6%BC%A2%E5%AD%97"
+ result = quoter(qs=True)(given)
+ assert expect == result
+
+
+@pytest.mark.parametrize("num", list(range(128)))
+def test_unquoting(num, unquoter):
+ # Make sure unquoting of all ASCII values works
+ given = hexescape(chr(num))
+ expect = chr(num)
+ result = unquoter()(given)
+ assert expect == result
+ if expect not in "+=&;":
+ result = unquoter(qs=True)(given)
+ assert expect == result
+
+
+# Expected value should be the same as given.
+# See https://url.spec.whatwg.org/#percent-encoded-bytes
+@pytest.mark.parametrize(
+ ("input", "expected"),
+ [
+ ("%", "%"),
+ ("%2", "%2"),
+ ("%x", "%x"),
+ ("%€", "%€"),
+ ("%2x", "%2x"),
+ ("%2 ", "%2 "),
+ ("% 2", "% 2"),
+ ("%xa", "%xa"),
+ ("%%", "%%"),
+ ("%%3f", "%?"),
+ ("%2%", "%2%"),
+ ("%2%3f", "%2?"),
+ ("%x%3f", "%x?"),
+ ("%€%3f", "%€?"),
+ ],
+)
+def test_unquoting_bad_percent_escapes(unquoter, input, expected):
+ assert unquoter()(input) == expected
+
+
+@pytest.mark.xfail
+# FIXME: After conversion to bytes, should not cause UTF-8 decode fail.
+# See https://url.spec.whatwg.org/#percent-encoded-bytes
+def test_unquoting_invalid_utf8_sequence(unquoter):
+ with pytest.raises(ValueError):
+ unquoter()("%AB")
+ with pytest.raises(ValueError):
+ unquoter()("%AB%AB")
+
+
+def test_unquoting_mixed_case_percent_escapes(unquoter):
+ expected = "𝕦"
+ assert expected == unquoter()("%F0%9D%95%A6")
+ assert expected == unquoter()("%F0%9d%95%a6")
+ assert expected == unquoter()("%f0%9D%95%a6")
+ assert expected == unquoter()("%f0%9d%95%a6")
+
+
+def test_unquoting_parts(unquoter):
+ # Make sure unquoting works when have non-quoted characters
+ # interspersed
+ given = "ab" + hexescape("c") + "d"
+ expect = "abcd"
+ result = unquoter()(given)
+ assert expect == result
+ result = unquoter(qs=True)(given)
+ assert expect == result
+
+
+def test_quote_None(quoter):
+ assert quoter()(None) is None
+
+
+def test_unquote_None(unquoter):
+ assert unquoter()(None) is None
+
+
+def test_quote_empty_string(quoter):
+ assert quoter()("") == ""
+
+
+def test_unquote_empty_string(unquoter):
+ assert unquoter()("") == ""
+
+
+def test_quote_bad_types(quoter):
+ with pytest.raises(TypeError):
+ quoter()(123)
+
+
+def test_unquote_bad_types(unquoter):
+ with pytest.raises(TypeError):
+ unquoter()(123)
+
+
+def test_quote_lowercase(quoter):
+ assert quoter()("%d1%84") == "%D1%84"
+
+
+def test_quote_unquoted(quoter):
+ assert quoter()("%41") == "A"
+
+
+def test_quote_space(quoter):
+ assert quoter()(" ") == "%20" # NULL
+
+
+# test to see if this would work to fix
+# coverage on this file.
+def test_quote_percent_last_character(quoter):
+ # % is last character in this case.
+ assert quoter()("%") == "%25"
+
+
+def test_unquote_unsafe(unquoter):
+ assert unquoter(unsafe="@")("%40") == "%40"
+
+
+def test_unquote_unsafe2(unquoter):
+ assert unquoter(unsafe="@")("%40abc") == "%40abc"
+
+
+def test_unquote_unsafe3(unquoter):
+ assert unquoter(qs=True)("a%2Bb=?%3D%2B%26") == "a%2Bb=?%3D%2B%26"
+
+
+def test_unquote_unsafe4(unquoter):
+ assert unquoter(unsafe="@")("a@b") == "a%40b"
+
+
+@pytest.mark.parametrize(
+ ("input", "expected"),
+ [
+ ("%e2%82", "%e2%82"),
+ ("%e2%82ac", "%e2%82ac"),
+ ("%e2%82%f8", "%e2%82%f8"),
+ ("%e2%82%2b", "%e2%82+"),
+ ("%e2%82%e2%82%ac", "%e2%82€"),
+ ("%e2%82%e2%82", "%e2%82%e2%82"),
+ ],
+)
+def test_unquote_non_utf8(unquoter, input, expected):
+ assert unquoter()(input) == expected
+
+
+def test_unquote_unsafe_non_utf8(unquoter):
+ assert unquoter(unsafe="\n")("%e2%82%0a") == "%e2%82%0A"
+
+
+def test_unquote_plus_non_utf8(unquoter):
+ assert unquoter(qs=True)("%e2%82%2b") == "%e2%82%2B"
+
+
+def test_quote_non_ascii(quoter):
+ assert quoter()("%F8") == "%F8"
+
+
+def test_quote_non_ascii2(quoter):
+ assert quoter()("a%F8b") == "a%F8b"
+
+
+def test_quote_percent_percent_encoded(quoter):
+ assert quoter()("%%3f") == "%25%3F"
+
+
+def test_quote_percent_digit_percent_encoded(quoter):
+ assert quoter()("%2%3f") == "%252%3F"
+
+
+def test_quote_percent_safe_percent_encoded(quoter):
+ assert quoter()("%x%3f") == "%25x%3F"
+
+
+def test_quote_percent_unsafe_percent_encoded(quoter):
+ assert quoter()("%#%3f") == "%25%23%3F"
+
+
+def test_quote_percent_non_ascii_percent_encoded(quoter):
+ assert quoter()("%ß%3f") == "%25%C3%9F%3F"
+
+
+def test_quote_percent_non_ascii2_percent_encoded(quoter):
+ assert quoter()("%€%3f") == "%25%E2%82%AC%3F"
+
+
+def test_quote_percent_non_ascii3_percent_encoded(quoter):
+ assert quoter()("%🐍%3f") == "%25%F0%9F%90%8D%3F"
+
+
+class StrLike(str):
+ pass
+
+
+def test_quote_str_like(quoter):
+ assert quoter()(StrLike("abc")) == "abc"
+
+
+def test_unquote_str_like(unquoter):
+ assert unquoter()(StrLike("abc")) == "abc"
+
+
+def test_quote_sub_delims(quoter):
+ assert quoter()("!$&'()*+,;=") == "!$&'()*+,;="
+
+
+def test_requote_sub_delims(quoter):
+ assert quoter()("%21%24%26%27%28%29%2A%2B%2C%3B%3D") == "!$&'()*+,;="
+
+
+def test_unquoting_plus(unquoter):
+ assert unquoter(qs=False)("a+b") == "a+b"
+
+
+def test_unquote_plus_to_space(unquoter):
+ assert unquoter(qs=True)("a+b") == "a b"
+
+
+def test_unquote_plus_to_space_unsafe(unquoter):
+ assert unquoter(unsafe="+", qs=True)("a+b") == "a+b"
+
+
+def test_quote_qs_with_colon(quoter):
+ s = quoter(safe="=+&?/:@", qs=True)("next=http%3A//example.com/")
+ assert s == "next=http://example.com/"
+
+
+def test_quote_protected(quoter):
+ s = quoter(protected="/")("/path%2fto/three")
+ assert s == "/path%2Fto/three"
+
+
+def test_quote_fastpath_safe(quoter):
+ s1 = "/path/to"
+ s2 = quoter(safe="/")(s1)
+ assert s1 is s2
+
+
+def test_quote_fastpath_pct(quoter):
+ s1 = "abc%A0"
+ s2 = quoter()(s1)
+ assert s1 is s2
+
+
+def test_quote_very_large_string(quoter):
+ # more than 8 KiB
+ s = "abcфух%30%0a" * 1024
+ assert quoter()(s) == "abc%D1%84%D1%83%D1%850%0A" * 1024
+
+
+def test_space(quoter):
+ s = "% A"
+ assert quoter()(s) == "%25%20A"
+
+
+def test_quoter_path_with_plus(quoter):
+ s = "/test/x+y%2Bz/:+%2B/"
+ assert "/test/x+y%2Bz/:+%2B/" == quoter(safe="@:", protected="/+")(s)
+
+
+def test_unquoter_path_with_plus(unquoter):
+ s = "/test/x+y%2Bz/:+%2B/"
+ assert "/test/x+y+z/:++/" == unquoter(unsafe="+")(s)
diff --git a/contrib/python/yarl/tests/test_update_query.py b/contrib/python/yarl/tests/test_update_query.py
new file mode 100644
index 0000000000..e47c468341
--- /dev/null
+++ b/contrib/python/yarl/tests/test_update_query.py
@@ -0,0 +1,366 @@
+import enum
+
+import pytest
+from multidict import MultiDict
+
+from yarl import URL
+
+# with_query
+
+
+def test_with_query():
+ url = URL("http://example.com")
+ assert str(url.with_query({"a": "1"})) == "http://example.com/?a=1"
+
+
+def test_update_query():
+ url = URL("http://example.com/")
+ assert str(url.update_query({"a": "1"})) == "http://example.com/?a=1"
+ assert str(URL("test").update_query(a=1)) == "test?a=1"
+
+ url = URL("http://example.com/?foo=bar")
+ expected_url = URL("http://example.com/?foo=bar&baz=foo")
+
+ assert url.update_query({"baz": "foo"}) == expected_url
+ assert url.update_query(baz="foo") == expected_url
+ assert url.update_query("baz=foo") == expected_url
+
+
+def test_update_query_with_args_and_kwargs():
+ url = URL("http://example.com/")
+
+ with pytest.raises(ValueError):
+ url.update_query("a", foo="bar")
+
+
+def test_update_query_with_multiple_args():
+ url = URL("http://example.com/")
+
+ with pytest.raises(ValueError):
+ url.update_query("a", "b")
+
+
+def test_update_query_with_none_arg():
+ url = URL("http://example.com/?foo=bar&baz=foo")
+ expected_url = URL("http://example.com/")
+ assert url.update_query(None) == expected_url
+
+
+def test_update_query_with_empty_dict():
+ url = URL("http://example.com/?foo=bar&baz=foo")
+ assert url.update_query({}) == url
+
+
+def test_with_query_list_of_pairs():
+ url = URL("http://example.com")
+ assert str(url.with_query([("a", "1")])) == "http://example.com/?a=1"
+
+
+def test_with_query_list_non_pairs():
+ url = URL("http://example.com")
+ with pytest.raises(ValueError):
+ url.with_query(["a=1", "b=2", "c=3"])
+
+
+def test_with_query_kwargs():
+ url = URL("http://example.com")
+ q = url.with_query(query="1", query2="1").query
+ assert q == dict(query="1", query2="1")
+
+
+def test_with_query_kwargs_and_args_are_mutually_exclusive():
+ url = URL("http://example.com")
+ with pytest.raises(ValueError):
+ url.with_query({"a": "2", "b": "4"}, a="1")
+
+
+def test_with_query_only_single_arg_is_supported():
+ url = URL("http://example.com")
+ u1 = url.with_query(b=3)
+ u2 = URL("http://example.com/?b=3")
+ assert u1 == u2
+ with pytest.raises(ValueError):
+ url.with_query("a=1", "a=b")
+
+
+def test_with_query_empty_dict():
+ url = URL("http://example.com/?a=b")
+ new_url = url.with_query({})
+ assert new_url.query_string == ""
+ assert str(new_url) == "http://example.com/"
+
+
+def test_with_query_empty_str():
+ url = URL("http://example.com/?a=b")
+ assert str(url.with_query("")) == "http://example.com/"
+
+
+def test_with_query_empty_value():
+ url = URL("http://example.com/")
+ assert str(url.with_query({"a": ""})) == "http://example.com/?a="
+
+
+def test_with_query_str():
+ url = URL("http://example.com")
+ assert str(url.with_query("a=1&b=2")) == "http://example.com/?a=1&b=2"
+
+
+def test_with_query_str_non_ascii_and_spaces():
+ url = URL("http://example.com")
+ url2 = url.with_query("a=1 2&b=знач")
+ assert url2.raw_query_string == "a=1+2&b=%D0%B7%D0%BD%D0%B0%D1%87"
+ assert url2.query_string == "a=1 2&b=знач"
+
+
+def test_with_query_int():
+ url = URL("http://example.com")
+ assert url.with_query({"a": 1}) == URL("http://example.com/?a=1")
+
+
+def test_with_query_kwargs_int():
+ url = URL("http://example.com")
+ assert url.with_query(b=2) == URL("http://example.com/?b=2")
+
+
+def test_with_query_list_int():
+ url = URL("http://example.com")
+ assert str(url.with_query([("a", 1)])) == "http://example.com/?a=1"
+
+
+@pytest.mark.parametrize(
+ ("query", "expected"),
+ [
+ pytest.param({"a": []}, "", id="empty list"),
+ pytest.param({"a": ()}, "", id="empty tuple"),
+ pytest.param({"a": [1]}, "/?a=1", id="single list"),
+ pytest.param({"a": (1,)}, "/?a=1", id="single tuple"),
+ pytest.param({"a": [1, 2]}, "/?a=1&a=2", id="list"),
+ pytest.param({"a": (1, 2)}, "/?a=1&a=2", id="tuple"),
+ pytest.param({"a[]": [1, 2]}, "/?a%5B%5D=1&a%5B%5D=2", id="key with braces"),
+ pytest.param({"&": [1, 2]}, "/?%26=1&%26=2", id="quote key"),
+ pytest.param({"a": ["1", 2]}, "/?a=1&a=2", id="mixed types"),
+ pytest.param({"&": ["=", 2]}, "/?%26=%3D&%26=2", id="quote key and value"),
+ pytest.param({"a": 1, "b": [2, 3]}, "/?a=1&b=2&b=3", id="single then list"),
+ pytest.param({"a": [1, 2], "b": 3}, "/?a=1&a=2&b=3", id="list then single"),
+ pytest.param({"a": ["1&a=2", 3]}, "/?a=1%26a%3D2&a=3", id="ampersand then int"),
+ pytest.param({"a": [1, "2&a=3"]}, "/?a=1&a=2%26a%3D3", id="int then ampersand"),
+ ],
+)
+def test_with_query_sequence(query, expected):
+ url = URL("http://example.com")
+ expected = "http://example.com{expected}".format_map(locals())
+ assert str(url.with_query(query)) == expected
+
+
+@pytest.mark.parametrize(
+ "query",
+ [
+ pytest.param({"a": [[1]]}, id="nested"),
+ pytest.param([("a", [1, 2])], id="tuple list"),
+ ],
+)
+def test_with_query_sequence_invalid_use(query):
+ url = URL("http://example.com")
+ with pytest.raises(TypeError, match="Invalid variable type"):
+ url.with_query(query)
+
+
+class _CStr(str):
+ pass
+
+
+class _EmptyStrEr:
+ def __str__(self):
+ return ""
+
+
+class _CInt(int, _EmptyStrEr):
+ pass
+
+
+class _CFloat(float, _EmptyStrEr):
+ pass
+
+
+@pytest.mark.parametrize(
+ ("value", "expected"),
+ [
+ pytest.param("1", "1", id="str"),
+ pytest.param(_CStr("1"), "1", id="custom str"),
+ pytest.param(1, "1", id="int"),
+ pytest.param(_CInt(1), "1", id="custom int"),
+ pytest.param(1.1, "1.1", id="float"),
+ pytest.param(_CFloat(1.1), "1.1", id="custom float"),
+ ],
+)
+def test_with_query_valid_type(value, expected):
+ url = URL("http://example.com")
+ expected = "http://example.com/?a={expected}".format_map(locals())
+ assert str(url.with_query({"a": value})) == expected
+
+
+@pytest.mark.parametrize(
+ ("value", "exc_type"),
+ [
+ pytest.param(True, TypeError, id="bool"),
+ pytest.param(None, TypeError, id="none"),
+ pytest.param(float("inf"), ValueError, id="non-finite float"),
+ pytest.param(float("nan"), ValueError, id="NaN float"),
+ ],
+)
+def test_with_query_invalid_type(value, exc_type):
+ url = URL("http://example.com")
+ with pytest.raises(exc_type):
+ url.with_query({"a": value})
+
+
+@pytest.mark.parametrize(
+ ("value", "expected"),
+ [
+ pytest.param("1", "1", id="str"),
+ pytest.param(_CStr("1"), "1", id="custom str"),
+ pytest.param(1, "1", id="int"),
+ pytest.param(_CInt(1), "1", id="custom int"),
+ pytest.param(1.1, "1.1", id="float"),
+ pytest.param(_CFloat(1.1), "1.1", id="custom float"),
+ ],
+)
+def test_with_query_list_valid_type(value, expected):
+ url = URL("http://example.com")
+ expected = "http://example.com/?a={expected}".format_map(locals())
+ assert str(url.with_query([("a", value)])) == expected
+
+
+@pytest.mark.parametrize(
+ ("value"), [pytest.param(True, id="bool"), pytest.param(None, id="none")]
+)
+def test_with_query_list_invalid_type(value):
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_query([("a", value)])
+
+
+def test_with_int_enum():
+ class IntEnum(int, enum.Enum):
+ A = 1
+
+ url = URL("http://example.com/path")
+ url2 = url.with_query(a=IntEnum.A)
+ assert str(url2) == "http://example.com/path?a=1"
+
+
+def test_with_float_enum():
+ class FloatEnum(float, enum.Enum):
+ A = 1.1
+
+ url = URL("http://example.com/path")
+ url2 = url.with_query(a=FloatEnum.A)
+ assert str(url2) == "http://example.com/path?a=1.1"
+
+
+def test_with_query_multidict():
+ url = URL("http://example.com/path")
+ q = MultiDict([("a", "b"), ("c", "d")])
+ assert str(url.with_query(q)) == "http://example.com/path?a=b&c=d"
+
+
+def test_with_multidict_with_spaces_and_non_ascii():
+ url = URL("http://example.com")
+ url2 = url.with_query({"a b": "ю б"})
+ assert url2.raw_query_string == "a+b=%D1%8E+%D0%B1"
+
+
+def test_with_query_multidict_with_unsafe():
+ url = URL("http://example.com/path")
+ url2 = url.with_query({"a+b": "?=+&;"})
+ assert url2.raw_query_string == "a%2Bb=?%3D%2B%26%3B"
+ assert url2.query_string == "a%2Bb=?%3D%2B%26%3B"
+ assert url2.query == {"a+b": "?=+&;"}
+
+
+def test_with_query_None():
+ url = URL("http://example.com/path?a=b")
+ assert url.with_query(None).query_string == ""
+
+
+def test_with_query_bad_type():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_query(123)
+
+
+def test_with_query_bytes():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_query(b"123")
+
+
+def test_with_query_bytearray():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_query(bytearray(b"123"))
+
+
+def test_with_query_memoryview():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_query(memoryview(b"123"))
+
+
+@pytest.mark.parametrize(
+ ("query", "expected"),
+ [
+ pytest.param([("key", "1;2;3")], "?key=1%3B2%3B3", id="tuple list semicolon"),
+ pytest.param({"key": "1;2;3"}, "?key=1%3B2%3B3", id="mapping semicolon"),
+ pytest.param([("key", "1&a=2")], "?key=1%26a%3D2", id="tuple list ampersand"),
+ pytest.param({"key": "1&a=2"}, "?key=1%26a%3D2", id="mapping ampersand"),
+ pytest.param([("&", "=")], "?%26=%3D", id="tuple list quote key"),
+ pytest.param({"&": "="}, "?%26=%3D", id="mapping quote key"),
+ pytest.param(
+ [("a[]", "3")],
+ "?a%5B%5D=3",
+ id="quote one key braces",
+ ),
+ pytest.param(
+ [("a[]", "3"), ("a[]", "4")],
+ "?a%5B%5D=3&a%5B%5D=4",
+ id="quote many key braces",
+ ),
+ ],
+)
+def test_with_query_params(query, expected):
+ url = URL("http://example.com/get")
+ url2 = url.with_query(query)
+ assert str(url2) == ("http://example.com/get" + expected)
+
+
+def test_with_query_only():
+ url = URL()
+ url2 = url.with_query(key="value")
+ assert str(url2) == "?key=value"
+
+
+def test_with_query_complex_url():
+ target_url = "http://example.com/?game=bulls+%26+cows"
+ url = URL("/redir").with_query({"t": target_url})
+ assert url.query["t"] == target_url
+
+
+def test_update_query_multiple_keys():
+ url = URL("http://example.com/path?a=1&a=2")
+ u2 = url.update_query([("a", "3"), ("a", "4")])
+
+ assert str(u2) == "http://example.com/path?a=3&a=4"
+
+
+# mod operator
+
+
+def test_update_query_with_mod_operator():
+ url = URL("http://example.com/")
+ assert str(url % {"a": "1"}) == "http://example.com/?a=1"
+ assert str(url % [("a", "1")]) == "http://example.com/?a=1"
+ assert str(url % "a=1&b=2") == "http://example.com/?a=1&b=2"
+ assert str(url % {"a": "1"} % {"b": "2"}) == "http://example.com/?a=1&b=2"
+ assert str(url % {"a": "1"} % {"a": "3", "b": "2"}) == "http://example.com/?a=3&b=2"
+ assert str(url / "foo" % {"a": "1"}) == "http://example.com/foo?a=1"
diff --git a/contrib/python/yarl/tests/test_url.py b/contrib/python/yarl/tests/test_url.py
new file mode 100644
index 0000000000..af13d0b5d5
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url.py
@@ -0,0 +1,1732 @@
+from enum import Enum
+from urllib.parse import SplitResult
+
+import pytest
+
+from yarl import URL
+
+
+def test_inheritance():
+ with pytest.raises(TypeError) as ctx:
+
+ class MyURL(URL): # type: ignore[misc]
+ pass
+
+ assert (
+ "Inheriting a class "
+ "<class '__tests__.test_url.test_inheritance.<locals>.MyURL'> "
+ "from URL is forbidden" == str(ctx.value)
+ )
+
+
+def test_str_subclass():
+ class S(str):
+ pass
+
+ assert str(URL(S("http://example.com"))) == "http://example.com"
+
+
+def test_is():
+ u1 = URL("http://example.com")
+ u2 = URL(u1)
+ assert u1 is u2
+
+
+def test_bool():
+ assert URL("http://example.com")
+ assert not URL()
+ assert not URL("")
+
+
+def test_absolute_url_without_host():
+ with pytest.raises(ValueError):
+ URL("http://:8080/")
+
+
+def test_url_is_not_str():
+ url = URL("http://example.com")
+ assert not isinstance(url, str)
+
+
+def test_str():
+ url = URL("http://example.com:8888/path/to?a=1&b=2")
+ assert str(url) == "http://example.com:8888/path/to?a=1&b=2"
+
+
+def test_repr():
+ url = URL("http://example.com")
+ assert "URL('http://example.com')" == repr(url)
+
+
+def test_origin():
+ url = URL("http://user:password@example.com:8888/path/to?a=1&b=2")
+ assert URL("http://example.com:8888") == url.origin()
+
+
+def test_origin_nonascii():
+ url = URL("http://user:password@историк.рф:8888/path/to?a=1&b=2")
+ assert str(url.origin()) == "http://xn--h1aagokeh.xn--p1ai:8888"
+
+
+def test_origin_ipv6():
+ url = URL("http://user:password@[::1]:8888/path/to?a=1&b=2")
+ assert str(url.origin()) == "http://[::1]:8888"
+
+
+def test_origin_not_absolute_url():
+ url = URL("/path/to?a=1&b=2")
+ with pytest.raises(ValueError):
+ url.origin()
+
+
+def test_origin_no_scheme():
+ url = URL("//user:password@example.com:8888/path/to?a=1&b=2")
+ with pytest.raises(ValueError):
+ url.origin()
+
+
+def test_drop_dots():
+ u = URL("http://example.com/path/../to")
+ assert str(u) == "http://example.com/to"
+
+
+def test_abs_cmp():
+ assert URL("http://example.com:8888") == URL("http://example.com:8888")
+ assert URL("http://example.com:8888/") == URL("http://example.com:8888/")
+ assert URL("http://example.com:8888/") == URL("http://example.com:8888")
+ assert URL("http://example.com:8888") == URL("http://example.com:8888/")
+
+
+def test_abs_hash():
+ url = URL("http://example.com:8888")
+ url_trailing = URL("http://example.com:8888/")
+ assert hash(url) == hash(url_trailing)
+
+
+# properties
+
+
+def test_scheme():
+ url = URL("http://example.com")
+ assert "http" == url.scheme
+
+
+def test_raw_user():
+ url = URL("http://user@example.com")
+ assert "user" == url.raw_user
+
+
+def test_raw_user_non_ascii():
+ url = URL("http://вася@example.com")
+ assert "%D0%B2%D0%B0%D1%81%D1%8F" == url.raw_user
+
+
+def test_no_user():
+ url = URL("http://example.com")
+ assert url.user is None
+
+
+def test_user_non_ascii():
+ url = URL("http://вася@example.com")
+ assert "вася" == url.user
+
+
+def test_raw_password():
+ url = URL("http://user:password@example.com")
+ assert "password" == url.raw_password
+
+
+def test_raw_password_non_ascii():
+ url = URL("http://user:пароль@example.com")
+ assert "%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C" == url.raw_password
+
+
+def test_password_non_ascii():
+ url = URL("http://user:пароль@example.com")
+ assert "пароль" == url.password
+
+
+def test_password_without_user():
+ url = URL("http://:password@example.com")
+ assert url.user is None
+ assert "password" == url.password
+
+
+def test_user_empty_password():
+ url = URL("http://user:@example.com")
+ assert "user" == url.user
+ assert "" == url.password
+
+
+def test_raw_host():
+ url = URL("http://example.com")
+ assert "example.com" == url.raw_host
+
+
+def test_raw_host_non_ascii():
+ url = URL("http://историк.рф")
+ assert "xn--h1aagokeh.xn--p1ai" == url.raw_host
+
+
+def test_host_non_ascii():
+ url = URL("http://историк.рф")
+ assert "историк.рф" == url.host
+
+
+def test_localhost():
+ url = URL("http://[::1]")
+ assert "::1" == url.host
+
+
+def test_host_with_underscore():
+ url = URL("http://abc_def.com")
+ assert "abc_def.com" == url.host
+
+
+def test_raw_host_when_port_is_specified():
+ url = URL("http://example.com:8888")
+ assert "example.com" == url.raw_host
+
+
+def test_raw_host_from_str_with_ipv4():
+ url = URL("http://127.0.0.1:80")
+ assert url.raw_host == "127.0.0.1"
+
+
+def test_raw_host_from_str_with_ipv6():
+ url = URL("http://[::1]:80")
+ assert url.raw_host == "::1"
+
+
+def test_authority_full() -> None:
+ url = URL("http://user:passwd@host.com:8080/path")
+ assert url.raw_authority == "user:passwd@host.com:8080"
+ assert url.authority == "user:passwd@host.com:8080"
+
+
+def test_authority_short() -> None:
+ url = URL("http://host.com/path")
+ assert url.raw_authority == "host.com"
+
+
+def test_authority_full_nonasci() -> None:
+ url = URL("http://ваня:пароль@айдеко.рф:8080/path")
+ assert url.raw_authority == (
+ "%D0%B2%D0%B0%D0%BD%D1%8F:%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C@"
+ "xn--80aidohy.xn--p1ai:8080"
+ )
+ assert url.authority == "ваня:пароль@айдеко.рф:8080"
+
+
+def test_lowercase():
+ url = URL("http://gitHUB.com")
+ assert url.raw_host == "github.com"
+ assert url.host == url.raw_host
+
+
+def test_lowercase_nonascii():
+ url = URL("http://Айдеко.Рф")
+ assert url.raw_host == "xn--80aidohy.xn--p1ai"
+ assert url.host == "айдеко.рф"
+
+
+def test_compressed_ipv6():
+ url = URL("http://[1DEC:0:0:0::1]")
+ assert url.raw_host == "1dec::1"
+ assert url.host == url.raw_host
+
+
+def test_ipv4_zone():
+ # I'm unsure if it is correct.
+ url = URL("http://1.2.3.4%тест%42:123")
+ assert url.raw_host == "1.2.3.4%тест%42"
+ assert url.host == url.raw_host
+
+
+def test_port_for_explicit_port():
+ url = URL("http://example.com:8888")
+ assert 8888 == url.port
+
+
+def test_port_for_implicit_port():
+ url = URL("http://example.com")
+ assert 80 == url.port
+
+
+def test_port_for_relative_url():
+ url = URL("/path/to")
+ assert url.port is None
+
+
+def test_port_for_unknown_scheme():
+ url = URL("unknown://example.com")
+ assert url.port is None
+
+
+def test_explicit_port_for_explicit_port():
+ url = URL("http://example.com:8888")
+ assert 8888 == url.explicit_port
+
+
+def test_explicit_port_for_implicit_port():
+ url = URL("http://example.com")
+ assert url.explicit_port is None
+
+
+def test_explicit_port_for_relative_url():
+ url = URL("/path/to")
+ assert url.explicit_port is None
+
+
+def test_explicit_port_for_unknown_scheme():
+ url = URL("unknown://example.com")
+ assert url.explicit_port is None
+
+
+def test_raw_path_string_empty():
+ url = URL("http://example.com")
+ assert "/" == url.raw_path
+
+
+def test_raw_path():
+ url = URL("http://example.com/path/to")
+ assert "/path/to" == url.raw_path
+
+
+def test_raw_path_non_ascii():
+ url = URL("http://example.com/путь/сюда")
+ assert "/%D0%BF%D1%83%D1%82%D1%8C/%D1%81%D1%8E%D0%B4%D0%B0" == url.raw_path
+
+
+def test_path_non_ascii():
+ url = URL("http://example.com/путь/сюда")
+ assert "/путь/сюда" == url.path
+
+
+def test_path_with_spaces():
+ url = URL("http://example.com/a b/test")
+ assert "/a b/test" == url.path
+
+ url = URL("http://example.com/a b")
+ assert "/a b" == url.path
+
+
+def test_raw_path_for_empty_url():
+ url = URL()
+ assert "" == url.raw_path
+
+
+def test_raw_path_for_colon_and_at():
+ url = URL("http://example.com/path:abc@123")
+ assert url.raw_path == "/path:abc@123"
+
+
+def test_raw_query_string():
+ url = URL("http://example.com?a=1&b=2")
+ assert url.raw_query_string == "a=1&b=2"
+
+
+def test_raw_query_string_non_ascii():
+ url = URL("http://example.com?б=в&ю=к")
+ assert url.raw_query_string == "%D0%B1=%D0%B2&%D1%8E=%D0%BA"
+
+
+def test_query_string_non_ascii():
+ url = URL("http://example.com?б=в&ю=к")
+ assert url.query_string == "б=в&ю=к"
+
+
+def test_path_qs():
+ url = URL("http://example.com/")
+ assert url.path_qs == "/"
+ url = URL("http://example.com/?б=в&ю=к")
+ assert url.path_qs == "/?б=в&ю=к"
+ url = URL("http://example.com/path?б=в&ю=к")
+ assert url.path_qs == "/path?б=в&ю=к"
+
+
+def test_raw_path_qs():
+ url = URL("http://example.com/")
+ assert url.raw_path_qs == "/"
+ url = URL("http://example.com/?б=в&ю=к")
+ assert url.raw_path_qs == "/?%D0%B1=%D0%B2&%D1%8E=%D0%BA"
+ url = URL("http://example.com/path?б=в&ю=к")
+ assert url.raw_path_qs == "/path?%D0%B1=%D0%B2&%D1%8E=%D0%BA"
+ url = URL("http://example.com/путь?a=1&b=2")
+ assert url.raw_path_qs == "/%D0%BF%D1%83%D1%82%D1%8C?a=1&b=2"
+
+
+def test_query_string_spaces():
+ url = URL("http://example.com?a+b=c+d&e=f+g")
+ assert url.query_string == "a b=c d&e=f g"
+
+
+# raw fragment
+
+
+def test_raw_fragment_empty():
+ url = URL("http://example.com")
+ assert "" == url.raw_fragment
+
+
+def test_raw_fragment():
+ url = URL("http://example.com/path#anchor")
+ assert "anchor" == url.raw_fragment
+
+
+def test_raw_fragment_non_ascii():
+ url = URL("http://example.com/path#якорь")
+ assert "%D1%8F%D0%BA%D0%BE%D1%80%D1%8C" == url.raw_fragment
+
+
+def test_raw_fragment_safe():
+ url = URL("http://example.com/path#a?b/c:d@e")
+ assert "a?b/c:d@e" == url.raw_fragment
+
+
+def test_fragment_non_ascii():
+ url = URL("http://example.com/path#якорь")
+ assert "якорь" == url.fragment
+
+
+def test_raw_parts_empty():
+ url = URL("http://example.com")
+ assert ("/",) == url.raw_parts
+
+
+def test_raw_parts():
+ url = URL("http://example.com/path/to")
+ assert ("/", "path", "to") == url.raw_parts
+
+
+def test_raw_parts_without_path():
+ url = URL("http://example.com")
+ assert ("/",) == url.raw_parts
+
+
+def test_raw_path_parts_with_2F_in_path():
+ url = URL("http://example.com/path%2Fto/three")
+ assert ("/", "path%2Fto", "three") == url.raw_parts
+
+
+def test_raw_path_parts_with_2f_in_path():
+ url = URL("http://example.com/path%2fto/three")
+ assert ("/", "path%2Fto", "three") == url.raw_parts
+
+
+def test_raw_parts_for_relative_path():
+ url = URL("path/to")
+ assert ("path", "to") == url.raw_parts
+
+
+def test_raw_parts_for_relative_path_starting_from_slash():
+ url = URL("/path/to")
+ assert ("/", "path", "to") == url.raw_parts
+
+
+def test_raw_parts_for_relative_double_path():
+ url = URL("path/to")
+ assert ("path", "to") == url.raw_parts
+
+
+def test_parts_for_empty_url():
+ url = URL()
+ assert ("",) == url.raw_parts
+
+
+def test_raw_parts_non_ascii():
+ url = URL("http://example.com/путь/сюда")
+ assert (
+ "/",
+ "%D0%BF%D1%83%D1%82%D1%8C",
+ "%D1%81%D1%8E%D0%B4%D0%B0",
+ ) == url.raw_parts
+
+
+def test_parts_non_ascii():
+ url = URL("http://example.com/путь/сюда")
+ assert ("/", "путь", "сюда") == url.parts
+
+
+def test_name_for_empty_url():
+ url = URL()
+ assert "" == url.raw_name
+
+
+def test_raw_name():
+ url = URL("http://example.com/path/to#frag")
+ assert "to" == url.raw_name
+
+
+def test_raw_name_root():
+ url = URL("http://example.com/#frag")
+ assert "" == url.raw_name
+
+
+def test_raw_name_root2():
+ url = URL("http://example.com")
+ assert "" == url.raw_name
+
+
+def test_raw_name_root3():
+ url = URL("http://example.com/")
+ assert "" == url.raw_name
+
+
+def test_relative_raw_name():
+ url = URL("path/to")
+ assert "to" == url.raw_name
+
+
+def test_relative_raw_name_starting_from_slash():
+ url = URL("/path/to")
+ assert "to" == url.raw_name
+
+
+def test_relative_raw_name_slash():
+ url = URL("/")
+ assert "" == url.raw_name
+
+
+def test_name_non_ascii():
+ url = URL("http://example.com/путь")
+ assert url.name == "путь"
+
+
+def test_suffix_for_empty_url():
+ url = URL()
+ assert "" == url.raw_suffix
+
+
+def test_raw_suffix():
+ url = URL("http://example.com/path/to.txt#frag")
+ assert ".txt" == url.raw_suffix
+
+
+def test_raw_suffix_root():
+ url = URL("http://example.com/#frag")
+ assert "" == url.raw_suffix
+
+
+def test_raw_suffix_root2():
+ url = URL("http://example.com")
+ assert "" == url.raw_suffix
+
+
+def test_raw_suffix_root3():
+ url = URL("http://example.com/")
+ assert "" == url.raw_suffix
+
+
+def test_relative_raw_suffix():
+ url = URL("path/to")
+ assert "" == url.raw_suffix
+
+
+def test_relative_raw_suffix_starting_from_slash():
+ url = URL("/path/to")
+ assert "" == url.raw_suffix
+
+
+def test_relative_raw_suffix_dot():
+ url = URL(".")
+ assert "" == url.raw_suffix
+
+
+def test_suffix_non_ascii():
+ url = URL("http://example.com/путь.суффикс")
+ assert url.suffix == ".суффикс"
+
+
+def test_suffix_with_empty_name():
+ url = URL("http://example.com/.hgrc")
+ assert "" == url.raw_suffix
+
+
+def test_suffix_multi_dot():
+ url = URL("http://example.com/doc.tar.gz")
+ assert ".gz" == url.raw_suffix
+
+
+def test_suffix_with_dot_name():
+ url = URL("http://example.com/doc.")
+ assert "" == url.raw_suffix
+
+
+def test_suffixes_for_empty_url():
+ url = URL()
+ assert () == url.raw_suffixes
+
+
+def test_raw_suffixes():
+ url = URL("http://example.com/path/to.txt#frag")
+ assert (".txt",) == url.raw_suffixes
+
+
+def test_raw_suffixes_root():
+ url = URL("http://example.com/#frag")
+ assert () == url.raw_suffixes
+
+
+def test_raw_suffixes_root2():
+ url = URL("http://example.com")
+ assert () == url.raw_suffixes
+
+
+def test_raw_suffixes_root3():
+ url = URL("http://example.com/")
+ assert () == url.raw_suffixes
+
+
+def test_relative_raw_suffixes():
+ url = URL("path/to")
+ assert () == url.raw_suffixes
+
+
+def test_relative_raw_suffixes_starting_from_slash():
+ url = URL("/path/to")
+ assert () == url.raw_suffixes
+
+
+def test_relative_raw_suffixes_dot():
+ url = URL(".")
+ assert () == url.raw_suffixes
+
+
+def test_suffixes_non_ascii():
+ url = URL("http://example.com/путь.суффикс")
+ assert url.suffixes == (".суффикс",)
+
+
+def test_suffixes_with_empty_name():
+ url = URL("http://example.com/.hgrc")
+ assert () == url.raw_suffixes
+
+
+def test_suffixes_multi_dot():
+ url = URL("http://example.com/doc.tar.gz")
+ assert (".tar", ".gz") == url.raw_suffixes
+
+
+def test_suffixes_with_dot_name():
+ url = URL("http://example.com/doc.")
+ assert () == url.raw_suffixes
+
+
+def test_plus_in_path():
+ url = URL("http://example.com/test/x+y%2Bz/:+%2B/")
+ assert "/test/x+y+z/:++/" == url.path
+
+
+def test_nonascii_in_qs():
+ url = URL("http://example.com")
+ url2 = url.with_query({"f\xf8\xf8": "f\xf8\xf8"})
+ assert "http://example.com/?f%C3%B8%C3%B8=f%C3%B8%C3%B8" == str(url2)
+
+
+def test_percent_encoded_in_qs():
+ url = URL("http://example.com")
+ url2 = url.with_query({"k%cf%80": "v%cf%80"})
+ assert str(url2) == "http://example.com/?k%25cf%2580=v%25cf%2580"
+ assert url2.raw_query_string == "k%25cf%2580=v%25cf%2580"
+ assert url2.query_string == "k%cf%80=v%cf%80"
+ assert url2.query == {"k%cf%80": "v%cf%80"}
+
+
+# modifiers
+
+
+def test_parent_raw_path():
+ url = URL("http://example.com/path/to")
+ assert url.parent.raw_path == "/path"
+
+
+def test_parent_raw_parts():
+ url = URL("http://example.com/path/to")
+ assert url.parent.raw_parts == ("/", "path")
+
+
+def test_double_parent_raw_path():
+ url = URL("http://example.com/path/to")
+ assert url.parent.parent.raw_path == "/"
+
+
+def test_empty_parent_raw_path():
+ url = URL("http://example.com/")
+ assert url.parent.parent.raw_path == "/"
+
+
+def test_empty_parent_raw_path2():
+ url = URL("http://example.com")
+ assert url.parent.parent.raw_path == "/"
+
+
+def test_clear_fragment_on_getting_parent():
+ url = URL("http://example.com/path/to#frag")
+ assert URL("http://example.com/path") == url.parent
+
+
+def test_clear_fragment_on_getting_parent_toplevel():
+ url = URL("http://example.com/#frag")
+ assert URL("http://example.com/") == url.parent
+
+
+def test_clear_query_on_getting_parent():
+ url = URL("http://example.com/path/to?a=b")
+ assert URL("http://example.com/path") == url.parent
+
+
+def test_clear_query_on_getting_parent_toplevel():
+ url = URL("http://example.com/?a=b")
+ assert URL("http://example.com/") == url.parent
+
+
+# truediv
+
+
+def test_div_root():
+ url = URL("http://example.com") / "path" / "to"
+ assert str(url) == "http://example.com/path/to"
+ assert url.raw_path == "/path/to"
+
+
+def test_div_root_with_slash():
+ url = URL("http://example.com/") / "path" / "to"
+ assert str(url) == "http://example.com/path/to"
+ assert url.raw_path == "/path/to"
+
+
+def test_div():
+ url = URL("http://example.com/path") / "to"
+ assert str(url) == "http://example.com/path/to"
+ assert url.raw_path == "/path/to"
+
+
+def test_div_with_slash():
+ url = URL("http://example.com/path/") / "to"
+ assert str(url) == "http://example.com/path/to"
+ assert url.raw_path == "/path/to"
+
+
+def test_div_path_starting_from_slash_is_forbidden():
+ url = URL("http://example.com/path/")
+ with pytest.raises(ValueError):
+ url / "/to/others"
+
+
+class StrEnum(str, Enum):
+ spam = "ham"
+
+ def __str__(self):
+ return self.value
+
+
+def test_div_path_srting_subclass():
+ url = URL("http://example.com/path/") / StrEnum.spam
+ assert str(url) == "http://example.com/path/ham"
+
+
+def test_div_bad_type():
+ url = URL("http://example.com/path/")
+ with pytest.raises(TypeError):
+ url / 3
+
+
+def test_div_cleanup_query_and_fragment():
+ url = URL("http://example.com/path?a=1#frag")
+ assert str(url / "to") == "http://example.com/path/to"
+
+
+def test_div_for_empty_url():
+ url = URL() / "a"
+ assert url.raw_parts == ("a",)
+
+
+def test_div_for_relative_url():
+ url = URL("a") / "b"
+ assert url.raw_parts == ("a", "b")
+
+
+def test_div_for_relative_url_started_with_slash():
+ url = URL("/a") / "b"
+ assert url.raw_parts == ("/", "a", "b")
+
+
+def test_div_non_ascii():
+ url = URL("http://example.com/сюда")
+ url2 = url / "туда"
+ assert url2.path == "/сюда/туда"
+ assert url2.raw_path == "/%D1%81%D1%8E%D0%B4%D0%B0/%D1%82%D1%83%D0%B4%D0%B0"
+ assert url2.parts == ("/", "сюда", "туда")
+ assert url2.raw_parts == (
+ "/",
+ "%D1%81%D1%8E%D0%B4%D0%B0",
+ "%D1%82%D1%83%D0%B4%D0%B0",
+ )
+
+
+def test_div_percent_encoded():
+ url = URL("http://example.com/path")
+ url2 = url / "%cf%80"
+ assert url2.path == "/path/%cf%80"
+ assert url2.raw_path == "/path/%25cf%2580"
+ assert url2.parts == ("/", "path", "%cf%80")
+ assert url2.raw_parts == ("/", "path", "%25cf%2580")
+
+
+def test_div_with_colon_and_at():
+ url = URL("http://example.com/base") / "path:abc@123"
+ assert url.raw_path == "/base/path:abc@123"
+
+
+def test_div_with_dots():
+ url = URL("http://example.com/base") / "../path/./to"
+ assert url.raw_path == "/path/to"
+
+
+# joinpath
+
+
+@pytest.mark.parametrize(
+ "base,to_join,expected",
+ [
+ pytest.param("", ("path", "to"), "http://example.com/path/to", id="root"),
+ pytest.param(
+ "/", ("path", "to"), "http://example.com/path/to", id="root-with-slash"
+ ),
+ pytest.param("/path", ("to",), "http://example.com/path/to", id="path"),
+ pytest.param(
+ "/path/", ("to",), "http://example.com/path/to", id="path-with-slash"
+ ),
+ pytest.param(
+ "/path?a=1#frag",
+ ("to",),
+ "http://example.com/path/to",
+ id="cleanup-query-and-fragment",
+ ),
+ pytest.param("", ("path/",), "http://example.com/path/", id="trailing-slash"),
+ pytest.param(
+ "", ("path/", "to/"), "http://example.com/path/to/", id="duplicate-slash"
+ ),
+ pytest.param("", (), "http://example.com", id="empty-segments"),
+ pytest.param(
+ "/", ("path/",), "http://example.com/path/", id="base-slash-trailing-slash"
+ ),
+ pytest.param(
+ "/",
+ ("path/", "to/"),
+ "http://example.com/path/to/",
+ id="base-slash-duplicate-slash",
+ ),
+ pytest.param("/", (), "http://example.com", id="base-slash-empty-segments"),
+ ],
+)
+def test_joinpath(base, to_join, expected):
+ url = URL(f"http://example.com{base}")
+ assert str(url.joinpath(*to_join)) == expected
+
+
+@pytest.mark.parametrize(
+ "url,to_join,expected",
+ [
+ pytest.param(URL(), ("a",), ("a",), id="empty-url"),
+ pytest.param(URL("a"), ("b",), ("a", "b"), id="relative-path"),
+ pytest.param(URL("a"), ("b", "", "c"), ("a", "b", "c"), id="empty-element"),
+ pytest.param(URL("/a"), ("b"), ("/", "a", "b"), id="absolute-path"),
+ pytest.param(URL(), ("a/",), ("a", ""), id="trailing-slash"),
+ pytest.param(URL(), ("a/", "b/"), ("a", "b", ""), id="duplicate-slash"),
+ pytest.param(URL(), (), ("",), id="empty-segments"),
+ ],
+)
+def test_joinpath_relative(url, to_join, expected):
+ assert url.joinpath(*to_join).raw_parts == expected
+
+
+@pytest.mark.parametrize(
+ "url,to_join,encoded,e_path,e_raw_path,e_parts,e_raw_parts",
+ [
+ pytest.param(
+ "http://example.com/сюда",
+ ("туда",),
+ False,
+ "/сюда/туда",
+ "/%D1%81%D1%8E%D0%B4%D0%B0/%D1%82%D1%83%D0%B4%D0%B0",
+ ("/", "сюда", "туда"),
+ ("/", "%D1%81%D1%8E%D0%B4%D0%B0", "%D1%82%D1%83%D0%B4%D0%B0"),
+ id="non-ascii",
+ ),
+ pytest.param(
+ "http://example.com/path",
+ ("%cf%80",),
+ False,
+ "/path/%cf%80",
+ "/path/%25cf%2580",
+ ("/", "path", "%cf%80"),
+ ("/", "path", "%25cf%2580"),
+ id="percent-encoded",
+ ),
+ pytest.param(
+ "http://example.com/path",
+ ("%cf%80",),
+ True,
+ "/path/π",
+ "/path/%cf%80",
+ ("/", "path", "π"),
+ ("/", "path", "%cf%80"),
+ id="encoded-percent-encoded",
+ ),
+ ],
+)
+def test_joinpath_encoding(
+ url, to_join, encoded, e_path, e_raw_path, e_parts, e_raw_parts
+):
+ joined = URL(url).joinpath(*to_join, encoded=encoded)
+ assert joined.path == e_path
+ assert joined.raw_path == e_raw_path
+ assert joined.parts == e_parts
+ assert joined.raw_parts == e_raw_parts
+
+
+@pytest.mark.parametrize(
+ "to_join,expected",
+ [
+ pytest.param(("path:abc@123",), "/base/path:abc@123", id="with-colon-and-at"),
+ pytest.param(("..", "path", ".", "to"), "/path/to", id="with-dots"),
+ ],
+)
+def test_joinpath_edgecases(to_join, expected):
+ url = URL("http://example.com/base").joinpath(*to_join)
+ assert url.raw_path == expected
+
+
+def test_joinpath_path_starting_from_slash_is_forbidden():
+ url = URL("http://example.com/path/")
+ with pytest.raises(
+ ValueError, match="Appending path .* starting from slash is forbidden"
+ ):
+ assert url.joinpath("/to/others")
+
+
+# with_path
+
+
+def test_with_path():
+ url = URL("http://example.com")
+ url2 = url.with_path("/test")
+ assert str(url2) == "http://example.com/test"
+ assert url2.raw_path == "/test"
+ assert url2.path == "/test"
+
+
+def test_with_path_nonascii():
+ url = URL("http://example.com")
+ url2 = url.with_path("/π")
+ assert str(url2) == "http://example.com/%CF%80"
+ assert url2.raw_path == "/%CF%80"
+ assert url2.path == "/π"
+
+
+def test_with_path_percent_encoded():
+ url = URL("http://example.com")
+ url2 = url.with_path("/%cf%80")
+ assert str(url2) == "http://example.com/%25cf%2580"
+ assert url2.raw_path == "/%25cf%2580"
+ assert url2.path == "/%cf%80"
+
+
+def test_with_path_encoded():
+ url = URL("http://example.com")
+ url2 = url.with_path("/test", encoded=True)
+ assert str(url2) == "http://example.com/test"
+ assert url2.raw_path == "/test"
+ assert url2.path == "/test"
+
+
+def test_with_path_encoded_nonascii():
+ url = URL("http://example.com")
+ url2 = url.with_path("/π", encoded=True)
+ assert str(url2) == "http://example.com/π"
+ assert url2.raw_path == "/π"
+ assert url2.path == "/π"
+
+
+def test_with_path_encoded_percent_encoded():
+ url = URL("http://example.com")
+ url2 = url.with_path("/%cf%80", encoded=True)
+ assert str(url2) == "http://example.com/%cf%80"
+ assert url2.raw_path == "/%cf%80"
+ assert url2.path == "/π"
+
+
+def test_with_path_dots():
+ url = URL("http://example.com")
+ assert str(url.with_path("/test/.")) == "http://example.com/test/"
+
+
+def test_with_path_relative():
+ url = URL("/path")
+ assert str(url.with_path("/new")) == "/new"
+
+
+def test_with_path_query():
+ url = URL("http://example.com?a=b")
+ assert str(url.with_path("/test")) == "http://example.com/test"
+
+
+def test_with_path_fragment():
+ url = URL("http://example.com#frag")
+ assert str(url.with_path("/test")) == "http://example.com/test"
+
+
+def test_with_path_empty():
+ url = URL("http://example.com/test")
+ assert str(url.with_path("")) == "http://example.com"
+
+
+def test_with_path_leading_slash():
+ url = URL("http://example.com")
+ assert url.with_path("test").path == "/test"
+
+
+# with_fragment
+
+
+def test_with_fragment():
+ url = URL("http://example.com")
+ url2 = url.with_fragment("frag")
+ assert str(url2) == "http://example.com/#frag"
+ assert url2.raw_fragment == "frag"
+ assert url2.fragment == "frag"
+
+
+def test_with_fragment_safe():
+ url = URL("http://example.com")
+ u2 = url.with_fragment("a:b?c@d/e")
+ assert str(u2) == "http://example.com/#a:b?c@d/e"
+
+
+def test_with_fragment_non_ascii():
+ url = URL("http://example.com")
+ url2 = url.with_fragment("фрагм")
+ assert url2.raw_fragment == "%D1%84%D1%80%D0%B0%D0%B3%D0%BC"
+ assert url2.fragment == "фрагм"
+
+
+def test_with_fragment_percent_encoded():
+ url = URL("http://example.com")
+ url2 = url.with_fragment("%cf%80")
+ assert str(url2) == "http://example.com/#%25cf%2580"
+ assert url2.raw_fragment == "%25cf%2580"
+ assert url2.fragment == "%cf%80"
+
+
+def test_with_fragment_None():
+ url = URL("http://example.com/path#frag")
+ url2 = url.with_fragment(None)
+ assert str(url2) == "http://example.com/path"
+
+
+def test_with_fragment_None_matching():
+ url = URL("http://example.com/path")
+ url2 = url.with_fragment(None)
+ assert url is url2
+
+
+def test_with_fragment_matching():
+ url = URL("http://example.com/path#frag")
+ url2 = url.with_fragment("frag")
+ assert url is url2
+
+
+def test_with_fragment_bad_type():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ url.with_fragment(123)
+
+
+# with_name
+
+
+def test_with_name():
+ url = URL("http://example.com/a/b")
+ assert url.raw_parts == ("/", "a", "b")
+ url2 = url.with_name("c")
+ assert url2.raw_parts == ("/", "a", "c")
+ assert url2.parts == ("/", "a", "c")
+ assert url2.raw_path == "/a/c"
+ assert url2.path == "/a/c"
+
+
+def test_with_name_for_naked_path():
+ url = URL("http://example.com")
+ url2 = url.with_name("a")
+ assert url2.raw_parts == ("/", "a")
+
+
+def test_with_name_for_relative_path():
+ url = URL("a")
+ url2 = url.with_name("b")
+ assert url2.raw_parts == ("b",)
+
+
+def test_with_name_for_relative_path2():
+ url = URL("a/b")
+ url2 = url.with_name("c")
+ assert url2.raw_parts == ("a", "c")
+
+
+def test_with_name_for_relative_path_starting_from_slash():
+ url = URL("/a")
+ url2 = url.with_name("b")
+ assert url2.raw_parts == ("/", "b")
+
+
+def test_with_name_for_relative_path_starting_from_slash2():
+ url = URL("/a/b")
+ url2 = url.with_name("c")
+ assert url2.raw_parts == ("/", "a", "c")
+
+
+def test_with_name_empty():
+ url = URL("http://example.com/path/to").with_name("")
+ assert str(url) == "http://example.com/path/"
+
+
+def test_with_name_non_ascii():
+ url = URL("http://example.com/path").with_name("путь")
+ assert url.path == "/путь"
+ assert url.raw_path == "/%D0%BF%D1%83%D1%82%D1%8C"
+ assert url.parts == ("/", "путь")
+ assert url.raw_parts == ("/", "%D0%BF%D1%83%D1%82%D1%8C")
+
+
+def test_with_name_percent_encoded():
+ url = URL("http://example.com/path")
+ url2 = url.with_name("%cf%80")
+ assert url2.raw_parts == ("/", "%25cf%2580")
+ assert url2.parts == ("/", "%cf%80")
+ assert url2.raw_path == "/%25cf%2580"
+ assert url2.path == "/%cf%80"
+
+
+def test_with_name_with_slash():
+ with pytest.raises(ValueError):
+ URL("http://example.com").with_name("a/b")
+
+
+def test_with_name_non_str():
+ with pytest.raises(TypeError):
+ URL("http://example.com").with_name(123)
+
+
+def test_with_name_within_colon_and_at():
+ url = URL("http://example.com/oldpath").with_name("path:abc@123")
+ assert url.raw_path == "/path:abc@123"
+
+
+def test_with_name_dot():
+ with pytest.raises(ValueError):
+ URL("http://example.com").with_name(".")
+
+
+def test_with_name_double_dot():
+ with pytest.raises(ValueError):
+ URL("http://example.com").with_name("..")
+
+
+# with_suffix
+
+
+def test_with_suffix():
+ url = URL("http://example.com/a/b")
+ assert url.raw_parts == ("/", "a", "b")
+ url2 = url.with_suffix(".c")
+ assert url2.raw_parts == ("/", "a", "b.c")
+ assert url2.parts == ("/", "a", "b.c")
+ assert url2.raw_path == "/a/b.c"
+ assert url2.path == "/a/b.c"
+
+
+def test_with_suffix_for_naked_path():
+ url = URL("http://example.com")
+ with pytest.raises(ValueError) as excinfo:
+ url.with_suffix(".a")
+ (msg,) = excinfo.value.args
+ assert msg == f"{url!r} has an empty name"
+
+
+def test_with_suffix_for_relative_path():
+ url = URL("a")
+ url2 = url.with_suffix(".b")
+ assert url2.raw_parts == ("a.b",)
+
+
+def test_with_suffix_for_relative_path2():
+ url = URL("a/b")
+ url2 = url.with_suffix(".c")
+ assert url2.raw_parts == ("a", "b.c")
+
+
+def test_with_suffix_for_relative_path_starting_from_slash():
+ url = URL("/a")
+ url2 = url.with_suffix(".b")
+ assert url2.raw_parts == ("/", "a.b")
+
+
+def test_with_suffix_for_relative_path_starting_from_slash2():
+ url = URL("/a/b")
+ url2 = url.with_suffix(".c")
+ assert url2.raw_parts == ("/", "a", "b.c")
+
+
+def test_with_suffix_empty():
+ url = URL("http://example.com/path/to").with_suffix("")
+ assert str(url) == "http://example.com/path/to"
+
+
+def test_with_suffix_non_ascii():
+ url = URL("http://example.com/path").with_suffix(".путь")
+ assert url.path == "/path.путь"
+ assert url.raw_path == "/path.%D0%BF%D1%83%D1%82%D1%8C"
+ assert url.parts == ("/", "path.путь")
+ assert url.raw_parts == ("/", "path.%D0%BF%D1%83%D1%82%D1%8C")
+
+
+def test_with_suffix_percent_encoded():
+ url = URL("http://example.com/path")
+ url2 = url.with_suffix(".%cf%80")
+ assert url2.raw_parts == ("/", "path.%25cf%2580")
+ assert url2.parts == ("/", "path.%cf%80")
+ assert url2.raw_path == "/path.%25cf%2580"
+ assert url2.path == "/path.%cf%80"
+
+
+def test_with_suffix_without_dot():
+ with pytest.raises(ValueError) as excinfo:
+ URL("http://example.com/a").with_suffix("b")
+ (msg,) = excinfo.value.args
+ assert msg == "Invalid suffix 'b'"
+
+
+def test_with_suffix_non_str():
+ with pytest.raises(TypeError) as excinfo:
+ URL("http://example.com").with_suffix(123)
+ (msg,) = excinfo.value.args
+ assert msg == "Invalid suffix type"
+
+
+def test_with_suffix_dot():
+ with pytest.raises(ValueError) as excinfo:
+ URL("http://example.com").with_suffix(".")
+ (msg,) = excinfo.value.args
+ assert msg == "Invalid suffix '.'"
+
+
+def test_with_suffix_with_slash():
+ with pytest.raises(ValueError) as excinfo:
+ URL("http://example.com/a").with_suffix("/.b")
+ (msg,) = excinfo.value.args
+ assert msg == "Invalid suffix '/.b'"
+
+
+def test_with_suffix_with_slash2():
+ with pytest.raises(ValueError) as excinfo:
+ URL("http://example.com/a").with_suffix(".b/.d")
+ (msg,) = excinfo.value.args
+ assert msg == "Slash in name is not allowed"
+
+
+def test_with_suffix_replace():
+ url = URL("/a.b")
+ url2 = url.with_suffix(".c")
+ assert url2.raw_parts == ("/", "a.c")
+
+
+# is_absolute
+
+
+def test_is_absolute_for_relative_url():
+ url = URL("/path/to")
+ assert not url.is_absolute()
+
+
+def test_is_absolute_for_absolute_url():
+ url = URL("http://example.com")
+ assert url.is_absolute()
+
+
+def test_is_non_absolute_for_empty_url():
+ url = URL()
+ assert not url.is_absolute()
+
+
+def test_is_non_absolute_for_empty_url2():
+ url = URL("")
+ assert not url.is_absolute()
+
+
+def test_is_absolute_path_starting_from_double_slash():
+ url = URL("//www.python.org")
+ assert url.is_absolute()
+
+
+# is_default_port
+
+
+def test_is_default_port_for_relative_url():
+ url = URL("/path/to")
+ assert not url.is_default_port()
+
+
+def test_is_default_port_for_absolute_url_without_port():
+ url = URL("http://example.com")
+ assert url.is_default_port()
+
+
+def test_is_default_port_for_absolute_url_with_default_port():
+ url = URL("http://example.com:80")
+ assert url.is_default_port()
+
+
+def test_is_default_port_for_absolute_url_with_nondefault_port():
+ url = URL("http://example.com:8080")
+ assert not url.is_default_port()
+
+
+def test_is_default_port_for_unknown_scheme():
+ url = URL("unknown://example.com:8080")
+ assert not url.is_default_port()
+
+
+#
+
+
+def test_no_scheme():
+ url = URL("example.com")
+ assert url.raw_host is None
+ assert url.raw_path == "example.com"
+ assert str(url) == "example.com"
+
+
+def test_no_scheme2():
+ url = URL("example.com/a/b")
+ assert url.raw_host is None
+ assert url.raw_path == "example.com/a/b"
+ assert str(url) == "example.com/a/b"
+
+
+def test_from_non_allowed():
+ with pytest.raises(TypeError):
+ URL(1234)
+
+
+def test_from_idna():
+ url = URL("http://xn--jxagkqfkduily1i.eu")
+ assert "http://xn--jxagkqfkduily1i.eu" == str(url)
+ url = URL("http://xn--einla-pqa.de/") # needs idna 2008
+ assert "http://xn--einla-pqa.de/" == str(url)
+
+
+def test_to_idna():
+ url = URL("http://εμπορικόσήμα.eu")
+ assert "http://xn--jxagkqfkduily1i.eu" == str(url)
+ url = URL("http://einlaß.de/")
+ assert "http://xn--einla-pqa.de/" == str(url)
+
+
+def test_from_ascii_login():
+ url = URL("http://" "%D0%B2%D0%B0%D1%81%D1%8F" "@host:1234/")
+ assert ("http://" "%D0%B2%D0%B0%D1%81%D1%8F" "@host:1234/") == str(url)
+
+
+def test_from_non_ascii_login():
+ url = URL("http://вася@host:1234/")
+ assert ("http://" "%D0%B2%D0%B0%D1%81%D1%8F" "@host:1234/") == str(url)
+
+
+def test_from_ascii_login_and_password():
+ url = URL(
+ "http://"
+ "%D0%B2%D0%B0%D1%81%D1%8F"
+ ":%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C"
+ "@host:1234/"
+ )
+ assert (
+ "http://"
+ "%D0%B2%D0%B0%D1%81%D1%8F"
+ ":%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C"
+ "@host:1234/"
+ ) == str(url)
+
+
+def test_from_non_ascii_login_and_password():
+ url = URL("http://вася:пароль@host:1234/")
+ assert (
+ "http://"
+ "%D0%B2%D0%B0%D1%81%D1%8F"
+ ":%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C"
+ "@host:1234/"
+ ) == str(url)
+
+
+def test_from_ascii_path():
+ url = URL("http://example.com/" "%D0%BF%D1%83%D1%82%D1%8C/%D1%82%D1%83%D0%B4%D0%B0")
+ assert (
+ "http://example.com/" "%D0%BF%D1%83%D1%82%D1%8C/%D1%82%D1%83%D0%B4%D0%B0"
+ ) == str(url)
+
+
+def test_from_ascii_path_lower_case():
+ url = URL("http://example.com/" "%d0%bf%d1%83%d1%82%d1%8c/%d1%82%d1%83%d0%b4%d0%b0")
+ assert (
+ "http://example.com/" "%D0%BF%D1%83%D1%82%D1%8C/%D1%82%D1%83%D0%B4%D0%B0"
+ ) == str(url)
+
+
+def test_from_non_ascii_path():
+ url = URL("http://example.com/путь/туда")
+ assert (
+ "http://example.com/" "%D0%BF%D1%83%D1%82%D1%8C/%D1%82%D1%83%D0%B4%D0%B0"
+ ) == str(url)
+
+
+def test_bytes():
+ url = URL("http://example.com/путь/туда")
+ assert (
+ b"http://example.com/%D0%BF%D1%83%D1%82%D1%8C/%D1%82%D1%83%D0%B4%D0%B0"
+ == bytes(url)
+ )
+
+
+def test_from_ascii_query_parts():
+ url = URL(
+ "http://example.com/"
+ "?%D0%BF%D0%B0%D1%80%D0%B0%D0%BC"
+ "=%D0%B7%D0%BD%D0%B0%D1%87"
+ )
+ assert (
+ "http://example.com/"
+ "?%D0%BF%D0%B0%D1%80%D0%B0%D0%BC"
+ "=%D0%B7%D0%BD%D0%B0%D1%87"
+ ) == str(url)
+
+
+def test_from_non_ascii_query_parts():
+ url = URL("http://example.com/?парам=знач")
+ assert (
+ "http://example.com/"
+ "?%D0%BF%D0%B0%D1%80%D0%B0%D0%BC"
+ "=%D0%B7%D0%BD%D0%B0%D1%87"
+ ) == str(url)
+
+
+def test_from_non_ascii_query_parts2():
+ url = URL("http://example.com/?п=з&ю=б")
+ assert "http://example.com/?%D0%BF=%D0%B7&%D1%8E=%D0%B1" == str(url)
+
+
+def test_from_ascii_fragment():
+ url = URL("http://example.com/" "#%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82")
+ assert (
+ "http://example.com/" "#%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82"
+ ) == str(url)
+
+
+def test_from_bytes_with_non_ascii_fragment():
+ url = URL("http://example.com/#фрагмент")
+ assert (
+ "http://example.com/" "#%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82"
+ ) == str(url)
+
+
+def test_to_str():
+ url = URL("http://εμπορικόσήμα.eu/")
+ assert "http://xn--jxagkqfkduily1i.eu/" == str(url)
+
+
+def test_to_str_long():
+ url = URL(
+ "https://host-12345678901234567890123456789012345678901234567890" "-name:8888/"
+ )
+ expected = (
+ "https://host-"
+ "12345678901234567890123456789012345678901234567890"
+ "-name:8888/"
+ )
+ assert expected == str(url)
+
+
+def test_decoding_with_2F_in_path():
+ url = URL("http://example.com/path%2Fto")
+ assert "http://example.com/path%2Fto" == str(url)
+ assert url == URL(str(url))
+
+
+def test_decoding_with_26_and_3D_in_query():
+ url = URL("http://example.com/?%26=%3D")
+ assert "http://example.com/?%26=%3D" == str(url)
+ assert url == URL(str(url))
+
+
+def test_fragment_only_url():
+ url = URL("#frag")
+ assert str(url) == "#frag"
+
+
+def test_url_from_url():
+ url = URL("http://example.com")
+ assert URL(url) == url
+ assert URL(url).raw_parts == ("/",)
+
+
+def test_lowercase_scheme():
+ url = URL("HTTP://example.com")
+ assert str(url) == "http://example.com"
+
+
+def test_str_for_empty_url():
+ url = URL()
+ assert "" == str(url)
+
+
+def test_parent_for_empty_url():
+ url = URL()
+ assert url is url.parent
+
+
+def test_empty_value_for_query():
+ url = URL("http://example.com/path").with_query({"a": ""})
+ assert str(url) == "http://example.com/path?a="
+
+
+def test_none_value_for_query():
+ with pytest.raises(TypeError):
+ URL("http://example.com/path").with_query({"a": None})
+
+
+def test_decode_pct_in_path():
+ url = URL("http://www.python.org/%7Eguido")
+ assert "http://www.python.org/~guido" == str(url)
+
+
+def test_decode_pct_in_path_lower_case():
+ url = URL("http://www.python.org/%7eguido")
+ assert "http://www.python.org/~guido" == str(url)
+
+
+# join
+
+
+def test_join():
+ base = URL("http://www.cwi.nl/%7Eguido/Python.html")
+ url = URL("FAQ.html")
+ url2 = base.join(url)
+ assert str(url2) == "http://www.cwi.nl/~guido/FAQ.html"
+
+
+def test_join_absolute():
+ base = URL("http://www.cwi.nl/%7Eguido/Python.html")
+ url = URL("//www.python.org/%7Eguido")
+ url2 = base.join(url)
+ assert str(url2) == "http://www.python.org/~guido"
+
+
+def test_join_non_url():
+ base = URL("http://example.com")
+ with pytest.raises(TypeError):
+ base.join("path/to")
+
+
+NORMAL = [
+ ("g:h", "g:h"),
+ ("g", "http://a/b/c/g"),
+ ("./g", "http://a/b/c/g"),
+ ("g/", "http://a/b/c/g/"),
+ ("/g", "http://a/g"),
+ ("//g", "http://g"),
+ ("?y", "http://a/b/c/d;p?y"),
+ ("g?y", "http://a/b/c/g?y"),
+ ("#s", "http://a/b/c/d;p?q#s"),
+ ("g#s", "http://a/b/c/g#s"),
+ ("g?y#s", "http://a/b/c/g?y#s"),
+ (";x", "http://a/b/c/;x"),
+ ("g;x", "http://a/b/c/g;x"),
+ ("g;x?y#s", "http://a/b/c/g;x?y#s"),
+ ("", "http://a/b/c/d;p?q"),
+ (".", "http://a/b/c/"),
+ ("./", "http://a/b/c/"),
+ ("..", "http://a/b/"),
+ ("../", "http://a/b/"),
+ ("../g", "http://a/b/g"),
+ ("../..", "http://a/"),
+ ("../../", "http://a/"),
+ ("../../g", "http://a/g"),
+]
+
+
+@pytest.mark.parametrize("url,expected", NORMAL)
+def test_join_from_rfc_3986_normal(url, expected):
+ # test case from https://tools.ietf.org/html/rfc3986.html#section-5.4
+ base = URL("http://a/b/c/d;p?q")
+ url = URL(url)
+ expected = URL(expected)
+ assert base.join(url) == expected
+
+
+ABNORMAL = [
+ ("../../../g", "http://a/g"),
+ ("../../../../g", "http://a/g"),
+ ("/./g", "http://a/g"),
+ ("/../g", "http://a/g"),
+ ("g.", "http://a/b/c/g."),
+ (".g", "http://a/b/c/.g"),
+ ("g..", "http://a/b/c/g.."),
+ ("..g", "http://a/b/c/..g"),
+ ("./../g", "http://a/b/g"),
+ ("./g/.", "http://a/b/c/g/"),
+ ("g/./h", "http://a/b/c/g/h"),
+ ("g/../h", "http://a/b/c/h"),
+ ("g;x=1/./y", "http://a/b/c/g;x=1/y"),
+ ("g;x=1/../y", "http://a/b/c/y"),
+ ("g?y/./x", "http://a/b/c/g?y/./x"),
+ ("g?y/../x", "http://a/b/c/g?y/../x"),
+ ("g#s/./x", "http://a/b/c/g#s/./x"),
+ ("g#s/../x", "http://a/b/c/g#s/../x"),
+]
+
+
+@pytest.mark.parametrize("url,expected", ABNORMAL)
+def test_join_from_rfc_3986_abnormal(url, expected):
+ # test case from https://tools.ietf.org/html/rfc3986.html#section-5.4.2
+ base = URL("http://a/b/c/d;p?q")
+ url = URL(url)
+ expected = URL(expected)
+ assert base.join(url) == expected
+
+
+def test_split_result_non_decoded():
+ with pytest.raises(ValueError):
+ URL(SplitResult("http", "example.com", "path", "qs", "frag"))
+
+
+def test_human_repr():
+ url = URL("http://вася:пароль@хост.домен:8080/путь/сюда?арг=вал#фраг")
+ s = url.human_repr()
+ assert URL(s) == url
+ assert s == "http://вася:пароль@хост.домен:8080/путь/сюда?арг=вал#фраг"
+
+
+def test_human_repr_defaults():
+ url = URL("путь")
+ s = url.human_repr()
+ assert s == "путь"
+
+
+def test_human_repr_default_port():
+ url = URL("http://вася:пароль@хост.домен/путь/сюда?арг=вал#фраг")
+ s = url.human_repr()
+ assert URL(s) == url
+ assert s == "http://вася:пароль@хост.домен/путь/сюда?арг=вал#фраг"
+
+
+def test_human_repr_ipv6():
+ url = URL("http://[::1]:8080/path")
+ s = url.human_repr()
+ url2 = URL(s)
+ assert url2 == url
+ assert url2.host == "::1"
+ assert s == "http://[::1]:8080/path"
+
+
+def test_human_repr_delimiters():
+ url = URL.build(
+ scheme="http",
+ user=" !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
+ password=" !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
+ host="хост.домен",
+ port=8080,
+ path="/ !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
+ query={
+ " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~": " !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
+ },
+ fragment=" !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
+ )
+ s = url.human_repr()
+ assert URL(s) == url
+ assert (
+ s == "http:// !\"%23$%25&'()*+,-.%2F%3A;<=>%3F%40%5B\\%5D^_`{|}~"
+ ": !\"%23$%25&'()*+,-.%2F%3A;<=>%3F%40%5B\\%5D^_`{|}~"
+ "@хост.домен:8080"
+ "/ !\"%23$%25&'()*+,-./:;<=>%3F@[\\]^_`{|}~"
+ "? !\"%23$%25%26'()*%2B,-./:%3B<%3D>?@[\\]^_`{|}~"
+ "= !\"%23$%25%26'()*%2B,-./:%3B<%3D>?@[\\]^_`{|}~"
+ "# !\"#$%25&'()*+,-./:;<=>?@[\\]^_`{|}~"
+ )
+
+
+def test_human_repr_non_printable():
+ url = URL.build(
+ scheme="http",
+ user="вася\n\xad\u200b",
+ password="пароль\n\xad\u200b",
+ host="хост.домен",
+ port=8080,
+ path="/путь\n\xad\u200b",
+ query={"арг\n\xad\u200b": "вал\n\xad\u200b"},
+ fragment="фраг\n\xad\u200b",
+ )
+ s = url.human_repr()
+ assert URL(s) == url
+ assert (
+ s == "http://вася%0A%C2%AD%E2%80%8B:пароль%0A%C2%AD%E2%80%8B"
+ "@хост.домен:8080"
+ "/путь%0A%C2%AD%E2%80%8B"
+ "?арг%0A%C2%AD%E2%80%8B=вал%0A%C2%AD%E2%80%8B"
+ "#фраг%0A%C2%AD%E2%80%8B"
+ )
+
+
+# relative
+
+
+def test_relative():
+ url = URL("http://user:pass@example.com:8080/path?a=b#frag")
+ rel = url.relative()
+ assert str(rel) == "/path?a=b#frag"
+
+
+def test_relative_is_relative():
+ url = URL("http://user:pass@example.com:8080/path?a=b#frag")
+ rel = url.relative()
+ assert not rel.is_absolute()
+
+
+def test_relative_abs_parts_are_removed():
+ url = URL("http://user:pass@example.com:8080/path?a=b#frag")
+ rel = url.relative()
+ assert not rel.scheme
+ assert not rel.user
+ assert not rel.password
+ assert not rel.host
+ assert not rel.port
+
+
+def test_relative_fails_on_rel_url():
+ with pytest.raises(ValueError):
+ URL("/path?a=b#frag").relative()
+
+
+def test_slash_and_question_in_query():
+ u = URL("http://example.com/path?http://example.com/p?a#b")
+ assert u.query_string == "http://example.com/p?a"
+
+
+def test_slash_and_question_in_fragment():
+ u = URL("http://example.com/path#http://example.com/p?a")
+ assert u.fragment == "http://example.com/p?a"
+
+
+def test_requoting():
+ u = URL("http://127.0.0.1/?next=http%3A//example.com/")
+ assert u.raw_query_string == "next=http://example.com/"
+ assert str(u) == "http://127.0.0.1/?next=http://example.com/"
diff --git a/contrib/python/yarl/tests/test_url_build.py b/contrib/python/yarl/tests/test_url_build.py
new file mode 100644
index 0000000000..51969fa849
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url_build.py
@@ -0,0 +1,259 @@
+import pytest
+
+from yarl import URL
+
+# build classmethod
+
+
+def test_build_without_arguments():
+ u = URL.build()
+ assert str(u) == ""
+
+
+def test_build_simple():
+ u = URL.build(scheme="http", host="127.0.0.1")
+ assert str(u) == "http://127.0.0.1"
+
+
+def test_build_with_scheme():
+ u = URL.build(scheme="blob", path="path")
+ assert str(u) == "blob:path"
+
+
+def test_build_with_host():
+ u = URL.build(host="127.0.0.1")
+ assert str(u) == "//127.0.0.1"
+ assert u == URL("//127.0.0.1")
+
+
+def test_build_with_scheme_and_host():
+ u = URL.build(scheme="http", host="127.0.0.1")
+ assert str(u) == "http://127.0.0.1"
+ assert u == URL("http://127.0.0.1")
+
+
+def test_build_with_port():
+ with pytest.raises(ValueError):
+ URL.build(port=8000)
+
+ u = URL.build(scheme="http", host="127.0.0.1", port=8000)
+ assert str(u) == "http://127.0.0.1:8000"
+
+
+def test_build_with_user():
+ u = URL.build(scheme="http", host="127.0.0.1", user="foo")
+ assert str(u) == "http://foo@127.0.0.1"
+
+
+def test_build_with_user_password():
+ u = URL.build(scheme="http", host="127.0.0.1", user="foo", password="bar")
+ assert str(u) == "http://foo:bar@127.0.0.1"
+
+
+def test_build_with_query_and_query_string():
+ with pytest.raises(ValueError):
+ URL.build(
+ scheme="http",
+ host="127.0.0.1",
+ user="foo",
+ password="bar",
+ port=8000,
+ path="/index.html",
+ query=dict(arg="value1"),
+ query_string="arg=value1",
+ fragment="top",
+ )
+
+
+def test_build_with_all():
+ u = URL.build(
+ scheme="http",
+ host="127.0.0.1",
+ user="foo",
+ password="bar",
+ port=8000,
+ path="/index.html",
+ query_string="arg=value1",
+ fragment="top",
+ )
+ assert str(u) == "http://foo:bar@127.0.0.1:8000/index.html?arg=value1#top"
+
+
+def test_build_with_authority_and_host():
+ with pytest.raises(ValueError):
+ URL.build(authority="host.com", host="example.com")
+
+
+def test_build_with_authority():
+ url = URL.build(scheme="http", authority="ваня:bar@host.com:8000", path="path")
+ assert str(url) == "http://%D0%B2%D0%B0%D0%BD%D1%8F:bar@host.com:8000/path"
+
+
+def test_build_with_authority_without_encoding():
+ url = URL.build(
+ scheme="http", authority="foo:bar@host.com:8000", path="path", encoded=True
+ )
+ assert str(url) == "http://foo:bar@host.com:8000/path"
+
+
+def test_query_str():
+ u = URL.build(scheme="http", host="127.0.0.1", path="/", query_string="arg=value1")
+ assert str(u) == "http://127.0.0.1/?arg=value1"
+
+
+def test_query_dict():
+ u = URL.build(scheme="http", host="127.0.0.1", path="/", query=dict(arg="value1"))
+
+ assert str(u) == "http://127.0.0.1/?arg=value1"
+
+
+def test_build_path_quoting():
+ u = URL.build(
+ scheme="http", host="127.0.0.1", path="/файл.jpg", query=dict(arg="Привет")
+ )
+
+ assert u == URL("http://127.0.0.1/файл.jpg?arg=Привет")
+ assert str(u) == (
+ "http://127.0.0.1/%D1%84%D0%B0%D0%B9%D0%BB.jpg?"
+ "arg=%D0%9F%D1%80%D0%B8%D0%B2%D0%B5%D1%82"
+ )
+
+
+def test_build_query_quoting():
+ u = URL.build(scheme="http", host="127.0.0.1", path="/файл.jpg", query="arg=Привет")
+
+ assert u == URL("http://127.0.0.1/файл.jpg?arg=Привет")
+ assert str(u) == (
+ "http://127.0.0.1/%D1%84%D0%B0%D0%B9%D0%BB.jpg?"
+ "arg=%D0%9F%D1%80%D0%B8%D0%B2%D0%B5%D1%82"
+ )
+
+
+def test_build_query_only():
+ u = URL.build(query={"key": "value"})
+
+ assert str(u) == "?key=value"
+
+
+def test_build_drop_dots():
+ u = URL.build(scheme="http", host="example.com", path="/path/../to")
+ assert str(u) == "http://example.com/to"
+
+
+def test_build_encode():
+ u = URL.build(
+ scheme="http",
+ host="историк.рф",
+ path="/путь/файл",
+ query_string="ключ=знач",
+ fragment="фраг",
+ )
+ expected = (
+ "http://xn--h1aagokeh.xn--p1ai"
+ "/%D0%BF%D1%83%D1%82%D1%8C/%D1%84%D0%B0%D0%B9%D0%BB"
+ "?%D0%BA%D0%BB%D1%8E%D1%87=%D0%B7%D0%BD%D0%B0%D1%87"
+ "#%D1%84%D1%80%D0%B0%D0%B3"
+ )
+ assert str(u) == expected
+
+
+def test_build_already_encoded():
+ # resulting URL is invalid but not encoded
+ u = URL.build(
+ scheme="http",
+ host="историк.рф",
+ path="/путь/файл",
+ query_string="ключ=знач",
+ fragment="фраг",
+ encoded=True,
+ )
+ assert str(u) == "http://историк.рф/путь/файл?ключ=знач#фраг"
+
+
+def test_build_percent_encoded():
+ u = URL.build(
+ scheme="http",
+ host="%2d.org",
+ user="u%2d",
+ password="p%2d",
+ path="/%2d",
+ query_string="k%2d=v%2d",
+ fragment="f%2d",
+ )
+ assert str(u) == "http://u%252d:p%252d@%2d.org/%252d?k%252d=v%252d#f%252d"
+ assert u.raw_host == "%2d.org"
+ assert u.host == "%2d.org"
+ assert u.raw_user == "u%252d"
+ assert u.user == "u%2d"
+ assert u.raw_password == "p%252d"
+ assert u.password == "p%2d"
+ assert u.raw_authority == "u%252d:p%252d@%2d.org"
+ assert u.authority == "u%2d:p%2d@%2d.org:80"
+ assert u.raw_path == "/%252d"
+ assert u.path == "/%2d"
+ assert u.query == {"k%2d": "v%2d"}
+ assert u.raw_query_string == "k%252d=v%252d"
+ assert u.query_string == "k%2d=v%2d"
+ assert u.raw_fragment == "f%252d"
+ assert u.fragment == "f%2d"
+
+
+def test_build_with_authority_percent_encoded():
+ u = URL.build(scheme="http", authority="u%2d:p%2d@%2d.org")
+ assert str(u) == "http://u%252d:p%252d@%2d.org"
+ assert u.raw_host == "%2d.org"
+ assert u.host == "%2d.org"
+ assert u.raw_user == "u%252d"
+ assert u.user == "u%2d"
+ assert u.raw_password == "p%252d"
+ assert u.password == "p%2d"
+ assert u.raw_authority == "u%252d:p%252d@%2d.org"
+ assert u.authority == "u%2d:p%2d@%2d.org:80"
+
+
+def test_build_with_authority_percent_encoded_already_encoded():
+ u = URL.build(scheme="http", authority="u%2d:p%2d@%2d.org", encoded=True)
+ assert str(u) == "http://u%2d:p%2d@%2d.org"
+ assert u.raw_host == "%2d.org"
+ assert u.host == "%2d.org"
+ assert u.user == "u-"
+ assert u.raw_user == "u%2d"
+ assert u.password == "p-"
+ assert u.raw_password == "p%2d"
+ assert u.authority == "u-:p-@%2d.org:80"
+ assert u.raw_authority == "u%2d:p%2d@%2d.org"
+
+
+def test_build_with_authority_with_path_with_leading_slash():
+ u = URL.build(scheme="http", host="example.com", path="/path_with_leading_slash")
+ assert str(u) == "http://example.com/path_with_leading_slash"
+
+
+def test_build_with_authority_with_empty_path():
+ u = URL.build(scheme="http", host="example.com", path="")
+ assert str(u) == "http://example.com"
+
+
+def test_build_with_authority_with_path_without_leading_slash():
+ with pytest.raises(ValueError):
+ URL.build(scheme="http", host="example.com", path="path_without_leading_slash")
+
+
+def test_build_with_none_host():
+ with pytest.raises(TypeError, match="NoneType is illegal for.*host"):
+ URL.build(scheme="http", host=None)
+
+
+def test_build_with_none_path():
+ with pytest.raises(TypeError):
+ URL.build(scheme="http", host="example.com", path=None)
+
+
+def test_build_with_none_query_string():
+ with pytest.raises(TypeError):
+ URL.build(scheme="http", host="example.com", query_string=None)
+
+
+def test_build_with_none_fragment():
+ with pytest.raises(TypeError):
+ URL.build(scheme="http", host="example.com", fragment=None)
diff --git a/contrib/python/yarl/tests/test_url_cmp_and_hash.py b/contrib/python/yarl/tests/test_url_cmp_and_hash.py
new file mode 100644
index 0000000000..17c42e3566
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url_cmp_and_hash.py
@@ -0,0 +1,88 @@
+from yarl import URL
+
+# comparison and hashing
+
+
+def test_ne_str():
+ url = URL("http://example.com/")
+ assert url != "http://example.com/"
+
+
+def test_eq():
+ url = URL("http://example.com/")
+ assert url == URL("http://example.com/")
+
+
+def test_hash():
+ assert hash(URL("http://example.com/")) == hash(URL("http://example.com/"))
+
+
+def test_hash_double_call():
+ url = URL("http://example.com/")
+ assert hash(url) == hash(url)
+
+
+def test_le_less():
+ url1 = URL("http://example1.com/")
+ url2 = URL("http://example2.com/")
+
+ assert url1 <= url2
+
+
+def test_le_eq():
+ url1 = URL("http://example.com/")
+ url2 = URL("http://example.com/")
+
+ assert url1 <= url2
+
+
+def test_le_not_implemented():
+ url = URL("http://example1.com/")
+
+ assert url.__le__(123) is NotImplemented
+
+
+def test_lt():
+ url1 = URL("http://example1.com/")
+ url2 = URL("http://example2.com/")
+
+ assert url1 < url2
+
+
+def test_lt_not_implemented():
+ url = URL("http://example1.com/")
+
+ assert url.__lt__(123) is NotImplemented
+
+
+def test_ge_more():
+ url1 = URL("http://example1.com/")
+ url2 = URL("http://example2.com/")
+
+ assert url2 >= url1
+
+
+def test_ge_eq():
+ url1 = URL("http://example.com/")
+ url2 = URL("http://example.com/")
+
+ assert url2 >= url1
+
+
+def test_ge_not_implemented():
+ url = URL("http://example1.com/")
+
+ assert url.__ge__(123) is NotImplemented
+
+
+def test_gt():
+ url1 = URL("http://example1.com/")
+ url2 = URL("http://example2.com/")
+
+ assert url2 > url1
+
+
+def test_gt_not_implemented():
+ url = URL("http://example1.com/")
+
+ assert url.__gt__(123) is NotImplemented
diff --git a/contrib/python/yarl/tests/test_url_parsing.py b/contrib/python/yarl/tests/test_url_parsing.py
new file mode 100644
index 0000000000..cc753fcd0c
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url_parsing.py
@@ -0,0 +1,582 @@
+import sys
+
+import pytest
+
+from yarl import URL
+
+
+class TestScheme:
+ def test_scheme_path(self):
+ u = URL("scheme:path")
+ assert u.scheme == "scheme"
+ assert u.host is None
+ assert u.path == "path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_path_other(self):
+ u = URL("scheme:path:other")
+ assert u.scheme == "scheme"
+ assert u.host is None
+ assert u.path == "path:other"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_complex_scheme(self):
+ u = URL("allow+chars-33.:path")
+ assert u.scheme == "allow+chars-33."
+ assert u.host is None
+ assert u.path == "path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_only(self):
+ u = URL("simple:")
+ assert u.scheme == "simple"
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_no_scheme1(self):
+ u = URL("google.com:80")
+ # See: https://bugs.python.org/issue27657
+ if (
+ sys.version_info[:3] == (3, 7, 6)
+ or sys.version_info[:3] == (3, 8, 1)
+ or sys.version_info >= (3, 9, 0)
+ ):
+ assert u.scheme == "google.com"
+ assert u.host is None
+ assert u.path == "80"
+ else:
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == "google.com:80"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_no_scheme2(self):
+ u = URL("google.com:80/root")
+ assert u.scheme == "google.com"
+ assert u.host is None
+ assert u.path == "80/root"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_not_a_scheme1(self):
+ u = URL("not_cheme:path")
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == "not_cheme:path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_not_a_scheme2(self):
+ u = URL("signals37:book")
+ assert u.scheme == "signals37"
+ assert u.host is None
+ assert u.path == "book"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_rel_path1(self):
+ u = URL(":relative-path")
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == ":relative-path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_rel_path2(self):
+ u = URL(":relative/path")
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == ":relative/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_weird(self):
+ u = URL("://and-this")
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == "://and-this"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+
+class TestHost:
+ def test_canonical(self):
+ u = URL("scheme://host/path")
+ assert u.scheme == "scheme"
+ assert u.host == "host"
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_absolute_no_scheme(self):
+ u = URL("//host/path")
+ assert u.scheme == ""
+ assert u.host == "host"
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_absolute_no_scheme_complex_host(self):
+ u = URL("//host+path")
+ assert u.scheme == ""
+ assert u.host == "host+path"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_absolute_no_scheme_simple_host(self):
+ u = URL("//host")
+ assert u.scheme == ""
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_weird_host(self):
+ u = URL("//this+is$also&host!")
+ assert u.scheme == ""
+ assert u.host == "this+is$also&host!"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_no_host(self):
+ u = URL("scheme:/host/path")
+ assert u.scheme == "scheme"
+ assert u.host is None
+ assert u.path == "/host/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_scheme_no_host2(self):
+ u = URL("scheme:///host/path")
+ assert u.scheme == "scheme"
+ assert u.host is None
+ assert u.path == "/host/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_no_scheme_no_host(self):
+ u = URL("scheme//host/path")
+ assert u.scheme == ""
+ assert u.host is None
+ assert u.path == "scheme//host/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_ipv4(self):
+ u = URL("//127.0.0.1/")
+ assert u.scheme == ""
+ assert u.host == "127.0.0.1"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_ipv6(self):
+ u = URL("//[::1]/")
+ assert u.scheme == ""
+ assert u.host == "::1"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_ipvfuture_address(self):
+ u = URL("//[v1.-1]/")
+ assert u.scheme == ""
+ assert u.host == "v1.-1"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+
+class TestPort:
+ def test_canonical(self):
+ u = URL("//host:80/path")
+ assert u.scheme == ""
+ assert u.host == "host"
+ assert u.port == 80
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_no_path(self):
+ u = URL("//host:80")
+ assert u.scheme == ""
+ assert u.host == "host"
+ assert u.port == 80
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ @pytest.mark.xfail(reason="https://github.com/aio-libs/yarl/issues/821")
+ def test_no_host(self):
+ u = URL("//:80")
+ assert u.scheme == ""
+ assert u.host == ""
+ assert u.port == 80
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_double_port(self):
+ with pytest.raises(ValueError):
+ URL("//h:22:80/")
+
+ def test_bad_port(self):
+ with pytest.raises(ValueError):
+ URL("//h:no/path")
+
+ def test_another_bad_port(self):
+ with pytest.raises(ValueError):
+ URL("//h:22:no/path")
+
+ def test_bad_port_again(self):
+ with pytest.raises(ValueError):
+ URL("//h:-80/path")
+
+
+class TestUserInfo:
+ def test_canonical(self):
+ u = URL("sch://user@host/")
+ assert u.scheme == "sch"
+ assert u.user == "user"
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_user_pass(self):
+ u = URL("//user:pass@host")
+ assert u.scheme == ""
+ assert u.user == "user"
+ assert u.password == "pass"
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_complex_userinfo(self):
+ u = URL("//user:pas:and:more@host")
+ assert u.scheme == ""
+ assert u.user == "user"
+ assert u.password == "pas:and:more"
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_no_user(self):
+ u = URL("//:pas:@host")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password == "pas:"
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_weird_user(self):
+ u = URL("//!($&')*+,;=@host")
+ assert u.scheme == ""
+ assert u.user == "!($&')*+,;="
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_weird_user2(self):
+ u = URL("//user@info@ya.ru")
+ assert u.scheme == ""
+ assert u.user == "user@info"
+ assert u.password is None
+ assert u.host == "ya.ru"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_weird_user3(self):
+ u = URL("//%5Bsome%5D@host")
+ assert u.scheme == ""
+ assert u.user == "[some]"
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+
+class TestQuery_String:
+ def test_simple(self):
+ u = URL("?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == "query"
+ assert u.fragment == ""
+
+ def test_scheme_query(self):
+ u = URL("http:?query")
+ assert u.scheme == "http"
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == "query"
+ assert u.fragment == ""
+
+ def test_abs_url_query(self):
+ u = URL("//host?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == "query"
+ assert u.fragment == ""
+
+ def test_abs_url_path_query(self):
+ u = URL("//host/path?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/path"
+ assert u.query_string == "query"
+ assert u.fragment == ""
+
+ def test_double_question_mark(self):
+ u = URL("//ho?st/path?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "ho"
+ assert u.path == "/"
+ assert u.query_string == "st/path?query"
+ assert u.fragment == ""
+
+ def test_complex_query(self):
+ u = URL("?a://b:c@d.e/f?g#h")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == "a://b:c@d.e/f?g"
+ assert u.fragment == "h"
+
+ def test_query_in_fragment(self):
+ u = URL("#?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == "?query"
+
+
+class TestFragment:
+ def test_simple(self):
+ u = URL("#frag")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == "frag"
+
+ def test_scheme_frag(self):
+ u = URL("http:#frag")
+ assert u.scheme == "http"
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == "frag"
+
+ def test_host_frag(self):
+ u = URL("//host#frag")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == "frag"
+
+ def test_scheme_path_frag(self):
+ u = URL("//host/path#frag")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == "frag"
+
+ def test_scheme_query_frag(self):
+ u = URL("//host?query#frag")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == "query"
+ assert u.fragment == "frag"
+
+ def test_host_frag_query(self):
+ u = URL("//ho#st/path?query")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "ho"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == "st/path?query"
+
+ def test_complex_frag(self):
+ u = URL("#a://b:c@d.e/f?g#h")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == "a://b:c@d.e/f?g#h"
+
+
+class TestStripEmptyParts:
+ def test_all_empty(self):
+ with pytest.raises(ValueError):
+ URL("//@:?#")
+
+ def test_path_only(self):
+ u = URL("///path")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_user(self):
+ u = URL("//@host")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_port(self):
+ u = URL("//host:")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_port_and_path(self):
+ u = URL("//host:/")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host == "host"
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_path_only(self):
+ u = URL("/")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "/"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_relative_path_only(self):
+ u = URL("path")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_path(self):
+ u = URL("/path")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_query_with_path(self):
+ u = URL("/path?")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_query(self):
+ u = URL("?")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_query_with_frag(self):
+ u = URL("?#frag")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == "frag"
+
+ def test_path_empty_frag(self):
+ u = URL("/path#")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == "/path"
+ assert u.query_string == ""
+ assert u.fragment == ""
+
+ def test_empty_path(self):
+ u = URL("#")
+ assert u.scheme == ""
+ assert u.user is None
+ assert u.password is None
+ assert u.host is None
+ assert u.path == ""
+ assert u.query_string == ""
+ assert u.fragment == ""
diff --git a/contrib/python/yarl/tests/test_url_query.py b/contrib/python/yarl/tests/test_url_query.py
new file mode 100644
index 0000000000..bcd2433cbc
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url_query.py
@@ -0,0 +1,173 @@
+from typing import List, Tuple
+from urllib.parse import parse_qs, urlencode
+
+import pytest
+from multidict import MultiDict, MultiDictProxy
+
+from yarl import URL
+
+# ========================================
+# Basic chars in query values
+# ========================================
+
+URLS_WITH_BASIC_QUERY_VALUES: List[Tuple[URL, MultiDict]] = [
+ # Empty strings, keys and values
+ (
+ URL("http://example.com"),
+ MultiDict(),
+ ),
+ (
+ URL("http://example.com?a="),
+ MultiDict([("a", "")]),
+ ),
+ # ASCII chars
+ (
+ URL("http://example.com?a+b=c+d"),
+ MultiDict({"a b": "c d"}),
+ ),
+ (
+ URL("http://example.com?a=1&b=2"),
+ MultiDict([("a", "1"), ("b", "2")]),
+ ),
+ (
+ URL("http://example.com?a=1&b=2&a=3"),
+ MultiDict([("a", "1"), ("b", "2"), ("a", "3")]),
+ ),
+ # Non-ASCI BMP chars
+ (
+ URL("http://example.com?ключ=знач"),
+ MultiDict({"ключ": "знач"}),
+ ),
+ (
+ URL("http://example.com?foo=ᴜɴɪᴄᴏᴅᴇ"),
+ MultiDict({"foo": "ᴜɴɪᴄᴏᴅᴇ"}),
+ ),
+ # Non-BMP chars
+ (
+ URL("http://example.com?bar=𝕦𝕟𝕚𝕔𝕠𝕕𝕖"),
+ MultiDict({"bar": "𝕦𝕟𝕚𝕔𝕠𝕕𝕖"}),
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query",
+ URLS_WITH_BASIC_QUERY_VALUES,
+)
+def test_query_basic_parsing(original_url, expected_query):
+ assert isinstance(original_url.query, MultiDictProxy)
+ assert original_url.query == expected_query
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query",
+ URLS_WITH_BASIC_QUERY_VALUES,
+)
+def test_query_basic_update_query(original_url, expected_query):
+ new_url = original_url.update_query({})
+ assert new_url == original_url
+
+
+def test_query_dont_unqoute_twice():
+ sample_url = "http://base.place?" + urlencode({"a": "/////"})
+ query = urlencode({"url": sample_url})
+ full_url = "http://test_url.aha?" + query
+
+ url = URL(full_url)
+ assert url.query["url"] == sample_url
+
+
+# ========================================
+# Reserved chars in query values
+# ========================================
+
+# See https://github.com/python/cpython#87133, which introduced a new
+# `separator` keyword argument to `urllib.parse.parse_qs` (among others).
+# If the name doesn't exist as a variable in the function bytecode, the
+# test is expected to fail.
+_SEMICOLON_XFAIL = pytest.mark.xfail(
+ condition="separator" not in parse_qs.__code__.co_varnames,
+ reason=(
+ "Python versions < 3.7.10, < 3.8.8 and < 3.9.2 lack a fix for "
+ 'CVE-2021-23336 dropping ";" as a valid query parameter separator, '
+ "making this test fail."
+ ),
+ strict=True,
+)
+
+
+URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES = [
+ # Ampersand
+ (URL("http://127.0.0.1/?a=10&b=20"), 2, "10"),
+ (URL("http://127.0.0.1/?a=10%26b=20"), 1, "10&b=20"),
+ (URL("http://127.0.0.1/?a=10%3Bb=20"), 1, "10;b=20"),
+ # Semicolon, which is *not* a query parameter separator as of RFC3986
+ (URL("http://127.0.0.1/?a=10;b=20"), 1, "10;b=20"),
+ (URL("http://127.0.0.1/?a=10%26b=20"), 1, "10&b=20"),
+ (URL("http://127.0.0.1/?a=10%3Bb=20"), 1, "10;b=20"),
+]
+URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL = [
+ # Ampersand
+ *URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[:3],
+ # Semicolon, which is *not* a query parameter separator as of RFC3986
+ # Mark the first of these as expecting to fail on old Python patch releases.
+ pytest.param(*URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[3], marks=_SEMICOLON_XFAIL),
+ *URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[4:],
+]
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query_len, expected_value_a",
+ URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL,
+)
+def test_query_separators_from_parsing(
+ original_url,
+ expected_query_len,
+ expected_value_a,
+):
+ assert len(original_url.query) == expected_query_len
+ assert original_url.query["a"] == expected_value_a
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query_len, expected_value_a",
+ URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL,
+)
+def test_query_separators_from_update_query(
+ original_url,
+ expected_query_len,
+ expected_value_a,
+):
+ new_url = original_url.update_query({"c": expected_value_a})
+ assert new_url.query["a"] == expected_value_a
+ assert new_url.query["c"] == expected_value_a
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query_len, expected_value_a",
+ URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES,
+)
+def test_query_separators_from_with_query(
+ original_url,
+ expected_query_len,
+ expected_value_a,
+):
+ new_url = original_url.with_query({"c": expected_value_a})
+ assert new_url.query["c"] == expected_value_a
+
+
+@pytest.mark.parametrize(
+ "original_url, expected_query_len, expected_value_a",
+ URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES,
+)
+def test_query_from_empty_update_query(
+ original_url,
+ expected_query_len,
+ expected_value_a,
+):
+ new_url = original_url.update_query({})
+
+ assert new_url.query["a"] == original_url.query["a"]
+
+ if "b" in original_url.query:
+ assert new_url.query["b"] == original_url.query["b"]
diff --git a/contrib/python/yarl/tests/test_url_update_netloc.py b/contrib/python/yarl/tests/test_url_update_netloc.py
new file mode 100644
index 0000000000..cf0cc1c44c
--- /dev/null
+++ b/contrib/python/yarl/tests/test_url_update_netloc.py
@@ -0,0 +1,228 @@
+import pytest
+
+from yarl import URL
+
+# with_*
+
+
+def test_with_scheme():
+ url = URL("http://example.com")
+ assert str(url.with_scheme("https")) == "https://example.com"
+
+
+def test_with_scheme_uppercased():
+ url = URL("http://example.com")
+ assert str(url.with_scheme("HTTPS")) == "https://example.com"
+
+
+def test_with_scheme_for_relative_url():
+ with pytest.raises(ValueError):
+ URL("path/to").with_scheme("http")
+
+
+def test_with_scheme_invalid_type():
+ url = URL("http://example.com")
+ with pytest.raises(TypeError):
+ assert str(url.with_scheme(123))
+
+
+def test_with_user():
+ url = URL("http://example.com")
+ assert str(url.with_user("john")) == "http://john@example.com"
+
+
+def test_with_user_non_ascii():
+ url = URL("http://example.com")
+ url2 = url.with_user("вася")
+ assert url2.raw_user == "%D0%B2%D0%B0%D1%81%D1%8F"
+ assert url2.user == "вася"
+ assert url2.raw_authority == "%D0%B2%D0%B0%D1%81%D1%8F@example.com"
+ assert url2.authority == "вася@example.com:80"
+
+
+def test_with_user_percent_encoded():
+ url = URL("http://example.com")
+ url2 = url.with_user("%cf%80")
+ assert url2.raw_user == "%25cf%2580"
+ assert url2.user == "%cf%80"
+ assert url2.raw_authority == "%25cf%2580@example.com"
+ assert url2.authority == "%cf%80@example.com:80"
+
+
+def test_with_user_for_relative_url():
+ with pytest.raises(ValueError):
+ URL("path/to").with_user("user")
+
+
+def test_with_user_invalid_type():
+ url = URL("http://example.com:123")
+ with pytest.raises(TypeError):
+ url.with_user(123)
+
+
+def test_with_user_None():
+ url = URL("http://john@example.com")
+ assert str(url.with_user(None)) == "http://example.com"
+
+
+def test_with_user_ipv6():
+ url = URL("http://john:pass@[::1]:8080/")
+ assert str(url.with_user(None)) == "http://[::1]:8080/"
+
+
+def test_with_user_None_when_password_present():
+ url = URL("http://john:pass@example.com")
+ assert str(url.with_user(None)) == "http://example.com"
+
+
+def test_with_password():
+ url = URL("http://john@example.com")
+ assert str(url.with_password("pass")) == "http://john:pass@example.com"
+
+
+def test_with_password_ipv6():
+ url = URL("http://john:pass@[::1]:8080/")
+ assert str(url.with_password(None)) == "http://john@[::1]:8080/"
+
+
+def test_with_password_non_ascii():
+ url = URL("http://john@example.com")
+ url2 = url.with_password("пароль")
+ assert url2.raw_password == "%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C"
+ assert url2.password == "пароль"
+ assert url2.raw_authority == "john:%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C@example.com"
+ assert url2.authority == "john:пароль@example.com:80"
+
+
+def test_with_password_percent_encoded():
+ url = URL("http://john@example.com")
+ url2 = url.with_password("%cf%80")
+ assert url2.raw_password == "%25cf%2580"
+ assert url2.password == "%cf%80"
+ assert url2.raw_authority == "john:%25cf%2580@example.com"
+ assert url2.authority == "john:%cf%80@example.com:80"
+
+
+def test_with_password_non_ascii_with_colon():
+ url = URL("http://john@example.com")
+ url2 = url.with_password("п:а")
+ assert url2.raw_password == "%D0%BF%3A%D0%B0"
+ assert url2.password == "п:а"
+
+
+def test_with_password_for_relative_url():
+ with pytest.raises(ValueError):
+ URL("path/to").with_password("pass")
+
+
+def test_with_password_None():
+ url = URL("http://john:pass@example.com")
+ assert str(url.with_password(None)) == "http://john@example.com"
+
+
+def test_with_password_invalid_type():
+ url = URL("http://example.com:123")
+ with pytest.raises(TypeError):
+ url.with_password(123)
+
+
+def test_with_password_and_empty_user():
+ url = URL("http://example.com")
+ url2 = url.with_password("pass")
+ assert url2.password == "pass"
+ assert url2.user is None
+ assert str(url2) == "http://:pass@example.com"
+
+
+def test_from_str_with_host_ipv4():
+ url = URL("http://host:80")
+ url = url.with_host("192.168.1.1")
+ assert url.raw_host == "192.168.1.1"
+
+
+def test_from_str_with_host_ipv6():
+ url = URL("http://host:80")
+ url = url.with_host("::1")
+ assert url.raw_host == "::1"
+
+
+def test_with_host():
+ url = URL("http://example.com:123")
+ assert str(url.with_host("example.org")) == "http://example.org:123"
+
+
+def test_with_host_empty():
+ url = URL("http://example.com:123")
+ with pytest.raises(ValueError):
+ url.with_host("")
+
+
+def test_with_host_non_ascii():
+ url = URL("http://example.com:123")
+ url2 = url.with_host("историк.рф")
+ assert url2.raw_host == "xn--h1aagokeh.xn--p1ai"
+ assert url2.host == "историк.рф"
+ assert url2.raw_authority == "xn--h1aagokeh.xn--p1ai:123"
+ assert url2.authority == "историк.рф:123"
+
+
+def test_with_host_percent_encoded():
+ url = URL("http://%25cf%2580%cf%80:%25cf%2580%cf%80@example.com:123")
+ url2 = url.with_host("%cf%80.org")
+ assert url2.raw_host == "%cf%80.org"
+ assert url2.host == "%cf%80.org"
+ assert url2.raw_authority == "%25cf%2580%CF%80:%25cf%2580%CF%80@%cf%80.org:123"
+ assert url2.authority == "%cf%80π:%cf%80π@%cf%80.org:123"
+
+
+def test_with_host_for_relative_url():
+ with pytest.raises(ValueError):
+ URL("path/to").with_host("example.com")
+
+
+def test_with_host_invalid_type():
+ url = URL("http://example.com:123")
+ with pytest.raises(TypeError):
+ url.with_host(None)
+
+
+def test_with_port():
+ url = URL("http://example.com")
+ assert str(url.with_port(8888)) == "http://example.com:8888"
+
+
+def test_with_port_with_no_port():
+ url = URL("http://example.com")
+ assert str(url.with_port(None)) == "http://example.com"
+
+
+def test_with_port_ipv6():
+ url = URL("http://[::1]:8080/")
+ assert str(url.with_port(80)) == "http://[::1]:80/"
+
+
+def test_with_port_keeps_query_and_fragment():
+ url = URL("http://example.com/?a=1#frag")
+ assert str(url.with_port(8888)) == "http://example.com:8888/?a=1#frag"
+
+
+def test_with_port_percent_encoded():
+ url = URL("http://user%name:pass%word@example.com/")
+ assert str(url.with_port(808)) == "http://user%25name:pass%25word@example.com:808/"
+
+
+def test_with_port_for_relative_url():
+ with pytest.raises(ValueError):
+ URL("path/to").with_port(1234)
+
+
+def test_with_port_invalid_type():
+ with pytest.raises(TypeError):
+ URL("http://example.com").with_port("123")
+ with pytest.raises(TypeError):
+ URL("http://example.com").with_port(True)
+
+
+def test_with_port_invalid_range():
+ with pytest.raises(ValueError):
+ URL("http://example.com").with_port(-1)
diff --git a/contrib/python/yarl/tests/ya.make b/contrib/python/yarl/tests/ya.make
new file mode 100644
index 0000000000..f86b0f6380
--- /dev/null
+++ b/contrib/python/yarl/tests/ya.make
@@ -0,0 +1,24 @@
+PY3TEST()
+
+PEERDIR(
+ contrib/python/yarl
+)
+
+TEST_SRCS(
+ test_cache.py
+ test_cached_property.py
+ test_normalize_path.py
+ test_pickle.py
+ test_quoting.py
+ test_update_query.py
+ test_url.py
+ test_url_build.py
+ test_url_cmp_and_hash.py
+ test_url_parsing.py
+ test_url_query.py
+ test_url_update_netloc.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/yarl/ya.make b/contrib/python/yarl/ya.make
new file mode 100644
index 0000000000..0c3d0ce434
--- /dev/null
+++ b/contrib/python/yarl/ya.make
@@ -0,0 +1,41 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(1.9.3)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/idna
+ contrib/python/multidict
+)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ yarl/__init__.py
+ yarl/__init__.pyi
+ yarl/_quoting.py
+ yarl/_quoting_c.pyi
+ yarl/_quoting_py.py
+ yarl/_url.py
+ CYTHON_C
+ yarl/_quoting_c.pyx
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/yarl/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ yarl/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/yarl/yarl/__init__.py b/contrib/python/yarl/yarl/__init__.py
new file mode 100644
index 0000000000..f43aecbc92
--- /dev/null
+++ b/contrib/python/yarl/yarl/__init__.py
@@ -0,0 +1,5 @@
+from ._url import URL, cache_clear, cache_configure, cache_info
+
+__version__ = "1.9.3"
+
+__all__ = ("URL", "cache_clear", "cache_configure", "cache_info")
diff --git a/contrib/python/yarl/yarl/_quoting.py b/contrib/python/yarl/yarl/_quoting.py
new file mode 100644
index 0000000000..8d1c705ff2
--- /dev/null
+++ b/contrib/python/yarl/yarl/_quoting.py
@@ -0,0 +1,18 @@
+import os
+import sys
+
+__all__ = ("_Quoter", "_Unquoter")
+
+
+NO_EXTENSIONS = bool(os.environ.get("YARL_NO_EXTENSIONS")) # type: bool
+if sys.implementation.name != "cpython":
+ NO_EXTENSIONS = True
+
+
+if not NO_EXTENSIONS: # pragma: no branch
+ try:
+ from ._quoting_c import _Quoter, _Unquoter # type: ignore[assignment]
+ except ImportError: # pragma: no cover
+ from ._quoting_py import _Quoter, _Unquoter # type: ignore[assignment]
+else:
+ from ._quoting_py import _Quoter, _Unquoter # type: ignore[assignment]
diff --git a/contrib/python/yarl/yarl/_quoting_c.pyx b/contrib/python/yarl/yarl/_quoting_c.pyx
new file mode 100644
index 0000000000..5335d17365
--- /dev/null
+++ b/contrib/python/yarl/yarl/_quoting_c.pyx
@@ -0,0 +1,371 @@
+# cython: language_level=3
+
+from cpython.exc cimport PyErr_NoMemory
+from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
+from cpython.unicode cimport PyUnicode_DecodeASCII, PyUnicode_DecodeUTF8Stateful
+from libc.stdint cimport uint8_t, uint64_t
+from libc.string cimport memcpy, memset
+
+from string import ascii_letters, digits
+
+
+cdef str GEN_DELIMS = ":/?#[]@"
+cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*,"
+cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;'
+cdef str RESERVED = GEN_DELIMS + SUB_DELIMS
+cdef str UNRESERVED = ascii_letters + digits + '-._~'
+cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS
+cdef str QS = '+&=;'
+
+DEF BUF_SIZE = 8 * 1024 # 8KiB
+cdef char BUFFER[BUF_SIZE]
+
+cdef inline Py_UCS4 _to_hex(uint8_t v):
+ if v < 10:
+ return <Py_UCS4>(v+0x30) # ord('0') == 0x30
+ else:
+ return <Py_UCS4>(v+0x41-10) # ord('A') == 0x41
+
+
+cdef inline int _from_hex(Py_UCS4 v):
+ if '0' <= v <= '9':
+ return <int>(v) - 0x30 # ord('0') == 0x30
+ elif 'A' <= v <= 'F':
+ return <int>(v) - 0x41 + 10 # ord('A') == 0x41
+ elif 'a' <= v <= 'f':
+ return <int>(v) - 0x61 + 10 # ord('a') == 0x61
+ else:
+ return -1
+
+
+cdef inline int _is_lower_hex(Py_UCS4 v):
+ return 'a' <= v <= 'f'
+
+
+cdef inline Py_UCS4 _restore_ch(Py_UCS4 d1, Py_UCS4 d2):
+ cdef int digit1 = _from_hex(d1)
+ if digit1 < 0:
+ return <Py_UCS4>-1
+ cdef int digit2 = _from_hex(d2)
+ if digit2 < 0:
+ return <Py_UCS4>-1
+ return <Py_UCS4>(digit1 << 4 | digit2)
+
+
+cdef uint8_t ALLOWED_TABLE[16]
+cdef uint8_t ALLOWED_NOTQS_TABLE[16]
+
+
+cdef inline bint bit_at(uint8_t array[], uint64_t ch):
+ return array[ch >> 3] & (1 << (ch & 7))
+
+
+cdef inline void set_bit(uint8_t array[], uint64_t ch):
+ array[ch >> 3] |= (1 << (ch & 7))
+
+
+memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE))
+memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE))
+
+for i in range(128):
+ if chr(i) in ALLOWED:
+ set_bit(ALLOWED_TABLE, i)
+ set_bit(ALLOWED_NOTQS_TABLE, i)
+ if chr(i) in QS:
+ set_bit(ALLOWED_NOTQS_TABLE, i)
+
+# ----------------- writer ---------------------------
+
+cdef struct Writer:
+ char *buf
+ Py_ssize_t size
+ Py_ssize_t pos
+ bint changed
+
+
+cdef inline void _init_writer(Writer* writer):
+ writer.buf = &BUFFER[0]
+ writer.size = BUF_SIZE
+ writer.pos = 0
+ writer.changed = 0
+
+
+cdef inline void _release_writer(Writer* writer):
+ if writer.buf != BUFFER:
+ PyMem_Free(writer.buf)
+
+
+cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed):
+ cdef char * buf
+ cdef Py_ssize_t size
+
+ if writer.pos == writer.size:
+ # reallocate
+ size = writer.size + BUF_SIZE
+ if writer.buf == BUFFER:
+ buf = <char*>PyMem_Malloc(size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ memcpy(buf, writer.buf, writer.size)
+ else:
+ buf = <char*>PyMem_Realloc(writer.buf, size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ writer.buf = buf
+ writer.size = size
+ writer.buf[writer.pos] = <char>ch
+ writer.pos += 1
+ writer.changed |= changed
+ return 0
+
+
+cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed):
+ if _write_char(writer, '%', changed) < 0:
+ return -1
+ if _write_char(writer, _to_hex(<uint8_t>ch >> 4), changed) < 0:
+ return -1
+ return _write_char(writer, _to_hex(<uint8_t>ch & 0x0f), changed)
+
+
+cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
+ cdef uint64_t utf = <uint64_t> symbol
+
+ if utf < 0x80:
+ return _write_pct(writer, <uint8_t>utf, True)
+ elif utf < 0x800:
+ if _write_pct(writer, <uint8_t>(0xc0 | (utf >> 6)), True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+ elif 0xD800 <= utf <= 0xDFFF:
+ # surogate pair, ignored
+ return 0
+ elif utf < 0x10000:
+ if _write_pct(writer, <uint8_t>(0xe0 | (utf >> 12)), True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f)),
+ True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+ elif utf > 0x10FFFF:
+ # symbol is too large
+ return 0
+ else:
+ if _write_pct(writer, <uint8_t>(0xf0 | (utf >> 18)), True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 12) & 0x3f)),
+ True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f)),
+ True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+
+
+# --------------------- end writer --------------------------
+
+
+cdef class _Quoter:
+ cdef bint _qs
+ cdef bint _requote
+
+ cdef uint8_t _safe_table[16]
+ cdef uint8_t _protected_table[16]
+
+ def __init__(
+ self, *, str safe='', str protected='', bint qs=False, bint requote=True,
+ ):
+ cdef Py_UCS4 ch
+
+ self._qs = qs
+ self._requote = requote
+
+ if not self._qs:
+ memcpy(self._safe_table,
+ ALLOWED_NOTQS_TABLE,
+ sizeof(self._safe_table))
+ else:
+ memcpy(self._safe_table,
+ ALLOWED_TABLE,
+ sizeof(self._safe_table))
+ for ch in safe:
+ if ord(ch) > 127:
+ raise ValueError("Only safe symbols with ORD < 128 are allowed")
+ set_bit(self._safe_table, ch)
+
+ memset(self._protected_table, 0, sizeof(self._protected_table))
+ for ch in protected:
+ if ord(ch) > 127:
+ raise ValueError("Only safe symbols with ORD < 128 are allowed")
+ set_bit(self._safe_table, ch)
+ set_bit(self._protected_table, ch)
+
+ def __call__(self, val):
+ cdef Writer writer
+ if val is None:
+ return None
+ if type(val) is not str:
+ if isinstance(val, str):
+ # derived from str
+ val = str(val)
+ else:
+ raise TypeError("Argument should be str")
+ _init_writer(&writer)
+ try:
+ return self._do_quote(<str>val, &writer)
+ finally:
+ _release_writer(&writer)
+
+ cdef str _do_quote(self, str val, Writer *writer):
+ cdef Py_UCS4 ch
+ cdef int changed
+ cdef int idx = 0
+ cdef int length = len(val)
+
+ while idx < length:
+ ch = val[idx]
+ idx += 1
+ if ch == '%' and self._requote and idx <= length - 2:
+ ch = _restore_ch(val[idx], val[idx + 1])
+ if ch != <Py_UCS4>-1:
+ idx += 2
+ if ch < 128:
+ if bit_at(self._protected_table, ch):
+ if _write_pct(writer, ch, True) < 0:
+ raise
+ continue
+
+ if bit_at(self._safe_table, ch):
+ if _write_char(writer, ch, True) < 0:
+ raise
+ continue
+
+ changed = (_is_lower_hex(val[idx - 2]) or
+ _is_lower_hex(val[idx - 1]))
+ if _write_pct(writer, ch, changed) < 0:
+ raise
+ continue
+ else:
+ ch = '%'
+
+ if self._write(writer, ch) < 0:
+ raise
+
+ if not writer.changed:
+ return val
+ else:
+ return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict")
+
+ cdef inline int _write(self, Writer *writer, Py_UCS4 ch):
+ if self._qs:
+ if ch == ' ':
+ return _write_char(writer, '+', True)
+
+ if ch < 128 and bit_at(self._safe_table, ch):
+ return _write_char(writer, ch, False)
+
+ return _write_utf8(writer, ch)
+
+
+cdef class _Unquoter:
+ cdef str _unsafe
+ cdef bint _qs
+ cdef _Quoter _quoter
+ cdef _Quoter _qs_quoter
+
+ def __init__(self, *, unsafe='', qs=False):
+ self._unsafe = unsafe
+ self._qs = qs
+ self._quoter = _Quoter()
+ self._qs_quoter = _Quoter(qs=True)
+
+ def __call__(self, val):
+ if val is None:
+ return None
+ if type(val) is not str:
+ if isinstance(val, str):
+ # derived from str
+ val = str(val)
+ else:
+ raise TypeError("Argument should be str")
+ return self._do_unquote(<str>val)
+
+ cdef str _do_unquote(self, str val):
+ if len(val) == 0:
+ return val
+ cdef list ret = []
+ cdef char buffer[4]
+ cdef Py_ssize_t buflen = 0
+ cdef Py_ssize_t consumed
+ cdef str unquoted
+ cdef Py_UCS4 ch = 0
+ cdef Py_ssize_t idx = 0
+ cdef Py_ssize_t length = len(val)
+ cdef Py_ssize_t start_pct
+
+ while idx < length:
+ ch = val[idx]
+ idx += 1
+ if ch == '%' and idx <= length - 2:
+ ch = _restore_ch(val[idx], val[idx + 1])
+ if ch != <Py_UCS4>-1:
+ idx += 2
+ assert buflen < 4
+ buffer[buflen] = ch
+ buflen += 1
+ try:
+ unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen,
+ NULL, &consumed)
+ except UnicodeDecodeError:
+ start_pct = idx - buflen * 3
+ buffer[0] = ch
+ buflen = 1
+ ret.append(val[start_pct : idx - 3])
+ try:
+ unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen,
+ NULL, &consumed)
+ except UnicodeDecodeError:
+ buflen = 0
+ ret.append(val[idx - 3 : idx])
+ continue
+ if not unquoted:
+ assert consumed == 0
+ continue
+ assert consumed == buflen
+ buflen = 0
+ if self._qs and unquoted in '+=&;':
+ ret.append(self._qs_quoter(unquoted))
+ elif unquoted in self._unsafe:
+ ret.append(self._quoter(unquoted))
+ else:
+ ret.append(unquoted)
+ continue
+ else:
+ ch = '%'
+
+ if buflen:
+ start_pct = idx - 1 - buflen * 3
+ ret.append(val[start_pct : idx - 1])
+ buflen = 0
+
+ if ch == '+':
+ if not self._qs or ch in self._unsafe:
+ ret.append('+')
+ else:
+ ret.append(' ')
+ continue
+
+ if ch in self._unsafe:
+ ret.append('%')
+ h = hex(ord(ch)).upper()[2:]
+ for ch in h:
+ ret.append(ch)
+ continue
+
+ ret.append(ch)
+
+ if buflen:
+ ret.append(val[length - buflen * 3 : length])
+
+ return ''.join(ret)
diff --git a/contrib/python/yarl/yarl/_quoting_py.py b/contrib/python/yarl/yarl/_quoting_py.py
new file mode 100644
index 0000000000..585a1da804
--- /dev/null
+++ b/contrib/python/yarl/yarl/_quoting_py.py
@@ -0,0 +1,197 @@
+import codecs
+import re
+from string import ascii_letters, ascii_lowercase, digits
+from typing import Optional, cast
+
+BASCII_LOWERCASE = ascii_lowercase.encode("ascii")
+BPCT_ALLOWED = {f"%{i:02X}".encode("ascii") for i in range(256)}
+GEN_DELIMS = ":/?#[]@"
+SUB_DELIMS_WITHOUT_QS = "!$'()*,"
+SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + "+&=;"
+RESERVED = GEN_DELIMS + SUB_DELIMS
+UNRESERVED = ascii_letters + digits + "-._~"
+ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS
+
+
+_IS_HEX = re.compile(b"[A-Z0-9][A-Z0-9]")
+_IS_HEX_STR = re.compile("[A-Fa-f0-9][A-Fa-f0-9]")
+
+utf8_decoder = codecs.getincrementaldecoder("utf-8")
+
+
+class _Quoter:
+ def __init__(
+ self,
+ *,
+ safe: str = "",
+ protected: str = "",
+ qs: bool = False,
+ requote: bool = True,
+ ) -> None:
+ self._safe = safe
+ self._protected = protected
+ self._qs = qs
+ self._requote = requote
+
+ def __call__(self, val: Optional[str]) -> Optional[str]:
+ if val is None:
+ return None
+ if not isinstance(val, str):
+ raise TypeError("Argument should be str")
+ if not val:
+ return ""
+ bval = cast(str, val).encode("utf8", errors="ignore")
+ ret = bytearray()
+ pct = bytearray()
+ safe = self._safe
+ safe += ALLOWED
+ if not self._qs:
+ safe += "+&=;"
+ safe += self._protected
+ bsafe = safe.encode("ascii")
+ idx = 0
+ while idx < len(bval):
+ ch = bval[idx]
+ idx += 1
+
+ if pct:
+ if ch in BASCII_LOWERCASE:
+ ch = ch - 32 # convert to uppercase
+ pct.append(ch)
+ if len(pct) == 3: # pragma: no branch # peephole optimizer
+ buf = pct[1:]
+ if not _IS_HEX.match(buf):
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 2
+ continue
+ try:
+ unquoted = chr(int(pct[1:].decode("ascii"), base=16))
+ except ValueError:
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 2
+ continue
+
+ if unquoted in self._protected:
+ ret.extend(pct)
+ elif unquoted in safe:
+ ret.append(ord(unquoted))
+ else:
+ ret.extend(pct)
+ pct.clear()
+
+ # special case, if we have only one char after "%"
+ elif len(pct) == 2 and idx == len(bval):
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 1
+
+ continue
+
+ elif ch == ord("%") and self._requote:
+ pct.clear()
+ pct.append(ch)
+
+ # special case if "%" is last char
+ if idx == len(bval):
+ ret.extend(b"%25")
+
+ continue
+
+ if self._qs:
+ if ch == ord(" "):
+ ret.append(ord("+"))
+ continue
+ if ch in bsafe:
+ ret.append(ch)
+ continue
+
+ ret.extend((f"%{ch:02X}").encode("ascii"))
+
+ ret2 = ret.decode("ascii")
+ if ret2 == val:
+ return val
+ return ret2
+
+
+class _Unquoter:
+ def __init__(self, *, unsafe: str = "", qs: bool = False) -> None:
+ self._unsafe = unsafe
+ self._qs = qs
+ self._quoter = _Quoter()
+ self._qs_quoter = _Quoter(qs=True)
+
+ def __call__(self, val: Optional[str]) -> Optional[str]:
+ if val is None:
+ return None
+ if not isinstance(val, str):
+ raise TypeError("Argument should be str")
+ if not val:
+ return ""
+ decoder = cast(codecs.BufferedIncrementalDecoder, utf8_decoder())
+ ret = []
+ idx = 0
+ while idx < len(val):
+ ch = val[idx]
+ idx += 1
+ if ch == "%" and idx <= len(val) - 2:
+ pct = val[idx : idx + 2]
+ if _IS_HEX_STR.fullmatch(pct):
+ b = bytes([int(pct, base=16)])
+ idx += 2
+ try:
+ unquoted = decoder.decode(b)
+ except UnicodeDecodeError:
+ start_pct = idx - 3 - len(decoder.buffer) * 3
+ ret.append(val[start_pct : idx - 3])
+ decoder.reset()
+ try:
+ unquoted = decoder.decode(b)
+ except UnicodeDecodeError:
+ ret.append(val[idx - 3 : idx])
+ continue
+ if not unquoted:
+ continue
+ if self._qs and unquoted in "+=&;":
+ to_add = self._qs_quoter(unquoted)
+ if to_add is None: # pragma: no cover
+ raise RuntimeError("Cannot quote None")
+ ret.append(to_add)
+ elif unquoted in self._unsafe:
+ to_add = self._quoter(unquoted)
+ if to_add is None: # pragma: no cover
+ raise RuntimeError("Cannot quote None")
+ ret.append(to_add)
+ else:
+ ret.append(unquoted)
+ continue
+
+ if decoder.buffer:
+ start_pct = idx - 1 - len(decoder.buffer) * 3
+ ret.append(val[start_pct : idx - 1])
+ decoder.reset()
+
+ if ch == "+":
+ if not self._qs or ch in self._unsafe:
+ ret.append("+")
+ else:
+ ret.append(" ")
+ continue
+
+ if ch in self._unsafe:
+ ret.append("%")
+ h = hex(ord(ch)).upper()[2:]
+ for ch in h:
+ ret.append(ch)
+ continue
+
+ ret.append(ch)
+
+ if decoder.buffer:
+ ret.append(val[-len(decoder.buffer) * 3 :])
+
+ ret2 = "".join(ret)
+ if ret2 == val:
+ return val
+ return ret2
diff --git a/contrib/python/yarl/yarl/_url.py b/contrib/python/yarl/yarl/_url.py
new file mode 100644
index 0000000000..c8f2acb39b
--- /dev/null
+++ b/contrib/python/yarl/yarl/_url.py
@@ -0,0 +1,1198 @@
+import functools
+import math
+import warnings
+from collections.abc import Mapping, Sequence
+from contextlib import suppress
+from ipaddress import ip_address
+from urllib.parse import SplitResult, parse_qsl, quote, urljoin, urlsplit, urlunsplit
+
+import idna
+from multidict import MultiDict, MultiDictProxy
+
+from ._quoting import _Quoter, _Unquoter
+
+DEFAULT_PORTS = {"http": 80, "https": 443, "ws": 80, "wss": 443}
+
+sentinel = object()
+
+
+def rewrite_module(obj: object) -> object:
+ obj.__module__ = "yarl"
+ return obj
+
+
+class cached_property:
+ """Use as a class method decorator. It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+
+ """
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+ try:
+ self.__doc__ = wrapped.__doc__
+ except AttributeError: # pragma: no cover
+ self.__doc__ = ""
+ self.name = wrapped.__name__
+
+ def __get__(self, inst, owner, _sentinel=sentinel):
+ if inst is None:
+ return self
+ val = inst._cache.get(self.name, _sentinel)
+ if val is not _sentinel:
+ return val
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+
+ def __set__(self, inst, value):
+ raise AttributeError("cached property is read-only")
+
+
+def _normalize_path_segments(segments):
+ """Drop '.' and '..' from a sequence of str segments"""
+
+ resolved_path = []
+
+ for seg in segments:
+ if seg == "..":
+ # ignore any .. segments that would otherwise cause an
+ # IndexError when popped from resolved_path if
+ # resolving for rfc3986
+ with suppress(IndexError):
+ resolved_path.pop()
+ elif seg != ".":
+ resolved_path.append(seg)
+
+ if segments and segments[-1] in (".", ".."):
+ # do some post-processing here.
+ # if the last segment was a relative dir,
+ # then we need to append the trailing '/'
+ resolved_path.append("")
+
+ return resolved_path
+
+
+@rewrite_module
+class URL:
+ # Don't derive from str
+ # follow pathlib.Path design
+ # probably URL will not suffer from pathlib problems:
+ # it's intended for libraries like aiohttp,
+ # not to be passed into standard library functions like os.open etc.
+
+ # URL grammar (RFC 3986)
+ # pct-encoded = "%" HEXDIG HEXDIG
+ # reserved = gen-delims / sub-delims
+ # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
+ # sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+ # / "*" / "+" / "," / ";" / "="
+ # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+ # URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
+ # hier-part = "//" authority path-abempty
+ # / path-absolute
+ # / path-rootless
+ # / path-empty
+ # scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
+ # authority = [ userinfo "@" ] host [ ":" port ]
+ # userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
+ # host = IP-literal / IPv4address / reg-name
+ # IP-literal = "[" ( IPv6address / IPvFuture ) "]"
+ # IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
+ # IPv6address = 6( h16 ":" ) ls32
+ # / "::" 5( h16 ":" ) ls32
+ # / [ h16 ] "::" 4( h16 ":" ) ls32
+ # / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ # / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ # / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ # / [ *4( h16 ":" ) h16 ] "::" ls32
+ # / [ *5( h16 ":" ) h16 ] "::" h16
+ # / [ *6( h16 ":" ) h16 ] "::"
+ # ls32 = ( h16 ":" h16 ) / IPv4address
+ # ; least-significant 32 bits of address
+ # h16 = 1*4HEXDIG
+ # ; 16 bits of address represented in hexadecimal
+ # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
+ # dec-octet = DIGIT ; 0-9
+ # / %x31-39 DIGIT ; 10-99
+ # / "1" 2DIGIT ; 100-199
+ # / "2" %x30-34 DIGIT ; 200-249
+ # / "25" %x30-35 ; 250-255
+ # reg-name = *( unreserved / pct-encoded / sub-delims )
+ # port = *DIGIT
+ # path = path-abempty ; begins with "/" or is empty
+ # / path-absolute ; begins with "/" but not "//"
+ # / path-noscheme ; begins with a non-colon segment
+ # / path-rootless ; begins with a segment
+ # / path-empty ; zero characters
+ # path-abempty = *( "/" segment )
+ # path-absolute = "/" [ segment-nz *( "/" segment ) ]
+ # path-noscheme = segment-nz-nc *( "/" segment )
+ # path-rootless = segment-nz *( "/" segment )
+ # path-empty = 0<pchar>
+ # segment = *pchar
+ # segment-nz = 1*pchar
+ # segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
+ # ; non-zero-length segment without any colon ":"
+ # pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
+ # query = *( pchar / "/" / "?" )
+ # fragment = *( pchar / "/" / "?" )
+ # URI-reference = URI / relative-ref
+ # relative-ref = relative-part [ "?" query ] [ "#" fragment ]
+ # relative-part = "//" authority path-abempty
+ # / path-absolute
+ # / path-noscheme
+ # / path-empty
+ # absolute-URI = scheme ":" hier-part [ "?" query ]
+ __slots__ = ("_cache", "_val")
+
+ _QUOTER = _Quoter(requote=False)
+ _REQUOTER = _Quoter()
+ _PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False)
+ _PATH_REQUOTER = _Quoter(safe="@:", protected="/+")
+ _QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False)
+ _QUERY_REQUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True)
+ _QUERY_PART_QUOTER = _Quoter(safe="?/:@", qs=True, requote=False)
+ _FRAGMENT_QUOTER = _Quoter(safe="?/:@", requote=False)
+ _FRAGMENT_REQUOTER = _Quoter(safe="?/:@")
+
+ _UNQUOTER = _Unquoter()
+ _PATH_UNQUOTER = _Unquoter(unsafe="+")
+ _QS_UNQUOTER = _Unquoter(qs=True)
+
+ def __new__(cls, val="", *, encoded=False, strict=None):
+ if strict is not None: # pragma: no cover
+ warnings.warn("strict parameter is ignored")
+ if type(val) is cls:
+ return val
+ if type(val) is str:
+ val = urlsplit(val)
+ elif type(val) is SplitResult:
+ if not encoded:
+ raise ValueError("Cannot apply decoding to SplitResult")
+ elif isinstance(val, str):
+ val = urlsplit(str(val))
+ else:
+ raise TypeError("Constructor parameter should be str")
+
+ if not encoded:
+ if not val[1]: # netloc
+ netloc = ""
+ host = ""
+ else:
+ host = val.hostname
+ if host is None:
+ raise ValueError("Invalid URL: host is required for absolute urls")
+
+ try:
+ port = val.port
+ except ValueError as e:
+ raise ValueError(
+ "Invalid URL: port can't be converted to integer"
+ ) from e
+
+ netloc = cls._make_netloc(
+ val.username, val.password, host, port, encode=True, requote=True
+ )
+ path = cls._PATH_REQUOTER(val[2])
+ if netloc:
+ path = cls._normalize_path(path)
+
+ cls._validate_authority_uri_abs_path(host=host, path=path)
+ query = cls._QUERY_REQUOTER(val[3])
+ fragment = cls._FRAGMENT_REQUOTER(val[4])
+ val = SplitResult(val[0], netloc, path, query, fragment)
+
+ self = object.__new__(cls)
+ self._val = val
+ self._cache = {}
+ return self
+
+ @classmethod
+ def build(
+ cls,
+ *,
+ scheme="",
+ authority="",
+ user=None,
+ password=None,
+ host="",
+ port=None,
+ path="",
+ query=None,
+ query_string="",
+ fragment="",
+ encoded=False,
+ ):
+ """Creates and returns a new URL"""
+
+ if authority and (user or password or host or port):
+ raise ValueError(
+ 'Can\'t mix "authority" with "user", "password", "host" or "port".'
+ )
+ if port and not host:
+ raise ValueError('Can\'t build URL with "port" but without "host".')
+ if query and query_string:
+ raise ValueError('Only one of "query" or "query_string" should be passed')
+ if (
+ scheme is None
+ or authority is None
+ or host is None
+ or path is None
+ or query_string is None
+ or fragment is None
+ ):
+ raise TypeError(
+ 'NoneType is illegal for "scheme", "authority", "host", "path", '
+ '"query_string", and "fragment" args, use empty string instead.'
+ )
+
+ if authority:
+ if encoded:
+ netloc = authority
+ else:
+ tmp = SplitResult("", authority, "", "", "")
+ netloc = cls._make_netloc(
+ tmp.username, tmp.password, tmp.hostname, tmp.port, encode=True
+ )
+ elif not user and not password and not host and not port:
+ netloc = ""
+ else:
+ netloc = cls._make_netloc(
+ user, password, host, port, encode=not encoded, encode_host=not encoded
+ )
+ if not encoded:
+ path = cls._PATH_QUOTER(path)
+ if netloc:
+ path = cls._normalize_path(path)
+
+ cls._validate_authority_uri_abs_path(host=host, path=path)
+ query_string = cls._QUERY_QUOTER(query_string)
+ fragment = cls._FRAGMENT_QUOTER(fragment)
+
+ url = cls(
+ SplitResult(scheme, netloc, path, query_string, fragment), encoded=True
+ )
+
+ if query:
+ return url.with_query(query)
+ else:
+ return url
+
+ def __init_subclass__(cls):
+ raise TypeError(f"Inheriting a class {cls!r} from URL is forbidden")
+
+ def __str__(self):
+ val = self._val
+ if not val.path and self.is_absolute() and (val.query or val.fragment):
+ val = val._replace(path="/")
+ return urlunsplit(val)
+
+ def __repr__(self):
+ return f"{self.__class__.__name__}('{str(self)}')"
+
+ def __bytes__(self):
+ return str(self).encode("ascii")
+
+ def __eq__(self, other):
+ if not type(other) is URL:
+ return NotImplemented
+
+ val1 = self._val
+ if not val1.path and self.is_absolute():
+ val1 = val1._replace(path="/")
+
+ val2 = other._val
+ if not val2.path and other.is_absolute():
+ val2 = val2._replace(path="/")
+
+ return val1 == val2
+
+ def __hash__(self):
+ ret = self._cache.get("hash")
+ if ret is None:
+ val = self._val
+ if not val.path and self.is_absolute():
+ val = val._replace(path="/")
+ ret = self._cache["hash"] = hash(val)
+ return ret
+
+ def __le__(self, other):
+ if not type(other) is URL:
+ return NotImplemented
+ return self._val <= other._val
+
+ def __lt__(self, other):
+ if not type(other) is URL:
+ return NotImplemented
+ return self._val < other._val
+
+ def __ge__(self, other):
+ if not type(other) is URL:
+ return NotImplemented
+ return self._val >= other._val
+
+ def __gt__(self, other):
+ if not type(other) is URL:
+ return NotImplemented
+ return self._val > other._val
+
+ def __truediv__(self, name):
+ if not isinstance(name, str):
+ return NotImplemented
+ return self._make_child((str(name),))
+
+ def __mod__(self, query):
+ return self.update_query(query)
+
+ def __bool__(self) -> bool:
+ return bool(
+ self._val.netloc or self._val.path or self._val.query or self._val.fragment
+ )
+
+ def __getstate__(self):
+ return (self._val,)
+
+ def __setstate__(self, state):
+ if state[0] is None and isinstance(state[1], dict):
+ # default style pickle
+ self._val = state[1]["_val"]
+ else:
+ self._val, *unused = state
+ self._cache = {}
+
+ def is_absolute(self):
+ """A check for absolute URLs.
+
+ Return True for absolute ones (having scheme or starting
+ with //), False otherwise.
+
+ """
+ return self.raw_host is not None
+
+ def is_default_port(self):
+ """A check for default port.
+
+ Return True if port is default for specified scheme,
+ e.g. 'http://python.org' or 'http://python.org:80', False
+ otherwise.
+
+ """
+ if self.port is None:
+ return False
+ default = DEFAULT_PORTS.get(self.scheme)
+ if default is None:
+ return False
+ return self.port == default
+
+ def origin(self):
+ """Return an URL with scheme, host and port parts only.
+
+ user, password, path, query and fragment are removed.
+
+ """
+ # TODO: add a keyword-only option for keeping user/pass maybe?
+ if not self.is_absolute():
+ raise ValueError("URL should be absolute")
+ if not self._val.scheme:
+ raise ValueError("URL should have scheme")
+ v = self._val
+ netloc = self._make_netloc(None, None, v.hostname, v.port)
+ val = v._replace(netloc=netloc, path="", query="", fragment="")
+ return URL(val, encoded=True)
+
+ def relative(self):
+ """Return a relative part of the URL.
+
+ scheme, user, password, host and port are removed.
+
+ """
+ if not self.is_absolute():
+ raise ValueError("URL should be absolute")
+ val = self._val._replace(scheme="", netloc="")
+ return URL(val, encoded=True)
+
+ @property
+ def scheme(self):
+ """Scheme for absolute URLs.
+
+ Empty string for relative URLs or URLs starting with //
+
+ """
+ return self._val.scheme
+
+ @property
+ def raw_authority(self):
+ """Encoded authority part of URL.
+
+ Empty string for relative URLs.
+
+ """
+ return self._val.netloc
+
+ @cached_property
+ def authority(self):
+ """Decoded authority part of URL.
+
+ Empty string for relative URLs.
+
+ """
+ return self._make_netloc(
+ self.user, self.password, self.host, self.port, encode_host=False
+ )
+
+ @property
+ def raw_user(self):
+ """Encoded user part of URL.
+
+ None if user is missing.
+
+ """
+ # not .username
+ ret = self._val.username
+ if not ret:
+ return None
+ return ret
+
+ @cached_property
+ def user(self):
+ """Decoded user part of URL.
+
+ None if user is missing.
+
+ """
+ return self._UNQUOTER(self.raw_user)
+
+ @property
+ def raw_password(self):
+ """Encoded password part of URL.
+
+ None if password is missing.
+
+ """
+ return self._val.password
+
+ @cached_property
+ def password(self):
+ """Decoded password part of URL.
+
+ None if password is missing.
+
+ """
+ return self._UNQUOTER(self.raw_password)
+
+ @property
+ def raw_host(self):
+ """Encoded host part of URL.
+
+ None for relative URLs.
+
+ """
+ # Use host instead of hostname for sake of shortness
+ # May add .hostname prop later
+ return self._val.hostname
+
+ @cached_property
+ def host(self):
+ """Decoded host part of URL.
+
+ None for relative URLs.
+
+ """
+ raw = self.raw_host
+ if raw is None:
+ return None
+ if "%" in raw:
+ # Hack for scoped IPv6 addresses like
+ # fe80::2%Проверка
+ # presence of '%' sign means only IPv6 address, so idna is useless.
+ return raw
+ return _idna_decode(raw)
+
+ @property
+ def port(self):
+ """Port part of URL, with scheme-based fallback.
+
+ None for relative URLs or URLs without explicit port and
+ scheme without default port substitution.
+
+ """
+ return self._val.port or DEFAULT_PORTS.get(self._val.scheme)
+
+ @property
+ def explicit_port(self):
+ """Port part of URL, without scheme-based fallback.
+
+ None for relative URLs or URLs without explicit port.
+
+ """
+ return self._val.port
+
+ @property
+ def raw_path(self):
+ """Encoded path of URL.
+
+ / for absolute URLs without path part.
+
+ """
+ ret = self._val.path
+ if not ret and self.is_absolute():
+ ret = "/"
+ return ret
+
+ @cached_property
+ def path(self):
+ """Decoded path of URL.
+
+ / for absolute URLs without path part.
+
+ """
+ return self._PATH_UNQUOTER(self.raw_path)
+
+ @cached_property
+ def query(self):
+ """A MultiDictProxy representing parsed query parameters in decoded
+ representation.
+
+ Empty value if URL has no query part.
+
+ """
+ ret = MultiDict(parse_qsl(self.raw_query_string, keep_blank_values=True))
+ return MultiDictProxy(ret)
+
+ @property
+ def raw_query_string(self):
+ """Encoded query part of URL.
+
+ Empty string if query is missing.
+
+ """
+ return self._val.query
+
+ @cached_property
+ def query_string(self):
+ """Decoded query part of URL.
+
+ Empty string if query is missing.
+
+ """
+ return self._QS_UNQUOTER(self.raw_query_string)
+
+ @cached_property
+ def path_qs(self):
+ """Decoded path of URL with query."""
+ if not self.query_string:
+ return self.path
+ return f"{self.path}?{self.query_string}"
+
+ @cached_property
+ def raw_path_qs(self):
+ """Encoded path of URL with query."""
+ if not self.raw_query_string:
+ return self.raw_path
+ return f"{self.raw_path}?{self.raw_query_string}"
+
+ @property
+ def raw_fragment(self):
+ """Encoded fragment part of URL.
+
+ Empty string if fragment is missing.
+
+ """
+ return self._val.fragment
+
+ @cached_property
+ def fragment(self):
+ """Decoded fragment part of URL.
+
+ Empty string if fragment is missing.
+
+ """
+ return self._UNQUOTER(self.raw_fragment)
+
+ @cached_property
+ def raw_parts(self):
+ """A tuple containing encoded *path* parts.
+
+ ('/',) for absolute URLs if *path* is missing.
+
+ """
+ path = self._val.path
+ if self.is_absolute():
+ if not path:
+ parts = ["/"]
+ else:
+ parts = ["/"] + path[1:].split("/")
+ else:
+ if path.startswith("/"):
+ parts = ["/"] + path[1:].split("/")
+ else:
+ parts = path.split("/")
+ return tuple(parts)
+
+ @cached_property
+ def parts(self):
+ """A tuple containing decoded *path* parts.
+
+ ('/',) for absolute URLs if *path* is missing.
+
+ """
+ return tuple(self._UNQUOTER(part) for part in self.raw_parts)
+
+ @cached_property
+ def parent(self):
+ """A new URL with last part of path removed and cleaned up query and
+ fragment.
+
+ """
+ path = self.raw_path
+ if not path or path == "/":
+ if self.raw_fragment or self.raw_query_string:
+ return URL(self._val._replace(query="", fragment=""), encoded=True)
+ return self
+ parts = path.split("/")
+ val = self._val._replace(path="/".join(parts[:-1]), query="", fragment="")
+ return URL(val, encoded=True)
+
+ @cached_property
+ def raw_name(self):
+ """The last part of raw_parts."""
+ parts = self.raw_parts
+ if self.is_absolute():
+ parts = parts[1:]
+ if not parts:
+ return ""
+ else:
+ return parts[-1]
+ else:
+ return parts[-1]
+
+ @cached_property
+ def name(self):
+ """The last part of parts."""
+ return self._UNQUOTER(self.raw_name)
+
+ @cached_property
+ def raw_suffix(self):
+ name = self.raw_name
+ i = name.rfind(".")
+ if 0 < i < len(name) - 1:
+ return name[i:]
+ else:
+ return ""
+
+ @cached_property
+ def suffix(self):
+ return self._UNQUOTER(self.raw_suffix)
+
+ @cached_property
+ def raw_suffixes(self):
+ name = self.raw_name
+ if name.endswith("."):
+ return ()
+ name = name.lstrip(".")
+ return tuple("." + suffix for suffix in name.split(".")[1:])
+
+ @cached_property
+ def suffixes(self):
+ return tuple(self._UNQUOTER(suffix) for suffix in self.raw_suffixes)
+
+ @staticmethod
+ def _validate_authority_uri_abs_path(host, path):
+ """Ensure that path in URL with authority starts with a leading slash.
+
+ Raise ValueError if not.
+ """
+ if len(host) > 0 and len(path) > 0 and not path.startswith("/"):
+ raise ValueError(
+ "Path in a URL with authority should start with a slash ('/') if set"
+ )
+
+ def _make_child(self, segments, encoded=False):
+ """add segments to self._val.path, accounting for absolute vs relative paths"""
+ # keep the trailing slash if the last segment ends with /
+ parsed = [""] if segments and segments[-1][-1:] == "/" else []
+ for seg in reversed(segments):
+ if not seg:
+ continue
+ if seg[0] == "/":
+ raise ValueError(
+ f"Appending path {seg!r} starting from slash is forbidden"
+ )
+ seg = seg if encoded else self._PATH_QUOTER(seg)
+ if "/" in seg:
+ parsed += (
+ sub for sub in reversed(seg.split("/")) if sub and sub != "."
+ )
+ elif seg != ".":
+ parsed.append(seg)
+ parsed.reverse()
+ old_path = self._val.path
+ if old_path:
+ parsed = [*old_path.rstrip("/").split("/"), *parsed]
+ if self.is_absolute():
+ parsed = _normalize_path_segments(parsed)
+ if parsed and parsed[0] != "":
+ # inject a leading slash when adding a path to an absolute URL
+ # where there was none before
+ parsed = ["", *parsed]
+ new_path = "/".join(parsed)
+ return URL(
+ self._val._replace(path=new_path, query="", fragment=""), encoded=True
+ )
+
+ @classmethod
+ def _normalize_path(cls, path):
+ # Drop '.' and '..' from str path
+
+ prefix = ""
+ if path.startswith("/"):
+ # preserve the "/" root element of absolute paths, copying it to the
+ # normalised output as per sections 5.2.4 and 6.2.2.3 of rfc3986.
+ prefix = "/"
+ path = path[1:]
+
+ segments = path.split("/")
+ return prefix + "/".join(_normalize_path_segments(segments))
+
+ @classmethod
+ def _encode_host(cls, host, human=False):
+ try:
+ ip, sep, zone = host.partition("%")
+ ip = ip_address(ip)
+ except ValueError:
+ host = host.lower()
+ # IDNA encoding is slow,
+ # skip it for ASCII-only strings
+ # Don't move the check into _idna_encode() helper
+ # to reduce the cache size
+ if human or host.isascii():
+ return host
+ host = _idna_encode(host)
+ else:
+ host = ip.compressed
+ if sep:
+ host += "%" + zone
+ if ip.version == 6:
+ host = "[" + host + "]"
+ return host
+
+ @classmethod
+ def _make_netloc(
+ cls, user, password, host, port, encode=False, encode_host=True, requote=False
+ ):
+ quoter = cls._REQUOTER if requote else cls._QUOTER
+ if encode_host:
+ ret = cls._encode_host(host)
+ else:
+ ret = host
+ if port is not None:
+ ret = ret + ":" + str(port)
+ if password is not None:
+ if not user:
+ user = ""
+ else:
+ if encode:
+ user = quoter(user)
+ if encode:
+ password = quoter(password)
+ user = user + ":" + password
+ elif user and encode:
+ user = quoter(user)
+ if user:
+ ret = user + "@" + ret
+ return ret
+
+ def with_scheme(self, scheme):
+ """Return a new URL with scheme replaced."""
+ # N.B. doesn't cleanup query/fragment
+ if not isinstance(scheme, str):
+ raise TypeError("Invalid scheme type")
+ if not self.is_absolute():
+ raise ValueError("scheme replacement is not allowed for relative URLs")
+ return URL(self._val._replace(scheme=scheme.lower()), encoded=True)
+
+ def with_user(self, user):
+ """Return a new URL with user replaced.
+
+ Autoencode user if needed.
+
+ Clear user/password if user is None.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ val = self._val
+ if user is None:
+ password = None
+ elif isinstance(user, str):
+ user = self._QUOTER(user)
+ password = val.password
+ else:
+ raise TypeError("Invalid user type")
+ if not self.is_absolute():
+ raise ValueError("user replacement is not allowed for relative URLs")
+ return URL(
+ self._val._replace(
+ netloc=self._make_netloc(user, password, val.hostname, val.port)
+ ),
+ encoded=True,
+ )
+
+ def with_password(self, password):
+ """Return a new URL with password replaced.
+
+ Autoencode password if needed.
+
+ Clear password if argument is None.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if password is None:
+ pass
+ elif isinstance(password, str):
+ password = self._QUOTER(password)
+ else:
+ raise TypeError("Invalid password type")
+ if not self.is_absolute():
+ raise ValueError("password replacement is not allowed for relative URLs")
+ val = self._val
+ return URL(
+ self._val._replace(
+ netloc=self._make_netloc(val.username, password, val.hostname, val.port)
+ ),
+ encoded=True,
+ )
+
+ def with_host(self, host):
+ """Return a new URL with host replaced.
+
+ Autoencode host if needed.
+
+ Changing host for relative URLs is not allowed, use .join()
+ instead.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if not isinstance(host, str):
+ raise TypeError("Invalid host type")
+ if not self.is_absolute():
+ raise ValueError("host replacement is not allowed for relative URLs")
+ if not host:
+ raise ValueError("host removing is not allowed")
+ val = self._val
+ return URL(
+ self._val._replace(
+ netloc=self._make_netloc(val.username, val.password, host, val.port)
+ ),
+ encoded=True,
+ )
+
+ def with_port(self, port):
+ """Return a new URL with port replaced.
+
+ Clear port to default if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if port is not None:
+ if isinstance(port, bool) or not isinstance(port, int):
+ raise TypeError(f"port should be int or None, got {type(port)}")
+ if port < 0 or port > 65535:
+ raise ValueError(f"port must be between 0 and 65535, got {port}")
+ if not self.is_absolute():
+ raise ValueError("port replacement is not allowed for relative URLs")
+ val = self._val
+ return URL(
+ self._val._replace(
+ netloc=self._make_netloc(val.username, val.password, val.hostname, port)
+ ),
+ encoded=True,
+ )
+
+ def with_path(self, path, *, encoded=False):
+ """Return a new URL with path replaced."""
+ if not encoded:
+ path = self._PATH_QUOTER(path)
+ if self.is_absolute():
+ path = self._normalize_path(path)
+ if len(path) > 0 and path[0] != "/":
+ path = "/" + path
+ return URL(self._val._replace(path=path, query="", fragment=""), encoded=True)
+
+ @classmethod
+ def _query_seq_pairs(cls, quoter, pairs):
+ for key, val in pairs:
+ if isinstance(val, (list, tuple)):
+ for v in val:
+ yield quoter(key) + "=" + quoter(cls._query_var(v))
+ else:
+ yield quoter(key) + "=" + quoter(cls._query_var(val))
+
+ @staticmethod
+ def _query_var(v):
+ cls = type(v)
+ if issubclass(cls, str):
+ return v
+ if issubclass(cls, float):
+ if math.isinf(v):
+ raise ValueError("float('inf') is not supported")
+ if math.isnan(v):
+ raise ValueError("float('nan') is not supported")
+ return str(float(v))
+ if issubclass(cls, int) and cls is not bool:
+ return str(int(v))
+ raise TypeError(
+ "Invalid variable type: value "
+ "should be str, int or float, got {!r} "
+ "of type {}".format(v, cls)
+ )
+
+ def _get_str_query(self, *args, **kwargs):
+ if kwargs:
+ if len(args) > 0:
+ raise ValueError(
+ "Either kwargs or single query parameter must be present"
+ )
+ query = kwargs
+ elif len(args) == 1:
+ query = args[0]
+ else:
+ raise ValueError("Either kwargs or single query parameter must be present")
+
+ if query is None:
+ query = None
+ elif isinstance(query, Mapping):
+ quoter = self._QUERY_PART_QUOTER
+ query = "&".join(self._query_seq_pairs(quoter, query.items()))
+ elif isinstance(query, str):
+ query = self._QUERY_QUOTER(query)
+ elif isinstance(query, (bytes, bytearray, memoryview)):
+ raise TypeError(
+ "Invalid query type: bytes, bytearray and memoryview are forbidden"
+ )
+ elif isinstance(query, Sequence):
+ quoter = self._QUERY_PART_QUOTER
+ # We don't expect sequence values if we're given a list of pairs
+ # already; only mappings like builtin `dict` which can't have the
+ # same key pointing to multiple values are allowed to use
+ # `_query_seq_pairs`.
+ query = "&".join(
+ quoter(k) + "=" + quoter(self._query_var(v)) for k, v in query
+ )
+ else:
+ raise TypeError(
+ "Invalid query type: only str, mapping or "
+ "sequence of (key, value) pairs is allowed"
+ )
+
+ return query
+
+ def with_query(self, *args, **kwargs):
+ """Return a new URL with query part replaced.
+
+ Accepts any Mapping (e.g. dict, multidict.MultiDict instances)
+ or str, autoencode the argument if needed.
+
+ A sequence of (key, value) pairs is supported as well.
+
+ It also can take an arbitrary number of keyword arguments.
+
+ Clear query if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+
+ new_query = self._get_str_query(*args, **kwargs) or ""
+ return URL(
+ self._val._replace(path=self._val.path, query=new_query), encoded=True
+ )
+
+ def update_query(self, *args, **kwargs):
+ """Return a new URL with query part updated."""
+ s = self._get_str_query(*args, **kwargs)
+ query = None
+ if s is not None:
+ new_query = MultiDict(parse_qsl(s, keep_blank_values=True))
+ query = MultiDict(self.query)
+ query.update(new_query)
+
+ return URL(
+ self._val._replace(query=self._get_str_query(query) or ""), encoded=True
+ )
+
+ def with_fragment(self, fragment):
+ """Return a new URL with fragment replaced.
+
+ Autoencode fragment if needed.
+
+ Clear fragment to default if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if fragment is None:
+ raw_fragment = ""
+ elif not isinstance(fragment, str):
+ raise TypeError("Invalid fragment type")
+ else:
+ raw_fragment = self._FRAGMENT_QUOTER(fragment)
+ if self.raw_fragment == raw_fragment:
+ return self
+ return URL(self._val._replace(fragment=raw_fragment), encoded=True)
+
+ def with_name(self, name):
+ """Return a new URL with name (last part of path) replaced.
+
+ Query and fragment parts are cleaned up.
+
+ Name is encoded if needed.
+
+ """
+ # N.B. DOES cleanup query/fragment
+ if not isinstance(name, str):
+ raise TypeError("Invalid name type")
+ if "/" in name:
+ raise ValueError("Slash in name is not allowed")
+ name = self._PATH_QUOTER(name)
+ if name in (".", ".."):
+ raise ValueError(". and .. values are forbidden")
+ parts = list(self.raw_parts)
+ if self.is_absolute():
+ if len(parts) == 1:
+ parts.append(name)
+ else:
+ parts[-1] = name
+ parts[0] = "" # replace leading '/'
+ else:
+ parts[-1] = name
+ if parts[0] == "/":
+ parts[0] = "" # replace leading '/'
+ return URL(
+ self._val._replace(path="/".join(parts), query="", fragment=""),
+ encoded=True,
+ )
+
+ def with_suffix(self, suffix):
+ """Return a new URL with suffix (file extension of name) replaced.
+
+ Query and fragment parts are cleaned up.
+
+ suffix is encoded if needed.
+ """
+ if not isinstance(suffix, str):
+ raise TypeError("Invalid suffix type")
+ if suffix and not suffix.startswith(".") or suffix == ".":
+ raise ValueError(f"Invalid suffix {suffix!r}")
+ name = self.raw_name
+ if not name:
+ raise ValueError(f"{self!r} has an empty name")
+ old_suffix = self.raw_suffix
+ if not old_suffix:
+ name = name + suffix
+ else:
+ name = name[: -len(old_suffix)] + suffix
+ return self.with_name(name)
+
+ def join(self, url):
+ """Join URLs
+
+ Construct a full (“absolute”) URL by combining a “base URL”
+ (self) with another URL (url).
+
+ Informally, this uses components of the base URL, in
+ particular the addressing scheme, the network location and
+ (part of) the path, to provide missing components in the
+ relative URL.
+
+ """
+ # See docs for urllib.parse.urljoin
+ if not isinstance(url, URL):
+ raise TypeError("url should be URL")
+ return URL(urljoin(str(self), str(url)), encoded=True)
+
+ def joinpath(self, *other, encoded=False):
+ """Return a new URL with the elements in other appended to the path."""
+ return self._make_child(other, encoded=encoded)
+
+ def human_repr(self):
+ """Return decoded human readable string for URL representation."""
+ user = _human_quote(self.user, "#/:?@[]")
+ password = _human_quote(self.password, "#/:?@[]")
+ host = self.host
+ if host:
+ host = self._encode_host(self.host, human=True)
+ path = _human_quote(self.path, "#?")
+ query_string = "&".join(
+ "{}={}".format(_human_quote(k, "#&+;="), _human_quote(v, "#&+;="))
+ for k, v in self.query.items()
+ )
+ fragment = _human_quote(self.fragment, "")
+ return urlunsplit(
+ SplitResult(
+ self.scheme,
+ self._make_netloc(
+ user,
+ password,
+ host,
+ self._val.port,
+ encode_host=False,
+ ),
+ path,
+ query_string,
+ fragment,
+ )
+ )
+
+
+def _human_quote(s, unsafe):
+ if not s:
+ return s
+ for c in "%" + unsafe:
+ if c in s:
+ s = s.replace(c, f"%{ord(c):02X}")
+ if s.isprintable():
+ return s
+ return "".join(c if c.isprintable() else quote(c) for c in s)
+
+
+_MAXCACHE = 256
+
+
+@functools.lru_cache(_MAXCACHE)
+def _idna_decode(raw):
+ try:
+ return idna.decode(raw.encode("ascii"))
+ except UnicodeError: # e.g. '::1'
+ return raw.encode("ascii").decode("idna")
+
+
+@functools.lru_cache(_MAXCACHE)
+def _idna_encode(host):
+ try:
+ return idna.encode(host, uts46=True).decode("ascii")
+ except UnicodeError:
+ return host.encode("idna").decode("ascii")
+
+
+@rewrite_module
+def cache_clear():
+ _idna_decode.cache_clear()
+ _idna_encode.cache_clear()
+
+
+@rewrite_module
+def cache_info():
+ return {
+ "idna_encode": _idna_encode.cache_info(),
+ "idna_decode": _idna_decode.cache_info(),
+ }
+
+
+@rewrite_module
+def cache_configure(*, idna_encode_size=_MAXCACHE, idna_decode_size=_MAXCACHE):
+ global _idna_decode, _idna_encode
+
+ _idna_encode = functools.lru_cache(idna_encode_size)(_idna_encode.__wrapped__)
+ _idna_decode = functools.lru_cache(idna_decode_size)(_idna_decode.__wrapped__)
diff --git a/contrib/python/yarl/yarl/py.typed b/contrib/python/yarl/yarl/py.typed
new file mode 100644
index 0000000000..dcf2c804da
--- /dev/null
+++ b/contrib/python/yarl/yarl/py.typed
@@ -0,0 +1 @@
+# Placeholder
diff --git a/contrib/python/ydb/py2/.dist-info/METADATA b/contrib/python/ydb/py2/.dist-info/METADATA
new file mode 100644
index 0000000000..1efcb264c4
--- /dev/null
+++ b/contrib/python/ydb/py2/.dist-info/METADATA
@@ -0,0 +1,63 @@
+Metadata-Version: 2.1
+Name: ydb
+Version: 2.15.1
+Summary: YDB Python SDK
+Home-page: http://github.com/ydb-platform/ydb-python-sdk
+Author: Yandex LLC
+Author-email: ydb@yandex-team.ru
+License: Apache 2.0
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: AUTHORS
+Requires-Dist: aiohttp ==3.7.4
+Requires-Dist: enum-compat >=0.0.1
+Requires-Dist: grpcio >=1.5.0
+Requires-Dist: packaging
+Requires-Dist: protobuf <5.0.0,>3.13.0
+Requires-Dist: pytest ==6.2.4
+Requires-Dist: six <2
+Provides-Extra: yc
+Requires-Dist: yandexcloud ; extra == 'yc'
+
+YDB Python SDK
+---
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/ydb-platform/ydb/blob/main/LICENSE)
+[![PyPI version](https://badge.fury.io/py/ydb.svg)](https://badge.fury.io/py/ydb)
+[![Functional tests](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml)
+[![Style checks](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml)
+
+Officially supported Python client for YDB.
+
+## Quickstart
+
+### Prerequisites
+
+- Python 3.8 or higher
+- `pip` version 9.0.1 or higher
+
+If necessary, upgrade your version of `pip`:
+
+```sh
+$ python -m pip install --upgrade pip
+```
+
+If you cannot upgrade `pip` due to a system-owned installation, you can
+run the example in a virtualenv:
+
+```sh
+$ python -m pip install virtualenv
+$ virtualenv venv
+$ source venv/bin/activate
+$ python -m pip install --upgrade pip
+```
+
+Install YDB python sdk:
+
+```sh
+$ python -m pip install ydb
+```
diff --git a/contrib/python/ydb/py2/.dist-info/top_level.txt b/contrib/python/ydb/py2/.dist-info/top_level.txt
new file mode 100644
index 0000000000..7298732379
--- /dev/null
+++ b/contrib/python/ydb/py2/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+kikimr
+ydb
diff --git a/contrib/python/ydb/py2/AUTHORS b/contrib/python/ydb/py2/AUTHORS
new file mode 100644
index 0000000000..200343e364
--- /dev/null
+++ b/contrib/python/ydb/py2/AUTHORS
@@ -0,0 +1,4 @@
+The following authors have created the source code of "YDB Python SDK"
+published and distributed by YANDEX LLC as the owner:
+
+Vitalii Gridnev <gridnevvvit@gmail.com>
diff --git a/contrib/python/ydb/py2/LICENSE b/contrib/python/ydb/py2/LICENSE
new file mode 100644
index 0000000000..cabac2dec9
--- /dev/null
+++ b/contrib/python/ydb/py2/LICENSE
@@ -0,0 +1,202 @@
+Copyright 2022 YANDEX LLC
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2022 YANDEX LLC
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/ydb/py2/README.md b/contrib/python/ydb/py2/README.md
new file mode 100644
index 0000000000..cfc57eb276
--- /dev/null
+++ b/contrib/python/ydb/py2/README.md
@@ -0,0 +1,37 @@
+YDB Python SDK
+---
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/ydb-platform/ydb/blob/main/LICENSE)
+[![PyPI version](https://badge.fury.io/py/ydb.svg)](https://badge.fury.io/py/ydb)
+[![Functional tests](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml)
+[![Style checks](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml)
+
+Officially supported Python client for YDB.
+
+## Quickstart
+
+### Prerequisites
+
+- Python 3.8 or higher
+- `pip` version 9.0.1 or higher
+
+If necessary, upgrade your version of `pip`:
+
+```sh
+$ python -m pip install --upgrade pip
+```
+
+If you cannot upgrade `pip` due to a system-owned installation, you can
+run the example in a virtualenv:
+
+```sh
+$ python -m pip install virtualenv
+$ virtualenv venv
+$ source venv/bin/activate
+$ python -m pip install --upgrade pip
+```
+
+Install YDB python sdk:
+
+```sh
+$ python -m pip install ydb
+```
diff --git a/contrib/python/ydb/py2/ya.make b/contrib/python/ydb/py2/ya.make
new file mode 100644
index 0000000000..f88f935402
--- /dev/null
+++ b/contrib/python/ydb/py2/ya.make
@@ -0,0 +1,71 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(2.15.1)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/deprecated/python/enum34
+ contrib/python/grpcio
+ contrib/python/packaging
+ contrib/python/protobuf
+ contrib/python/pytest
+ contrib/python/six
+)
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ ydb.public.api.grpc
+ ydb.public.api.grpc.*
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ ydb/__init__.py
+ ydb/_apis.py
+ ydb/_errors.py
+ ydb/_session_impl.py
+ ydb/_sp_impl.py
+ ydb/_tx_ctx_impl.py
+ ydb/_utilities.py
+ ydb/auth_helpers.py
+ ydb/connection.py
+ ydb/convert.py
+ ydb/credentials.py
+ ydb/dbapi/__init__.py
+ ydb/dbapi/connection.py
+ ydb/dbapi/cursor.py
+ ydb/dbapi/errors.py
+ ydb/default_pem.py
+ ydb/driver.py
+ ydb/export.py
+ ydb/global_settings.py
+ ydb/iam/__init__.py
+ ydb/iam/auth.py
+ ydb/import_client.py
+ ydb/interceptor.py
+ ydb/issues.py
+ ydb/operation.py
+ ydb/pool.py
+ ydb/resolver.py
+ ydb/scheme.py
+ ydb/scripting.py
+ ydb/settings.py
+ ydb/sqlalchemy/__init__.py
+ ydb/sqlalchemy/types.py
+ ydb/table.py
+ ydb/tracing.py
+ ydb/types.py
+ ydb/ydb_version.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/ydb/py2/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/python/ydb/py2/ydb/__init__.py b/contrib/python/ydb/py2/ydb/__init__.py
new file mode 100644
index 0000000000..56b73478eb
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/__init__.py
@@ -0,0 +1,20 @@
+from .credentials import * # noqa
+from .driver import * # noqa
+from .global_settings import * # noqa
+from .table import * # noqa
+from .issues import * # noqa
+from .types import * # noqa
+from .scheme import * # noqa
+from .settings import * # noqa
+from .resolver import * # noqa
+from .export import * # noqa
+from .auth_helpers import * # noqa
+from .operation import * # noqa
+from .scripting import * # noqa
+from .import_client import * # noqa
+from .tracing import * # noqa
+
+try:
+ import ydb.aio as aio # noqa
+except Exception:
+ pass
diff --git a/contrib/python/ydb/py2/ydb/_apis.py b/contrib/python/ydb/py2/ydb/_apis.py
new file mode 100644
index 0000000000..89efac6041
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_apis.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+try:
+ from ydb.public.api.grpc import (
+ ydb_cms_v1_pb2_grpc,
+ ydb_discovery_v1_pb2_grpc,
+ ydb_scheme_v1_pb2_grpc,
+ ydb_table_v1_pb2_grpc,
+ ydb_operation_v1_pb2_grpc,
+ )
+
+ from ydb.public.api.protos import (
+ ydb_status_codes_pb2,
+ ydb_discovery_pb2,
+ ydb_scheme_pb2,
+ ydb_table_pb2,
+ ydb_value_pb2,
+ ydb_operation_pb2,
+ ydb_common_pb2,
+ )
+except ImportError:
+ from contrib.ydb.public.api.grpc import (
+ ydb_cms_v1_pb2_grpc,
+ ydb_discovery_v1_pb2_grpc,
+ ydb_scheme_v1_pb2_grpc,
+ ydb_table_v1_pb2_grpc,
+ ydb_operation_v1_pb2_grpc,
+ )
+
+ from contrib.ydb.public.api.protos import (
+ ydb_status_codes_pb2,
+ ydb_discovery_pb2,
+ ydb_scheme_pb2,
+ ydb_table_pb2,
+ ydb_value_pb2,
+ ydb_operation_pb2,
+ ydb_common_pb2,
+ )
+
+
+StatusIds = ydb_status_codes_pb2.StatusIds
+FeatureFlag = ydb_common_pb2.FeatureFlag
+primitive_types = ydb_value_pb2.Type.PrimitiveTypeId
+ydb_value = ydb_value_pb2
+ydb_scheme = ydb_scheme_pb2
+ydb_table = ydb_table_pb2
+ydb_discovery = ydb_discovery_pb2
+ydb_operation = ydb_operation_pb2
+
+
+class CmsService(object):
+ Stub = ydb_cms_v1_pb2_grpc.CmsServiceStub
+
+
+class DiscoveryService(object):
+ Stub = ydb_discovery_v1_pb2_grpc.DiscoveryServiceStub
+ ListEndpoints = "ListEndpoints"
+
+
+class OperationService(object):
+ Stub = ydb_operation_v1_pb2_grpc.OperationServiceStub
+ ForgetOperation = "ForgetOperation"
+ GetOperation = "GetOperation"
+ CancelOperation = "CancelOperation"
+
+
+class SchemeService(object):
+ Stub = ydb_scheme_v1_pb2_grpc.SchemeServiceStub
+ MakeDirectory = "MakeDirectory"
+ RemoveDirectory = "RemoveDirectory"
+ ListDirectory = "ListDirectory"
+ DescribePath = "DescribePath"
+ ModifyPermissions = "ModifyPermissions"
+
+
+class TableService(object):
+ Stub = ydb_table_v1_pb2_grpc.TableServiceStub
+
+ StreamExecuteScanQuery = "StreamExecuteScanQuery"
+ ExplainDataQuery = "ExplainDataQuery"
+ CreateTable = "CreateTable"
+ DropTable = "DropTable"
+ AlterTable = "AlterTable"
+ CopyTables = "CopyTables"
+ RenameTables = "RenameTables"
+ DescribeTable = "DescribeTable"
+ CreateSession = "CreateSession"
+ DeleteSession = "DeleteSession"
+ ExecuteSchemeQuery = "ExecuteSchemeQuery"
+ PrepareDataQuery = "PrepareDataQuery"
+ ExecuteDataQuery = "ExecuteDataQuery"
+ BeginTransaction = "BeginTransaction"
+ CommitTransaction = "CommitTransaction"
+ RollbackTransaction = "RollbackTransaction"
+ KeepAlive = "KeepAlive"
+ StreamReadTable = "StreamReadTable"
+ BulkUpsert = "BulkUpsert"
diff --git a/contrib/python/ydb/py2/ydb/_errors.py b/contrib/python/ydb/py2/ydb/_errors.py
new file mode 100644
index 0000000000..84ac23db76
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_errors.py
@@ -0,0 +1,60 @@
+from . import issues
+
+_errors_retriable_fast_backoff_types = [
+ issues.Unavailable,
+]
+_errors_retriable_slow_backoff_types = [
+ issues.Aborted,
+ issues.BadSession,
+ issues.Overloaded,
+ issues.SessionPoolEmpty,
+ issues.ConnectionError,
+]
+_errors_retriable_slow_backoff_idempotent_types = [
+ issues.Undetermined,
+]
+
+
+def check_retriable_error(err, retry_settings, attempt):
+ if isinstance(err, issues.NotFound):
+ if retry_settings.retry_not_found:
+ return ErrorRetryInfo(
+ True, retry_settings.fast_backoff.calc_timeout(attempt)
+ )
+ else:
+ return ErrorRetryInfo(False, None)
+
+ if isinstance(err, issues.InternalError):
+ if retry_settings.retry_internal_error:
+ return ErrorRetryInfo(
+ True, retry_settings.slow_backoff.calc_timeout(attempt)
+ )
+ else:
+ return ErrorRetryInfo(False, None)
+
+ for t in _errors_retriable_fast_backoff_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(
+ True, retry_settings.fast_backoff.calc_timeout(attempt)
+ )
+
+ for t in _errors_retriable_slow_backoff_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(
+ True, retry_settings.slow_backoff.calc_timeout(attempt)
+ )
+
+ if retry_settings.idempotent:
+ for t in _errors_retriable_slow_backoff_idempotent_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(
+ True, retry_settings.slow_backoff.calc_timeout(attempt)
+ )
+
+ return ErrorRetryInfo(False, None)
+
+
+class ErrorRetryInfo:
+ def __init__(self, is_retriable, sleep_timeout_seconds=None):
+ self.is_retriable = is_retriable
+ self.sleep_timeout_seconds = sleep_timeout_seconds
diff --git a/contrib/python/ydb/py2/ydb/_session_impl.py b/contrib/python/ydb/py2/ydb/_session_impl.py
new file mode 100644
index 0000000000..27d7a3f6a0
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_session_impl.py
@@ -0,0 +1,498 @@
+import functools
+from google.protobuf.empty_pb2 import Empty
+from . import issues, types, _apis, convert, scheme, operation, _utilities
+
+X_YDB_SERVER_HINTS = "x-ydb-server-hints"
+X_YDB_SESSION_CLOSE = "session-close"
+
+
+def _check_session_is_closing(rpc_state, session_state):
+ metadata = rpc_state.trailing_metadata()
+ if X_YDB_SESSION_CLOSE in metadata.get(X_YDB_SERVER_HINTS, []):
+ session_state.set_closing()
+
+
+def bad_session_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state, *args, **kwargs):
+ try:
+ _check_session_is_closing(rpc_state, session_state)
+ return func(rpc_state, response_pb, session_state, *args, **kwargs)
+ except issues.BadSession:
+ session_state.reset()
+ raise
+
+ return decorator
+
+
+@bad_session_handler
+def wrap_prepare_query_response(rpc_state, response_pb, session_state, yql_text):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.PrepareQueryResult()
+ response_pb.operation.result.Unpack(message)
+ data_query = types.DataQuery(yql_text, message.parameters_types)
+ session_state.keep(data_query, message.query_id)
+ return data_query
+
+
+def prepare_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(
+ _apis.ydb_table.PrepareDataQueryRequest()
+ )
+ request.yql_text = yql_text
+ return request
+
+
+class AlterTableOperation(operation.Operation):
+ def __init__(self, rpc_state, response_pb, driver):
+ super(AlterTableOperation, self).__init__(rpc_state, response_pb, driver)
+ self.ready = response_pb.operation.ready
+
+
+def copy_tables_request_factory(session_state, source_destination_pairs):
+ request = session_state.attach_request(_apis.ydb_table.CopyTablesRequest())
+ for source_path, destination_path in source_destination_pairs:
+ table_item = request.tables.add()
+ table_item.source_path = source_path
+ table_item.destination_path = destination_path
+ return request
+
+
+def rename_tables_request_factory(session_state, rename_items):
+ request = session_state.attach_request(_apis.ydb_table.RenameTablesRequest())
+ for item in rename_items:
+ table_item = request.tables.add()
+ table_item.source_path = item.source_path
+ table_item.destination_path = item.destination_path
+ table_item.replace_destination = item.replace_destination
+ return request
+
+
+def explain_data_query_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(
+ _apis.ydb_table.ExplainDataQueryRequest()
+ )
+ request.yql_text = yql_text
+ return request
+
+
+class _ExplainResponse(object):
+ def __init__(self, ast, plan):
+ self.query_ast = ast
+ self.query_plan = plan
+
+
+def wrap_explain_response(rpc_state, response_pb, session_state):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExplainQueryResult()
+ response_pb.operation.result.Unpack(message)
+ return _ExplainResponse(message.query_ast, message.query_plan)
+
+
+@bad_session_handler
+def wrap_execute_scheme_result(rpc_state, response_pb, session_state):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExecuteQueryResult()
+ response_pb.operation.result.Unpack(message)
+ return convert.ResultSets(message.result_sets)
+
+
+def execute_scheme_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(
+ _apis.ydb_table.ExecuteSchemeQueryRequest()
+ )
+ request.yql_text = yql_text
+ return request
+
+
+@bad_session_handler
+def wrap_describe_table_response(
+ rpc_state, response_pb, sesssion_state, scheme_entry_cls
+):
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.DescribeTableResult()
+ response_pb.operation.result.Unpack(message)
+ return scheme._wrap_scheme_entry(
+ message.self,
+ scheme_entry_cls,
+ message.columns,
+ message.primary_key,
+ message.shard_key_bounds,
+ message.indexes,
+ message.table_stats if message.HasField("table_stats") else None,
+ message.ttl_settings if message.HasField("ttl_settings") else None,
+ message.attributes,
+ message.partitioning_settings
+ if message.HasField("partitioning_settings")
+ else None,
+ message.column_families,
+ message.key_bloom_filter,
+ message.read_replicas_settings
+ if message.HasField("read_replicas_settings")
+ else None,
+ message.storage_settings if message.HasField("storage_settings") else None,
+ )
+
+
+def explicit_partitions_factory(primary_key, columns, split_points):
+ column_types = {}
+ pk = set(primary_key)
+ for column in columns:
+ if column.name in pk:
+ column_types[column.name] = column.type
+
+ explicit_partitions = _apis.ydb_table.ExplicitPartitions()
+ for split_point in split_points:
+ typed_value = explicit_partitions.split_points.add()
+ split_point_type = types.TupleType()
+ prefix_size = len(split_point.value)
+ for pl_el_id, pk_name in enumerate(primary_key):
+ if pl_el_id >= prefix_size:
+ break
+
+ split_point_type.add_element(column_types[pk_name])
+
+ typed_value.type.MergeFrom(split_point_type.proto)
+ typed_value.value.MergeFrom(
+ convert.from_native_value(split_point_type.proto, split_point.value)
+ )
+
+ return explicit_partitions
+
+
+def create_table_request_factory(session_state, path, table_description):
+ if isinstance(table_description, _apis.ydb_table.CreateTableRequest):
+ request = session_state.attach_request(table_description)
+ return request
+
+ request = _apis.ydb_table.CreateTableRequest()
+ request.path = path
+ request.primary_key.extend(list(table_description.primary_key))
+ for column in table_description.columns:
+ request.columns.add(name=column.name, type=column.type_pb, family=column.family)
+
+ if table_description.profile is not None:
+ request.profile.MergeFrom(table_description.profile.to_pb(table_description))
+
+ for index in table_description.indexes:
+ request.indexes.add().MergeFrom(index.to_pb())
+
+ if table_description.ttl_settings is not None:
+ request.ttl_settings.MergeFrom(table_description.ttl_settings.to_pb())
+
+ request.attributes.update(table_description.attributes)
+
+ if table_description.column_families:
+ for column_family in table_description.column_families:
+ request.column_families.add().MergeFrom(column_family.to_pb())
+
+ if table_description.storage_settings is not None:
+ request.storage_settings.MergeFrom(table_description.storage_settings.to_pb())
+
+ if table_description.read_replicas_settings is not None:
+ request.read_replicas_settings.MergeFrom(
+ table_description.read_replicas_settings.to_pb()
+ )
+
+ if table_description.partitioning_settings is not None:
+ request.partitioning_settings.MergeFrom(
+ table_description.partitioning_settings.to_pb()
+ )
+
+ request.key_bloom_filter = table_description.key_bloom_filter
+ if table_description.compaction_policy is not None:
+ request.compaction_policy = table_description.compaction_policy
+ if table_description.partition_at_keys is not None:
+ request.partition_at_keys.MergeFrom(
+ explicit_partitions_factory(
+ list(table_description.primary_key),
+ table_description.columns,
+ table_description.partition_at_keys.split_points,
+ )
+ )
+
+ elif table_description.uniform_partitions > 0:
+ request.uniform_partitions = table_description.uniform_partitions
+
+ return session_state.attach_request(request)
+
+
+def keep_alive_request_factory(session_state):
+ request = _apis.ydb_table.KeepAliveRequest()
+ return session_state.attach_request(request)
+
+
+@bad_session_handler
+def cleanup_session(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ session_state.reset()
+ return session
+
+
+@bad_session_handler
+def initialize_session(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.CreateSessionResult()
+ response_pb.operation.result.Unpack(message)
+ session_state.set_id(message.session_id).attach_endpoint(rpc_state.endpoint_key)
+ return session
+
+
+@bad_session_handler
+def wrap_operation(rpc_state, response_pb, session_state, driver=None):
+ return operation.Operation(rpc_state, response_pb, driver)
+
+
+def wrap_operation_bulk_upsert(rpc_state, response_pb, driver=None):
+ return operation.Operation(rpc_state, response_pb, driver)
+
+
+@bad_session_handler
+def wrap_keep_alive_response(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ return session
+
+
+def describe_table_request_factory(session_state, path, settings=None):
+ request = session_state.attach_request(_apis.ydb_table.DescribeTableRequest())
+ request.path = path
+
+ if (
+ settings is not None
+ and hasattr(settings, "include_shard_key_bounds")
+ and settings.include_shard_key_bounds
+ ):
+ request.include_shard_key_bounds = settings.include_shard_key_bounds
+
+ if (
+ settings is not None
+ and hasattr(settings, "include_table_stats")
+ and settings.include_table_stats
+ ):
+ request.include_table_stats = settings.include_table_stats
+
+ return request
+
+
+def alter_table_request_factory(
+ session_state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+):
+ request = session_state.attach_request(_apis.ydb_table.AlterTableRequest(path=path))
+ if add_columns is not None:
+ for column in add_columns:
+ request.add_columns.add(name=column.name, type=column.type_pb)
+
+ if drop_columns is not None:
+ request.drop_columns.extend(list(drop_columns))
+
+ if drop_indexes is not None:
+ request.drop_indexes.extend(list(drop_indexes))
+
+ if add_indexes is not None:
+ for index in add_indexes:
+ request.add_indexes.add().MergeFrom(index.to_pb())
+
+ if alter_attributes is not None:
+ request.alter_attributes.update(alter_attributes)
+
+ if set_ttl_settings is not None:
+ request.set_ttl_settings.MergeFrom(set_ttl_settings.to_pb())
+
+ if drop_ttl_settings is not None and drop_ttl_settings:
+ request.drop_ttl_settings.MergeFrom(Empty())
+
+ if add_column_families is not None:
+ for column_family in add_column_families:
+ request.add_column_families.add().MergeFrom(column_family.to_pb())
+
+ if alter_column_families is not None:
+ for column_family in alter_column_families:
+ request.alter_column_families.add().MergeFrom(column_family.to_pb())
+
+ if alter_storage_settings is not None:
+ request.alter_storage_settings.MergeFrom(alter_storage_settings.to_pb())
+
+ if set_compaction_policy is not None:
+ request.set_compaction_policy = set_compaction_policy
+
+ if alter_partitioning_settings is not None:
+ request.alter_partitioning_settings.MergeFrom(
+ alter_partitioning_settings.to_pb()
+ )
+
+ if set_key_bloom_filter is not None:
+ request.set_key_bloom_filter = set_key_bloom_filter
+
+ if set_read_replicas_settings is not None:
+ request.set_read_replicas_settings.MergeFrom(set_read_replicas_settings.to_pb())
+
+ return request
+
+
+def read_table_request_factory(
+ session_state,
+ path,
+ key_range=None,
+ columns=None,
+ ordered=False,
+ row_limit=None,
+ use_snapshot=None,
+):
+ request = _apis.ydb_table.ReadTableRequest()
+ request.path = path
+ request.ordered = ordered
+ if key_range is not None and key_range.from_bound is not None:
+ target_attribute = (
+ "greater_or_equal" if key_range.from_bound.is_inclusive() else "greater"
+ )
+ getattr(request.key_range, target_attribute).MergeFrom(
+ convert.to_typed_value_from_native(
+ key_range.from_bound.type, key_range.from_bound.value
+ )
+ )
+
+ if key_range is not None and key_range.to_bound is not None:
+ target_attribute = (
+ "less_or_equal" if key_range.to_bound.is_inclusive() else "less"
+ )
+ getattr(request.key_range, target_attribute).MergeFrom(
+ convert.to_typed_value_from_native(
+ key_range.to_bound.type, key_range.to_bound.value
+ )
+ )
+
+ if columns is not None:
+ for column in columns:
+ request.columns.append(column)
+ if row_limit:
+ # NOTE(gvit): pylint cannot understand that row_limit is not None
+ request.row_limit = row_limit # pylint: disable=E5903
+ if use_snapshot is not None:
+ if isinstance(use_snapshot, bool):
+ if use_snapshot:
+ request.use_snapshot = _apis.FeatureFlag.ENABLED
+ else:
+ request.use_snapshot = _apis.FeatureFlag.DISABLED
+ else:
+ request.use_snapshot = use_snapshot
+ return session_state.attach_request(request)
+
+
+def bulk_upsert_request_factory(table, rows, column_types):
+ request = _apis.ydb_table.BulkUpsertRequest()
+ request.table = table
+ request.rows.MergeFrom(
+ convert.to_typed_value_from_native(types.ListType(column_types).proto, rows)
+ )
+ return request
+
+
+def wrap_read_table_response(response):
+ issues._process_response(response)
+ snapshot = response.snapshot if response.HasField("snapshot") else None
+ return convert.ResultSet.from_message(response.result.result_set, snapshot=snapshot)
+
+
+class SessionState(object):
+ def __init__(self, table_client_settings):
+ self._session_id = None
+ self._query_cache = _utilities.LRUCache(1000)
+ self._default = (None, None)
+ self._pending_query = False
+ self._endpoint = None
+ self._closing = False
+ self._client_cache_enabled = table_client_settings._client_query_cache_enabled
+ self.table_client_settings = table_client_settings
+
+ def __contains__(self, query):
+ return self.lookup(query) != self._default
+
+ def reset(self):
+ self._query_cache = _utilities.LRUCache(1000)
+ self._session_id = None
+ self._pending_query = False
+ self._endpoint = None
+
+ def attach_endpoint(self, endpoint):
+ self._endpoint = endpoint
+ return self
+
+ def set_closing(self):
+ self._closing = True
+ return self
+
+ def closing(self):
+ return self._closing
+
+ @property
+ def endpoint(self):
+ return self._endpoint
+
+ @property
+ def session_id(self):
+ return self._session_id
+
+ def pending_query(self):
+ return self._pending_query
+
+ def set_id(self, session_id):
+ self._session_id = session_id
+ return self
+
+ def keep(self, query, query_id):
+ if self._client_cache_enabled:
+ self._query_cache.put(query.name, (query, query_id))
+ else:
+ self._query_cache.put(query.name, (query, None))
+ return self
+
+ @staticmethod
+ def _query_key(query):
+ return (
+ query.name
+ if isinstance(query, types.DataQuery)
+ else _utilities.get_query_hash(query)
+ )
+
+ def lookup(self, query):
+ return self._query_cache.get(self._query_key(query), self._default)
+
+ def erase(self, query):
+ query, _ = self.lookup(query)
+ self._query_cache.erase(query.name)
+
+ def complete_query(self):
+ self._pending_query = False
+ return self
+
+ def start_query(self):
+ if self._pending_query:
+ # don't invalidate session at this point
+ self.reset()
+ raise issues.BadSession("Pending previous query completion!")
+ self._pending_query = True
+ return self
+
+ def attach_request(self, request):
+ if self._session_id is None:
+ raise issues.BadSession("Empty session_id")
+ request.session_id = self._session_id
+ return request
diff --git a/contrib/python/ydb/py2/ydb/_sp_impl.py b/contrib/python/ydb/py2/ydb/_sp_impl.py
new file mode 100644
index 0000000000..a8529d7321
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_sp_impl.py
@@ -0,0 +1,399 @@
+# -*- coding: utf-8 -*-
+import collections
+from concurrent import futures
+from six.moves import queue
+import time
+import threading
+from . import settings, issues, _utilities, tracing
+
+
+class SessionPoolImpl(object):
+ def __init__(
+ self,
+ logger,
+ driver,
+ size,
+ workers_threads_count=4,
+ initializer=None,
+ min_pool_size=0,
+ ):
+ self._lock = threading.RLock()
+ self._waiters = collections.OrderedDict()
+ self._driver = driver
+ if hasattr(driver, "_driver_config"):
+ self.tracer = driver._driver_config.tracer
+ else:
+ self.tracer = tracing.Tracer(None)
+ self._active_queue = queue.PriorityQueue()
+ self._active_count = 0
+ self._size = size
+ self._req_settings = settings.BaseRequestSettings().with_timeout(3)
+ self._tp = futures.ThreadPoolExecutor(workers_threads_count)
+ self._initializer = initializer
+ self._should_stop = threading.Event()
+ self._keep_alive_threshold = 4 * 60
+ self._spin_timeout = 30
+ self._event_queue = queue.Queue()
+ self._driver_await_timeout = 3
+ self._event_loop_thread = threading.Thread(target=self.events_loop)
+ self._event_loop_thread.daemon = True
+ self._event_loop_thread.start()
+ self._logger = logger
+ self._min_pool_size = min_pool_size
+ self._terminating = False
+ if self._min_pool_size > self._size:
+ raise ValueError("Invalid min pool size value!")
+ for _ in range(self._min_pool_size):
+ self._prepare(self._create())
+
+ def stop(self, timeout):
+ with self._lock:
+ self._logger.debug("Requested session pool stop.")
+ self._event_queue.put(self._terminate_event)
+ self._should_stop.set()
+ self._terminating = True
+
+ self._logger.debug(
+ "Session pool is under stop, cancelling all in flight waiters."
+ )
+ while True:
+ try:
+ _, waiter = self._waiters.popitem(last=False)
+ session = self._create()
+ waiter.set_result(session)
+ self._logger.debug(
+ "Waiter %s has been replied with empty session info. Session details: %s.",
+ waiter,
+ session,
+ )
+ except KeyError:
+ break
+
+ self._logger.debug("Destroying sessions in active queue")
+ while True:
+ try:
+ _, session = self._active_queue.get(block=False)
+ self._destroy(session, "session-pool-terminated")
+
+ except queue.Empty:
+ break
+
+ self._logger.debug("Destroyed active sessions")
+
+ self._event_loop_thread.join(timeout)
+
+ def _terminate_event(self):
+ self._logger.debug("Terminated session pool.")
+ raise StopIteration()
+
+ def _delayed_prepare(self, session):
+ try:
+ self._driver.wait(self._driver_await_timeout, fail_fast=False)
+ except Exception:
+ pass
+
+ self._prepare(session)
+
+ def pick(self):
+ with self._lock:
+ try:
+ priority, session = self._active_queue.get_nowait()
+ except queue.Empty:
+ return None
+
+ till_expire = priority - time.time()
+ if till_expire < self._keep_alive_threshold:
+ return session
+ self._active_queue.put((priority, session))
+ return None
+
+ def _create(self):
+ with self._lock:
+ session = self._driver.table_client.session()
+ self._logger.debug("Created session %s", session)
+ self._active_count += 1
+ return session
+
+ @property
+ def active_size(self):
+ with self._lock:
+ return self._active_count
+
+ @property
+ def free_size(self):
+ with self._lock:
+ return self._active_queue.qsize()
+
+ @property
+ def busy_size(self):
+ with self._lock:
+ return self._active_count - self._active_queue.qsize()
+
+ @property
+ def max_size(self):
+ return self._size
+
+ @property
+ def waiters_count(self):
+ with self._lock:
+ return len(self._waiters)
+
+ def _is_min_pool_size_satisfied(self, delta=0):
+ if self._terminating:
+ return True
+ return self._active_count + delta >= self._min_pool_size
+
+ def _destroy(self, session, reason):
+ self._logger.debug("Requested session destroy: %s, reason: %s", session, reason)
+ with self._lock:
+ tracing.trace(self.tracer, {"destroy.reason": reason})
+ self._active_count -= 1
+ self._logger.debug(
+ "Session %s is no longer active. Current active count %d.",
+ session,
+ self._active_count,
+ )
+ cnt_waiters = len(self._waiters)
+ if cnt_waiters > 0:
+ self._logger.debug(
+ "In flight waiters: %d, preparing session %s replacement.",
+ cnt_waiters,
+ session,
+ )
+ # we have a waiter that should be replied, so we have to prepare replacement
+ self._prepare(self._create())
+ elif not self._is_min_pool_size_satisfied():
+ self._logger.debug(
+ "Current session pool size is less than %s, actual size %s",
+ self._min_pool_size,
+ self._active_count,
+ )
+ self._prepare(self._create())
+
+ if session.initialized():
+ session.async_delete(self._req_settings)
+ self._logger.debug("Sent delete on session %s", session)
+
+ def put(self, session):
+ with self._lock:
+ self._logger.debug("Put on session %s", session)
+ if session.closing():
+ self._destroy(session, "session-close")
+ return False
+
+ if session.pending_query():
+ self._destroy(session, "pending-query")
+ return False
+
+ if not session.initialized() or self._should_stop.is_set():
+ self._destroy(session, "not-initialized")
+ # we should probably prepare replacement session here
+ return False
+
+ try:
+ _, waiter = self._waiters.popitem(last=False)
+ waiter.set_result(session)
+ tracing.trace(self.tracer, {"put.to_waiter": True})
+ self._logger.debug("Replying to waiter with a session %s", session)
+ except KeyError:
+ priority = time.time() + 10 * 60
+ tracing.trace(
+ self.tracer, {"put.to_pool": True, "session.new_priority": priority}
+ )
+ self._active_queue.put((priority, session))
+
+ def _on_session_create(self, session, f):
+ with self._lock:
+ try:
+ f.result()
+ if self._initializer is None:
+ return self.put(session)
+ except issues.Error as e:
+ self._logger.error(
+ "Failed to create session. Put event to a delayed queue. Reason: %s",
+ str(e),
+ )
+ return self._event_queue.put(lambda: self._delayed_prepare(session))
+
+ except Exception as e:
+ self._logger.exception(
+ "Failed to create session. Put event to a delayed queue. Reason: %s",
+ str(e),
+ )
+ return self._event_queue.put(lambda: self._delayed_prepare(session))
+
+ init_f = self._tp.submit(self._initializer, session)
+
+ def _on_initialize(in_f):
+ try:
+ in_f.result()
+ self.put(session)
+ except Exception:
+ self._prepare(session)
+
+ init_f.add_done_callback(_on_initialize)
+
+ def _prepare(self, session):
+ if self._should_stop.is_set():
+ self._destroy(session, "session-pool-terminated")
+ return
+
+ with self._lock:
+ self._logger.debug("Preparing session %s", session)
+ if len(self._waiters) < 1 and self._is_min_pool_size_satisfied(delta=-1):
+ self._logger.info("No pending waiters, will destroy session")
+ return self._destroy(session, "session-useless")
+
+ f = session.async_create(self._req_settings)
+ f.add_done_callback(lambda _: self._on_session_create(session, _))
+
+ def _waiter_cleanup(self, w):
+ with self._lock:
+ try:
+ self._waiters.pop(w)
+ except KeyError:
+ return None
+
+ def subscribe(self):
+ with self._lock:
+ try:
+ _, session = self._active_queue.get(block=False)
+ tracing.trace(self.tracer, {"acquire.found_free_session": True})
+ return _utilities.wrap_result_in_future(session)
+ except queue.Empty:
+ self._logger.debug(
+ "Active session queue is empty, subscribe waiter for a session"
+ )
+ waiter = _utilities.future()
+ self._logger.debug("Subscribe waiter %s", waiter)
+ if self._should_stop.is_set():
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.empty_session": True,
+ },
+ )
+ session = self._create()
+ self._logger.debug(
+ "Session pool is under stop, replying with empty session, %s",
+ session,
+ )
+ waiter.set_result(session)
+ return waiter
+
+ waiter.add_done_callback(self._waiter_cleanup)
+ self._waiters[waiter] = waiter
+ if self._active_count < self._size:
+ self._logger.debug(
+ "Session pool is not large enough (active_count < size: %d < %d). "
+ "will create a new session.",
+ self._active_count,
+ self._size,
+ )
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.creating_new_session": True,
+ "session_pool.active_size": self._active_count,
+ "session_pool.size": self._size,
+ },
+ )
+ session = self._create()
+ self._prepare(session)
+ else:
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.creating_new_session": False,
+ "session_pool.active_size": self._active_count,
+ "session_pool.size": self._size,
+ "acquire.waiting_for_free_session": True,
+ },
+ )
+ return waiter
+
+ def unsubscribe(self, waiter):
+ with self._lock:
+ try:
+ # at first we remove waiter from list of the waiters to ensure
+ # we will not signal it right now
+ self._logger.debug("Unsubscribe on waiter %s", waiter)
+ self._waiters.pop(waiter)
+ except KeyError:
+ try:
+ session = waiter.result(timeout=-1)
+ self.put(session)
+ except (futures.CancelledError, futures.TimeoutError):
+ # future is cancelled and not signalled
+ pass
+
+ def _on_keep_alive(self, session, f):
+ try:
+ self.put(f.result())
+ # additional logic should be added to check
+ # current status of the session
+ except issues.Error:
+ self._destroy(session, "keep-alive-error")
+ except Exception:
+ self._destroy(session, "keep-alive-error")
+
+ def acquire(self, blocking=True, timeout=None):
+ waiter = self.subscribe()
+ has_result = False
+ if blocking:
+ tracing.trace(self.tracer, {"acquire.blocking": True})
+ try:
+ tracing.trace(self.tracer, {"acquire.blocking.wait": True})
+ session = waiter.result(timeout=timeout)
+ has_result = True
+ return session
+ except futures.TimeoutError:
+ tracing.trace(self.tracer, {"acquire.blocking.timeout": True})
+ raise issues.SessionPoolEmpty("Timeout on session acquire.")
+ finally:
+ if not has_result:
+ self.unsubscribe(waiter)
+
+ else:
+ tracing.trace(self.tracer, {"acquire.nonblocking": True})
+ try:
+ session = waiter.result(timeout=-1)
+ has_result = True
+ return session
+ except futures.TimeoutError:
+ raise issues.SessionPoolEmpty("Session pool is empty.")
+ finally:
+ if not has_result:
+ self.unsubscribe(waiter)
+
+ def events_loop(self):
+ while True:
+ try:
+ if self._should_stop.is_set():
+ break
+
+ event = self._event_queue.get(timeout=self._spin_timeout)
+ event()
+ except StopIteration:
+ break
+
+ except queue.Empty:
+ while True:
+ if not self.send_keep_alive():
+ break
+
+ def send_keep_alive(self):
+ session = self.pick()
+ if session is None:
+ return False
+
+ if self._should_stop.is_set():
+ self._destroy(session, "session-pool-terminated")
+ return False
+
+ f = session.async_keep_alive(self._req_settings)
+ f.add_done_callback(lambda q: self._on_keep_alive(session, q))
+ return True
diff --git a/contrib/python/ydb/py2/ydb/_tx_ctx_impl.py b/contrib/python/ydb/py2/ydb/_tx_ctx_impl.py
new file mode 100644
index 0000000000..925d74b441
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_tx_ctx_impl.py
@@ -0,0 +1,179 @@
+from . import issues, _session_impl, _apis, types, convert
+import functools
+
+
+def reset_tx_id_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state, tx_state, *args, **kwargs):
+ try:
+ return func(
+ rpc_state, response_pb, session_state, tx_state, *args, **kwargs
+ )
+ except issues.Error:
+ tx_state.tx_id = None
+ tx_state.dead = True
+ raise
+
+ return decorator
+
+
+def not_found_handler(func):
+ @functools.wraps(func)
+ def decorator(
+ rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs
+ ):
+ try:
+ return func(
+ rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs
+ )
+ except issues.NotFound:
+ session_state.erase(query)
+ raise
+
+ return decorator
+
+
+def wrap_tx_factory_handler(func):
+ @functools.wraps(func)
+ def decorator(session_state, tx_state, *args, **kwargs):
+ if tx_state.dead:
+ raise issues.PreconditionFailed(
+ "Failed to perform action on broken transaction context!"
+ )
+ return func(session_state, tx_state, *args, **kwargs)
+
+ return decorator
+
+
+@_session_impl.bad_session_handler
+@reset_tx_id_handler
+def wrap_result_on_rollback_or_commit_tx(
+ rpc_state, response_pb, session_state, tx_state, tx
+):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ # transaction successfully committed or rolled back
+ tx_state.tx_id = None
+ return tx
+
+
+@_session_impl.bad_session_handler
+def wrap_tx_begin_response(rpc_state, response_pb, session_state, tx_state, tx):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.BeginTransactionResult()
+ response_pb.operation.result.Unpack(message)
+ tx_state.tx_id = message.tx_meta.id
+ return tx
+
+
+@wrap_tx_factory_handler
+def begin_request_factory(session_state, tx_state):
+ request = _apis.ydb_table.BeginTransactionRequest()
+ request = session_state.start_query().attach_request(request)
+ request.tx_settings.MergeFrom(_construct_tx_settings(tx_state))
+ return request
+
+
+@wrap_tx_factory_handler
+def rollback_request_factory(session_state, tx_state):
+ request = _apis.ydb_table.RollbackTransactionRequest()
+ request.tx_id = tx_state.tx_id
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+@wrap_tx_factory_handler
+def commit_request_factory(session_state, tx_state):
+ """
+ Constructs commit request
+ """
+ request = _apis.ydb_table.CommitTransactionRequest()
+ request.tx_id = tx_state.tx_id
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+class TxState(object):
+ __slots__ = ("tx_id", "tx_mode", "dead", "initialized")
+
+ def __init__(self, tx_mode):
+ """
+ Holds transaction context manager info
+ :param tx_mode: A mode of transaction
+ """
+ self.tx_id = None
+ self.tx_mode = tx_mode
+ self.dead = False
+ self.initialized = False
+
+
+def _construct_tx_settings(tx_state):
+ tx_settings = _apis.ydb_table.TransactionSettings()
+ mode_property = getattr(tx_settings, tx_state.tx_mode.name)
+ mode_property.MergeFrom(tx_state.tx_mode.settings)
+ return tx_settings
+
+
+@wrap_tx_factory_handler
+def execute_request_factory(
+ session_state, tx_state, query, parameters, commit_tx, settings
+):
+ data_query, query_id = session_state.lookup(query)
+ parameters_types = {}
+
+ if query_id is not None:
+ query_pb = _apis.ydb_table.Query(id=query_id)
+ parameters_types = data_query.parameters_types
+ else:
+ if data_query is not None:
+ # client cache disabled for send query text every time
+ yql_text = data_query.yql_text
+ parameters_types = data_query.parameters_types
+ elif isinstance(query, types.DataQuery):
+ yql_text = query.yql_text
+ parameters_types = query.parameters_types
+ else:
+ yql_text = query
+ query_pb = _apis.ydb_table.Query(yql_text=yql_text)
+ request = _apis.ydb_table.ExecuteDataQueryRequest(
+ parameters=convert.parameters_to_pb(parameters_types, parameters)
+ )
+
+ if query_id is not None:
+ # SDK not send query text and nothing save to cache
+ keep_in_cache = False
+ elif settings is not None and hasattr(settings, "keep_in_cache"):
+ keep_in_cache = settings.keep_in_cache
+ elif parameters:
+ keep_in_cache = True
+ else:
+ keep_in_cache = False
+
+ if keep_in_cache:
+ request.query_cache_policy.keep_in_cache = True
+
+ request.query.MergeFrom(query_pb)
+ tx_control = _apis.ydb_table.TransactionControl()
+ tx_control.commit_tx = commit_tx
+ if tx_state.tx_id is not None:
+ tx_control.tx_id = tx_state.tx_id
+ else:
+ tx_control.begin_tx.MergeFrom(_construct_tx_settings(tx_state))
+ request.tx_control.MergeFrom(tx_control)
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+@_session_impl.bad_session_handler
+@reset_tx_id_handler
+@not_found_handler
+def wrap_result_and_tx_id(rpc_state, response_pb, session_state, tx_state, query):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExecuteQueryResult()
+ response_pb.operation.result.Unpack(message)
+ if message.query_meta.id:
+ session_state.keep(query, message.query_meta.id)
+ tx_state.tx_id = None if not message.tx_meta.id else message.tx_meta.id
+ return convert.ResultSets(message.result_sets, session_state.table_client_settings)
diff --git a/contrib/python/ydb/py2/ydb/_utilities.py b/contrib/python/ydb/py2/ydb/_utilities.py
new file mode 100644
index 0000000000..32419b1bf9
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/_utilities.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+import six
+import codecs
+from concurrent import futures
+import functools
+import hashlib
+import collections
+from . import ydb_version
+
+try:
+ from . import interceptor
+except ImportError:
+ interceptor = None
+
+
+_grpcs_protocol = "grpcs://"
+_grpc_protocol = "grpc://"
+
+
+def wrap_result_in_future(result):
+ f = futures.Future()
+ f.set_result(result)
+ return f
+
+
+def wrap_exception_in_future(exc):
+ f = futures.Future()
+ f.set_exception(exc)
+ return f
+
+
+def future():
+ return futures.Future()
+
+
+def x_ydb_sdk_build_info_header():
+ return ("x-ydb-sdk-build-info", "ydb-python-sdk/" + ydb_version.VERSION)
+
+
+def is_secure_protocol(endpoint):
+ return endpoint.startswith("grpcs://")
+
+
+def wrap_endpoint(endpoint):
+ if endpoint.startswith(_grpcs_protocol):
+ return endpoint[len(_grpcs_protocol) :]
+ if endpoint.startswith(_grpc_protocol):
+ return endpoint[len(_grpc_protocol) :]
+ return endpoint
+
+
+def parse_connection_string(connection_string):
+ cs = connection_string
+ if not cs.startswith(_grpc_protocol) and not cs.startswith(_grpcs_protocol):
+ # default is grpcs
+ cs = _grpcs_protocol + cs
+
+ p = six.moves.urllib.parse.urlparse(connection_string)
+ b = six.moves.urllib.parse.parse_qs(p.query)
+ database = b.get("database", [])
+ assert len(database) > 0
+
+ return p.scheme + "://" + p.netloc, database[0]
+
+
+# Decorator that ensures no exceptions are leaked from decorated async call
+def wrap_async_call_exceptions(f):
+ @functools.wraps(f)
+ def decorator(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception as e:
+ return wrap_exception_in_future(e)
+
+ return decorator
+
+
+def get_query_hash(yql_text):
+ try:
+ return hashlib.sha256(
+ six.text_type(yql_text, "utf-8").encode("utf-8")
+ ).hexdigest()
+ except TypeError:
+ return hashlib.sha256(six.text_type(yql_text).encode("utf-8")).hexdigest()
+
+
+class LRUCache(object):
+ def __init__(self, capacity=1000):
+ self.items = collections.OrderedDict()
+ self.capacity = capacity
+
+ def put(self, key, value):
+ self.items[key] = value
+ while len(self.items) > self.capacity:
+ self.items.popitem(last=False)
+
+ def get(self, key, _default):
+ if key not in self.items:
+ return _default
+ value = self.items.pop(key)
+ self.items[key] = value
+ return value
+
+ def erase(self, key):
+ self.items.pop(key)
+
+
+def from_bytes(val):
+ """
+ Translates value into valid utf8 string
+ :param val: A value to translate
+ :return: A valid utf8 string
+ """
+ try:
+ return codecs.decode(val, "utf8")
+ except (UnicodeEncodeError, TypeError):
+ return val
+
+
+class AsyncResponseIterator(object):
+ def __init__(self, it, wrapper):
+ self.it = it
+ self.wrapper = wrapper
+
+ def cancel(self):
+ self.it.cancel()
+ return self
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ return interceptor.operate_async_stream_call(self.it, self.wrapper)
+
+ def next(self):
+ return self._next()
+
+ def __next__(self):
+ return self._next()
+
+
+class SyncResponseIterator(object):
+ def __init__(self, it, wrapper):
+ self.it = it
+ self.wrapper = wrapper
+
+ def cancel(self):
+ self.it.cancel()
+ return self
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ return self.wrapper(next(self.it))
+
+ def next(self):
+ return self._next()
+
+ def __next__(self):
+ return self._next()
diff --git a/contrib/python/ydb/py2/ydb/auth_helpers.py b/contrib/python/ydb/py2/ydb/auth_helpers.py
new file mode 100644
index 0000000000..5d889555df
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/auth_helpers.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+import os
+
+from . import credentials, tracing
+import warnings
+
+
+def read_bytes(f):
+ with open(f, "rb") as fr:
+ return fr.read()
+
+
+def load_ydb_root_certificate():
+ path = os.getenv("YDB_SSL_ROOT_CERTIFICATES_FILE", None)
+ if path is not None and os.path.exists(path):
+ return read_bytes(path)
+ return None
+
+
+def construct_credentials_from_environ(tracer=None):
+ tracer = tracer if tracer is not None else tracing.Tracer(None)
+ warnings.warn(
+ "using construct_credentials_from_environ method for credentials instantiation is deprecated and will be "
+ "removed in the future major releases. Please instantialize credentials by default or provide correct credentials "
+ "instance to the Driver."
+ )
+
+ # dynamically import required authentication libraries
+ if (
+ os.getenv("USE_METADATA_CREDENTIALS") is not None
+ and int(os.getenv("USE_METADATA_CREDENTIALS")) == 1
+ ):
+ import ydb.iam
+
+ tracing.trace(tracer, {"credentials.metadata": True})
+ return ydb.iam.MetadataUrlCredentials()
+
+ if os.getenv("YDB_TOKEN") is not None:
+ tracing.trace(tracer, {"credentials.access_token": True})
+ return credentials.AuthTokenCredentials(os.getenv("YDB_TOKEN"))
+
+ if os.getenv("SA_KEY_FILE") is not None:
+
+ import ydb.iam
+
+ tracing.trace(tracer, {"credentials.sa_key_file": True})
+ root_certificates_file = os.getenv("SSL_ROOT_CERTIFICATES_FILE", None)
+ iam_channel_credentials = {}
+ if root_certificates_file is not None:
+ iam_channel_credentials = {
+ "root_certificates": read_bytes(root_certificates_file)
+ }
+ return ydb.iam.ServiceAccountCredentials.from_file(
+ os.getenv("SA_KEY_FILE"),
+ iam_channel_credentials=iam_channel_credentials,
+ iam_endpoint=os.getenv("IAM_ENDPOINT", "iam.api.cloud.yandex.net:443"),
+ )
diff --git a/contrib/python/ydb/py2/ydb/connection.py b/contrib/python/ydb/py2/ydb/connection.py
new file mode 100644
index 0000000000..95db084a3c
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/connection.py
@@ -0,0 +1,550 @@
+# -*- coding: utf-8 -*-
+import logging
+import copy
+from concurrent import futures
+import uuid
+import threading
+import collections
+
+from google.protobuf import text_format
+import grpc
+from . import issues, _apis, _utilities
+from . import default_pem
+
+_stubs_list = (
+ _apis.TableService.Stub,
+ _apis.SchemeService.Stub,
+ _apis.DiscoveryService.Stub,
+ _apis.CmsService.Stub,
+)
+
+logger = logging.getLogger(__name__)
+DEFAULT_TIMEOUT = 600
+YDB_DATABASE_HEADER = "x-ydb-database"
+YDB_TRACE_ID_HEADER = "x-ydb-trace-id"
+YDB_REQUEST_TYPE_HEADER = "x-ydb-request-type"
+
+
+def _message_to_string(message):
+ """
+ Constructs a string representation of provided message or generator
+ :param message: A protocol buffer or generator instance
+ :return: A string
+ """
+ try:
+ return text_format.MessageToString(message, as_one_line=True)
+ except Exception:
+ return str(message)
+
+
+def _log_response(rpc_state, response):
+ """
+ Writes a message with response into debug logs
+ :param rpc_state: A state of rpc
+ :param response: A received response
+ :return: None
+ """
+ if logger.isEnabledFor(logging.DEBUG):
+ logger.debug("%s: response = { %s }", rpc_state, _message_to_string(response))
+
+
+def _log_request(rpc_state, request):
+ """
+ Writes a message with request into debug logs
+ :param rpc_state: An id of request
+ :param request: A received response
+ :return: None
+ """
+ if logger.isEnabledFor(logging.DEBUG):
+ logger.debug("%s: request = { %s }", rpc_state, _message_to_string(request))
+
+
+def _rpc_error_handler(rpc_state, rpc_error, on_disconnected=None):
+ """
+ RPC call error handler, that translates gRPC error into YDB issue
+ :param rpc_state: A state of rpc
+ :param rpc_error: an underlying rpc error to handle
+ :param on_disconnected: a handler to call on disconnected connection
+ """
+ logger.info("%s: received error, %s", rpc_state, rpc_error)
+ if isinstance(rpc_error, grpc.Call):
+ if rpc_error.code() == grpc.StatusCode.UNAUTHENTICATED:
+ return issues.Unauthenticated(rpc_error.details())
+ elif rpc_error.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
+ return issues.DeadlineExceed("Deadline exceeded on request")
+ elif rpc_error.code() == grpc.StatusCode.UNIMPLEMENTED:
+ return issues.Unimplemented(
+ "Method or feature is not implemented on server!"
+ )
+
+ logger.debug("%s: unhandled rpc error, disconnecting channel", rpc_state)
+ if on_disconnected is not None:
+ on_disconnected()
+
+ return issues.ConnectionLost("Rpc error, reason %s" % str(rpc_error))
+
+
+def _on_response_callback(
+ rpc_state, call_state_unref, wrap_result=None, on_disconnected=None, wrap_args=()
+):
+ """
+ Callback to be executed on received RPC response
+ :param rpc_state: A name of RPC
+ :param wrap_result: A callable that wraps received response
+ :param on_disconnected: A handler to executed on disconnected channel
+ :param wrap_args: An arguments to be passed into wrap result callable
+ :return: None
+ """
+ try:
+ logger.debug("%s: on response callback started", rpc_state)
+ response = rpc_state.rendezvous.result()
+ _log_response(rpc_state, response)
+ response = (
+ response
+ if wrap_result is None
+ else wrap_result(rpc_state, response, *wrap_args)
+ )
+ rpc_state.result_future.set_result(response)
+ logger.debug("%s: on response callback success", rpc_state)
+ except grpc.FutureCancelledError as e:
+ logger.debug("%s: request execution cancelled", rpc_state)
+ if not rpc_state.result_future.cancelled():
+ rpc_state.result_future.set_exception(e)
+
+ except grpc.RpcError as rpc_call_error:
+ rpc_state.result_future.set_exception(
+ _rpc_error_handler(rpc_state, rpc_call_error, on_disconnected)
+ )
+
+ except issues.Error as e:
+ logger.info("%s: received exception, %s", rpc_state, str(e))
+ rpc_state.result_future.set_exception(e)
+
+ except Exception as e:
+ logger.error("%s: received exception, %s", rpc_state, str(e))
+ rpc_state.result_future.set_exception(issues.ConnectionLost(str(e)))
+
+ call_state_unref()
+
+
+def _construct_metadata(driver_config, settings):
+ """
+ Translates request settings into RPC metadata
+ :param driver_config: A driver config
+ :param settings: An instance of BaseRequestSettings
+ :return: RPC metadata
+ """
+ metadata = []
+ if driver_config.database is not None:
+ metadata.append((YDB_DATABASE_HEADER, driver_config.database))
+
+ need_rpc_auth = getattr(settings, "need_rpc_auth", True)
+ if driver_config.credentials is not None and need_rpc_auth:
+ metadata.extend(driver_config.credentials.auth_metadata())
+
+ if settings is not None:
+ if settings.trace_id is not None:
+ metadata.append((YDB_TRACE_ID_HEADER, settings.trace_id))
+ if settings.request_type is not None:
+ metadata.append((YDB_REQUEST_TYPE_HEADER, settings.request_type))
+ metadata.extend(getattr(settings, "headers", []))
+
+ metadata.append(_utilities.x_ydb_sdk_build_info_header())
+ return metadata
+
+
+def _get_request_timeout(settings):
+ """
+ Extracts RPC timeout from request settings
+ :param settings: an instance of BaseRequestSettings
+ :return: timeout of RPC execution
+ """
+ if settings is None or settings.timeout is None:
+ return DEFAULT_TIMEOUT
+ return settings.timeout
+
+
+class EndpointOptions(object):
+ __slots__ = ("ssl_target_name_override", "node_id")
+
+ def __init__(self, ssl_target_name_override=None, node_id=None):
+ self.ssl_target_name_override = ssl_target_name_override
+ self.node_id = node_id
+
+
+def _construct_channel_options(driver_config, endpoint_options=None):
+ """
+ Constructs gRPC channel initialization options
+ :param driver_config: A driver config instance
+ :param endpoint_options: Endpoint options
+ :return: A channel initialization options
+ """
+ _max_message_size = 64 * 10**6
+ _default_connect_options = [
+ ("grpc.max_receive_message_length", _max_message_size),
+ ("grpc.max_send_message_length", _max_message_size),
+ ("grpc.primary_user_agent", driver_config.primary_user_agent),
+ (
+ "grpc.lb_policy_name",
+ getattr(driver_config, "grpc_lb_policy_name", "round_robin"),
+ ),
+ ]
+ if driver_config.grpc_keep_alive_timeout is not None:
+ _default_connect_options.extend(
+ [
+ ("grpc.keepalive_time_ms", driver_config.grpc_keep_alive_timeout >> 3),
+ ("grpc.keepalive_timeout_ms", driver_config.grpc_keep_alive_timeout),
+ ("grpc.http2.max_pings_without_data", 0),
+ ("grpc.keepalive_permit_without_calls", 0),
+ ]
+ )
+ if endpoint_options is not None:
+ if endpoint_options.ssl_target_name_override:
+ _default_connect_options.append(
+ (
+ "grpc.ssl_target_name_override",
+ endpoint_options.ssl_target_name_override,
+ )
+ )
+ if driver_config.channel_options is None:
+ return _default_connect_options
+ channel_options = copy.deepcopy(driver_config.channel_options)
+ custom_options_keys = set(i[0] for i in driver_config.channel_options)
+ for item in filter(
+ lambda x: x[0] not in custom_options_keys, _default_connect_options
+ ):
+ channel_options.append(item)
+ return channel_options
+
+
+class _RpcState(object):
+ __slots__ = (
+ "rpc",
+ "request_id",
+ "result_future",
+ "rpc_name",
+ "endpoint",
+ "rendezvous",
+ "metadata_kv",
+ "endpoint_key",
+ )
+
+ def __init__(self, stub_instance, rpc_name, endpoint, endpoint_key):
+ """Stores all RPC related data"""
+ self.rpc_name = rpc_name
+ self.rpc = getattr(stub_instance, rpc_name)
+ self.request_id = uuid.uuid4()
+ self.endpoint = endpoint
+ self.rendezvous = None
+ self.metadata_kv = None
+ self.endpoint_key = endpoint_key
+
+ def __str__(self):
+ return "RpcState(%s, %s, %s)" % (self.rpc_name, self.request_id, self.endpoint)
+
+ def __call__(self, *args, **kwargs):
+ """Execute a RPC."""
+ try:
+ response, rendezvous = self.rpc.with_call(*args, **kwargs)
+ self.rendezvous = rendezvous
+ return response
+ except AttributeError:
+ return self.rpc(*args, **kwargs)
+
+ def trailing_metadata(self):
+ """Trailing metadata of the call."""
+ if self.metadata_kv is None:
+
+ self.metadata_kv = collections.defaultdict(set)
+ for metadatum in self.rendezvous.trailing_metadata():
+ self.metadata_kv[metadatum.key].add(metadatum.value)
+
+ return self.metadata_kv
+
+ def future(self, *args, **kwargs):
+ self.rendezvous = self.rpc.future(*args, **kwargs)
+ self.result_future = futures.Future()
+
+ def _cancel_callback(f):
+ """forwards cancel to gPRC future"""
+ if f.cancelled():
+ self.rendezvous.cancel()
+
+ self.rendezvous.add_done_callback(_cancel_callback)
+ return self.rendezvous, self.result_future
+
+
+_nanos_in_second = 10**9
+
+
+def _set_duration(duration_value, seconds_float):
+ duration_value.seconds = int(seconds_float)
+ duration_value.nanos = int((seconds_float - int(seconds_float)) * _nanos_in_second)
+ return duration_value
+
+
+def _set_server_timeouts(request, settings, default_value):
+ if not hasattr(request, "operation_params"):
+ return
+
+ operation_timeout = getattr(settings, "operation_timeout", default_value)
+ operation_timeout = (
+ default_value if operation_timeout is None else operation_timeout
+ )
+ cancel_after = getattr(settings, "cancel_after", default_value)
+ cancel_after = default_value if cancel_after is None else cancel_after
+ _set_duration(request.operation_params.operation_timeout, operation_timeout)
+ _set_duration(request.operation_params.cancel_after, cancel_after)
+
+
+def channel_factory(
+ endpoint, driver_config, channel_provider=None, endpoint_options=None
+):
+ channel_provider = channel_provider if channel_provider is not None else grpc
+ options = _construct_channel_options(driver_config, endpoint_options)
+ logger.debug("Channel options: {}".format(options))
+
+ if driver_config.root_certificates is None and not driver_config.secure_channel:
+ return channel_provider.insecure_channel(
+ endpoint, options, compression=getattr(driver_config, "compression", None)
+ )
+
+ root_certificates = driver_config.root_certificates
+ if root_certificates is None:
+ root_certificates = default_pem.load_default_pem()
+ credentials = grpc.ssl_channel_credentials(
+ root_certificates, driver_config.private_key, driver_config.certificate_chain
+ )
+ return channel_provider.secure_channel(
+ endpoint,
+ credentials,
+ options,
+ compression=getattr(driver_config, "compression", None),
+ )
+
+
+class EndpointKey(object):
+ __slots__ = ("endpoint", "node_id")
+
+ def __init__(self, endpoint, node_id):
+ self.endpoint = endpoint
+ self.node_id = node_id
+
+
+class Connection(object):
+ __slots__ = (
+ "endpoint",
+ "_channel",
+ "_call_states",
+ "_stub_instances",
+ "_driver_config",
+ "_cleanup_callbacks",
+ "__weakref__",
+ "lock",
+ "calls",
+ "closing",
+ "endpoint_key",
+ "node_id",
+ )
+
+ def __init__(self, endpoint, driver_config=None, endpoint_options=None):
+ """
+ Object that wraps gRPC channel and encapsulates gRPC request execution logic
+ :param endpoint: endpoint to connect (in pattern host:port), constructed by user or
+ discovered by the YDB endpoint discovery mechanism
+ :param driver_config: A driver config instance to be used for RPC call interception
+ """
+ global _stubs_list
+ self.endpoint = endpoint
+ self.node_id = getattr(endpoint_options, "node_id", None)
+ self.endpoint_key = EndpointKey(
+ endpoint, getattr(endpoint_options, "node_id", None)
+ )
+ self._channel = channel_factory(
+ self.endpoint, driver_config, endpoint_options=endpoint_options
+ )
+ self._driver_config = driver_config
+ self._call_states = {}
+ self._stub_instances = {}
+ self._cleanup_callbacks = []
+ # pre-initialize stubs
+ for stub in _stubs_list:
+ self._stub_instances[stub] = stub(self._channel)
+ self.lock = threading.RLock()
+ self.calls = 0
+ self.closing = False
+
+ def _prepare_stub_instance(self, stub):
+ if stub not in self._stub_instances:
+ self._stub_instances[stub] = stub(self._channel)
+
+ def add_cleanup_callback(self, callback):
+ self._cleanup_callbacks.append(callback)
+
+ def _prepare_call(self, stub, rpc_name, request, settings):
+ timeout, metadata = _get_request_timeout(settings), _construct_metadata(
+ self._driver_config, settings
+ )
+ _set_server_timeouts(request, settings, timeout)
+ self._prepare_stub_instance(stub)
+ rpc_state = _RpcState(
+ self._stub_instances[stub], rpc_name, self.endpoint, self.endpoint_key
+ )
+ logger.debug("%s: creating call state", rpc_state)
+ with self.lock:
+ if self.closing:
+ raise issues.ConnectionLost("Couldn't start call")
+ self.calls += 1
+ self._call_states[rpc_state.request_id] = rpc_state
+ # Call successfully prepared and registered
+ _log_request(rpc_state, request)
+ return rpc_state, timeout, metadata
+
+ def _finish_call(self, call_state):
+ with self.lock:
+ self.calls -= 1
+ self._call_states.pop(call_state.request_id, None)
+ # Call successfully finished
+ if self.closing and self.calls == 0:
+ # Channel is closing and we have to destroy channel
+ self.destroy()
+
+ def future(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ on_disconnected=None,
+ ):
+ """
+ Sends request constructed by client
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param on_disconnected: A callable to be executed when underlying channel becomes disconnected
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A future of computation
+ """
+ rpc_state, timeout, metadata = self._prepare_call(
+ stub, rpc_name, request, settings
+ )
+ rendezvous, result_future = rpc_state.future(
+ request,
+ timeout,
+ metadata,
+ compression=getattr(settings, "compression", None),
+ )
+ rendezvous.add_done_callback(
+ lambda resp_future: _on_response_callback(
+ rpc_state,
+ lambda: self._finish_call(rpc_state),
+ wrap_result,
+ on_disconnected,
+ wrap_args,
+ )
+ )
+ return result_future
+
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ on_disconnected=None,
+ ):
+ """
+ Synchronously sends request constructed by client library
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param on_disconnected: A callable to be executed when underlying channel becomes disconnected
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A result of computation
+ """
+ rpc_state, timeout, metadata = self._prepare_call(
+ stub, rpc_name, request, settings
+ )
+ try:
+ response = rpc_state(
+ request,
+ timeout,
+ metadata,
+ compression=getattr(settings, "compression", None),
+ )
+ _log_response(rpc_state, response)
+ return (
+ response
+ if wrap_result is None
+ else wrap_result(rpc_state, response, *wrap_args)
+ )
+ except grpc.RpcError as rpc_error:
+ raise _rpc_error_handler(rpc_state, rpc_error, on_disconnected)
+ finally:
+ self._finish_call(rpc_state)
+
+ @classmethod
+ def ready_factory(
+ cls, endpoint, driver_config, ready_timeout=10, endpoint_options=None
+ ):
+ candidate = cls(endpoint, driver_config, endpoint_options=endpoint_options)
+ ready_future = candidate.ready_future()
+ try:
+ ready_future.result(timeout=ready_timeout)
+ return candidate
+ except grpc.FutureTimeoutError:
+ ready_future.cancel()
+ candidate.close()
+ return None
+
+ except Exception:
+ candidate.close()
+ return None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ def close(self):
+ """
+ Closes the underlying gRPC channel
+ :return: None
+ """
+ logger.info("Closing channel for endpoint %s", self.endpoint)
+ with self.lock:
+ self.closing = True
+
+ for callback in self._cleanup_callbacks:
+ callback(self)
+
+ # potentially we should cancel in-flight calls here but currently
+ # it is not required since gRPC can successfully cancel these calls manually.
+
+ if self.calls == 0:
+ # everything is cancelled/completed and channel can be destroyed
+ self.destroy()
+
+ def destroy(self):
+ if hasattr(self, "_channel") and hasattr(self._channel, "close"):
+ self._channel.close()
+
+ def ready_future(self):
+ """
+ Creates a future that tracks underlying gRPC channel is ready
+ :return: A Future object that matures when the underlying channel is ready
+ to receive request
+ """
+ return grpc.channel_ready_future(self._channel)
diff --git a/contrib/python/ydb/py2/ydb/convert.py b/contrib/python/ydb/py2/ydb/convert.py
new file mode 100644
index 0000000000..97d51cf1b6
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/convert.py
@@ -0,0 +1,514 @@
+# -*- coding: utf-8 -*-
+import decimal
+from google.protobuf import struct_pb2
+import six
+
+from . import issues, types, _apis
+
+
+_SHIFT_BIT_COUNT = 64
+_SHIFT = 2**64
+_SIGN_BIT = 2**63
+_DecimalNanRepr = 10**35 + 1
+_DecimalInfRepr = 10**35
+_DecimalSignedInfRepr = -(10**35)
+_primitive_type_by_id = {}
+_default_allow_truncated_result = True
+
+
+def _initialize():
+ for pt in types.PrimitiveType:
+ _primitive_type_by_id[pt._idn_] = pt
+
+
+_initialize()
+
+
+class _DotDict(dict):
+ def __init__(self, *args, **kwargs):
+ super(_DotDict, self).__init__(*args, **kwargs)
+
+ def __getattr__(self, item):
+ return self[item]
+
+
+def _is_decimal_signed(hi_value):
+ return (hi_value & _SIGN_BIT) == _SIGN_BIT
+
+
+def _pb_to_decimal(type_pb, value_pb, table_client_settings):
+ hi = (
+ (value_pb.high_128 - (1 << _SHIFT_BIT_COUNT))
+ if _is_decimal_signed(value_pb.high_128)
+ else value_pb.high_128
+ )
+ int128_value = value_pb.low_128 + (hi << _SHIFT_BIT_COUNT)
+ if int128_value == _DecimalNanRepr:
+ return decimal.Decimal("Nan")
+ elif int128_value == _DecimalInfRepr:
+ return decimal.Decimal("Inf")
+ elif int128_value == _DecimalSignedInfRepr:
+ return decimal.Decimal("-Inf")
+ return decimal.Decimal(int128_value) / decimal.Decimal(
+ 10**type_pb.decimal_type.scale
+ )
+
+
+def _pb_to_primitive(type_pb, value_pb, table_client_settings):
+ return _primitive_type_by_id.get(type_pb.type_id).get_value(
+ value_pb, table_client_settings
+ )
+
+
+def _pb_to_optional(type_pb, value_pb, table_client_settings):
+ if value_pb.WhichOneof("value") == "null_flag_value":
+ return None
+ if value_pb.WhichOneof("value") == "nested_value":
+ return _to_native_value(
+ type_pb.optional_type.item, value_pb.nested_value, table_client_settings
+ )
+ return _to_native_value(type_pb.optional_type.item, value_pb, table_client_settings)
+
+
+def _pb_to_list(type_pb, value_pb, table_client_settings):
+ return [
+ _to_native_value(
+ type_pb.list_type.item, value_proto_item, table_client_settings
+ )
+ for value_proto_item in value_pb.items
+ ]
+
+
+def _pb_to_tuple(type_pb, value_pb, table_client_settings):
+ return tuple(
+ _to_native_value(item_type, item_value, table_client_settings)
+ for item_type, item_value in six.moves.zip(
+ type_pb.tuple_type.elements, value_pb.items
+ )
+ )
+
+
+def _pb_to_dict(type_pb, value_pb, table_client_settings):
+ result = {}
+ for kv_pair in value_pb.pairs:
+ key = _to_native_value(
+ type_pb.dict_type.key, kv_pair.key, table_client_settings
+ )
+ payload = _to_native_value(
+ type_pb.dict_type.payload, kv_pair.payload, table_client_settings
+ )
+ result[key] = payload
+ return result
+
+
+class _Struct(_DotDict):
+ pass
+
+
+def _pb_to_struct(type_pb, value_pb, table_client_settings):
+ result = _Struct()
+ for member, item in six.moves.zip(type_pb.struct_type.members, value_pb.items):
+ result[member.name] = _to_native_value(member.type, item, table_client_settings)
+ return result
+
+
+def _pb_to_void(type_pb, value_pb, table_client_settings):
+ return None
+
+
+_to_native_map = {
+ "type_id": _pb_to_primitive,
+ "decimal_type": _pb_to_decimal,
+ "optional_type": _pb_to_optional,
+ "list_type": _pb_to_list,
+ "tuple_type": _pb_to_tuple,
+ "dict_type": _pb_to_dict,
+ "struct_type": _pb_to_struct,
+ "void_type": _pb_to_void,
+ "empty_list_type": _pb_to_list,
+ "empty_dict_type": _pb_to_dict,
+}
+
+
+def _to_native_value(type_pb, value_pb, table_client_settings=None):
+ return _to_native_map.get(type_pb.WhichOneof("type"))(
+ type_pb, value_pb, table_client_settings
+ )
+
+
+def _decimal_to_int128(value_type, value):
+ if value.is_nan():
+ return _DecimalNanRepr
+ elif value.is_infinite():
+ if value.is_signed():
+ return _DecimalSignedInfRepr
+ return _DecimalInfRepr
+
+ sign, digits, exponent = value.as_tuple()
+ int128_value = 0
+ digits_count = 0
+ for digit in digits:
+ int128_value *= 10
+ int128_value += digit
+ digits_count += 1
+
+ if value_type.decimal_type.scale + exponent < 0:
+ raise issues.GenericError("Couldn't parse decimal value, exponent is too large")
+
+ for _ in range(value_type.decimal_type.scale + exponent):
+ int128_value *= 10
+ digits_count += 1
+
+ if digits_count > value_type.decimal_type.precision + value_type.decimal_type.scale:
+ raise issues.GenericError("Couldn't parse decimal value, digits count > 35")
+
+ if sign:
+ int128_value *= -1
+
+ return int128_value
+
+
+def _decimal_to_pb(value_type, value):
+ value_pb = _apis.ydb_value.Value()
+ int128_value = _decimal_to_int128(value_type, value)
+ if int128_value < 0:
+ value_pb.high_128 = (int128_value >> _SHIFT_BIT_COUNT) + (1 << _SHIFT_BIT_COUNT)
+ int128_value -= (int128_value >> _SHIFT_BIT_COUNT) << _SHIFT_BIT_COUNT
+ else:
+ value_pb.high_128 = int128_value >> _SHIFT_BIT_COUNT
+ int128_value -= value_pb.high_128 << _SHIFT_BIT_COUNT
+ value_pb.low_128 = int128_value
+ return value_pb
+
+
+def _primitive_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ data_type = _primitive_type_by_id.get(type_pb.type_id)
+ data_type.set_value(value_pb, value)
+ return value_pb
+
+
+def _optional_to_pb(type_pb, value):
+ if value is None:
+ return _apis.ydb_value.Value(null_flag_value=struct_pb2.NULL_VALUE)
+ return _from_native_value(type_pb.optional_type.item, value)
+
+
+def _list_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for element in value:
+ value_item_proto = value_pb.items.add()
+ value_item_proto.MergeFrom(_from_native_value(type_pb.list_type.item, element))
+ return value_pb
+
+
+def _tuple_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for element_type, element_value in six.moves.zip(
+ type_pb.tuple_type.elements, value
+ ):
+ value_item_proto = value_pb.items.add()
+ value_item_proto.MergeFrom(_from_native_value(element_type, element_value))
+ return value_pb
+
+
+def _dict_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for key, payload in value.items():
+ kv_pair = value_pb.pairs.add()
+ kv_pair.key.MergeFrom(_from_native_value(type_pb.dict_type.key, key))
+ if payload:
+ kv_pair.payload.MergeFrom(
+ _from_native_value(type_pb.dict_type.payload, payload)
+ )
+ return value_pb
+
+
+def _struct_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for member in type_pb.struct_type.members:
+ value_item_proto = value_pb.items.add()
+ value_item = (
+ value[member.name]
+ if isinstance(value, dict)
+ else getattr(value, member.name)
+ )
+ value_item_proto.MergeFrom(_from_native_value(member.type, value_item))
+ return value_pb
+
+
+_from_native_map = {
+ "type_id": _primitive_to_pb,
+ "decimal_type": _decimal_to_pb,
+ "optional_type": _optional_to_pb,
+ "list_type": _list_to_pb,
+ "tuple_type": _tuple_to_pb,
+ "dict_type": _dict_to_pb,
+ "struct_type": _struct_to_pb,
+}
+
+
+def _decimal_type_to_native(type_pb):
+ return types.DecimalType(type_pb.decimal_type.precision, type_pb.decimal_type.scale)
+
+
+def _optional_type_to_native(type_pb):
+ return types.OptionalType(type_to_native(type_pb.optional_type.item))
+
+
+def _primitive_type_to_native(type_pb):
+ return _primitive_type_by_id.get(type_pb.type_id)
+
+
+def _null_type_factory(type_pb):
+ return types.NullType()
+
+
+_type_to_native_map = {
+ "optional_type": _optional_type_to_native,
+ "type_id": _primitive_type_to_native,
+ "decimal_type": _decimal_type_to_native,
+ "null_type": _null_type_factory,
+}
+
+
+def type_to_native(type_pb):
+ return _type_to_native_map.get(type_pb.WhichOneof("type"))(type_pb)
+
+
+def _from_native_value(type_pb, value):
+ return _from_native_map.get(type_pb.WhichOneof("type"))(type_pb, value)
+
+
+def to_typed_value_from_native(type_pb, value):
+ typed_value = _apis.ydb_value.TypedValue()
+ typed_value.type.MergeFrom(type_pb)
+ typed_value.value.MergeFrom(from_native_value(type_pb, value))
+ return typed_value
+
+
+def parameters_to_pb(parameters_types, parameters_values):
+ if parameters_values is None or not parameters_values:
+ return {}
+
+ param_values_pb = {}
+ for name, type_pb in six.iteritems(parameters_types):
+ result = _apis.ydb_value.TypedValue()
+ ttype = type_pb
+ if isinstance(type_pb, types.AbstractTypeBuilder):
+ ttype = type_pb.proto
+ elif isinstance(type_pb, types.PrimitiveType):
+ ttype = type_pb.proto
+ result.type.MergeFrom(ttype)
+ result.value.MergeFrom(_from_native_value(ttype, parameters_values[name]))
+ param_values_pb[name] = result
+ return param_values_pb
+
+
+def _unwrap_optionality(column):
+ c_type = column.type
+ current_type = c_type.WhichOneof("type")
+ while current_type == "optional_type":
+ c_type = c_type.optional_type.item
+ current_type = c_type.WhichOneof("type")
+ return _to_native_map.get(current_type), c_type
+
+
+class _ResultSet(object):
+ __slots__ = ("columns", "rows", "truncated", "snapshot")
+
+ def __init__(self, columns, rows, truncated, snapshot=None):
+ self.columns = columns
+ self.rows = rows
+ self.truncated = truncated
+ self.snapshot = snapshot
+
+ @classmethod
+ def from_message(cls, message, table_client_settings=None, snapshot=None):
+ rows = []
+ # prepare columnn parsers before actuall parsing
+ column_parsers = []
+ if len(message.rows) > 0:
+ for column in message.columns:
+ column_parsers.append(_unwrap_optionality(column))
+
+ for row_proto in message.rows:
+ row = _Row(message.columns)
+ for column, value, column_info in six.moves.zip(
+ message.columns, row_proto.items, column_parsers
+ ):
+ v_type = value.WhichOneof("value")
+ if v_type == "null_flag_value":
+ row[column.name] = None
+ continue
+
+ while v_type == "nested_value":
+ value = value.nested_value
+ v_type = value.WhichOneof("value")
+
+ column_parser, unwrapped_type = column_info
+ row[column.name] = column_parser(
+ unwrapped_type, value, table_client_settings
+ )
+ rows.append(row)
+ return cls(message.columns, rows, message.truncated, snapshot)
+
+ @classmethod
+ def lazy_from_message(cls, message, table_client_settings=None, snapshot=None):
+ rows = _LazyRows(message.rows, table_client_settings, message.columns)
+ return cls(message.columns, rows, message.truncated, snapshot)
+
+
+ResultSet = _ResultSet
+
+
+class _Row(_DotDict):
+ def __init__(self, columns):
+ super(_Row, self).__init__()
+ self._columns = columns
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self[self._columns[key].name]
+ elif isinstance(key, slice):
+ return tuple(map(lambda x: self[x.name], self._columns[key]))
+ else:
+ return super(_Row, self).__getitem__(key)
+
+
+class _LazyRowItem:
+
+ __slots__ = ["_item", "_type", "_table_client_settings", "_processed", "_parser"]
+
+ def __init__(self, proto_item, proto_type, table_client_settings, parser):
+ self._item = proto_item
+ self._type = proto_type
+ self._table_client_settings = table_client_settings
+ self._processed = False
+ self._parser = parser
+
+ def get(self):
+ if not self._processed:
+
+ self._item = self._parser(
+ self._type, self._item, self._table_client_settings
+ )
+ self._processed = True
+ return self._item
+
+
+class _LazyRow(_DotDict):
+ def __init__(self, columns, proto_row, table_client_settings, parsers):
+ super(_LazyRow, self).__init__()
+ self._columns = columns
+ self._table_client_settings = table_client_settings
+ for i, (column, row_item) in enumerate(
+ six.moves.zip(self._columns, proto_row.items)
+ ):
+ super(_LazyRow, self).__setitem__(
+ column.name,
+ _LazyRowItem(row_item, column.type, table_client_settings, parsers[i]),
+ )
+
+ def __setitem__(self, key, value):
+ raise NotImplementedError("Cannot insert values into lazy row")
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self[self._columns[key].name]
+ elif isinstance(key, slice):
+ return tuple(map(lambda x: self[x.name], self._columns[key]))
+ else:
+ return super(_LazyRow, self).__getitem__(key).get()
+
+ def __iter__(self):
+ return super(_LazyRow, self).__iter__()
+
+ def __next__(self):
+ return super(_LazyRow, self).__next__().get()
+
+ def next(self):
+ return self.__next__()
+
+
+def from_native_value(type_pb, value):
+ return _from_native_value(type_pb, value)
+
+
+def to_native_value(typed_value):
+ return _to_native_value(typed_value.type, typed_value.value)
+
+
+class _LazyRows:
+ def __init__(self, rows, table_client_settings, columns):
+ self._rows = rows
+ self._parsers = [_LazyParser(columns, i) for i in range(len(columns))]
+ self._table_client_settings = table_client_settings
+ self._columns = columns
+
+ def __len__(self):
+ return len(self._rows)
+
+ def fetchone(self):
+ return _LazyRow(
+ self._columns, self._rows[0], self._table_client_settings, self._parsers
+ )
+
+ def fetchmany(self, number):
+ for index in range(min(len(self), number)):
+ yield _LazyRow(
+ self._columns,
+ self._rows[index],
+ self._table_client_settings,
+ self._parsers,
+ )
+
+ def __iter__(self):
+ for row in self.fetchmany(len(self)):
+ yield row
+
+ def fetchall(self):
+ for row in self:
+ yield row
+
+
+class _LazyParser:
+ __slots__ = ["_columns", "_column_index", "_prepared"]
+
+ def __init__(self, columns, column_index):
+ self._columns = columns
+ self._column_index = column_index
+ self._prepared = None
+
+ def __call__(self, *args, **kwargs):
+ if self._prepared is None:
+ self._prepared = _to_native_map.get(
+ self._columns[self._column_index].type.WhichOneof("type")
+ )
+ return self._prepared(*args, **kwargs)
+
+
+class ResultSets(list):
+ def __init__(self, result_sets_pb, table_client_settings=None):
+ make_lazy = (
+ False
+ if table_client_settings is None
+ else table_client_settings._make_result_sets_lazy
+ )
+
+ allow_truncated_result = _default_allow_truncated_result
+ if table_client_settings:
+ allow_truncated_result = table_client_settings._allow_truncated_result
+
+ result_sets = []
+ initializer = (
+ _ResultSet.from_message if not make_lazy else _ResultSet.lazy_from_message
+ )
+ for result_set in result_sets_pb:
+ result_set = initializer(result_set, table_client_settings)
+ if result_set.truncated and not allow_truncated_result:
+ raise issues.TruncatedResponseError(
+ "Response for the request was truncated by server"
+ )
+ result_sets.append(result_set)
+ super(ResultSets, self).__init__(result_sets)
diff --git a/contrib/python/ydb/py2/ydb/credentials.py b/contrib/python/ydb/py2/ydb/credentials.py
new file mode 100644
index 0000000000..8547fbbd7b
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/credentials.py
@@ -0,0 +1,234 @@
+# -*- coding: utf-8 -*-
+import abc
+import six
+from . import tracing, issues, connection
+from . import settings as settings_impl
+import threading
+from concurrent import futures
+import logging
+import time
+
+try:
+ from ydb.public.api.protos import ydb_auth_pb2
+ from ydb.public.api.grpc import ydb_auth_v1_pb2_grpc
+except ImportError:
+ from contrib.ydb.public.api.protos import ydb_auth_pb2
+ from contrib.ydb.public.api.grpc import ydb_auth_v1_pb2_grpc
+
+
+YDB_AUTH_TICKET_HEADER = "x-ydb-auth-ticket"
+logger = logging.getLogger(__name__)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AbstractCredentials(object):
+ """
+ An abstract class that provides auth metadata
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Credentials(object):
+ def __init__(self, tracer=None):
+ self.tracer = tracer if tracer is not None else tracing.Tracer(None)
+
+ @abc.abstractmethod
+ def auth_metadata(self):
+ """
+ :return: An iterable with auth metadata
+ """
+ pass
+
+
+class OneToManyValue(object):
+ def __init__(self):
+ self._value = None
+ self._condition = threading.Condition()
+
+ def consume(self, timeout=3):
+ with self._condition:
+ if self._value is None:
+ self._condition.wait(timeout=timeout)
+ return self._value
+
+ def update(self, n_value):
+ with self._condition:
+ prev_value = self._value
+ self._value = n_value
+ if prev_value is None:
+ self._condition.notify_all()
+
+
+class AtMostOneExecution(object):
+ def __init__(self):
+ self._can_schedule = True
+ self._lock = threading.Lock()
+ self._tp = futures.ThreadPoolExecutor(1)
+
+ def wrapped_execution(self, callback):
+ try:
+ callback()
+ except Exception:
+ pass
+
+ finally:
+ self.cleanup()
+
+ def submit(self, callback):
+ with self._lock:
+ if self._can_schedule:
+ self._tp.submit(self.wrapped_execution, callback)
+ self._can_schedule = False
+
+ def cleanup(self):
+ with self._lock:
+ self._can_schedule = True
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AbstractExpiringTokenCredentials(Credentials):
+ def __init__(self, tracer=None):
+ super(AbstractExpiringTokenCredentials, self).__init__(tracer)
+ self._expires_in = 0
+ self._refresh_in = 0
+ self._hour = 60 * 60
+ self._cached_token = OneToManyValue()
+ self._tp = AtMostOneExecution()
+ self.logger = logger.getChild(self.__class__.__name__)
+ self.last_error = None
+ self.extra_error_message = ""
+
+ @abc.abstractmethod
+ def _make_token_request(self):
+ pass
+
+ def _log_refresh_start(self, current_time):
+ self.logger.debug("Start refresh token from metadata")
+ if current_time > self._refresh_in:
+ self.logger.info(
+ "Cached token reached refresh_in deadline, current time %s, deadline %s",
+ current_time,
+ self._refresh_in,
+ )
+
+ if current_time > self._expires_in and self._expires_in > 0:
+ self.logger.error(
+ "Cached token reached expires_in deadline, current time %s, deadline %s",
+ current_time,
+ self._expires_in,
+ )
+
+ def _update_expiration_info(self, auth_metadata):
+ self._expires_in = time.time() + min(
+ self._hour, auth_metadata["expires_in"] / 2
+ )
+ self._refresh_in = time.time() + min(
+ self._hour / 2, auth_metadata["expires_in"] / 4
+ )
+
+ def _refresh(self):
+ current_time = time.time()
+ self._log_refresh_start(current_time)
+ try:
+ token_response = self._make_token_request()
+ self._cached_token.update(token_response["access_token"])
+ self._update_expiration_info(token_response)
+ self.logger.info(
+ "Token refresh successful. current_time %s, refresh_in %s",
+ current_time,
+ self._refresh_in,
+ )
+
+ except (KeyboardInterrupt, SystemExit):
+ return
+
+ except Exception as e:
+ self.last_error = str(e)
+ time.sleep(1)
+ self._tp.submit(self._refresh)
+
+ @property
+ @tracing.with_trace()
+ def token(self):
+ current_time = time.time()
+ if current_time > self._refresh_in:
+ tracing.trace(self.tracer, {"refresh": True})
+ self._tp.submit(self._refresh)
+ cached_token = self._cached_token.consume(timeout=3)
+ tracing.trace(self.tracer, {"consumed": True})
+ if cached_token is None:
+ if self.last_error is None:
+ raise issues.ConnectionError(
+ "%s: timeout occurred while waiting for token.\n%s"
+ % (
+ self.__class__.__name__,
+ self.extra_error_message,
+ )
+ )
+ raise issues.ConnectionError(
+ "%s: %s.\n%s"
+ % (self.__class__.__name__, self.last_error, self.extra_error_message)
+ )
+ return cached_token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self.token)]
+
+
+def _wrap_static_credentials_response(rpc_state, response):
+ issues._process_response(response.operation)
+ result = ydb_auth_pb2.LoginResult()
+ response.operation.result.Unpack(result)
+ return result
+
+
+class StaticCredentials(AbstractExpiringTokenCredentials):
+ def __init__(self, driver_config, user, password="", tracer=None):
+ super(StaticCredentials, self).__init__(tracer)
+ self.driver_config = driver_config
+ self.user = user
+ self.password = password
+ self.request_timeout = 10
+
+ def _make_token_request(self):
+ conn = connection.Connection.ready_factory(
+ self.driver_config.endpoint, self.driver_config
+ )
+ assert conn is not None, (
+ "Failed to establish connection in to %s" % self.driver_config.endpoint
+ )
+ try:
+ result = conn(
+ ydb_auth_pb2.LoginRequest(user=self.user, password=self.password),
+ ydb_auth_v1_pb2_grpc.AuthServiceStub,
+ "Login",
+ _wrap_static_credentials_response,
+ settings_impl.BaseRequestSettings()
+ .with_timeout(self.request_timeout)
+ .with_need_rpc_auth(False),
+ )
+ finally:
+ conn.close()
+ return {"expires_in": 30 * 60, "access_token": result.token}
+
+
+class AnonymousCredentials(Credentials):
+ @staticmethod
+ def auth_metadata():
+ return []
+
+
+class AuthTokenCredentials(Credentials):
+ def __init__(self, token):
+ self._token = token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self._token)]
+
+
+class AccessTokenCredentials(Credentials):
+ def __init__(self, token):
+ self._token = token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self._token)]
diff --git a/contrib/python/ydb/py2/ydb/dbapi/__init__.py b/contrib/python/ydb/py2/ydb/dbapi/__init__.py
new file mode 100644
index 0000000000..7363921192
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/dbapi/__init__.py
@@ -0,0 +1,47 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from .connection import Connection
+from .errors import (
+ Warning,
+ Error,
+ InterfaceError,
+ DatabaseError,
+ DataError,
+ OperationalError,
+ IntegrityError,
+ InternalError,
+ ProgrammingError,
+ NotSupportedError,
+)
+
+version = "0.0.31"
+
+version_info = (
+ 1,
+ 0,
+ 0,
+)
+
+apilevel = "1.0"
+
+threadsafety = 0
+
+paramstyle = "qmark"
+
+errors = (
+ Warning,
+ Error,
+ InterfaceError,
+ DatabaseError,
+ DataError,
+ OperationalError,
+ IntegrityError,
+ InternalError,
+ ProgrammingError,
+ NotSupportedError,
+)
+
+
+def connect(*args, **kwargs):
+ return Connection(*args, **kwargs)
diff --git a/contrib/python/ydb/py2/ydb/dbapi/connection.py b/contrib/python/ydb/py2/ydb/dbapi/connection.py
new file mode 100644
index 0000000000..c69381a92f
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/dbapi/connection.py
@@ -0,0 +1,92 @@
+from __future__ import absolute_import, unicode_literals
+
+import posixpath
+
+import ydb
+from .cursor import Cursor
+from .errors import DatabaseError
+
+
+class Connection(object):
+
+ deiver = None
+ pool = None
+
+ def __init__(self, endpoint, database=None, **conn_kwargs):
+ self.endpoint = endpoint
+ self.database = database
+ self._conn_kwargs = conn_kwargs
+ driver, pool = self._create_driver(self.endpoint, self.database, **conn_kwargs)
+ self.driver = driver
+ self.pool = pool
+
+ def cursor(self):
+ return Cursor(self)
+
+ def execute(self, sql, parameters=None):
+ return self.cursor().execute(sql, parameters)
+
+ def executemany(self, sql, parameters):
+ return self.cursor().executemany(sql, parameters)
+
+ def describe(self, table_path):
+ full_path = posixpath.join(self.database, table_path)
+ try:
+ res = self.pool.retry_operation_sync(
+ lambda cli: cli.describe_table(full_path)
+ )
+ return res.columns
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ except Exception:
+ raise DatabaseError("Failed to describe table %r" % (table_path,))
+
+ def check_exists(self, table_path):
+ try:
+ self.driver.scheme_client.describe_path(table_path)
+ return True
+ except ydb.SchemeError:
+ return False
+
+ def commit(self):
+ pass
+
+ def rollback(self):
+ pass
+
+ def close(self):
+ if self.pool is not None:
+ self.pool.stop()
+ if self.driver is not None:
+ self.driver.stop()
+
+ @staticmethod
+ def _create_endpoint(host, port):
+ return "%s:%d" % (host, port)
+
+ @staticmethod
+ def _create_driver(endpoint, database, **conn_kwargs):
+ driver_config = ydb.DriverConfig(
+ endpoint,
+ database=database,
+ table_client_settings=ydb.TableClientSettings()
+ .with_native_date_in_result_sets(True)
+ .with_native_datetime_in_result_sets(True)
+ .with_native_json_in_result_sets(True),
+ **conn_kwargs
+ )
+ driver = ydb.Driver(driver_config)
+ try:
+ driver.wait(timeout=5, fail_fast=True)
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ except Exception:
+ driver.stop()
+ raise DatabaseError(
+ "Failed to connect to YDB, details %s"
+ % driver.discovery_debug_details()
+ )
+
+ return driver, ydb.SessionPool(driver)
diff --git a/contrib/python/ydb/py2/ydb/dbapi/cursor.py b/contrib/python/ydb/py2/ydb/dbapi/cursor.py
new file mode 100644
index 0000000000..71175abf4e
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/dbapi/cursor.py
@@ -0,0 +1,184 @@
+from __future__ import absolute_import, unicode_literals
+
+import collections
+import datetime
+import itertools
+import logging
+
+import six
+
+import ydb
+from .errors import DatabaseError
+
+
+LOGGER = logging.getLogger(__name__)
+
+
+STR_QUOTE_MAP = (
+ ("\\", "\\\\"),
+ ("'", r"\'"),
+ ("\0", r"\x00"),
+ # To re-check: \b \f \r \n \t
+)
+
+
+def render_str(value):
+ for r_from, r_to in STR_QUOTE_MAP:
+ value = value.replace(r_from, r_to)
+ return "'" + value + "'"
+
+
+def render_date(value):
+ return "Date({})".format(render_str(value.isoformat()))
+
+
+def render_datetime(value):
+ # TODO: is there a better solution for this?
+ return "DateTime::MakeDatetime(DateTime::ParseIso8601({}))".format(
+ render_str(value.isoformat())
+ )
+
+
+def render(value):
+ if value is None:
+ return "NULL"
+ if isinstance(value, six.string_types):
+ return render_str(value)
+ if isinstance(value, datetime.datetime):
+ return render_datetime(value)
+ if isinstance(value, datetime.date):
+ return render_date(value)
+ return repr(value)
+
+
+def render_sql(sql, parameters):
+ if not parameters:
+ return sql
+
+ assert sql.count("?") == len(parameters), "num of placeholders != num of params"
+
+ quoted_params = [render(param) for param in parameters]
+ quoted_params += [""]
+ sql_pieces = sql.split("?")
+ assert len(sql_pieces) == len(quoted_params)
+ return "".join(
+ piece for pair in zip(sql_pieces, quoted_params) for piece in pair if piece
+ )
+
+
+def named_result_for(column_names):
+ # TODO fix: this doesn't allow columns names starting with underscore, e.g. `select 1 as _a`.
+ return collections.namedtuple("NamedResult", column_names)
+
+
+def _get_column_type(type_obj):
+ return str(type_obj)
+
+
+def get_column_type(type_obj):
+ return _get_column_type(ydb.convert.type_to_native(type_obj))
+
+
+class Cursor(object):
+ def __init__(self, connection):
+ self.connection = connection
+ self.description = []
+ self.arraysize = 1
+ self.logger = LOGGER
+ self.rows = None
+ self._rows_prefetched = None
+
+ def execute(self, sql, parameters=None):
+ fsql = render_sql(sql, parameters)
+ self.logger.debug("execute sql: %s", fsql)
+ try:
+ chunks = self.connection.driver.table_client.scan_query(fsql)
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ self.description = []
+
+ rows = self._rows_iterable(chunks)
+ # Prefetch the description:
+ try:
+ first_row = next(rows)
+ except StopIteration:
+ pass
+ else:
+ rows = itertools.chain((first_row,), rows)
+ if self.rows is not None:
+ rows = itertools.chain(self.rows, rows)
+
+ self.rows = rows
+
+ def _rows_iterable(self, chunks_iterable):
+ description = None
+ try:
+ for chunk in chunks_iterable:
+ if description is None and len(chunk.result_set.rows) > 0:
+ description = [
+ (
+ col.name,
+ get_column_type(col.type),
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ for col in chunk.result_set.columns
+ ]
+ self.description = description
+ for row in chunk.result_set.rows:
+ # returns tuple to be compatible with SqlAlchemy and because
+ # of this PEP to return a sequence: https://www.python.org/dev/peps/pep-0249/#fetchmany
+ yield row[::]
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ def _ensure_prefetched(self):
+ if self.rows is not None and self._rows_prefetched is None:
+ self._rows_prefetched = list(self.rows)
+ self.rows = iter(self._rows_prefetched)
+ return self._rows_prefetched
+
+ def executemany(self, sql, seq_of_parameters):
+ for parameters in seq_of_parameters:
+ self.execute(sql, parameters)
+
+ def executescript(self, script):
+ return self.execute(script)
+
+ def fetchone(self):
+ if self.rows is None:
+ return None
+ try:
+ return next(self.rows)
+ except StopIteration:
+ return None
+
+ def fetchmany(self, size=None):
+ if size is None:
+ size = self.arraysize
+
+ return list(itertools.islice(self.rows, size))
+
+ def fetchall(self):
+ return list(self.rows)
+
+ def nextset(self):
+ self.fetchall()
+
+ def setinputsizes(self, sizes):
+ pass
+
+ def setoutputsize(self, column=None):
+ pass
+
+ def close(self):
+ self.rows = None
+ self._rows_prefetched = None
+
+ @property
+ def rowcount(self):
+ return len(self._ensure_prefetched())
diff --git a/contrib/python/ydb/py2/ydb/dbapi/errors.py b/contrib/python/ydb/py2/ydb/dbapi/errors.py
new file mode 100644
index 0000000000..f4309f3812
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/dbapi/errors.py
@@ -0,0 +1,103 @@
+class Warning(Exception):
+ pass
+
+
+class Error(Exception):
+ def __init__(self, message, issues=None, status=None):
+
+ pretty_issues = _pretty_issues(issues)
+ message = message if pretty_issues is None else pretty_issues
+
+ super(Error, self).__init__(message)
+ self.issues = issues
+ self.message = message
+ self.status = status
+
+
+class InterfaceError(Error):
+ pass
+
+
+class DatabaseError(Error):
+ pass
+
+
+class DataError(DatabaseError):
+ pass
+
+
+class OperationalError(DatabaseError):
+ pass
+
+
+class IntegrityError(DatabaseError):
+ pass
+
+
+class InternalError(DatabaseError):
+ pass
+
+
+class ProgrammingError(DatabaseError):
+ pass
+
+
+class NotSupportedError(DatabaseError):
+ pass
+
+
+def _pretty_issues(issues):
+ if issues is None:
+ return None
+
+ children_messages = [_get_messages(issue, root=True) for issue in issues]
+
+ if None in children_messages:
+ return None
+
+ return "\n" + "\n".join(children_messages)
+
+
+def _get_messages(issue, max_depth=100, indent=2, depth=0, root=False):
+ if depth >= max_depth:
+ return None
+ margin_str = " " * depth * indent
+ pre_message = ""
+ children = ""
+ if issue.issues:
+ collapsed_messages = []
+ while not root and len(issue.issues) == 1:
+ collapsed_messages.append(issue.message)
+ issue = issue.issues[0]
+ if collapsed_messages:
+ pre_message = margin_str + ", ".join(collapsed_messages) + "\n"
+ depth += 1
+ margin_str = " " * depth * indent
+ else:
+ pre_message = ""
+
+ children_messages = [
+ _get_messages(iss, max_depth=max_depth, indent=indent, depth=depth + 1)
+ for iss in issue.issues
+ ]
+
+ if None in children_messages:
+ return None
+
+ children = "\n".join(children_messages)
+
+ return (
+ pre_message
+ + margin_str
+ + issue.message
+ + "\n"
+ + margin_str
+ + "severity level: "
+ + str(issue.severity)
+ + "\n"
+ + margin_str
+ + "issue code: "
+ + str(issue.issue_code)
+ + "\n"
+ + children
+ )
diff --git a/contrib/python/ydb/py2/ydb/default_pem.py b/contrib/python/ydb/py2/ydb/default_pem.py
new file mode 100644
index 0000000000..92286ba237
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/default_pem.py
@@ -0,0 +1,4691 @@
+import six
+
+
+data = """
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 146587175971765017618439757810265552097
+# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
+# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
+# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
+f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
+mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
+zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
+fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
+vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
+Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
+zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
+Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
+k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
+DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
+lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
+Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
+d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
+XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
+gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
+d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
+J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
+DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
+F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
+SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
+E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 146587176055767053814479386953112547951
+# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
+# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
+# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
+CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
+GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
+XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
+re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
+PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
+mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
+8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
+x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
+nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
+kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
+twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
+8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
+vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
+z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
+pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
+pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
+R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
+RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
+0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
+5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
+izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
+yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 146587176140553309517047991083707763997
+# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
+# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
+# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
+-----BEGIN CERTIFICATE-----
+MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
+736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
+DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
+fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
+njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 146587176229350439916519468929765261721
+# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
+# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
+# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
+-----BEGIN CERTIFICATE-----
+MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
+hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
+xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
+CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
+sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIFZTCCA02gAwIBAgIKUlD06gAAAAAAGDANBgkqhkiG9w0BAQ0FADAfMR0wGwYD
+VQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xODA2MjgxMTE0NTdaFw0zMjA2
+MjgxMTI0NTdaMFsxEjAQBgoJkiaJk/IsZAEZFgJydTEWMBQGCgmSJomT8ixkARkW
+BnlhbmRleDESMBAGCgmSJomT8ixkARkWAmxkMRkwFwYDVQQDExBZYW5kZXhJbnRl
+cm5hbENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy6Sab1PCbISk
+GSAUpr6JJKLXlf4O+cBhjALfQn2QpPL/cDjZ2+MPXuAUgE8KT+/mbAGA2rJID0KY
+RjDSkByxnhoX8jwWsmPYXoAmOMPkgKRG9/ZefnMrK4oVhGgLmxnpbEkNbGh88cJ1
+OVzgD5LVHSpDqm7iEuoUPOJCWXQ51+rZ0Lw9zBEU8v3yXXI345iWpLj92pOQDH0G
+Tqr7BnQywxcgb5BYdywayacIT7UTJZk7832m5k7Oa3qMIKKXHsx26rNVUVBfpzph
+OFvqkLetOKHk7827NDKr3I3OFXzQk4gy6tagv8PZNp+XGOBWfYkbLfI4xbTnjHIW
+n5q1gfKPOQIDAQABo4IBZTCCAWEwEAYJKwYBBAGCNxUBBAMCAQIwIwYJKwYBBAGC
+NxUCBBYEFNgaef9LcdQKs6qfsfiuWF5p/yqRMB0GA1UdDgQWBBSP3TKDCRNT3ZEa
+Zumz1DzFtPJnSDBZBgNVHSAEUjBQME4GBFUdIAAwRjBEBggrBgEFBQcCARY4aHR0
+cDovL2NybHMueWFuZGV4LnJ1L2Nwcy9ZYW5kZXhJbnRlcm5hbENBL3BvbGljaWVz
+Lmh0bWwwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0PBAQDAgGGMA8G
+A1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUq7nF/6Hv5lMdMzkihNF21DdOLWow
+VAYDVR0fBE0wSzBJoEegRYZDaHR0cDovL2NybHMueWFuZGV4LnJ1L1lhbmRleElu
+dGVybmFsUm9vdENBL1lhbmRleEludGVybmFsUm9vdENBLmNybDANBgkqhkiG9w0B
+AQ0FAAOCAgEAQnOiyykjwtSuCBV6rSiM8Q1rQIcfyqn1JBxSGeBMABc64loWSPaQ
+DtYPIW5rwNX7TQ94bjyYgCxhwHqUED/fcBOmXCQ2iBsdy5LOcNEZaC2kBHQuZ7dL
+0fSvpE98a41y9yY6CJGFXg8E/4GrQwgQEqT5Qbe9GHPadpRu+ptVvI6uLZG3ks2o
+oodjOm5C0SIo1pY4OtPAYE/AzTaYkTFbAqYcPfEfXHEOigBJBeXnQs7cANxX/RaF
+PnHEjZbGY57EtBP6p5ckndkfEmqp3PLXbsQteNOVpsUw5eVqEzinSisBmLc28nnr
+5QEojRontAaZd7ZzB5zaGkVuE+0laUUWSNBhfGE1R3LrTJEK9L7FEsBBprOxIWww
+CvLmAfglouwuNRc2TjRdfnZaEfPLD7NYIF4ahXPAMcfTii23Tlr2uB7LetNykSlX
+Z9S5/yf61VFEKnxuipFPNgtKqPcFgFUxlEb+wOeOfYZ7ex8VlpMBWbadj3Go025b
+KZUwKwHDQvgJ5pz9g3t+t5Xieu2pwyddWGu+1SItRohRhlyTiep7oW6yTps7Qt0e
+8pdLuLG7ZF19h1Pxi+dVbeaeNcsGEAOdRuCk+RTZHNe+J4yC8tNJOepnfYDul6SB
+RjFWthiFK45+TZRHAcsG9JuV8JNvgoKaL75v/GUsKaeJ3Cps3rBStfc=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIFGTCCAwGgAwIBAgIQJMM7ZIy2SYxCBgK7WcFwnjANBgkqhkiG9w0BAQ0FADAf
+MR0wGwYDVQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xMzAyMTExMzQxNDNa
+Fw0zMzAyMTExMzUxNDJaMB8xHTAbBgNVBAMTFFlhbmRleEludGVybmFsUm9vdENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAgb4xoQjBQ7oEFk8EHVGy
+1pDEmPWw0Wgw5nX9RM7LL2xQWyUuEq+Lf9Dgh+O725aZ9+SO2oEs47DHHt81/fne
+5N6xOftRrCpy8hGtUR/A3bvjnQgjs+zdXvcO9cTuuzzPTFSts/iZATZsAruiepMx
+SGj9S1fGwvYws/yiXWNoNBz4Tu1Tlp0g+5fp/ADjnxc6DqNk6w01mJRDbx+6rlBO
+aIH2tQmJXDVoFdrhmBK9qOfjxWlIYGy83TnrvdXwi5mKTMtpEREMgyNLX75UjpvO
+NkZgBvEXPQq+g91wBGsWIE2sYlguXiBniQgAJOyRuSdTxcJoG8tZkLDPRi5RouWY
+gxXr13edn1TRDGco2hkdtSUBlajBMSvAq+H0hkslzWD/R+BXkn9dh0/DFnxVt4XU
+5JbFyd/sKV/rF4Vygfw9ssh1ZIWdqkfZ2QXOZ2gH4AEeoN/9vEfUPwqPVzL0XEZK
+r4s2WjU9mE5tHrVsQOZ80wnvYHYi2JHbl0hr5ghs4RIyJwx6LEEnj2tzMFec4f7o
+dQeSsZpgRJmpvpAfRTxhIRjZBrKxnMytedAkUPguBQwjVCn7+EaKiJfpu42JG8Mm
++/dHi+Q9Tc+0tX5pKOIpQMlMxMHw8MfPmUjC3AAd9lsmCtuybYoeN2IRdbzzchJ8
+l1ZuoI3gH7pcIeElfVSqSBkCAwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFKu5xf+h7+ZTHTM5IoTRdtQ3Ti1qMBAGCSsGAQQB
+gjcVAQQDAgEAMA0GCSqGSIb3DQEBDQUAA4ICAQAVpyJ1qLjqRLC34F1UXkC3vxpO
+nV6WgzpzA+DUNog4Y6RhTnh0Bsir+I+FTl0zFCm7JpT/3NP9VjfEitMkHehmHhQK
+c7cIBZSF62K477OTvLz+9ku2O/bGTtYv9fAvR4BmzFfyPDoAKOjJSghD1p/7El+1
+eSjvcUBzLnBUtxO/iYXRNo7B3+1qo4F5Hz7rPRLI0UWW/0UAfVCO2fFtyF6C1iEY
+/q0Ldbf3YIaMkf2WgGhnX9yH/8OiIij2r0LVNHS811apyycjep8y/NkG4q1Z9jEi
+VEX3P6NEL8dWtXQlvlNGMcfDT3lmB+tS32CPEUwce/Ble646rukbERRwFfxXojpf
+C6ium+LtJc7qnK6ygnYF4D6mz4H+3WaxJd1S1hGQxOb/3WVw63tZFnN62F6/nc5g
+6T44Yb7ND6y3nVcygLpbQsws6HsjX65CoSjrrPn0YhKxNBscF7M7tLTW/5LK9uhk
+yjRCkJ0YagpeLxfV1l1ZJZaTPZvY9+ylHnWHhzlq0FzcrooSSsp4i44DB2K7O2ID
+87leymZkKUY6PMDa4GkDJx0dG4UXDhRETMf+NkYgtLJ+UIzMNskwVDcxO4kVL+Hi
+Pj78bnC5yCw8P5YylR45LdxLzLO68unoXOyFz1etGXzszw8lJI9LNubYxk77mK8H
+LpuQKbSbIERsmR+QqQ==
+-----END CERTIFICATE-----
+"""
+
+
+def load_default_pem():
+ global data
+
+ if six.PY3:
+ return data.encode("utf-8")
+ return data
diff --git a/contrib/python/ydb/py2/ydb/driver.py b/contrib/python/ydb/py2/ydb/driver.py
new file mode 100644
index 0000000000..9b3fa99cfa
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/driver.py
@@ -0,0 +1,242 @@
+# -*- coding: utf-8 -*-
+from . import credentials as credentials_impl, table, scheme, pool
+from . import tracing
+import six
+import os
+import grpc
+from . import _utilities
+
+if six.PY2:
+ Any = None
+else:
+ from typing import Any # noqa
+
+
+class RPCCompression:
+ """Indicates the compression method to be used for an RPC."""
+
+ NoCompression = grpc.Compression.NoCompression
+ Deflate = grpc.Compression.Deflate
+ Gzip = grpc.Compression.Gzip
+
+
+def default_credentials(credentials=None, tracer=None):
+ tracer = tracer if tracer is not None else tracing.Tracer(None)
+ with tracer.trace("Driver.default_credentials") as ctx:
+ if credentials is not None:
+ ctx.trace({"credentials.prepared": True})
+ return credentials
+
+ service_account_key_file = os.getenv("YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS")
+ if service_account_key_file is not None:
+ ctx.trace({"credentials.service_account_key_file": True})
+ import ydb.iam
+
+ return ydb.iam.ServiceAccountCredentials.from_file(service_account_key_file)
+
+ anonymous_credetials = os.getenv("YDB_ANONYMOUS_CREDENTIALS", "0") == "1"
+ if anonymous_credetials:
+ ctx.trace({"credentials.anonymous": True})
+ return credentials_impl.AnonymousCredentials()
+
+ metadata_credentials = os.getenv("YDB_METADATA_CREDENTIALS", "0") == "1"
+ if metadata_credentials:
+ ctx.trace({"credentials.metadata": True})
+ import ydb.iam
+
+ return ydb.iam.MetadataUrlCredentials(tracer=tracer)
+
+ access_token = os.getenv("YDB_ACCESS_TOKEN_CREDENTIALS")
+ if access_token is not None:
+ ctx.trace({"credentials.access_token": True})
+ return credentials_impl.AuthTokenCredentials(access_token)
+
+ import ydb.iam
+
+ return ydb.iam.MetadataUrlCredentials(tracer=tracer)
+
+
+class DriverConfig(object):
+ __slots__ = (
+ "endpoint",
+ "database",
+ "ca_cert",
+ "channel_options",
+ "credentials",
+ "use_all_nodes",
+ "root_certificates",
+ "certificate_chain",
+ "private_key",
+ "grpc_keep_alive_timeout",
+ "secure_channel",
+ "table_client_settings",
+ "endpoints",
+ "primary_user_agent",
+ "tracer",
+ "grpc_lb_policy_name",
+ "discovery_request_timeout",
+ "compression",
+ )
+
+ def __init__(
+ self,
+ endpoint,
+ database=None,
+ ca_cert=None,
+ auth_token=None,
+ channel_options=None,
+ credentials=None,
+ use_all_nodes=False,
+ root_certificates=None,
+ certificate_chain=None,
+ private_key=None,
+ grpc_keep_alive_timeout=None,
+ table_client_settings=None,
+ endpoints=None,
+ primary_user_agent="python-library",
+ tracer=None,
+ grpc_lb_policy_name="round_robin",
+ discovery_request_timeout=10,
+ compression=None,
+ ):
+ """
+ A driver config to initialize a driver instance
+
+ :param endpoint: A endpoint specified in pattern host:port to be used for initial channel initialization and for YDB endpoint discovery mechanism
+ :param database: A name of the database
+ :param ca_cert: A CA certificate when SSL should be used
+ :param auth_token: A authentication token
+ :param credentials: An instance of AbstractCredentials
+ :param use_all_nodes: A balancing policy that forces to use all available nodes.
+ :param root_certificates: The PEM-encoded root certificates as a byte string.
+ :param private_key: The PEM-encoded private key as a byte string, or None if no\
+ private key should be used.
+ :param certificate_chain: The PEM-encoded certificate chain as a byte string\
+ to use or or None if no certificate chain should be used.
+ :param grpc_keep_alive_timeout: GRpc KeepAlive timeout, ms
+ :param ydb.Tracer tracer: ydb.Tracer instance to trace requests in driver.\
+ If tracing aio ScopeManager must be ContextVarsScopeManager
+ :param grpc_lb_policy_name: A load balancing policy to be used for discovery channel construction. Default value is `round_round`
+ :param discovery_request_timeout: A default timeout to complete the discovery. The default value is 10 seconds.
+
+ """
+ self.endpoint = endpoint
+ self.database = database
+ self.ca_cert = ca_cert
+ self.channel_options = channel_options
+ self.secure_channel = _utilities.is_secure_protocol(endpoint)
+ self.endpoint = _utilities.wrap_endpoint(self.endpoint)
+ self.endpoints = []
+ if endpoints is not None:
+ self.endpoints = [_utilities.wrap_endpoint(endp) for endp in endpoints]
+ if auth_token is not None:
+ credentials = credentials_impl.AuthTokenCredentials(auth_token)
+ self.credentials = credentials
+ self.use_all_nodes = use_all_nodes
+ self.root_certificates = root_certificates
+ self.certificate_chain = certificate_chain
+ self.private_key = private_key
+ self.grpc_keep_alive_timeout = grpc_keep_alive_timeout
+ self.table_client_settings = table_client_settings
+ self.primary_user_agent = primary_user_agent
+ self.tracer = tracer if tracer is not None else tracing.Tracer(None)
+ self.grpc_lb_policy_name = grpc_lb_policy_name
+ self.discovery_request_timeout = discovery_request_timeout
+ self.compression = compression
+
+ def set_database(self, database):
+ self.database = database
+ return self
+
+ @classmethod
+ def default_from_endpoint_and_database(
+ cls, endpoint, database, root_certificates=None, credentials=None, **kwargs
+ ):
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+ @classmethod
+ def default_from_connection_string(
+ cls, connection_string, root_certificates=None, credentials=None, **kwargs
+ ):
+ endpoint, database = _utilities.parse_connection_string(connection_string)
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+ def set_grpc_keep_alive_timeout(self, timeout):
+ self.grpc_keep_alive_timeout = timeout
+ return self
+
+
+ConnectionParams = DriverConfig
+
+
+def get_config(
+ driver_config=None,
+ connection_string=None,
+ endpoint=None,
+ database=None,
+ root_certificates=None,
+ credentials=None,
+ config_class=DriverConfig,
+ **kwargs
+):
+ if driver_config is None:
+ if connection_string is not None:
+ driver_config = config_class.default_from_connection_string(
+ connection_string, root_certificates, credentials, **kwargs
+ )
+ else:
+ driver_config = config_class.default_from_endpoint_and_database(
+ endpoint, database, root_certificates, credentials, **kwargs
+ )
+ return driver_config
+ return driver_config
+
+
+class Driver(pool.ConnectionPool):
+ __slots__ = ("scheme_client", "table_client")
+
+ def __init__(
+ self,
+ driver_config=None,
+ connection_string=None,
+ endpoint=None,
+ database=None,
+ root_certificates=None,
+ credentials=None,
+ **kwargs
+ ):
+ """
+ Constructs a driver instance to be used in table and scheme clients.
+ It encapsulates endpoints discovery mechanism and provides ability to execute RPCs
+ on discovered endpoints
+
+ :param driver_config: A driver config
+ :param connection_string: A string in the following format: <protocol>://<hostame>:<port>/?database=/path/to/the/database
+ :param endpoint: An endpoint specified in the following format: <protocol>://<hostame>:<port>
+ :param database: A database path
+ :param credentials: A credentials. If not specifed credentials constructed by default.
+ """
+ driver_config = get_config(
+ driver_config,
+ connection_string,
+ endpoint,
+ database,
+ root_certificates,
+ credentials,
+ )
+
+ super(Driver, self).__init__(driver_config)
+ self.scheme_client = scheme.SchemeClient(self)
+ self.table_client = table.TableClient(self, driver_config.table_client_settings)
diff --git a/contrib/python/ydb/py2/ydb/export.py b/contrib/python/ydb/py2/ydb/export.py
new file mode 100644
index 0000000000..aac31315ca
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/export.py
@@ -0,0 +1,280 @@
+import enum
+
+from . import _apis
+
+from . import settings_impl as s_impl
+
+try:
+ from ydb.public.api.protos import ydb_export_pb2
+ from ydb.public.api.grpc import ydb_export_v1_pb2_grpc
+except ImportError:
+ from contrib.ydb.public.api.protos import ydb_export_pb2
+ from contrib.ydb.public.api.grpc import ydb_export_v1_pb2_grpc
+
+from . import operation
+
+_ExportToYt = "ExportToYt"
+_ExportToS3 = "ExportToS3"
+_progresses = {}
+
+
+@enum.unique
+class ExportProgress(enum.IntEnum):
+ UNSPECIFIED = 0
+ PREPARING = 1
+ TRANSFER_DATA = 2
+ DONE = 3
+ CANCELLATION = 4
+ CANCELLED = 5
+
+
+def _initialize_progresses():
+ for key, value in ydb_export_pb2.ExportProgress.Progress.items():
+ _progresses[value] = getattr(ExportProgress, key[len("PROGRESS_") :])
+
+
+_initialize_progresses()
+
+
+class ExportToYTOperation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ExportToYTOperation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_export_pb2.ExportToYtMetadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+ self.items_progress = metadata.items_progress
+
+ def __str__(self):
+ return "ExportToYTOperation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ExportToS3Operation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ExportToS3Operation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_export_pb2.ExportToS3Metadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+ self.items_progress = metadata.items_progress
+
+ def __str__(self):
+ return "ExportToS3Operation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ExportToYTSettings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExportToYTSettings, self).__init__()
+ self.items = []
+ self.number_of_retries = 0
+ self.token = None
+ self.host = None
+ self.port = None
+ self.uid = None
+
+ def with_port(self, port):
+ self.port = port
+ return self
+
+ def with_host(self, host):
+ self.host = host
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_token(self, token):
+ self.token = token
+ return self
+
+ def with_item(self, item):
+ """
+ :param: A source & destination tuple to export.
+ """
+ self.items.append(item)
+ return self
+
+ def with_source_and_destination(self, source_path, destination_path):
+ return self.with_item((source_path, destination_path))
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+
+class ExportToS3Settings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExportToS3Settings, self).__init__()
+ self.items = []
+ self.bucket = None
+ self.endpoint = None
+ self.scheme = 2
+ self.uid = None
+ self.access_key = None
+ self.secret_key = None
+ self.number_of_retries = 0
+ self.storage_class = None
+ self.export_compression = None
+
+ def with_scheme(self, scheme):
+ self.scheme = scheme
+ return self
+
+ def with_storage_class(self, storage_class):
+ self.storage_class = storage_class
+ return self
+
+ def with_export_compression(self, compression):
+ self.export_compression = compression
+ return self
+
+ def with_bucket(self, bucket):
+ self.bucket = bucket
+ return self
+
+ def with_endpoint(self, endpoint):
+ self.endpoint = endpoint
+ return self
+
+ def with_access_key(self, access_key):
+ self.access_key = access_key
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_secret_key(self, secret_key):
+ self.secret_key = secret_key
+ return self
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_source_and_destination(self, source_path, destination_prefix):
+ return self.with_item((source_path, destination_prefix))
+
+ def with_item(self, item):
+ self.items.append(item)
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+
+def _export_to_yt_request_factory(settings):
+ request = ydb_export_pb2.ExportToYtRequest(
+ settings=ydb_export_pb2.ExportToYtSettings(
+ host=settings.host, token=settings.token
+ )
+ )
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ if settings.port:
+ request.settings.port = settings.port
+
+ for source_path, destination_path in settings.items:
+ request.settings.items.add(
+ source_path=source_path, destination_path=destination_path
+ )
+
+ return request
+
+
+def _get_operation_request(operation_id):
+ request = _apis.ydb_operation.GetOperationRequest(id=operation_id)
+ return request
+
+
+def _export_to_s3_request_factory(settings):
+ request = ydb_export_pb2.ExportToS3Request(
+ settings=ydb_export_pb2.ExportToS3Settings(
+ endpoint=settings.endpoint,
+ bucket=settings.bucket,
+ access_key=settings.access_key,
+ secret_key=settings.secret_key,
+ scheme=settings.scheme,
+ storage_class=settings.storage_class,
+ )
+ )
+
+ if settings.uid is not None:
+ request.operation_params.labels["uid"] = settings.uid
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ if settings.export_compression is not None:
+ request.settings.compression = settings.export_compression
+
+ for source_path, destination_prefix in settings.items:
+ request.settings.items.add(
+ source_path=source_path,
+ destination_prefix=destination_prefix,
+ )
+
+ return request
+
+
+class ExportClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def get_export_to_s3_operation(self, operation_id, settings=None):
+ return self._driver(
+ _get_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ ExportToS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def export_to_s3(self, settings):
+ return self._driver(
+ _export_to_s3_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToS3,
+ ExportToS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def export_to_yt(self, settings):
+ return self._driver(
+ _export_to_yt_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToYt,
+ ExportToYTOperation,
+ settings,
+ (self._driver,),
+ )
+
+ def async_export_to_yt(self, settings):
+ return self._driver.future(
+ _export_to_yt_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToYt,
+ ExportToYTOperation,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py2/ydb/global_settings.py b/contrib/python/ydb/py2/ydb/global_settings.py
new file mode 100644
index 0000000000..8a3ec9535e
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/global_settings.py
@@ -0,0 +1,16 @@
+from . import convert
+from . import table
+
+
+def global_allow_truncated_result(enabled=True):
+ """
+ call global_allow_truncated_result(False) for more safe execution and compatible with future changes
+ """
+ convert._default_allow_truncated_result = enabled
+
+
+def global_allow_split_transactions(enabled):
+ """
+ call global_allow_truncated_result(False) for more safe execution and compatible with future changes
+ """
+ table._allow_split_transaction = enabled
diff --git a/contrib/python/ydb/py2/ydb/iam/__init__.py b/contrib/python/ydb/py2/ydb/iam/__init__.py
new file mode 100644
index 0000000000..7167efe13e
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/iam/__init__.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+from .auth import ServiceAccountCredentials # noqa
+from .auth import MetadataUrlCredentials # noqa
diff --git a/contrib/python/ydb/py2/ydb/iam/auth.py b/contrib/python/ydb/py2/ydb/iam/auth.py
new file mode 100644
index 0000000000..06b07e917e
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/iam/auth.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+from ydb import credentials, tracing
+import grpc
+import time
+import abc
+import six
+from datetime import datetime
+import json
+import os
+
+try:
+ from yandex.cloud.iam.v1 import iam_token_service_pb2_grpc
+ from yandex.cloud.iam.v1 import iam_token_service_pb2
+ import jwt
+except ImportError:
+ jwt = None
+ iam_token_service_pb2_grpc = None
+ iam_token_service_pb2 = None
+
+try:
+ import requests
+except ImportError:
+ requests = None
+
+
+DEFAULT_METADATA_URL = (
+ "http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token"
+)
+
+
+def get_jwt(account_id, access_key_id, private_key, jwt_expiration_timeout):
+ now = time.time()
+ now_utc = datetime.utcfromtimestamp(now)
+ exp_utc = datetime.utcfromtimestamp(now + jwt_expiration_timeout)
+ return jwt.encode(
+ key=private_key,
+ algorithm="PS256",
+ headers={"typ": "JWT", "alg": "PS256", "kid": access_key_id},
+ payload={
+ "iss": account_id,
+ "aud": "https://iam.api.cloud.yandex.net/iam/v1/tokens",
+ "iat": now_utc,
+ "exp": exp_utc,
+ },
+ )
+
+
+@six.add_metaclass(abc.ABCMeta)
+class TokenServiceCredentials(credentials.AbstractExpiringTokenCredentials):
+ def __init__(self, iam_endpoint=None, iam_channel_credentials=None, tracer=None):
+ super(TokenServiceCredentials, self).__init__(tracer)
+ assert (
+ iam_token_service_pb2_grpc is not None
+ ), "run pip install==ydb[yc] to use service account credentials"
+ self._get_token_request_timeout = 10
+ self._iam_token_service_pb2 = iam_token_service_pb2
+ self._iam_token_service_pb2_grpc = iam_token_service_pb2_grpc
+ self._iam_endpoint = (
+ "iam.api.cloud.yandex.net:443" if iam_endpoint is None else iam_endpoint
+ )
+ self._iam_channel_credentials = (
+ {} if iam_channel_credentials is None else iam_channel_credentials
+ )
+
+ def _channel_factory(self):
+ return grpc.secure_channel(
+ self._iam_endpoint,
+ grpc.ssl_channel_credentials(**self._iam_channel_credentials),
+ )
+
+ @abc.abstractmethod
+ def _get_token_request(self):
+ pass
+
+ @tracing.with_trace()
+ def _make_token_request(self):
+ with self._channel_factory() as channel:
+ tracing.trace(self.tracer, {"iam_token.from_service": True})
+ stub = self._iam_token_service_pb2_grpc.IamTokenServiceStub(channel)
+ response = stub.Create(
+ self._get_token_request(), timeout=self._get_token_request_timeout
+ )
+ expires_in = max(0, response.expires_at.seconds - int(time.time()))
+ return {"access_token": response.iam_token, "expires_in": expires_in}
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseJWTCredentials(object):
+ def __init__(self, account_id, access_key_id, private_key):
+ self._account_id = account_id
+ self._jwt_expiration_timeout = 60.0 * 60
+ self._token_expiration_timeout = 120
+ self._access_key_id = access_key_id
+ self._private_key = private_key
+
+ def set_token_expiration_timeout(self, value):
+ self._token_expiration_timeout = value
+ return self
+
+ @classmethod
+ def from_file(cls, key_file, iam_endpoint=None, iam_channel_credentials=None):
+ with open(os.path.expanduser(key_file), "r") as r:
+ output = json.loads(r.read())
+ account_id = output.get("service_account_id", None)
+ if account_id is None:
+ account_id = output.get("user_account_id", None)
+ return cls(
+ account_id,
+ output["id"],
+ output["private_key"],
+ iam_endpoint=iam_endpoint,
+ iam_channel_credentials=iam_channel_credentials,
+ )
+
+
+class JWTIamCredentials(TokenServiceCredentials, BaseJWTCredentials):
+ def __init__(
+ self,
+ account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ TokenServiceCredentials.__init__(self, iam_endpoint, iam_channel_credentials)
+ BaseJWTCredentials.__init__(self, account_id, access_key_id, private_key)
+
+ def _get_token_request(self):
+ return self._iam_token_service_pb2.CreateIamTokenRequest(
+ jwt=get_jwt(
+ self._account_id,
+ self._access_key_id,
+ self._private_key,
+ self._jwt_expiration_timeout,
+ )
+ )
+
+
+class YandexPassportOAuthIamCredentials(TokenServiceCredentials):
+ def __init__(
+ self,
+ yandex_passport_oauth_token,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ self._yandex_passport_oauth_token = yandex_passport_oauth_token
+ super(YandexPassportOAuthIamCredentials, self).__init__(
+ iam_endpoint, iam_channel_credentials
+ )
+
+ def _get_token_request(self):
+ return iam_token_service_pb2.CreateIamTokenRequest(
+ yandex_passport_oauth_token=self._yandex_passport_oauth_token
+ )
+
+
+class MetadataUrlCredentials(credentials.AbstractExpiringTokenCredentials):
+ def __init__(self, metadata_url=None, tracer=None):
+ """
+ :param metadata_url: Metadata url
+ :param ydb.Tracer tracer: ydb tracer
+ """
+ super(MetadataUrlCredentials, self).__init__(tracer)
+ assert (
+ requests is not None
+ ), "Install requests library to use metadata credentials provider"
+ self.extra_error_message = "Check that metadata service configured properly since we failed to fetch it from metadata_url."
+ self._metadata_url = (
+ DEFAULT_METADATA_URL if metadata_url is None else metadata_url
+ )
+ self._tp.submit(self._refresh)
+
+ @tracing.with_trace()
+ def _make_token_request(self):
+ response = requests.get(
+ self._metadata_url, headers={"Metadata-Flavor": "Google"}, timeout=3
+ )
+ response.raise_for_status()
+ return json.loads(response.text)
+
+
+class ServiceAccountCredentials(JWTIamCredentials):
+ def __init__(
+ self,
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ super(ServiceAccountCredentials, self).__init__(
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint,
+ iam_channel_credentials,
+ )
diff --git a/contrib/python/ydb/py2/ydb/import_client.py b/contrib/python/ydb/py2/ydb/import_client.py
new file mode 100644
index 0000000000..a11d77a0c6
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/import_client.py
@@ -0,0 +1,162 @@
+import enum
+
+from . import _apis
+
+from . import settings_impl as s_impl
+
+try:
+ from ydb.public.api.protos import ydb_import_pb2
+ from ydb.public.api.grpc import ydb_import_v1_pb2_grpc
+except ImportError:
+ from contrib.ydb.public.api.protos import ydb_import_pb2
+ from contrib.ydb.public.api.grpc import ydb_import_v1_pb2_grpc
+
+
+from . import operation
+
+_ImportFromS3 = "ImportFromS3"
+_progresses = {}
+
+
+@enum.unique
+class ImportProgress(enum.IntEnum):
+ UNSPECIFIED = 0
+ PREPARING = 1
+ TRANSFER_DATA = 2
+ BUILD_INDEXES = 3
+ DONE = 4
+ CANCELLATION = 5
+ CANCELLED = 6
+
+
+def _initialize_progresses():
+ for key, value in ydb_import_pb2.ImportProgress.Progress.items():
+ _progresses[value] = getattr(ImportProgress, key[len("PROGRESS_") :])
+
+
+_initialize_progresses()
+
+
+class ImportFromS3Operation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ImportFromS3Operation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_import_pb2.ImportFromS3Metadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+
+ def __str__(self):
+ return "ImportFromS3Operation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ImportFromS3Settings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ImportFromS3Settings, self).__init__()
+ self.items = []
+ self.bucket = None
+ self.endpoint = None
+ self.scheme = 2
+ self.uid = None
+ self.access_key = None
+ self.secret_key = None
+ self.number_of_retries = 0
+
+ def with_scheme(self, scheme):
+ self.scheme = scheme
+ return self
+
+ def with_bucket(self, bucket):
+ self.bucket = bucket
+ return self
+
+ def with_endpoint(self, endpoint):
+ self.endpoint = endpoint
+ return self
+
+ def with_access_key(self, access_key):
+ self.access_key = access_key
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_secret_key(self, secret_key):
+ self.secret_key = secret_key
+ return self
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_source_and_destination(self, source_path, destination_prefix):
+ return self.with_item((source_path, destination_prefix))
+
+ def with_item(self, item):
+ self.items.append(item)
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+
+def _get_operation_request(operation_id):
+ request = _apis.ydb_operation.GetOperationRequest(id=operation_id)
+ return request
+
+
+def _import_from_s3_request_factory(settings):
+ request = ydb_import_pb2.ImportFromS3Request(
+ settings=ydb_import_pb2.ImportFromS3Settings(
+ endpoint=settings.endpoint,
+ bucket=settings.bucket,
+ access_key=settings.access_key,
+ secret_key=settings.secret_key,
+ scheme=settings.scheme,
+ )
+ )
+
+ if settings.uid is not None:
+ request.operation_params.labels["uid"] = settings.uid
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ for source, destination in settings.items:
+ request.settings.items.add(
+ source_prefix=source,
+ destination_path=destination,
+ )
+
+ return request
+
+
+class ImportClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def get_import_from_s3_operation(self, operation_id, settings=None):
+ return self._driver(
+ _get_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ ImportFromS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def import_from_s3(self, settings):
+ return self._driver(
+ _import_from_s3_request_factory(settings),
+ ydb_import_v1_pb2_grpc.ImportServiceStub,
+ _ImportFromS3,
+ ImportFromS3Operation,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py2/ydb/interceptor.py b/contrib/python/ydb/py2/ydb/interceptor.py
new file mode 100644
index 0000000000..5405177a8e
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/interceptor.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+import grpc
+from concurrent import futures
+from grpc._cython import cygrpc
+from grpc._channel import _handle_event, _EMPTY_FLAGS
+
+
+def _event_handler(state, response_deserializer):
+ def handle_event(event):
+ with state.condition:
+ callbacks = _handle_event(event, state, response_deserializer)
+ state.condition.notify_all()
+ done = not state.due
+ for callback in callbacks:
+ callback()
+
+ if getattr(state, "on_event_handler_callback", None) is not None:
+ state.on_event_handler_callback(state)
+
+ return done and state.fork_epoch >= cygrpc.get_fork_epoch()
+
+ return handle_event
+
+
+def on_event_callback(future, it, response_wrapper):
+ def _callback(state):
+ with state.condition:
+ if state.response is not None:
+ response = state.response
+ state.response = None
+ if not future.done():
+ try:
+ future.set_result(response_wrapper(response))
+ except Exception as e:
+ future.set_exception(e)
+ elif cygrpc.OperationType.receive_message not in state.due:
+ if state.code is grpc.StatusCode.OK:
+ if not future.done():
+ future.set_exception(StopIteration())
+ elif state.code is not None:
+ if not future.done():
+ future.set_exception(it)
+
+ return _callback
+
+
+def operate_async_stream_call(it, wrapper):
+ future = futures.Future()
+ callback = on_event_callback(future, it, wrapper)
+
+ with it._state.condition:
+ if it._state.code is None:
+ it._state.on_event_handler_callback = callback
+ operating = it._call.operate(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
+ _event_handler(it._state, it._response_deserializer),
+ )
+ if operating:
+ it._state.due.add(cygrpc.OperationType.receive_message)
+ elif it._state.code is grpc.StatusCode.OK:
+ future.set_exception(StopIteration())
+ else:
+ future.set_exception(it)
+ return future
+
+
+def monkey_patch_event_handler():
+ grpc._channel._event_handler = _event_handler
diff --git a/contrib/python/ydb/py2/ydb/issues.py b/contrib/python/ydb/py2/ydb/issues.py
new file mode 100644
index 0000000000..611f15ecad
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/issues.py
@@ -0,0 +1,206 @@
+# -*- coding: utf-8 -*-
+from google.protobuf import text_format
+import enum
+from six.moves import queue
+
+from . import _apis
+
+
+_TRANSPORT_STATUSES_FIRST = 401000
+_CLIENT_STATUSES_FIRST = 402000
+
+
+@enum.unique
+class StatusCode(enum.IntEnum):
+ STATUS_CODE_UNSPECIFIED = _apis.StatusIds.STATUS_CODE_UNSPECIFIED
+ SUCCESS = _apis.StatusIds.SUCCESS
+ BAD_REQUEST = _apis.StatusIds.BAD_REQUEST
+ UNAUTHORIZED = _apis.StatusIds.UNAUTHORIZED
+ INTERNAL_ERROR = _apis.StatusIds.INTERNAL_ERROR
+ ABORTED = _apis.StatusIds.ABORTED
+ UNAVAILABLE = _apis.StatusIds.UNAVAILABLE
+ OVERLOADED = _apis.StatusIds.OVERLOADED
+ SCHEME_ERROR = _apis.StatusIds.SCHEME_ERROR
+ GENERIC_ERROR = _apis.StatusIds.GENERIC_ERROR
+ TIMEOUT = _apis.StatusIds.TIMEOUT
+ BAD_SESSION = _apis.StatusIds.BAD_SESSION
+ PRECONDITION_FAILED = _apis.StatusIds.PRECONDITION_FAILED
+ ALREADY_EXISTS = _apis.StatusIds.ALREADY_EXISTS
+ NOT_FOUND = _apis.StatusIds.NOT_FOUND
+ SESSION_EXPIRED = _apis.StatusIds.SESSION_EXPIRED
+ CANCELLED = _apis.StatusIds.CANCELLED
+ UNDETERMINED = _apis.StatusIds.UNDETERMINED
+ UNSUPPORTED = _apis.StatusIds.UNSUPPORTED
+ SESSION_BUSY = _apis.StatusIds.SESSION_BUSY
+ EXTERNAL_ERROR = _apis.StatusIds.EXTERNAL_ERROR
+
+ CONNECTION_LOST = _TRANSPORT_STATUSES_FIRST + 10
+ CONNECTION_FAILURE = _TRANSPORT_STATUSES_FIRST + 20
+ DEADLINE_EXCEEDED = _TRANSPORT_STATUSES_FIRST + 30
+ CLIENT_INTERNAL_ERROR = _TRANSPORT_STATUSES_FIRST + 40
+ UNIMPLEMENTED = _TRANSPORT_STATUSES_FIRST + 50
+
+ UNAUTHENTICATED = _CLIENT_STATUSES_FIRST + 30
+ SESSION_POOL_EMPTY = _CLIENT_STATUSES_FIRST + 40
+
+
+class Error(Exception):
+ status = None
+
+ def __init__(self, message, issues=None):
+ super(Error, self).__init__(message)
+ self.issues = issues
+ self.message = message
+
+
+class TruncatedResponseError(Error):
+ status = None
+
+
+class ConnectionError(Error):
+ status = None
+
+
+class ConnectionFailure(ConnectionError):
+ status = StatusCode.CONNECTION_FAILURE
+
+
+class ConnectionLost(ConnectionError):
+ status = StatusCode.CONNECTION_LOST
+
+
+class DeadlineExceed(ConnectionError):
+ status = StatusCode.DEADLINE_EXCEEDED
+
+
+class Unimplemented(ConnectionError):
+ status = StatusCode.UNIMPLEMENTED
+
+
+class Unauthenticated(Error):
+ status = StatusCode.UNAUTHENTICATED
+
+
+class BadRequest(Error):
+ status = StatusCode.BAD_REQUEST
+
+
+class Unauthorized(Error):
+ status = StatusCode.UNAUTHORIZED
+
+
+class InternalError(Error):
+ status = StatusCode.INTERNAL_ERROR
+
+
+class Aborted(Error):
+ status = StatusCode.ABORTED
+
+
+class Unavailable(Error):
+ status = StatusCode.UNAVAILABLE
+
+
+class Overloaded(Error):
+ status = StatusCode.OVERLOADED
+
+
+class SchemeError(Error):
+ status = StatusCode.SCHEME_ERROR
+
+
+class GenericError(Error):
+ status = StatusCode.GENERIC_ERROR
+
+
+class BadSession(Error):
+ status = StatusCode.BAD_SESSION
+
+
+class Timeout(Error):
+ status = StatusCode.TIMEOUT
+
+
+class PreconditionFailed(Error):
+ status = StatusCode.PRECONDITION_FAILED
+
+
+class NotFound(Error):
+ status = StatusCode.NOT_FOUND
+
+
+class AlreadyExists(Error):
+ status = StatusCode.ALREADY_EXISTS
+
+
+class SessionExpired(Error):
+ status = StatusCode.SESSION_EXPIRED
+
+
+class Cancelled(Error):
+ status = StatusCode.CANCELLED
+
+
+class Undetermined(Error):
+ status = StatusCode.UNDETERMINED
+
+
+class Unsupported(Error):
+ status = StatusCode.UNSUPPORTED
+
+
+class SessionBusy(Error):
+ status = StatusCode.SESSION_BUSY
+
+
+class ExternalError(Error):
+ status = StatusCode.EXTERNAL_ERROR
+
+
+class SessionPoolEmpty(Error, queue.Empty):
+ status = StatusCode.SESSION_POOL_EMPTY
+
+
+def _format_issues(issues):
+ if not issues:
+ return ""
+
+ return " ,".join(
+ text_format.MessageToString(issue, as_utf8=False, as_one_line=True)
+ for issue in issues
+ )
+
+
+def _format_response(response):
+ fmt_issues = _format_issues(response.issues)
+ return "{0} (server_code: {1})".format(fmt_issues, response.status)
+
+
+_success_status_codes = {StatusCode.STATUS_CODE_UNSPECIFIED, StatusCode.SUCCESS}
+_server_side_error_map = {
+ StatusCode.BAD_REQUEST: BadRequest,
+ StatusCode.UNAUTHORIZED: Unauthorized,
+ StatusCode.INTERNAL_ERROR: InternalError,
+ StatusCode.ABORTED: Aborted,
+ StatusCode.UNAVAILABLE: Unavailable,
+ StatusCode.OVERLOADED: Overloaded,
+ StatusCode.SCHEME_ERROR: SchemeError,
+ StatusCode.GENERIC_ERROR: GenericError,
+ StatusCode.TIMEOUT: Timeout,
+ StatusCode.BAD_SESSION: BadSession,
+ StatusCode.PRECONDITION_FAILED: PreconditionFailed,
+ StatusCode.ALREADY_EXISTS: AlreadyExists,
+ StatusCode.NOT_FOUND: NotFound,
+ StatusCode.SESSION_EXPIRED: SessionExpired,
+ StatusCode.CANCELLED: Cancelled,
+ StatusCode.UNDETERMINED: Undetermined,
+ StatusCode.UNSUPPORTED: Unsupported,
+ StatusCode.SESSION_BUSY: SessionBusy,
+ StatusCode.EXTERNAL_ERROR: ExternalError,
+}
+
+
+def _process_response(response_proto):
+ if response_proto.status not in _success_status_codes:
+ exc_obj = _server_side_error_map.get(response_proto.status)
+ raise exc_obj(_format_response(response_proto), response_proto.issues)
diff --git a/contrib/python/ydb/py2/ydb/operation.py b/contrib/python/ydb/py2/ydb/operation.py
new file mode 100644
index 0000000000..6084ef0f18
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/operation.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+from . import issues
+from . import _apis
+
+
+def _forget_operation_request(operation_id):
+ request = _apis.ydb_operation.ForgetOperationRequest(id=operation_id)
+ return request
+
+
+def _forget_operation_response(rpc_state, response): # pylint: disable=W0613
+ issues._process_response(response)
+
+
+def _cancel_operation_request(operation_id):
+ request = _apis.ydb_operation.CancelOperationRequest(id=operation_id)
+ return request
+
+
+def _cancel_operation_response(rpc_state, response): # pylint: disable=W0613
+ issues._process_response(response)
+
+
+def _get_operation_request(self):
+ request = _apis.ydb_operation.GetOperationRequest(id=self.id)
+ return request
+
+
+class OperationClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def cancel(self, operation_id, settings=None):
+ return self._driver(
+ _cancel_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.CancelOperation,
+ _cancel_operation_response,
+ settings,
+ )
+
+ def forget(self, operation_id, settings=None):
+ return self._driver(
+ _forget_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.ForgetOperation,
+ _forget_operation_response,
+ settings,
+ )
+
+
+class Operation(object):
+ __slots__ = ("id", "_driver", "self_cls")
+
+ def __init__(self, rpc_state, response, driver=None): # pylint: disable=W0613
+ # implement proper interface a bit later
+ issues._process_response(response.operation)
+ self.id = response.operation.id
+ self._driver = driver
+ # self.ready = operation.ready
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "<Operation %s>" % (self.id,)
+
+ def _ensure_implements(self):
+ if self._driver is None:
+ raise ValueError("Operation doesn't implement request!")
+
+ def cancel(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _cancel_operation_request(self.id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.CancelOperation,
+ _cancel_operation_response,
+ settings,
+ )
+
+ def forget(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _forget_operation_request(self.id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.ForgetOperation,
+ _forget_operation_response,
+ settings,
+ )
+
+ def get(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _get_operation_request(self),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ self.__class__,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py2/ydb/pool.py b/contrib/python/ydb/py2/ydb/pool.py
new file mode 100644
index 0000000000..dfda0adff2
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/pool.py
@@ -0,0 +1,528 @@
+# -*- coding: utf-8 -*-
+import threading
+import logging
+from concurrent import futures
+import collections
+import random
+
+import six
+
+from . import connection as connection_impl, issues, resolver, _utilities, tracing
+from abc import abstractmethod, ABCMeta
+
+
+logger = logging.getLogger(__name__)
+
+
+class ConnectionsCache(object):
+ def __init__(self, use_all_nodes=False, tracer=tracing.Tracer(None)):
+ self.tracer = tracer
+ self.lock = threading.RLock()
+ self.connections = collections.OrderedDict()
+ self.connections_by_node_id = collections.OrderedDict()
+ self.outdated = collections.OrderedDict()
+ self.subscriptions = set()
+ self.preferred = collections.OrderedDict()
+ self.logger = logging.getLogger(__name__)
+ self.use_all_nodes = use_all_nodes
+ self.conn_lst_order = (
+ (self.connections,)
+ if self.use_all_nodes
+ else (self.preferred, self.connections)
+ )
+ self.fast_fail_subscriptions = set()
+
+ def add(self, connection, preferred=False):
+ if connection is None:
+ return False
+
+ connection.add_cleanup_callback(self.remove)
+ with self.lock:
+ if preferred:
+ self.preferred[connection.endpoint] = connection
+
+ self.connections_by_node_id[connection.node_id] = connection
+ self.connections[connection.endpoint] = connection
+ subscriptions = list(self.subscriptions)
+ self.subscriptions.clear()
+
+ if len(self.connections) > 0:
+ self.complete_discovery(None)
+
+ for subscription in subscriptions:
+ subscription.set_result(None)
+ return True
+
+ def _on_done_callback(self, subscription):
+ """
+ A done callback for the subscription future
+ :param subscription: A subscription
+ :return: None
+ """
+ with self.lock:
+ try:
+ self.subscriptions.remove(subscription)
+ except KeyError:
+ return subscription
+
+ @property
+ def size(self):
+ with self.lock:
+ return len(self.connections) - len(self.outdated)
+
+ def already_exists(self, endpoint):
+ with self.lock:
+ return endpoint in self.connections
+
+ def values(self):
+ with self.lock:
+ return list(self.connections.values())
+
+ def make_outdated(self, connection):
+ with self.lock:
+ self.outdated[connection.endpoint] = connection
+ return self
+
+ def cleanup_outdated(self):
+ with self.lock:
+ outdated_connections = list(self.outdated.values())
+ for outdated_connection in outdated_connections:
+ outdated_connection.close()
+ return self
+
+ def cleanup(self):
+ with self.lock:
+ actual_connections = list(self.connections.values())
+ for connection in actual_connections:
+ connection.close()
+
+ def complete_discovery(self, error):
+ with self.lock:
+ for subscription in self.fast_fail_subscriptions:
+ if error is None:
+ subscription.set_result(None)
+ else:
+ subscription.set_exception(error)
+
+ self.fast_fail_subscriptions.clear()
+
+ def add_fast_fail(self):
+ with self.lock:
+ subscription = futures.Future()
+ if len(self.connections) > 0:
+ subscription.set_result(None)
+ return subscription
+
+ self.fast_fail_subscriptions.add(subscription)
+ return subscription
+
+ def subscribe(self):
+ with self.lock:
+ subscription = futures.Future()
+ if len(self.connections) > 0:
+ subscription.set_result(None)
+ return subscription
+ self.subscriptions.add(subscription)
+ subscription.add_done_callback(self._on_done_callback)
+ return subscription
+
+ @tracing.with_trace()
+ def get(self, preferred_endpoint=None):
+ with self.lock:
+ if (
+ preferred_endpoint is not None
+ and preferred_endpoint.node_id in self.connections_by_node_id
+ ):
+ return self.connections_by_node_id[preferred_endpoint.node_id]
+
+ if (
+ preferred_endpoint is not None
+ and preferred_endpoint.endpoint in self.connections
+ ):
+ return self.connections[preferred_endpoint]
+
+ for conn_lst in self.conn_lst_order:
+ try:
+ endpoint, connection = conn_lst.popitem(last=False)
+ conn_lst[endpoint] = connection
+ tracing.trace(self.tracer, {"found_in_lists": True})
+ return connection
+ except KeyError:
+ continue
+
+ raise issues.ConnectionLost("Couldn't find valid connection")
+
+ def remove(self, connection):
+ with self.lock:
+ self.connections_by_node_id.pop(connection.node_id, None)
+ self.preferred.pop(connection.endpoint, None)
+ self.connections.pop(connection.endpoint, None)
+ self.outdated.pop(connection.endpoint, None)
+
+
+class Discovery(threading.Thread):
+ def __init__(self, store, driver_config):
+ """
+ A timer thread that implements endpoints discovery logic
+ :param store: A store with endpoints
+ :param driver_config: An instance of DriverConfig
+ """
+ super(Discovery, self).__init__()
+ self.logger = logger.getChild(self.__class__.__name__)
+ self.condition = threading.Condition()
+ self.daemon = True
+ self._cache = store
+ self._driver_config = driver_config
+ self._resolver = resolver.DiscoveryEndpointsResolver(self._driver_config)
+ self._base_discovery_interval = 60
+ self._ready_timeout = 4
+ self._discovery_request_timeout = 2
+ self._should_stop = threading.Event()
+ self._max_size = 9
+ self._base_emergency_retry_interval = 1
+ self._ssl_required = False
+ if driver_config.root_certificates is not None or driver_config.secure_channel:
+ self._ssl_required = True
+
+ def discovery_debug_details(self):
+ return self._resolver.debug_details()
+
+ def _emergency_retry_interval(self):
+ return (1 + random.random()) * self._base_emergency_retry_interval
+
+ def _discovery_interval(self):
+ return (1 + random.random()) * self._base_discovery_interval
+
+ def notify_disconnected(self):
+ self._send_wake_up()
+
+ def _send_wake_up(self):
+ acquired = self.condition.acquire(blocking=False)
+
+ if not acquired:
+ return
+
+ self.condition.notify_all()
+ self.condition.release()
+
+ def _handle_empty_database(self):
+ if self._cache.size > 0:
+ return True
+
+ return self._cache.add(
+ connection_impl.Connection.ready_factory(
+ self._driver_config.endpoint, self._driver_config, self._ready_timeout
+ )
+ )
+
+ def execute_discovery(self):
+ if self._driver_config.database is None:
+ return self._handle_empty_database()
+
+ with self._resolver.context_resolve() as resolve_details:
+ if resolve_details is None:
+ return False
+
+ resolved_endpoints = set(
+ endpoint
+ for resolved_endpoint in resolve_details.endpoints
+ for endpoint, endpoint_options in resolved_endpoint.endpoints_with_options()
+ )
+ for cached_endpoint in self._cache.values():
+ if cached_endpoint.endpoint not in resolved_endpoints:
+ self._cache.make_outdated(cached_endpoint)
+
+ for resolved_endpoint in resolve_details.endpoints:
+ if self._ssl_required and not resolved_endpoint.ssl:
+ continue
+
+ if not self._ssl_required and resolved_endpoint.ssl:
+ continue
+
+ preferred = resolve_details.self_location == resolved_endpoint.location
+
+ for (
+ endpoint,
+ endpoint_options,
+ ) in resolved_endpoint.endpoints_with_options():
+ if (
+ self._cache.size >= self._max_size
+ or self._cache.already_exists(endpoint)
+ ):
+ continue
+
+ ready_connection = connection_impl.Connection.ready_factory(
+ endpoint,
+ self._driver_config,
+ self._ready_timeout,
+ endpoint_options=endpoint_options,
+ )
+ self._cache.add(ready_connection, preferred)
+
+ self._cache.cleanup_outdated()
+
+ return self._cache.size > 0
+
+ def stop(self):
+ self._should_stop.set()
+ self._send_wake_up()
+
+ def run(self):
+ with self.condition:
+ while True:
+ if self._should_stop.is_set():
+ break
+
+ successful = self.execute_discovery()
+ if successful:
+ self._cache.complete_discovery(None)
+ else:
+ self._cache.complete_discovery(
+ issues.ConnectionFailure(str(self.discovery_debug_details()))
+ )
+
+ if self._should_stop.is_set():
+ break
+
+ interval = (
+ self._discovery_interval()
+ if successful
+ else self._emergency_retry_interval()
+ )
+ self.condition.wait(interval)
+
+ self._cache.cleanup()
+ self.logger.info("Successfully terminated discovery process")
+
+
+@six.add_metaclass(ABCMeta)
+class IConnectionPool:
+ @abstractmethod
+ def __init__(self, driver_config):
+ """
+ An object that encapsulates discovery logic and provides ability to execute user requests
+ on discovered endpoints.
+ :param driver_config: An instance of DriverConfig
+ """
+ pass
+
+ @abstractmethod
+ def stop(self, timeout=10):
+ """
+ Stops underlying discovery process and cleanups
+ :param timeout: A timeout to wait for stop completion
+ :return: None
+ """
+ pass
+
+ @abstractmethod
+ def wait(self, timeout=None, fail_fast=False):
+ """
+ Waits for endpoints to be are available to serve user requests
+ :param timeout: A timeout to wait in seconds
+ :param fail_fast: Should wait fail fast?
+ :return: None
+ """
+
+ @abstractmethod
+ def discovery_debug_details(self):
+ """
+ Returns debug string about last errors
+ :return:
+ """
+ pass
+
+ @abstractmethod
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Sends request constructed by client library
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A result of computation
+ """
+ pass
+
+
+class ConnectionPool(IConnectionPool):
+ def __init__(self, driver_config):
+ """
+ An object that encapsulates discovery logic and provides ability to execute user requests
+ on discovered endpoints.
+
+ :param driver_config: An instance of DriverConfig
+ """
+ self._driver_config = driver_config
+ self._store = ConnectionsCache(
+ driver_config.use_all_nodes, driver_config.tracer
+ )
+ self.tracer = driver_config.tracer
+ self._grpc_init = connection_impl.Connection(
+ self._driver_config.endpoint, self._driver_config
+ )
+ self._discovery_thread = Discovery(self._store, self._driver_config)
+ self._discovery_thread.start()
+ self._stopped = False
+ self._stop_guard = threading.Lock()
+
+ def stop(self, timeout=10):
+ """
+ Stops underlying discovery process and cleanups
+
+ :param timeout: A timeout to wait for stop completion
+ :return: None
+ """
+ with self._stop_guard:
+ if self._stopped:
+ return
+
+ self._stopped = True
+ self._discovery_thread.stop()
+ self._grpc_init.close()
+ self._discovery_thread.join(timeout)
+
+ def async_wait(self, fail_fast=False):
+ """
+ Returns a future to subscribe on endpoints availability.
+
+ :return: A concurrent.futures.Future instance.
+ """
+ if fail_fast:
+ return self._store.add_fast_fail()
+ return self._store.subscribe()
+
+ def wait(self, timeout=None, fail_fast=False):
+ """
+ Waits for endpoints to be are available to serve user requests
+
+ :param timeout: A timeout to wait in seconds
+ :return: None
+ """
+ if fail_fast:
+ self._store.add_fast_fail().result(timeout)
+ else:
+ self._store.subscribe().result(timeout)
+
+ def _on_disconnected(self, connection):
+ """
+ Removes bad discovered endpoint and triggers discovery process
+
+ :param connection: A disconnected connection
+ :return: None
+ """
+ connection.close()
+ self._discovery_thread.notify_disconnected()
+
+ def discovery_debug_details(self):
+ return self._discovery_thread.discovery_debug_details()
+
+ @tracing.with_trace()
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Synchronously sends request constructed by client library
+
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+
+ :return: A result of computation
+ """
+ tracing.trace(
+ self.tracer, {"request": request, "stub": stub, "rpc_name": rpc_name}
+ )
+ try:
+ connection = self._store.get(preferred_endpoint)
+ except Exception:
+ self._discovery_thread.notify_disconnected()
+ raise
+
+ res = connection(
+ request,
+ stub,
+ rpc_name,
+ wrap_result,
+ settings,
+ wrap_args,
+ lambda: self._on_disconnected(connection),
+ )
+ tracing.trace(
+ self.tracer, {"response": res}, trace_level=tracing.TraceLevel.DEBUG
+ )
+ return res
+
+ @_utilities.wrap_async_call_exceptions
+ def future(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Sends request constructed by client
+
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used\
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+
+ :return: A future of computation
+ """
+ try:
+ connection = self._store.get(preferred_endpoint)
+ except Exception:
+ self._discovery_thread.notify_disconnected()
+ raise
+
+ return connection.future(
+ request,
+ stub,
+ rpc_name,
+ wrap_result,
+ settings,
+ wrap_args,
+ lambda: self._on_disconnected(connection),
+ )
+
+ def __enter__(self):
+ """
+ In some cases (scripts, for example) this context manager can be used.
+
+ :return:
+ """
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
diff --git a/contrib/python/ydb/py2/ydb/resolver.py b/contrib/python/ydb/py2/ydb/resolver.py
new file mode 100644
index 0000000000..b40ae984dc
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/resolver.py
@@ -0,0 +1,209 @@
+# -*- coding: utf-8 -*-
+import contextlib
+import logging
+import threading
+import random
+import itertools
+from . import connection as conn_impl, issues, settings as settings_impl, _apis
+
+logger = logging.getLogger(__name__)
+
+
+class EndpointInfo(object):
+ __slots__ = (
+ "address",
+ "endpoint",
+ "location",
+ "port",
+ "ssl",
+ "ipv4_addrs",
+ "ipv6_addrs",
+ "ssl_target_name_override",
+ "node_id",
+ )
+
+ def __init__(self, endpoint_info):
+ self.address = endpoint_info.address
+ self.endpoint = "%s:%s" % (endpoint_info.address, endpoint_info.port)
+ self.location = endpoint_info.location
+ self.port = endpoint_info.port
+ self.ssl = endpoint_info.ssl
+ self.ipv4_addrs = tuple(endpoint_info.ip_v4)
+ self.ipv6_addrs = tuple(endpoint_info.ip_v6)
+ self.ssl_target_name_override = endpoint_info.ssl_target_name_override
+ self.node_id = endpoint_info.node_id
+
+ def endpoints_with_options(self):
+ ssl_target_name_override = None
+ if self.ssl:
+ if self.ssl_target_name_override:
+ ssl_target_name_override = self.ssl_target_name_override
+ elif self.ipv6_addrs or self.ipv4_addrs:
+ ssl_target_name_override = self.address
+
+ endpoint_options = conn_impl.EndpointOptions(
+ ssl_target_name_override=ssl_target_name_override, node_id=self.node_id
+ )
+
+ if self.ipv6_addrs or self.ipv4_addrs:
+ for ipv6addr in self.ipv6_addrs:
+ yield ("ipv6:[%s]:%s" % (ipv6addr, self.port), endpoint_options)
+ for ipv4addr in self.ipv4_addrs:
+ yield ("ipv4:%s:%s" % (ipv4addr, self.port), endpoint_options)
+ else:
+ yield (self.endpoint, endpoint_options)
+
+ def __str__(self):
+ return "<Endpoint %s, location %s, ssl: %s>" % (
+ self.endpoint,
+ self.location,
+ self.ssl,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __hash__(self):
+ return hash(self.endpoint)
+
+ def __eq__(self, other):
+ if not hasattr(other, "endpoint"):
+ return False
+
+ return self.endpoint == other.endpoint
+
+
+def _list_endpoints_request_factory(connection_params):
+ request = _apis.ydb_discovery.ListEndpointsRequest()
+ request.database = connection_params.database
+ return request
+
+
+class DiscoveryResult(object):
+ def __init__(self, self_location, endpoints):
+ self.self_location = self_location
+ self.endpoints = endpoints
+
+ def __str__(self):
+ return "DiscoveryResult <self_location: %s, endpoints %s>" % (
+ self.self_location,
+ self.endpoints,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+ @classmethod
+ def from_response(cls, rpc_state, response, use_all_nodes=False):
+ issues._process_response(response.operation)
+ message = _apis.ydb_discovery.ListEndpointsResult()
+ response.operation.result.Unpack(message)
+ unique_local_endpoints = set()
+ unique_different_endpoints = set()
+ for info in message.endpoints:
+ if info.location == message.self_location:
+ unique_local_endpoints.add(EndpointInfo(info))
+ else:
+ unique_different_endpoints.add(EndpointInfo(info))
+
+ result = []
+ unique_local_endpoints = list(unique_local_endpoints)
+ unique_different_endpoints = list(unique_different_endpoints)
+ if use_all_nodes:
+ result.extend(unique_local_endpoints)
+ result.extend(unique_different_endpoints)
+ random.shuffle(result)
+ else:
+ random.shuffle(unique_local_endpoints)
+ random.shuffle(unique_different_endpoints)
+ result.extend(unique_local_endpoints)
+ result.extend(unique_different_endpoints)
+
+ return cls(message.self_location, result)
+
+
+class DiscoveryEndpointsResolver(object):
+ def __init__(self, driver_config):
+ self.logger = logger.getChild(self.__class__.__name__)
+ self._driver_config = driver_config
+ self._ready_timeout = getattr(
+ self._driver_config, "discovery_request_timeout", 10
+ )
+ self._lock = threading.Lock()
+ self._debug_details_history_size = 20
+ self._debug_details_items = []
+ self._endpoints = []
+ self._endpoints.append(driver_config.endpoint)
+ self._endpoints.extend(driver_config.endpoints)
+ random.shuffle(self._endpoints)
+ self._endpoints_iter = itertools.cycle(self._endpoints)
+
+ def _add_debug_details(self, message, *args):
+ self.logger.debug(message, *args)
+ message = message % args
+ with self._lock:
+ self._debug_details_items.append(message)
+ if len(self._debug_details_items) > self._debug_details_history_size:
+ self._debug_details_items.pop()
+
+ def debug_details(self):
+ """
+ Returns last resolver errors as a debug string.
+ """
+ with self._lock:
+ return "\n".join(self._debug_details_items)
+
+ def resolve(self):
+ with self.context_resolve() as result:
+ return result
+
+ @contextlib.contextmanager
+ def context_resolve(self):
+ self.logger.debug("Preparing initial endpoint to resolve endpoints")
+ endpoint = next(self._endpoints_iter)
+ initial = conn_impl.Connection.ready_factory(
+ endpoint, self._driver_config, ready_timeout=self._ready_timeout
+ )
+ if initial is None:
+ self._add_debug_details(
+ 'Failed to establish connection to YDB discovery endpoint: "%s". Check endpoint correctness.'
+ % endpoint
+ )
+ yield
+ return
+
+ self.logger.debug(
+ "Resolving endpoints for database %s", self._driver_config.database
+ )
+ try:
+ resolved = initial(
+ _list_endpoints_request_factory(self._driver_config),
+ _apis.DiscoveryService.Stub,
+ _apis.DiscoveryService.ListEndpoints,
+ DiscoveryResult.from_response,
+ settings=settings_impl.BaseRequestSettings().with_timeout(
+ self._ready_timeout
+ ),
+ wrap_args=(self._driver_config.use_all_nodes,),
+ )
+
+ self._add_debug_details(
+ "Resolved endpoints for database %s: %s",
+ self._driver_config.database,
+ resolved,
+ )
+
+ yield resolved
+ except Exception as e:
+
+ self._add_debug_details(
+ 'Failed to resolve endpoints for database %s. Endpoint: "%s". Error details:\n %s',
+ self._driver_config.database,
+ endpoint,
+ e,
+ )
+
+ yield
+
+ finally:
+ initial.close()
diff --git a/contrib/python/ydb/py2/ydb/scheme.py b/contrib/python/ydb/py2/ydb/scheme.py
new file mode 100644
index 0000000000..4e8b716d5f
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/scheme.py
@@ -0,0 +1,493 @@
+# -*- coding: utf-8 -*-
+import abc
+import enum
+import six
+from abc import abstractmethod
+from . import issues, operation, settings as settings_impl, _apis
+
+
+@enum.unique
+class SchemeEntryType(enum.IntEnum):
+ """
+ Enumerates all available entry types.
+ """
+
+ TYPE_UNSPECIFIED = 0
+ DIRECTORY = 1
+ TABLE = 2
+ PERS_QUEUE_GROUP = 3
+ DATABASE = 4
+ RTMR_VOLUME = 5
+ BLOCK_STORE_VOLUME = 6
+ COORDINATION_NODE = 7
+ COLUMN_TABLE = 13
+ SEQUENCE = 15
+ REPLICATION = 16
+ TOPIC = 17
+
+ @staticmethod
+ def is_table(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a table and False otherwise
+ """
+ return entry == SchemeEntryType.TABLE
+
+ @staticmethod
+ def is_directory(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a directory and False otherwise
+ """
+ return entry == SchemeEntryType.DIRECTORY
+
+ @staticmethod
+ def is_database(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a database and False otherwise
+ """
+ return entry == SchemeEntryType.DATABASE
+
+ @staticmethod
+ def is_coordination_node(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a coordination node and False otherwise
+ """
+ return entry == SchemeEntryType.COORDINATION_NODE
+
+ @staticmethod
+ def is_directory_or_database(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a directory or database and False otherwise
+ """
+ return entry == SchemeEntryType.DATABASE or entry == SchemeEntryType.DIRECTORY
+
+
+class SchemeEntry(object):
+ __slots__ = (
+ "name",
+ "owner",
+ "type",
+ "effective_permissions",
+ "permissions",
+ "size_bytes",
+ )
+
+ def __init__(
+ self,
+ name,
+ owner,
+ type,
+ effective_permissions,
+ permissions,
+ size_bytes,
+ *args,
+ **kwargs
+ ):
+ """
+ Represents a scheme entry.
+ :param name: A name of a scheme entry
+ :param owner: A owner of a scheme entry
+ :param type: A type of scheme entry
+ :param effective_permissions: A list of effective permissions applied to this scheme entry
+ :param permissions: A list of permissions applied to this scheme entry
+ :param size_bytes: Size of entry in bytes
+ """
+ self.name = name
+ self.owner = owner
+ self.type = type
+ self.effective_permissions = effective_permissions
+ self.permissions = permissions
+ self.size_bytes = size_bytes
+
+ def is_directory(self):
+ """
+ :return: True if scheme entry is a directory and False otherwise
+ """
+ return SchemeEntryType.is_directory(self.type)
+
+ def is_table(self):
+ """
+ :return: True if scheme entry is a table and False otherwise
+ """
+ return SchemeEntryType.is_table(self.type)
+
+ def is_database(self):
+ """
+ :return: True if scheme entry is a database and False otherwise
+ """
+ return SchemeEntryType.is_database(self.type)
+
+ def is_directory_or_database(self):
+ """
+ :return: True if scheme entry is a directory or a database and False otherwise
+ """
+ return SchemeEntryType.is_directory_or_database(self.type)
+
+ def is_coordination_node(self):
+ """
+ :return: True if scheme entry is a coordination node and False otherwise
+ """
+ return SchemeEntryType.is_coordination_node(self.type)
+
+
+class Directory(SchemeEntry):
+ __slots__ = ("children",)
+
+ def __init__(
+ self,
+ name,
+ owner,
+ type,
+ effective_permissions,
+ permissions,
+ children,
+ *args,
+ **kwargs
+ ):
+ """
+ Represents a directory scheme entry.
+ :param name: A name of a scheme entry
+ :param owner: A owner of a scheme entry
+ :param type: A type of scheme entry
+ :param effective_permissions: A list of effective permissions applied to this scheme entry
+ :param permissions: A list of permissions applied to this scheme entry
+ :param children: A list of children
+ """
+ super(Directory, self).__init__(
+ name, owner, type, effective_permissions, permissions, 0
+ )
+ self.children = children
+
+
+def _describe_path_request_factory(path):
+ request = _apis.ydb_scheme.DescribePathRequest()
+ request.path = path
+ return request
+
+
+def _list_directory_request_factory(path):
+ request = _apis.ydb_scheme.ListDirectoryRequest()
+ request.path = path
+ return request
+
+
+def _remove_directory_request_factory(path):
+ request = _apis.ydb_scheme.RemoveDirectoryRequest()
+ request.path = path
+ return request
+
+
+def _make_directory_request_factory(path):
+ request = _apis.ydb_scheme.MakeDirectoryRequest()
+ request.path = path
+ return request
+
+
+class MakeDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class RemoveDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class ListDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class DescribePathSettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class ModifyPermissionsSettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ModifyPermissionsSettings, self).__init__()
+ self._pb = _apis.ydb_scheme.ModifyPermissionsRequest()
+
+ def grant_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.grant.MergeFrom(
+ Permissions(subject, permission_names).to_pb()
+ )
+ return self
+
+ def revoke_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.revoke.MergeFrom(
+ Permissions(subject, permission_names).to_pb()
+ )
+ return self
+
+ def set_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.set.MergeFrom(Permissions(subject, permission_names).to_pb())
+ return self
+
+ def change_owner(self, owner):
+ permission_action = self._pb.actions.add()
+ permission_action.change_owner = owner
+ return self
+
+ def clear_permissions(self):
+ self._pb.clear_permissions = True
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class Permissions(object):
+ __slots__ = ("subject", "permission_names")
+
+ def __init__(self, subject, permission_names):
+ """
+ Represents permissions
+ :param subject: A subject of permission names
+ :param permission_names: A list of permission names
+ """
+ self.subject = subject
+ self.permission_names = permission_names
+
+ def to_pb(self):
+ """
+ :return: A protocol buffer representation of permissions
+ """
+ pb = _apis.ydb_scheme.Permissions()
+ pb.subject = self.subject
+ pb.permission_names.extend(self.permission_names)
+ return pb
+
+
+def _modify_permissions_request_factory(path, settings):
+ """
+ Constructs modify permissions request
+ :param path: A path to apply permissions
+ :param settings: An instance of ModifyPermissionsSettings
+ :return: A constructed request
+ """
+ modify_permissions_request = settings.to_pb()
+ modify_permissions_request.path = path
+ return modify_permissions_request
+
+
+def _wrap_permissions(permissions):
+ """
+ Wraps permissions protocol buffers into native Python objects
+ :param permissions: A protocol buffer representation of permissions
+ :return: A iterable of permissions
+ """
+ return tuple(
+ Permissions(permission.subject, permission.permission_names)
+ for permission in permissions
+ )
+
+
+def _wrap_scheme_entry(entry_pb, scheme_entry_cls=None, *args, **kwargs):
+ """
+ Wraps scheme entry into native Python objects.
+ :param entry_pb: A protocol buffer representation of a scheme entry
+ :param scheme_entry_cls: A native Python class that represents scheme entry (
+ by default that is generic SchemeEntry)
+ :param args: A list of optional arguments
+ :param kwargs: A dictionary of with optional arguments
+ :return: A native Python reprensentation of scheme entry
+ """
+ scheme_entry_cls = SchemeEntry if scheme_entry_cls is None else scheme_entry_cls
+ return scheme_entry_cls(
+ entry_pb.name,
+ entry_pb.owner,
+ SchemeEntryType(entry_pb.type),
+ _wrap_permissions(entry_pb.effective_permissions),
+ _wrap_permissions(entry_pb.permissions),
+ entry_pb.size_bytes,
+ *args,
+ **kwargs
+ )
+
+
+def _wrap_list_directory_response(rpc_state, response):
+ """
+ Wraps list directory response
+ :param response: A list directory response
+ :return: A directory
+ """
+ issues._process_response(response.operation)
+ message = _apis.ydb_scheme.ListDirectoryResult()
+ response.operation.result.Unpack(message)
+ children = []
+ supported_items = set(i.value for i in SchemeEntryType)
+ for children_item in message.children:
+ if children_item.type not in supported_items:
+ continue
+
+ children.append(_wrap_scheme_entry(children_item))
+
+ return Directory(
+ message.self.name,
+ message.self.owner,
+ SchemeEntryType(message.self.type),
+ _wrap_permissions(message.self.effective_permissions),
+ _wrap_permissions(message.self.permissions),
+ tuple(children),
+ )
+
+
+def _wrap_describe_path_response(rpc_state, response):
+ issues._process_response(response.operation)
+ message = _apis.ydb_scheme.DescribePathResult()
+ response.operation.result.Unpack(message)
+ return _wrap_scheme_entry(message.self)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ISchemeClient:
+ @abstractmethod
+ def __init__(self, driver):
+ pass
+
+ @abstractmethod
+ def make_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def remove_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def list_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def describe_path(self, path, settings):
+ pass
+
+ @abstractmethod
+ def modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An operation if success or exception on case of failure
+ """
+ pass
+
+
+class BaseSchemeClient(ISchemeClient):
+ __slots__ = ("_driver",)
+
+ def __init__(self, driver):
+ self._driver = driver
+
+ def make_directory(self, path, settings=None):
+ return self._driver(
+ _make_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.MakeDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def remove_directory(self, path, settings=None):
+ return self._driver(
+ _remove_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.RemoveDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def list_directory(self, path, settings=None):
+ return self._driver(
+ _list_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ListDirectory,
+ _wrap_list_directory_response,
+ settings,
+ )
+
+ def describe_path(self, path, settings=None):
+ return self._driver(
+ _describe_path_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.DescribePath,
+ _wrap_describe_path_response,
+ settings,
+ )
+
+ def modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An operation if success or exception on case of failure
+ """
+ return self._driver(
+ _modify_permissions_request_factory(path, settings),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ModifyPermissions,
+ operation.Operation,
+ settings,
+ )
+
+
+class SchemeClient(BaseSchemeClient):
+ def async_make_directory(self, path, settings=None):
+ return self._driver.future(
+ _make_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.MakeDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def async_remove_directory(self, path, settings=None):
+ return self._driver.future(
+ _remove_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.RemoveDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def async_list_directory(self, path, settings=None):
+ return self._driver.future(
+ _list_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ListDirectory,
+ _wrap_list_directory_response,
+ settings,
+ )
+
+ def async_describe_path(self, path, settings=None):
+ return self._driver.future(
+ _describe_path_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.DescribePath,
+ _wrap_describe_path_response,
+ settings,
+ )
+
+ def async_modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An future of computation
+ """
+ return self._driver.future(
+ _modify_permissions_request_factory(path, settings),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ModifyPermissions,
+ operation.Operation,
+ settings,
+ )
diff --git a/contrib/python/ydb/py2/ydb/scripting.py b/contrib/python/ydb/py2/ydb/scripting.py
new file mode 100644
index 0000000000..27c4be5386
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/scripting.py
@@ -0,0 +1,109 @@
+try:
+ from ydb.public.api.protos import ydb_scripting_pb2
+ from ydb.public.api.grpc import ydb_scripting_v1_pb2_grpc
+except ImportError:
+ from contrib.ydb.public.api.protos import ydb_scripting_pb2
+ from contrib.ydb.public.api.grpc import ydb_scripting_v1_pb2_grpc
+
+
+from . import issues, convert, settings
+
+
+class TypedParameters(object):
+ def __init__(self, parameters_types, parameters_values):
+ self.parameters_types = parameters_types
+ self.parameters_values = parameters_values
+
+
+class ScriptingClientSettings(object):
+ def __init__(self):
+ self._native_date_in_result_sets = False
+ self._native_datetime_in_result_sets = False
+
+ def with_native_date_in_result_sets(self, enabled):
+ self._native_date_in_result_sets = enabled
+ return self
+
+ def with_native_datetime_in_result_sets(self, enabled):
+ self._native_datetime_in_result_sets = enabled
+ return self
+
+
+class ExplainYqlScriptSettings(settings.BaseRequestSettings):
+ MODE_UNSPECIFIED = 0
+ MODE_PARSE = 1
+ MODE_VALIDATE = 2
+ MODE_EXPLAIN = 3
+
+ def __init__(self):
+ super(ExplainYqlScriptSettings, self).__init__()
+ self.mode = False
+
+ def with_mode(self, val):
+ self.mode = val
+ return self
+
+
+def _execute_yql_query_request_factory(script, tp=None, settings=None):
+ params = (
+ None
+ if tp is None
+ else convert.parameters_to_pb(tp.parameters_types, tp.parameters_values)
+ )
+ return ydb_scripting_pb2.ExecuteYqlRequest(script=script, parameters=params)
+
+
+class YqlQueryResult(object):
+ def __init__(self, result, scripting_client_settings=None):
+ self.result_sets = convert.ResultSets(
+ result.result_sets, scripting_client_settings
+ )
+
+
+class YqlExplainResult(object):
+ def __init__(self, result):
+ self.plan = result.plan
+
+
+def _wrap_response(rpc_state, response, scripting_client_settings):
+ issues._process_response(response.operation)
+ message = ydb_scripting_pb2.ExecuteYqlResult()
+ response.operation.result.Unpack(message)
+ return YqlQueryResult(message)
+
+
+def _wrap_explain_response(rpc_state, response):
+ issues._process_response(response.operation)
+ message = ydb_scripting_pb2.ExplainYqlResult()
+ response.operation.result.Unpack(message)
+ return YqlExplainResult(message)
+
+
+class ScriptingClient(object):
+ def __init__(self, driver, scripting_client_settings=None):
+ self.driver = driver
+ self.scripting_client_settings = (
+ scripting_client_settings
+ if scripting_client_settings is not None
+ else ScriptingClientSettings()
+ )
+
+ def execute_yql(self, script, typed_parameters=None, settings=None):
+ request = _execute_yql_query_request_factory(script, typed_parameters, settings)
+ return self.driver(
+ request,
+ ydb_scripting_v1_pb2_grpc.ScriptingServiceStub,
+ "ExecuteYql",
+ _wrap_response,
+ settings=settings,
+ wrap_args=(self.scripting_client_settings,),
+ )
+
+ def explain_yql(self, script, settings=None):
+ return self.driver(
+ ydb_scripting_pb2.ExplainYqlRequest(script=script, mode=settings.mode),
+ ydb_scripting_v1_pb2_grpc.ScriptingServiceStub,
+ "ExplainYql",
+ _wrap_explain_response,
+ settings=settings,
+ )
diff --git a/contrib/python/ydb/py2/ydb/settings.py b/contrib/python/ydb/py2/ydb/settings.py
new file mode 100644
index 0000000000..6739a46fab
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/settings.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+
+
+class BaseRequestSettings(object):
+ __slots__ = (
+ "trace_id",
+ "request_type",
+ "timeout",
+ "cancel_after",
+ "operation_timeout",
+ "tracer",
+ "compression",
+ "headers",
+ "need_rpc_auth",
+ )
+
+ def __init__(self):
+ """
+ Request settings to be used for RPC execution
+ """
+ self.trace_id = None
+ self.request_type = None
+ self.timeout = None
+ self.cancel_after = None
+ self.operation_timeout = None
+ self.compression = None
+ self.need_rpc_auth = True
+ self.headers = []
+
+ def make_copy(self):
+ return (
+ BaseRequestSettings()
+ .with_trace_id(self.trace_id)
+ .with_request_type(self.request_type)
+ .with_timeout(self.timeout)
+ .with_cancel_after(self.cancel_after)
+ .with_operation_timeout(self.operation_timeout)
+ .with_compression(self.compression)
+ .with_need_rpc_auth(self.need_rpc_auth)
+ )
+
+ def with_compression(self, compression):
+ """
+ Enables compression for the specific RPC
+ :param compression: An RPCCompression enum value.
+ :return The self instance.
+ """
+ self.compression = compression
+ return self
+
+ def with_need_rpc_auth(self, need_rpc_auth):
+ self.need_rpc_auth = need_rpc_auth
+ return self
+
+ def with_header(self, key, value):
+ """
+ Adds a key-value pair to the request headers.
+ :param key: A string with a header key.
+ :param value: A string with a header value.
+ :return The self instance.
+ """
+ self.headers.append((key, value))
+ return self
+
+ def with_trace_id(self, trace_id):
+ """
+ Includes trace id for RPC headers
+ :param trace_id: A trace id string
+ :return: The self instance
+ """
+ self.trace_id = trace_id
+ return self
+
+ def with_request_type(self, request_type):
+ """
+ Includes request type for RPC headers
+ :param request_type: A request type string
+ :return: The self instance
+ """
+ self.request_type = request_type
+ return self
+
+ def with_operation_timeout(self, timeout):
+ """
+ Indicates that client is no longer interested in the result of operation after the specified duration
+ starting from the time operation arrives at the server.
+ Server will try to stop the execution of operation and if no result is currently available the operation
+ will receive TIMEOUT status code, which will be sent back to client if it was waiting for the operation result.
+ Timeout of operation does not tell anything about its result, it might be completed successfully
+ or cancelled on server.
+ :param timeout:
+ :return:
+ """
+ self.operation_timeout = timeout
+ return self
+
+ def with_cancel_after(self, timeout):
+ """
+ Server will try to cancel the operation after the specified duration starting from the time
+ the operation arrives at server.
+ In case of successful cancellation operation will receive CANCELLED status code, which will be
+ sent back to client if it was waiting for the operation result.
+ In case when cancellation isn't possible, no action will be performed.
+ :param timeout:
+ :return:
+ """
+ self.cancel_after = timeout
+ return self
+
+ def with_timeout(self, timeout):
+ """
+ Client-side timeout to complete request.
+ Since YDB doesn't support request cancellation at this moment, this feature should be
+ used properly to avoid server overload.
+ :param timeout: timeout value in seconds
+ :return: The self instance
+ """
+ self.timeout = timeout
+ return self
diff --git a/contrib/python/ydb/py2/ydb/sqlalchemy/__init__.py b/contrib/python/ydb/py2/ydb/sqlalchemy/__init__.py
new file mode 100644
index 0000000000..aa9b2d006c
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/sqlalchemy/__init__.py
@@ -0,0 +1,298 @@
+"""
+Experimental
+Work in progress, breaking changes are possible.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+
+try:
+ import ydb
+ from ydb.dbapi.errors import NotSupportedError
+ from ydb.sqlalchemy.types import UInt32, UInt64
+
+ from sqlalchemy.engine.default import DefaultDialect
+ from sqlalchemy.sql.compiler import (
+ IdentifierPreparer,
+ GenericTypeCompiler,
+ SQLCompiler,
+ )
+ from sqlalchemy import Table
+ from sqlalchemy.sql.elements import ClauseList
+ from sqlalchemy.sql import functions
+ import sqlalchemy as sa
+ from sqlalchemy import exc
+ from sqlalchemy.util.compat import inspect_getfullargspec
+ from sqlalchemy.sql import literal_column
+
+ SQLALCHEMY_VERSION = tuple(sa.__version__.split("."))
+ SA_14 = SQLALCHEMY_VERSION >= ("1", "4")
+
+ class YqlIdentifierPreparer(IdentifierPreparer):
+ def __init__(self, dialect):
+ super(YqlIdentifierPreparer, self).__init__(
+ dialect,
+ initial_quote="`",
+ final_quote="`",
+ )
+
+ def _requires_quotes(self, value):
+ # Force all identifiers to get quoted unless already quoted.
+ return not (
+ value.startswith(self.initial_quote)
+ and value.endswith(self.final_quote)
+ )
+
+ class YqlTypeCompiler(GenericTypeCompiler):
+ def visit_VARCHAR(self, type_, **kw):
+ return "STRING"
+
+ def visit_unicode(self, type_, **kw):
+ return "UTF8"
+
+ def visit_NVARCHAR(self, type_, **kw):
+ return "UTF8"
+
+ def visit_TEXT(self, type_, **kw):
+ return "UTF8"
+
+ def visit_FLOAT(self, type_, **kw):
+ return "DOUBLE"
+
+ def visit_BOOLEAN(self, type_, **kw):
+ return "BOOL"
+
+ def visit_uint32(self, type_, **kw):
+ return "UInt32"
+
+ def visit_uint64(self, type_, **kw):
+ return "UInt64"
+
+ def visit_uint8(self, type_, **kw):
+ return "UInt8"
+
+ class ParametrizedFunction(functions.Function):
+ __visit_name__ = "parametrized_function"
+
+ def __init__(self, name, params, *args, **kwargs):
+ super(ParametrizedFunction, self).__init__(name, *args, **kwargs)
+ self._func_name = name
+ self._func_params = params
+ self.params_expr = ClauseList(
+ operator=functions.operators.comma_op, group_contents=True, *params
+ ).self_group()
+
+ class YqlCompiler(SQLCompiler):
+ def group_by_clause(self, select, **kw):
+ # Hack to ensure it is possible to define labels in groupby.
+ kw.update(within_columns_clause=True)
+ return super(YqlCompiler, self).group_by_clause(select, **kw)
+
+ def visit_lambda(self, lambda_, **kw):
+ func = lambda_.func
+ spec = inspect_getfullargspec(func)
+
+ if spec.varargs:
+ raise exc.CompileError("Lambdas with *args are not supported")
+
+ try:
+ keywords = spec.keywords
+ except AttributeError:
+ keywords = spec.varkw
+
+ if keywords:
+ raise exc.CompileError("Lambdas with **kwargs are not supported")
+
+ text = "(" + ", ".join("$" + arg for arg in spec.args) + ")" + " -> "
+
+ args = [literal_column("$" + arg) for arg in spec.args]
+ text += "{ RETURN " + self.process(func(*args), **kw) + " ;}"
+
+ return text
+
+ def visit_parametrized_function(self, func, **kwargs):
+ name = func.name
+ name_parts = []
+ for name in name.split("::"):
+ fname = (
+ self.preparer.quote(name)
+ if self.preparer._requires_quotes_illegal_chars(name)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else name
+ )
+
+ name_parts.append(fname)
+
+ name = "::".join(name_parts)
+ params = func.params_expr._compiler_dispatch(self, **kwargs)
+ args = self.function_argspec(func, **kwargs)
+ return "%(name)s%(params)s%(args)s" % dict(
+ name=name, params=params, args=args
+ )
+
+ def visit_function(self, func, add_to_result_map=None, **kwargs):
+ # Copypaste of `sa.sql.compiler.SQLCompiler.visit_function` with
+ # `::` as namespace separator instead of `.`
+ if add_to_result_map is not None:
+ add_to_result_map(func.name, func.name, (), func.type)
+
+ disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
+ if disp:
+ return disp(func, **kwargs)
+ else:
+ name = sa.sql.compiler.FUNCTIONS.get(func.__class__, None)
+ if name:
+ if func._has_args:
+ name += "%(expr)s"
+ else:
+ name = func.name
+ name = (
+ self.preparer.quote(name)
+ if self.preparer._requires_quotes_illegal_chars(name)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else name
+ )
+ name = name + "%(expr)s"
+ return "::".join(
+ [
+ (
+ self.preparer.quote(tok)
+ if self.preparer._requires_quotes_illegal_chars(tok)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else tok
+ )
+ for tok in func.packagenames
+ ]
+ + [name]
+ ) % {"expr": self.function_argspec(func, **kwargs)}
+
+ COLUMN_TYPES = {
+ ydb.PrimitiveType.Int8: sa.INTEGER,
+ ydb.PrimitiveType.Int16: sa.INTEGER,
+ ydb.PrimitiveType.Int32: sa.INTEGER,
+ ydb.PrimitiveType.Int64: sa.INTEGER,
+ ydb.PrimitiveType.Uint8: sa.INTEGER,
+ ydb.PrimitiveType.Uint16: sa.INTEGER,
+ ydb.PrimitiveType.Uint32: UInt32,
+ ydb.PrimitiveType.Uint64: UInt64,
+ ydb.PrimitiveType.Float: sa.FLOAT,
+ ydb.PrimitiveType.Double: sa.FLOAT,
+ ydb.PrimitiveType.String: sa.TEXT,
+ ydb.PrimitiveType.Utf8: sa.TEXT,
+ ydb.PrimitiveType.Json: sa.JSON,
+ ydb.PrimitiveType.JsonDocument: sa.JSON,
+ ydb.DecimalType: sa.DECIMAL,
+ ydb.PrimitiveType.Yson: sa.TEXT,
+ ydb.PrimitiveType.Date: sa.DATE,
+ ydb.PrimitiveType.Datetime: sa.DATETIME,
+ ydb.PrimitiveType.Timestamp: sa.DATETIME,
+ ydb.PrimitiveType.Interval: sa.INTEGER,
+ ydb.PrimitiveType.Bool: sa.BOOLEAN,
+ ydb.PrimitiveType.DyNumber: sa.TEXT,
+ }
+
+ def _get_column_info(t):
+ nullable = False
+ if isinstance(t, ydb.OptionalType):
+ nullable = True
+ t = t.item
+
+ if isinstance(t, ydb.DecimalType):
+ return sa.DECIMAL(precision=t.precision, scale=t.scale), nullable
+
+ return COLUMN_TYPES[t], nullable
+
+ class YqlDialect(DefaultDialect):
+ name = "yql"
+ supports_alter = False
+ max_identifier_length = 63
+ supports_sane_rowcount = False
+ supports_statement_cache = False
+
+ supports_native_enum = False
+ supports_native_boolean = True
+ supports_smallserial = False
+
+ supports_sequences = False
+ sequences_optional = True
+ preexecute_autoincrement_sequences = True
+ postfetch_lastrowid = False
+
+ supports_default_values = False
+ supports_empty_insert = False
+ supports_multivalues_insert = True
+ default_paramstyle = "qmark"
+
+ isolation_level = None
+
+ preparer = YqlIdentifierPreparer
+ statement_compiler = YqlCompiler
+ type_compiler = YqlTypeCompiler
+
+ @staticmethod
+ def dbapi():
+ import ydb.dbapi
+
+ return ydb.dbapi
+
+ def _check_unicode_returns(self, *args, **kwargs):
+ # Normally, this would do 2 SQL queries, which isn't quite necessary.
+ return "conditional"
+
+ def get_columns(self, connection, table_name, schema=None, **kw):
+ if schema is not None:
+ raise NotSupportedError
+
+ if isinstance(table_name, Table):
+ qt = table_name.name
+ else:
+ qt = table_name
+
+ if SA_14:
+ raw_conn = connection.connection
+ else:
+ raw_conn = connection.raw_connection()
+ columns = raw_conn.describe(qt)
+ as_compatible = []
+ for column in columns:
+ col_type, nullable = _get_column_info(column.type)
+ as_compatible.append(
+ {
+ "name": column.name,
+ "type": col_type,
+ "nullable": nullable,
+ }
+ )
+
+ return as_compatible
+
+ def has_table(self, connection, table_name, schema=None):
+ if schema is not None:
+ raise NotSupportedError
+
+ quote = self.identifier_preparer.quote_identifier
+ qtable = quote(table_name)
+
+ # TODO: use `get_columns` instead.
+ statement = "SELECT * FROM " + qtable
+ try:
+ connection.execute(statement)
+ return True
+ except Exception:
+ return False
+
+except ImportError:
+
+ class YqlDialect(object):
+ def __init__(self):
+ raise RuntimeError("could not import sqlalchemy")
+
+
+def register_dialect(
+ name="yql",
+ module=__name__,
+ cls="YqlDialect",
+):
+ import sqlalchemy as sa
+
+ return sa.dialects.registry.register(name, module, cls)
diff --git a/contrib/python/ydb/py2/ydb/sqlalchemy/types.py b/contrib/python/ydb/py2/ydb/sqlalchemy/types.py
new file mode 100644
index 0000000000..f6e10ccd6f
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/sqlalchemy/types.py
@@ -0,0 +1,32 @@
+try:
+ from sqlalchemy.types import Integer
+ from sqlalchemy.sql import type_api
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy import util, exc
+except ImportError:
+ Integer = object
+ ColumnElement = object
+
+
+class UInt32(Integer):
+ __visit_name__ = "uint32"
+
+
+class UInt64(Integer):
+ __visit_name__ = "uint64"
+
+
+class UInt8(Integer):
+ __visit_name__ = "uint8"
+
+
+class Lambda(ColumnElement):
+
+ __visit_name__ = "lambda"
+
+ def __init__(self, func):
+ if not util.callable(func):
+ raise exc.ArgumentError("func must be callable")
+
+ self.type = type_api.NULLTYPE
+ self.func = func
diff --git a/contrib/python/ydb/py2/ydb/table.py b/contrib/python/ydb/py2/ydb/table.py
new file mode 100644
index 0000000000..c06652610a
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/table.py
@@ -0,0 +1,2682 @@
+# -*- coding: utf-8 -*-
+import abc
+import ydb
+from abc import abstractmethod
+import logging
+import time
+import random
+import enum
+
+import six
+from . import (
+ issues,
+ convert,
+ settings as settings_impl,
+ scheme,
+ types,
+ _utilities,
+ _apis,
+ _sp_impl,
+ _session_impl,
+ _tx_ctx_impl,
+ tracing,
+)
+from ._errors import check_retriable_error
+
+try:
+ from . import interceptor
+except ImportError:
+ interceptor = None
+
+_allow_split_transaction = True
+
+logger = logging.getLogger(__name__)
+
+##################################################################
+# A deprecated aliases in case when direct import has been used #
+##################################################################
+SessionPoolEmpty = issues.SessionPoolEmpty
+DataQuery = types.DataQuery
+
+
+class DescribeTableSettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(DescribeTableSettings, self).__init__()
+ self.include_shard_key_bounds = False
+ self.include_table_stats = False
+
+ def with_include_shard_key_bounds(self, value):
+ self.include_shard_key_bounds = value
+ return self
+
+ def with_include_table_stats(self, value):
+ self.include_table_stats = value
+ return self
+
+
+class ExecDataQuerySettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExecDataQuerySettings, self).__init__()
+ self.keep_in_cache = True
+
+ def with_keep_in_cache(self, value):
+ self.keep_in_cache = value
+ return self
+
+
+class KeyBound(object):
+ __slots__ = ("_equal", "value", "type")
+
+ def __init__(self, key_value, key_type=None, inclusive=False):
+ """
+ Represents key bound.
+ :param key_value: An iterable with key values
+ :param key_type: A type of key
+ :param inclusive: A flag that indicates bound includes key provided in the value.
+ """
+
+ try:
+ iter(key_value)
+ except TypeError:
+ assert False, "value must be iterable!"
+
+ if isinstance(key_type, types.TupleType):
+ key_type = key_type.proto
+
+ self._equal = inclusive
+ self.value = key_value
+ self.type = key_type
+
+ def is_inclusive(self):
+ return self._equal
+
+ def is_exclusive(self):
+ return not self._equal
+
+ def __str__(self):
+ if self._equal:
+ return "InclusiveKeyBound(Tuple%s)" % str(self.value)
+ return "ExclusiveKeyBound(Tuple%s)" % str(self.value)
+
+ @classmethod
+ def inclusive(cls, key_value, key_type):
+ return cls(key_value, key_type, True)
+
+ @classmethod
+ def exclusive(cls, key_value, key_type):
+ return cls(key_value, key_type, False)
+
+
+class KeyRange(object):
+ __slots__ = ("from_bound", "to_bound")
+
+ def __init__(self, from_bound, to_bound):
+ self.from_bound = from_bound
+ self.to_bound = to_bound
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "KeyRange(%s, %s)" % (str(self.from_bound), str(self.to_bound))
+
+
+class Column(object):
+ def __init__(self, name, type, family=None):
+ self._name = name
+ self._type = type
+ self.family = family
+
+ def __eq__(self, other):
+ return self.name == other.name and self._type.item == other.type.item
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def type(self):
+ return self._type
+
+ def with_family(self, family):
+ self.family = family
+ return self
+
+ @property
+ def type_pb(self):
+ try:
+ return self._type.proto
+ except Exception:
+ return self._type
+
+
+@enum.unique
+class FeatureFlag(enum.IntEnum):
+ UNSPECIFIED = 0
+ ENABLED = 1
+ DISABLED = 2
+
+
+@enum.unique
+class AutoPartitioningPolicy(enum.IntEnum):
+ AUTO_PARTITIONING_POLICY_UNSPECIFIED = 0
+ DISABLED = 1
+ AUTO_SPLIT = 2
+ AUTO_SPLIT_MERGE = 3
+
+
+@enum.unique
+class IndexStatus(enum.IntEnum):
+ INDEX_STATUS_UNSPECIFIED = 0
+ READY = 1
+ BUILDING = 2
+
+
+class CachingPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.CachingPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class ExecutionPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.ExecutionPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class CompactionPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.CompactionPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class SplitPoint(object):
+ def __init__(self, *args):
+ self._value = tuple(args)
+
+ @property
+ def value(self):
+ return self._value
+
+
+class ExplicitPartitions(object):
+ def __init__(self, split_points):
+ self.split_points = split_points
+
+
+class PartitioningPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.PartitioningPolicy()
+ self.preset_name = None
+ self.uniform_partitions = None
+ self.auto_partitioning = None
+ self.explicit_partitions = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_uniform_partitions(self, uniform_partitions):
+ self._pb.uniform_partitions = uniform_partitions
+ self.uniform_partitions = uniform_partitions
+ return self
+
+ def with_explicit_partitions(self, explicit_partitions):
+ self.explicit_partitions = explicit_partitions
+ return self
+
+ def with_auto_partitioning(self, auto_partitioning):
+ self._pb.auto_partitioning = auto_partitioning
+ self.auto_partitioning = auto_partitioning
+ return self
+
+ def to_pb(self, table_description):
+ if self.explicit_partitions is not None:
+ column_types = {}
+ pk = set(table_description.primary_key)
+ for column in table_description.columns:
+ if column.name in pk:
+ column_types[column.name] = column.type
+
+ for split_point in self.explicit_partitions.split_points:
+ typed_value = self._pb.explicit_partitions.split_points.add()
+ split_point_type = types.TupleType()
+ prefix_size = len(split_point.value)
+ for pl_el_id, pk_name in enumerate(table_description.primary_key):
+ if pl_el_id >= prefix_size:
+ break
+
+ split_point_type.add_element(column_types[pk_name])
+
+ typed_value.type.MergeFrom(split_point_type.proto)
+ typed_value.value.MergeFrom(
+ convert.from_native_value(split_point_type.proto, split_point.value)
+ )
+
+ return self._pb
+
+
+class TableIndex(object):
+ def __init__(self, name):
+ self._pb = _apis.ydb_table.TableIndex()
+ self._pb.name = name
+ self.name = name
+ self.index_columns = []
+ # output only.
+ self.status = None
+
+ def with_global_index(self):
+ self._pb.global_index.SetInParent()
+ return self
+
+ def with_index_columns(self, *columns):
+ for column in columns:
+ self._pb.index_columns.append(column)
+ self.index_columns.append(column)
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class ReplicationPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.ReplicationPolicy()
+ self.preset_name = None
+ self.replicas_count = None
+ self.allow_promotion = None
+ self.create_per_availability_zone = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_replicas_count(self, replicas_count):
+ self._pb.replicas_count = replicas_count
+ self.replicas_count = replicas_count
+ return self
+
+ def with_create_per_availability_zone(self, create_per_availability_zone):
+ self._pb.create_per_availability_zone = create_per_availability_zone
+ self.create_per_availability_zone = create_per_availability_zone
+ return self
+
+ def with_allow_promotion(self, allow_promotion):
+ self._pb.allow_promotion = allow_promotion
+ self.allow_promotion = allow_promotion
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class StoragePool(object):
+ def __init__(self, media):
+ self.media = media
+
+ def to_pb(self):
+ return _apis.ydb_table.StoragePool(media=self.media)
+
+
+class StoragePolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.StoragePolicy()
+ self.preset_name = None
+ self.syslog = None
+ self.log = None
+ self.data = None
+ self.keep_in_memory = None
+ self.external = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_syslog_storage_settings(self, syslog_settings):
+ self._pb.syslog.MergeFrom(syslog_settings.to_pb())
+ self.syslog = syslog_settings
+ return self
+
+ def with_log_storage_settings(self, log_settings):
+ self._pb.log.MergeFrom(log_settings.to_pb())
+ self.log = log_settings
+ return self
+
+ def with_data_storage_settings(self, data_settings):
+ self._pb.data.MergeFrom(data_settings.to_pb())
+ self.data = data_settings
+ return self
+
+ def with_external_storage_settings(self, external_settings):
+ self._pb.external.MergeFrom(external_settings.to_pb())
+ self.external = external_settings
+ return self
+
+ def with_keep_in_memory(self, keep_in_memory):
+ self._pb.keep_in_memory = keep_in_memory
+ self.keep_in_memory = keep_in_memory
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class TableProfile(object):
+ def __init__(self):
+ self.preset_name = None
+ self.compaction_policy = None
+ self.partitioning_policy = None
+ self.storage_policy = None
+ self.execution_policy = None
+ self.replication_policy = None
+ self.caching_policy = None
+
+ def with_preset_name(self, preset_name):
+ self.preset_name = preset_name
+ return self
+
+ def with_compaction_policy(self, compaction_policy):
+ self.compaction_policy = compaction_policy
+ return self
+
+ def with_partitioning_policy(self, partitioning_policy):
+ self.partitioning_policy = partitioning_policy
+ return self
+
+ def with_execution_policy(self, execution_policy):
+ self.execution_policy = execution_policy
+ return self
+
+ def with_caching_policy(self, caching_policy):
+ self.caching_policy = caching_policy
+ return self
+
+ def with_storage_policy(self, storage_policy):
+ self.storage_policy = storage_policy
+ return self
+
+ def with_replication_policy(self, replication_policy):
+ self.replication_policy = replication_policy
+ return self
+
+ def to_pb(self, table_description):
+ pb = _apis.ydb_table.TableProfile()
+
+ if self.preset_name is not None:
+ pb.preset_name = self.preset_name
+
+ if self.execution_policy is not None:
+ pb.execution_policy.MergeFrom(self.execution_policy.to_pb())
+
+ if self.storage_policy is not None:
+ pb.storage_policy.MergeFrom(self.storage_policy.to_pb())
+
+ if self.replication_policy is not None:
+ pb.replication_policy.MergeFrom(self.replication_policy.to_pb())
+
+ if self.caching_policy is not None:
+ pb.caching_policy.MergeFrom(self.caching_policy.to_pb())
+
+ if self.compaction_policy is not None:
+ pb.compaction_policy.MergeFrom(self.compaction_policy.to_pb())
+
+ if self.partitioning_policy is not None:
+ pb.partitioning_policy.MergeFrom(
+ self.partitioning_policy.to_pb(table_description)
+ )
+
+ return pb
+
+
+class DateTypeColumnModeSettings(object):
+ def __init__(self, column_name, expire_after_seconds=0):
+ self.column_name = column_name
+ self.expire_after_seconds = expire_after_seconds
+
+ def to_pb(self):
+ pb = _apis.ydb_table.DateTypeColumnModeSettings()
+
+ pb.column_name = self.column_name
+ pb.expire_after_seconds = self.expire_after_seconds
+
+ return pb
+
+
+@enum.unique
+class ColumnUnit(enum.IntEnum):
+ UNIT_UNSPECIFIED = 0
+ UNIT_SECONDS = 1
+ UNIT_MILLISECONDS = 2
+ UNIT_MICROSECONDS = 3
+ UNIT_NANOSECONDS = 4
+
+
+class ValueSinceUnixEpochModeSettings(object):
+ def __init__(self, column_name, column_unit, expire_after_seconds=0):
+ self.column_name = column_name
+ self.column_unit = column_unit
+ self.expire_after_seconds = expire_after_seconds
+
+ def to_pb(self):
+ pb = _apis.ydb_table.ValueSinceUnixEpochModeSettings()
+
+ pb.column_name = self.column_name
+ pb.column_unit = self.column_unit
+ pb.expire_after_seconds = self.expire_after_seconds
+
+ return pb
+
+
+class TtlSettings(object):
+ def __init__(self):
+ self.date_type_column = None
+ self.value_since_unix_epoch = None
+
+ def with_date_type_column(self, column_name, expire_after_seconds=0):
+ self.date_type_column = DateTypeColumnModeSettings(
+ column_name, expire_after_seconds
+ )
+ return self
+
+ def with_value_since_unix_epoch(
+ self, column_name, column_unit, expire_after_seconds=0
+ ):
+ self.value_since_unix_epoch = ValueSinceUnixEpochModeSettings(
+ column_name, column_unit, expire_after_seconds
+ )
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.TtlSettings()
+
+ if self.date_type_column is not None:
+ pb.date_type_column.MergeFrom(self.date_type_column.to_pb())
+ elif self.value_since_unix_epoch is not None:
+ pb.value_since_unix_epoch.MergeFrom(self.value_since_unix_epoch.to_pb())
+ else:
+ raise RuntimeError("Unspecified ttl settings mode")
+
+ return pb
+
+
+class TableStats(object):
+ def __init__(self):
+ self.partitions = None
+ self.store_size = 0
+
+ def with_store_size(self, store_size):
+ self.store_size = store_size
+ return self
+
+ def with_partitions(self, partitions):
+ self.partitions = partitions
+ return self
+
+
+class ReadReplicasSettings(object):
+ def __init__(self):
+ self.per_az_read_replicas_count = 0
+ self.any_az_read_replicas_count = 0
+
+ def with_any_az_read_replicas_count(self, any_az_read_replicas_count):
+ self.any_az_read_replicas_count = any_az_read_replicas_count
+ return self
+
+ def with_per_az_read_replicas_count(self, per_az_read_replicas_count):
+ self.per_az_read_replicas_count = per_az_read_replicas_count
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.ReadReplicasSettings()
+ if self.per_az_read_replicas_count > 0:
+ pb.per_az_read_replicas_count = self.per_az_read_replicas_count
+ elif self.any_az_read_replicas_count > 0:
+ pb.any_az_read_replicas_count = self.any_az_read_replicas_count
+ return pb
+
+
+class PartitioningSettings(object):
+ def __init__(self):
+ self.partitioning_by_size = 0
+ self.partition_size_mb = 0
+ self.partitioning_by_load = 0
+ self.min_partitions_count = 0
+ self.max_partitions_count = 0
+
+ def with_max_partitions_count(self, max_partitions_count):
+ self.max_partitions_count = max_partitions_count
+ return self
+
+ def with_min_partitions_count(self, min_partitions_count):
+ self.min_partitions_count = min_partitions_count
+ return self
+
+ def with_partitioning_by_load(self, partitioning_by_load):
+ self.partitioning_by_load = partitioning_by_load
+ return self
+
+ def with_partition_size_mb(self, partition_size_mb):
+ self.partition_size_mb = partition_size_mb
+ return self
+
+ def with_partitioning_by_size(self, partitioning_by_size):
+ self.partitioning_by_size = partitioning_by_size
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.PartitioningSettings()
+ pb.partitioning_by_size = self.partitioning_by_size
+ pb.partition_size_mb = self.partition_size_mb
+ pb.partitioning_by_load = self.partitioning_by_load
+ pb.min_partitions_count = self.min_partitions_count
+ pb.max_partitions_count = self.max_partitions_count
+ return pb
+
+
+class StorageSettings(object):
+ def __init__(self):
+ self.tablet_commit_log0 = None
+ self.tablet_commit_log1 = None
+ self.external = None
+ self.store_external_blobs = 0
+
+ def with_store_external_blobs(self, store_external_blobs):
+ self.store_external_blobs = store_external_blobs
+ return self
+
+ def with_external(self, external):
+ self.external = external
+ return self
+
+ def with_tablet_commit_log1(self, tablet_commit_log1):
+ self.tablet_commit_log1 = tablet_commit_log1
+ return self
+
+ def with_tablet_commit_log0(self, tablet_commit_log0):
+ self.tablet_commit_log0 = tablet_commit_log0
+ return self
+
+ def to_pb(self):
+ st = _apis.ydb_table.StorageSettings()
+ st.store_external_blobs = self.store_external_blobs
+ if self.external:
+ st.external.MergeFrom(self.external.to_pb())
+ if self.tablet_commit_log0:
+ st.tablet_commit_log0.MergeFrom(self.tablet_commit_log0.to_pb())
+ if self.tablet_commit_log1:
+ st.tablet_commit_log1.MergeFrom(self.tablet_commit_log1.to_pb())
+ return st
+
+
+@enum.unique
+class Compression(enum.IntEnum):
+ UNSPECIFIED = 0
+ NONE = 1
+ LZ4 = 2
+
+
+class ColumnFamily(object):
+ def __init__(self):
+ self.compression = 0
+ self.name = None
+ self.data = None
+ self.keep_in_memory = 0
+
+ def with_name(self, name):
+ self.name = name
+ return self
+
+ def with_compression(self, compression):
+ self.compression = compression
+ return self
+
+ def with_data(self, data):
+ self.data = data
+ return self
+
+ def with_keep_in_memory(self, keep_in_memory):
+ self.keep_in_memory = keep_in_memory
+ return self
+
+ def to_pb(self):
+ cm = _apis.ydb_table.ColumnFamily()
+ cm.keep_in_memory = self.keep_in_memory
+ cm.compression = self.compression
+ if self.name is not None:
+ cm.name = self.name
+ if self.data is not None:
+ cm.data.MergeFrom(self.data.to_pb())
+ return cm
+
+
+class TableDescription(object):
+ def __init__(self):
+ self.columns = []
+ self.primary_key = []
+ self.profile = None
+ self.indexes = []
+ self.column_families = []
+ self.ttl_settings = None
+ self.attributes = {}
+ self.uniform_partitions = 0
+ self.partition_at_keys = None
+ self.compaction_policy = None
+ self.key_bloom_filter = 0
+ self.read_replicas_settings = None
+ self.partitioning_settings = None
+ self.storage_settings = None
+
+ def with_storage_settings(self, storage_settings):
+ self.storage_settings = storage_settings
+ return self
+
+ def with_column(self, column):
+ self.columns.append(column)
+ return self
+
+ def with_columns(self, *columns):
+ for column in columns:
+ self.with_column(column)
+ return self
+
+ def with_primary_key(self, key):
+ self.primary_key.append(key)
+ return self
+
+ def with_primary_keys(self, *keys):
+ for pk in keys:
+ self.with_primary_key(pk)
+ return self
+
+ def with_column_family(self, column_family):
+ self.column_families.append(column_family)
+ return self
+
+ def with_column_families(self, *column_families):
+ for column_family in column_families:
+ self.with_column_family(column_family)
+ return self
+
+ def with_indexes(self, *indexes):
+ for index in indexes:
+ self.with_index(index)
+ return self
+
+ def with_index(self, index):
+ self.indexes.append(index)
+ return self
+
+ def with_profile(self, profile):
+ self.profile = profile
+ return self
+
+ def with_ttl(self, ttl_settings):
+ self.ttl_settings = ttl_settings
+ return self
+
+ def with_attributes(self, attributes):
+ self.attributes = attributes
+ return self
+
+ def with_uniform_partitions(self, uniform_partitions):
+ self.uniform_partitions = uniform_partitions
+ return self
+
+ def with_partition_at_keys(self, partition_at_keys):
+ self.partition_at_keys = partition_at_keys
+ return self
+
+ def with_key_bloom_filter(self, key_bloom_filter):
+ self.key_bloom_filter = key_bloom_filter
+ return self
+
+ def with_partitioning_settings(self, partitioning_settings):
+ self.partitioning_settings = partitioning_settings
+ return self
+
+ def with_read_replicas_settings(self, read_replicas_settings):
+ self.read_replicas_settings = read_replicas_settings
+ return self
+
+ def with_compaction_policy(self, compaction_policy):
+ self.compaction_policy = compaction_policy
+ return self
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AbstractTransactionModeBuilder(object):
+ @property
+ @abc.abstractmethod
+ def name(self):
+ pass
+
+ @property
+ @abc.abstractmethod
+ def settings(self):
+ pass
+
+
+class SnapshotReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.SnapshotModeSettings()
+ self._name = "snapshot_read_only"
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class SerializableReadWrite(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._name = "serializable_read_write"
+ self._pb = _apis.ydb_table.SerializableModeSettings()
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class OnlineReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.OnlineModeSettings()
+ self._pb.allow_inconsistent_reads = False
+ self._name = "online_read_only"
+
+ def with_allow_inconsistent_reads(self):
+ self._pb.allow_inconsistent_reads = True
+ return self
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class StaleReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.StaleModeSettings()
+ self._name = "stale_read_only"
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class BackoffSettings(object):
+ def __init__(self, ceiling=6, slot_duration=0.001, uncertain_ratio=0.5):
+ self.ceiling = ceiling
+ self.slot_duration = slot_duration
+ self.uncertain_ratio = uncertain_ratio
+
+ def calc_timeout(self, retry_number):
+ slots_count = 1 << min(retry_number, self.ceiling)
+ max_duration_ms = slots_count * self.slot_duration * 1000.0
+ # duration_ms = random.random() * max_duration_ms * uncertain_ratio) + max_duration_ms * (1 - uncertain_ratio)
+ duration_ms = max_duration_ms * (
+ random.random() * self.uncertain_ratio + 1.0 - self.uncertain_ratio
+ )
+ return duration_ms / 1000.0
+
+
+class RetrySettings(object):
+ def __init__(
+ self,
+ max_retries=10,
+ max_session_acquire_timeout=None,
+ on_ydb_error_callback=None,
+ backoff_ceiling=6,
+ backoff_slot_duration=1,
+ get_session_client_timeout=5,
+ fast_backoff_settings=None,
+ slow_backoff_settings=None,
+ idempotent=False,
+ ):
+ self.max_retries = max_retries
+ self.max_session_acquire_timeout = max_session_acquire_timeout
+ self.on_ydb_error_callback = (
+ (lambda e: None) if on_ydb_error_callback is None else on_ydb_error_callback
+ )
+ self.fast_backoff = (
+ BackoffSettings(10, 0.005)
+ if fast_backoff_settings is None
+ else fast_backoff_settings
+ )
+ self.slow_backoff = (
+ BackoffSettings(backoff_ceiling, backoff_slot_duration)
+ if slow_backoff_settings is None
+ else slow_backoff_settings
+ )
+ self.retry_not_found = True
+ self.idempotent = idempotent
+ self.retry_internal_error = True
+ self.unknown_error_handler = lambda e: None
+ self.get_session_client_timeout = get_session_client_timeout
+ if max_session_acquire_timeout is not None:
+ self.get_session_client_timeout = min(
+ self.max_session_acquire_timeout, self.get_session_client_timeout
+ )
+
+ def with_fast_backoff(self, backoff_settings):
+ self.fast_backoff = backoff_settings
+ return self
+
+ def with_slow_backoff(self, backoff_settings):
+ self.slow_backoff = backoff_settings
+ return self
+
+
+class YdbRetryOperationSleepOpt(object):
+ def __init__(self, timeout):
+ self.timeout = timeout
+
+ def __eq__(self, other):
+ return type(self) == type(other) and self.timeout == other.timeout
+
+ def __repr__(self):
+ return "YdbRetryOperationSleepOpt(%s)" % self.timeout
+
+
+class YdbRetryOperationFinalResult(object):
+ def __init__(self, result):
+ self.result = result
+ self.exc = None
+
+ def __eq__(self, other):
+ return (
+ type(self) == type(other)
+ and self.result == other.result
+ and self.exc == other.exc
+ )
+
+ def __repr__(self):
+ return "YdbRetryOperationFinalResult(%s, exc=%s)" % (self.result, self.exc)
+
+ def set_exception(self, exc):
+ self.exc = exc
+
+
+def retry_operation_impl(callee, retry_settings=None, *args, **kwargs):
+ retry_settings = RetrySettings() if retry_settings is None else retry_settings
+ status = None
+
+ for attempt in six.moves.range(retry_settings.max_retries + 1):
+ try:
+ result = YdbRetryOperationFinalResult(callee(*args, **kwargs))
+ yield result
+
+ if result.exc is not None:
+ raise result.exc
+
+ except issues.Error as e:
+ status = e
+ retry_settings.on_ydb_error_callback(e)
+
+ retriable_info = check_retriable_error(e, retry_settings, attempt)
+ if not retriable_info.is_retriable:
+ raise
+
+ skip_yield_error_types = [
+ issues.Aborted,
+ issues.BadSession,
+ issues.NotFound,
+ issues.InternalError,
+ ]
+
+ yield_sleep = True
+ for t in skip_yield_error_types:
+ if isinstance(e, t):
+ yield_sleep = False
+
+ if yield_sleep:
+ yield YdbRetryOperationSleepOpt(retriable_info.sleep_timeout_seconds)
+
+ except Exception as e:
+ # you should provide your own handler you want
+ retry_settings.unknown_error_handler(e)
+ raise
+
+ raise status
+
+
+def retry_operation_sync(callee, retry_settings=None, *args, **kwargs):
+ opt_generator = retry_operation_impl(callee, retry_settings, *args, **kwargs)
+ for next_opt in opt_generator:
+ if isinstance(next_opt, YdbRetryOperationSleepOpt):
+ time.sleep(next_opt.timeout)
+ else:
+ return next_opt.result
+
+
+class TableClientSettings(object):
+ def __init__(self):
+ self._client_query_cache_enabled = False
+ self._native_datetime_in_result_sets = False
+ self._native_date_in_result_sets = False
+ self._make_result_sets_lazy = False
+ self._native_json_in_result_sets = False
+ self._native_interval_in_result_sets = False
+ self._native_timestamp_in_result_sets = False
+ self._allow_truncated_result = convert._default_allow_truncated_result
+
+ def with_native_timestamp_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_timestamp_in_result_sets = enabled
+ return self
+
+ def with_native_interval_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_interval_in_result_sets = enabled
+ return self
+
+ def with_native_json_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_json_in_result_sets = enabled
+ return self
+
+ def with_native_date_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_date_in_result_sets = enabled
+ return self
+
+ def with_native_datetime_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_datetime_in_result_sets = enabled
+ return self
+
+ def with_client_query_cache(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._client_query_cache_enabled = enabled
+ return self
+
+ def with_lazy_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._make_result_sets_lazy = enabled
+ return self
+
+ def with_allow_truncated_result(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._allow_truncated_result = enabled
+ return self
+
+
+class ScanQueryResult(object):
+ def __init__(self, result, table_client_settings):
+ self._result = result
+ self.query_stats = result.query_stats
+ self.result_set = convert.ResultSet.from_message(
+ self._result.result_set, table_client_settings
+ )
+
+
+@enum.unique
+class QueryStatsCollectionMode(enum.IntEnum):
+ NONE = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_NONE
+ BASIC = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_BASIC
+ FULL = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_FULL
+
+
+class ScanQuerySettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ScanQuerySettings, self).__init__()
+ self.collect_stats = None
+
+ def with_collect_stats(self, collect_stats_mode):
+ self.collect_stats = collect_stats_mode
+ return self
+
+
+class ScanQuery(object):
+ def __init__(self, yql_text, parameters_types):
+ self.yql_text = yql_text
+ self.parameters_types = parameters_types
+
+
+def _wrap_scan_query_response(response, table_client_settings):
+ issues._process_response(response)
+ return ScanQueryResult(response.result, table_client_settings)
+
+
+def _scan_query_request_factory(query, parameters=None, settings=None):
+ if not isinstance(query, ScanQuery):
+ query = ScanQuery(query, {})
+ parameters = {} if parameters is None else parameters
+ collect_stats = getattr(
+ settings,
+ "collect_stats",
+ _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_NONE,
+ )
+ return _apis.ydb_table.ExecuteScanQueryRequest(
+ mode=_apis.ydb_table.ExecuteScanQueryRequest.Mode.MODE_EXEC,
+ query=_apis.ydb_table.Query(yql_text=query.yql_text),
+ parameters=convert.parameters_to_pb(query.parameters_types, parameters),
+ collect_stats=collect_stats,
+ )
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ISession:
+ @abstractmethod
+ def __init__(self, driver, table_client_settings):
+ pass
+
+ @abstractmethod
+ def __lt__(self, other):
+ pass
+
+ @abstractmethod
+ def __eq__(self, other):
+ pass
+
+ @property
+ @abstractmethod
+ def session_id(self):
+ pass
+
+ @abstractmethod
+ def initialized(self):
+ """
+ Return True if session is successfully initialized with a session_id and False otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def pending_query(self):
+ pass
+
+ @abstractmethod
+ def reset(self):
+ """
+ Perform session state reset (that includes cleanup of the session_id, query cache, and etc.)
+ """
+ pass
+
+ @abstractmethod
+ def read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+ :param settings: Request settings
+
+ :return: SyncResponseIterator instance
+ """
+ pass
+
+ @abstractmethod
+ def keep_alive(self, settings=None):
+ pass
+
+ @abstractmethod
+ def create(self, settings=None):
+ pass
+
+ @abstractmethod
+ def delete(self, settings=None):
+ pass
+
+ @abstractmethod
+ def execute_scheme(self, yql_text, settings=None):
+ pass
+
+ @abstractmethod
+ def transaction(
+ self, tx_mode=None, allow_split_transactions=_allow_split_transaction
+ ):
+ pass
+
+ @abstractmethod
+ def has_prepared(self, query):
+ pass
+
+ @abstractmethod
+ def prepare(self, query, settings=None):
+ pass
+
+ @abstractmethod
+ def explain(self, yql_text, settings=None):
+ """
+ Expiremental API.
+
+ :param yql_text:
+ :param settings:
+
+ :return:
+ """
+ pass
+
+ @abstractmethod
+ def create_table(self, path, table_description, settings=None):
+ """
+ Create a YDB table.
+
+ :param path: A table path
+ :param table_description: A description of table to create. An instance TableDescription
+ :param settings: An instance of BaseRequestSettings that describes how rpc should invoked.
+
+ :return: A description of created scheme entry or error otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def drop_table(self, path, settings=None):
+ pass
+
+ @abstractmethod
+ def alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ pass
+
+ @abstractmethod
+ def copy_table(self, source_path, destination_path, settings=None):
+ pass
+
+ @abstractmethod
+ def copy_tables(self, source_destination_pairs, settings=None):
+ pass
+
+ def describe_table(self, path, settings=None):
+ """
+ Returns a description of the table by provided path
+
+ :param path: A table path
+ :param settings: A request settings
+
+ :return: Description of a table
+ """
+ pass
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ITableClient:
+ def __init__(self, driver, table_client_settings=None):
+ pass
+
+ @abstractmethod
+ def session(self):
+ pass
+
+ @abstractmethod
+ def scan_query(self, query, parameters=None, settings=None):
+ pass
+
+ @abstractmethod
+ def bulk_upsert(self, table_path, rows, column_types, settings=None):
+ """
+ Bulk upsert data
+
+ :param table_path: A table path.
+ :param rows: A list of structures.
+ :param column_types: Bulk upsert column types.
+
+ """
+ pass
+
+
+class BaseTableClient(ITableClient):
+ def __init__(self, driver, table_client_settings=None):
+ # type:(ydb.Driver, ydb.TableClientSettings) -> None
+ self._driver = driver
+ self._table_client_settings = (
+ TableClientSettings()
+ if table_client_settings is None
+ else table_client_settings
+ )
+
+ def session(self):
+ # type: () -> ydb.Session
+ return Session(self._driver, self._table_client_settings)
+
+ def scan_query(self, query, parameters=None, settings=None):
+ # type: (ydb.ScanQuery, tuple, ydb.BaseRequestSettings) -> ydb.SyncResponseIterator
+ request = _scan_query_request_factory(query, parameters, settings)
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamExecuteScanQuery,
+ settings=settings,
+ )
+ return _utilities.SyncResponseIterator(
+ stream_it,
+ lambda resp: _wrap_scan_query_response(resp, self._table_client_settings),
+ )
+
+ def bulk_upsert(self, table_path, rows, column_types, settings=None):
+ # type: (str, list, ydb.AbstractTypeBuilder | ydb.PrimitiveType, ydb.BaseRequestSettings) -> None
+ """
+ Bulk upsert data
+
+ :param table_path: A table path.
+ :param rows: A list of structures.
+ :param column_types: Bulk upsert column types.
+
+ """
+ return self._driver(
+ _session_impl.bulk_upsert_request_factory(table_path, rows, column_types),
+ _apis.TableService.Stub,
+ _apis.TableService.BulkUpsert,
+ _session_impl.wrap_operation_bulk_upsert,
+ settings,
+ (),
+ )
+
+
+class TableClient(BaseTableClient):
+ def async_scan_query(self, query, parameters=None, settings=None):
+ # type: (ydb.ScanQuery, tuple, ydb.BaseRequestSettings) -> ydb.AsyncResponseIterator
+ request = _scan_query_request_factory(query, parameters, settings)
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamExecuteScanQuery,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(
+ stream_it,
+ lambda resp: _wrap_scan_query_response(resp, self._table_client_settings),
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_bulk_upsert(self, table_path, rows, column_types, settings=None):
+ # type: (str, list, ydb.AbstractTypeBuilder | ydb.PrimitiveType, ydb.BaseRequestSettings) -> None
+ return self._driver.future(
+ _session_impl.bulk_upsert_request_factory(table_path, rows, column_types),
+ _apis.TableService.Stub,
+ _apis.TableService.BulkUpsert,
+ _session_impl.wrap_operation_bulk_upsert,
+ settings,
+ (),
+ )
+
+
+def _make_index_description(index):
+ result = TableIndex(index.name).with_index_columns(
+ *tuple(col for col in index.index_columns)
+ )
+ result.status = IndexStatus(index.status)
+ return result
+
+
+class TableSchemeEntry(scheme.SchemeEntry):
+ def __init__(
+ self,
+ name,
+ owner,
+ type,
+ effective_permissions,
+ permissions,
+ size_bytes,
+ columns,
+ primary_key,
+ shard_key_bounds,
+ indexes,
+ table_stats,
+ ttl_settings,
+ attributes,
+ partitioning_settings,
+ column_families,
+ key_bloom_filter,
+ read_replicas_settings,
+ storage_settings,
+ *args,
+ **kwargs
+ ):
+
+ super(TableSchemeEntry, self).__init__(
+ name,
+ owner,
+ type,
+ effective_permissions,
+ permissions,
+ size_bytes,
+ *args,
+ **kwargs
+ )
+ self.primary_key = [pk for pk in primary_key]
+ self.columns = [
+ Column(column.name, convert.type_to_native(column.type), column.family)
+ for column in columns
+ ]
+ self.indexes = [_make_index_description(index) for index in indexes]
+ self.shard_key_ranges = []
+ self.column_families = []
+ self.key_bloom_filter = FeatureFlag(key_bloom_filter)
+ left_key_bound = None
+ for column_family in column_families:
+ self.column_families.append(
+ ColumnFamily()
+ .with_name(column_family.name)
+ .with_keep_in_memory(FeatureFlag(column_family.keep_in_memory))
+ .with_compression(Compression(column_family.compression))
+ )
+
+ if column_family.HasField("data"):
+ self.column_families[-1].with_data(
+ StoragePool(column_family.data.media)
+ )
+
+ for shard_key_bound in shard_key_bounds:
+ # for next key range
+ key_bound_type = shard_key_bound.type
+ current_bound = convert.to_native_value(shard_key_bound)
+ self.shard_key_ranges.append(
+ KeyRange(
+ None
+ if left_key_bound is None
+ else KeyBound.inclusive(left_key_bound, key_bound_type),
+ KeyBound.exclusive(current_bound, key_bound_type),
+ )
+ )
+ left_key_bound = current_bound
+
+ assert isinstance(left_key_bound, tuple)
+
+ if len(shard_key_bounds) > 0:
+ self.shard_key_ranges.append(
+ KeyRange(
+ KeyBound.inclusive(left_key_bound, shard_key_bounds[-1].type),
+ None,
+ )
+ )
+
+ else:
+ self.shard_key_ranges.append(KeyRange(None, None))
+
+ self.read_replicas_settings = None
+ if read_replicas_settings is not None:
+ self.read_replicas_settings = ReadReplicasSettings()
+ for field in ("per_az_read_replicas_count", "any_az_read_replicas_count"):
+ if read_replicas_settings.WhichOneof("settings") == field:
+ setattr(
+ self.read_replicas_settings,
+ field,
+ getattr(read_replicas_settings, field),
+ )
+
+ self.storage_settings = None
+ if storage_settings is not None:
+ self.storage_settings = StorageSettings()
+ self.storage_settings.store_external_blobs = FeatureFlag(
+ self.storage_settings.store_external_blobs
+ )
+ if storage_settings.HasField("tablet_commit_log0"):
+ self.storage_settings.with_tablet_commit_log0(
+ StoragePool(storage_settings.tablet_commit_log0.media)
+ )
+
+ if storage_settings.HasField("tablet_commit_log1"):
+ self.storage_settings.with_tablet_commit_log1(
+ StoragePool(storage_settings.tablet_commit_log1.media)
+ )
+
+ if storage_settings.HasField("external"):
+ self.storage_settings.with_external(
+ StoragePool(storage_settings.external.media)
+ )
+
+ self.partitioning_settings = None
+ if partitioning_settings is not None:
+ self.partitioning_settings = PartitioningSettings()
+ for field in (
+ "partitioning_by_size",
+ "partitioning_by_load",
+ "partition_size_mb",
+ "min_partitions_count",
+ "max_partitions_count",
+ ):
+ setattr(
+ self.partitioning_settings,
+ field,
+ getattr(partitioning_settings, field),
+ )
+
+ self.ttl_settings = None
+ if ttl_settings is not None:
+ if ttl_settings.HasField("date_type_column"):
+ self.ttl_settings = TtlSettings().with_date_type_column(
+ ttl_settings.date_type_column.column_name,
+ ttl_settings.date_type_column.expire_after_seconds,
+ )
+ elif ttl_settings.HasField("value_since_unix_epoch"):
+ self.ttl_settings = TtlSettings().with_value_since_unix_epoch(
+ ttl_settings.value_since_unix_epoch.column_name,
+ ColumnUnit(ttl_settings.value_since_unix_epoch.column_unit),
+ ttl_settings.value_since_unix_epoch.expire_after_seconds,
+ )
+
+ self.table_stats = None
+ if table_stats is not None:
+ self.table_stats = TableStats()
+ if table_stats.partitions != 0:
+ self.table_stats = self.table_stats.with_partitions(
+ table_stats.partitions
+ )
+
+ if table_stats.store_size != 0:
+ self.table_stats = self.table_stats.with_store_size(
+ table_stats.store_size
+ )
+
+ self.attributes = attributes
+
+
+class RenameItem:
+ def __init__(self, source_path, destination_path, replace_destination=False):
+ self._source_path = source_path
+ self._destination_path = destination_path
+ self._replace_destination = replace_destination
+
+ @property
+ def source_path(self):
+ return self._source_path
+
+ @property
+ def destination_path(self):
+ return self._destination_path
+
+ @property
+ def replace_destination(self):
+ return self._replace_destination
+
+
+class BaseSession(ISession):
+ def __init__(self, driver, table_client_settings):
+ self._driver = driver
+ self._state = _session_impl.SessionState(table_client_settings)
+
+ def __lt__(self, other):
+ return self.session_id < other.session_id
+
+ def __eq__(self, other):
+ return self.session_id == other.session_id
+
+ @property
+ def session_id(self):
+ """
+ Return session_id.
+ """
+ return self._state.session_id
+
+ def initialized(self):
+ """
+ Return True if session is successfully initialized with a session_id and False otherwise.
+ """
+ return self._state.session_id is not None
+
+ def pending_query(self):
+ return self._state.pending_query()
+
+ def closing(self):
+ """Returns True if session is closing."""
+ return self._state.closing()
+
+ def reset(self):
+ """
+ Perform session state reset (that includes cleanup of the session_id, query cache, and etc.)
+ """
+ return self._state.reset()
+
+ def read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+
+ :return: SyncResponseIterator instance
+ """
+ request = _session_impl.read_table_request_factory(
+ self._state,
+ path,
+ key_range,
+ columns,
+ ordered,
+ row_limit,
+ use_snapshot=use_snapshot,
+ )
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamReadTable,
+ settings=settings,
+ )
+ return _utilities.SyncResponseIterator(
+ stream_it, _session_impl.wrap_read_table_response
+ )
+
+ def keep_alive(self, settings=None):
+ return self._driver(
+ _session_impl.keep_alive_request_factory(self._state),
+ _apis.TableService.Stub,
+ _apis.TableService.KeepAlive,
+ _session_impl.wrap_keep_alive_response,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def create(self, settings=None):
+ if self._state.session_id is not None:
+ return self
+ create_settings = settings_impl.BaseRequestSettings()
+ if settings is not None:
+ create_settings = settings.make_copy()
+ create_settings = create_settings.with_header(
+ "x-ydb-client-capabilities", "session-balancer"
+ )
+ return self._driver(
+ _apis.ydb_table.CreateSessionRequest(),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateSession,
+ _session_impl.initialize_session,
+ create_settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def delete(self, settings=None):
+ return self._driver(
+ self._state.attach_request(_apis.ydb_table.DeleteSessionRequest()),
+ _apis.TableService.Stub,
+ _apis.TableService.DeleteSession,
+ _session_impl.cleanup_session,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def execute_scheme(self, yql_text, settings=None):
+ return self._driver(
+ _session_impl.execute_scheme_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteSchemeQuery,
+ _session_impl.wrap_execute_scheme_result,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def transaction(
+ self, tx_mode=None, allow_split_transactions=_allow_split_transaction
+ ):
+ return TxContext(
+ self._driver,
+ self._state,
+ self,
+ tx_mode,
+ allow_split_transactions=allow_split_transactions,
+ )
+
+ def has_prepared(self, query):
+ return query in self._state
+
+ def prepare(self, query, settings=None):
+ data_query, _ = self._state.lookup(query)
+ if data_query is not None:
+ return data_query
+ return self._driver(
+ _session_impl.prepare_request_factory(self._state, query),
+ _apis.TableService.Stub,
+ _apis.TableService.PrepareDataQuery,
+ _session_impl.wrap_prepare_query_response,
+ settings,
+ (self._state, query),
+ self._state.endpoint,
+ )
+
+ def explain(self, yql_text, settings=None):
+ """
+ Expiremental API.
+
+ :param yql_text:
+ :param settings:
+
+ :return:
+ """
+ return self._driver(
+ _session_impl.explain_data_query_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExplainDataQuery,
+ _session_impl.wrap_explain_response,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def create_table(self, path, table_description, settings=None):
+ """
+ Create a YDB table.
+
+ :param path: A table path
+ :param table_description: A description of table to create. An instance TableDescription
+ :param settings: An instance of BaseRequestSettings that describes how rpc should invoked.
+
+ :return: A description of created scheme entry or error otherwise.
+ """
+ return self._driver(
+ _session_impl.create_table_request_factory(
+ self._state, path, table_description
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def drop_table(self, path, settings=None):
+ return self._driver(
+ self._state.attach_request(_apis.ydb_table.DropTableRequest(path=path)),
+ _apis.TableService.Stub,
+ _apis.TableService.DropTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ return self._driver(
+ _session_impl.alter_table_request_factory(
+ self._state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.AlterTable,
+ _session_impl.AlterTableOperation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def describe_table(self, path, settings=None):
+ """
+ Returns a description of the table by provided path
+
+ :param path: A table path
+ :param settings: A request settings
+
+ :return: Description of a table
+ """
+ return self._driver(
+ _session_impl.describe_table_request_factory(self._state, path, settings),
+ _apis.TableService.Stub,
+ _apis.TableService.DescribeTable,
+ _session_impl.wrap_describe_table_response,
+ settings,
+ (self._state, TableSchemeEntry),
+ self._state.endpoint,
+ )
+
+ def copy_table(self, source_path, destination_path, settings=None):
+ return self.copy_tables([(source_path, destination_path)], settings=settings)
+
+ def copy_tables(self, source_destination_pairs, settings=None):
+ return self._driver(
+ _session_impl.copy_tables_request_factory(
+ self._state, source_destination_pairs
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.CopyTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def rename_tables(self, rename_items, settings=None):
+ return self._driver(
+ _session_impl.rename_tables_request_factory(self._state, rename_items),
+ _apis.TableService.Stub,
+ _apis.TableService.RenameTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+
+class Session(BaseSession):
+ def async_read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+
+ :return: AsyncResponseIterator instance
+ """
+ if interceptor is None:
+ raise RuntimeError("Async read table is not available due to import issues")
+ request = _session_impl.read_table_request_factory(
+ self._state,
+ path,
+ key_range,
+ columns,
+ ordered,
+ row_limit,
+ use_snapshot=use_snapshot,
+ )
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamReadTable,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(
+ stream_it, _session_impl.wrap_read_table_response
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_keep_alive(self, settings=None):
+ return self._driver.future(
+ _session_impl.keep_alive_request_factory(self._state),
+ _apis.TableService.Stub,
+ _apis.TableService.KeepAlive,
+ _session_impl.wrap_keep_alive_response,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_create(self, settings=None):
+ if self._state.session_id is not None:
+ return _utilities.wrap_result_in_future(self)
+ create_settings = settings_impl.BaseRequestSettings()
+ if settings is not None:
+ create_settings = settings.make_copy()
+ create_settings = create_settings.with_header(
+ "x-ydb-client-capabilities", "session-balancer"
+ )
+ return self._driver.future(
+ _apis.ydb_table.CreateSessionRequest(),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateSession,
+ _session_impl.initialize_session,
+ create_settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_delete(self, settings=None):
+ return self._driver.future(
+ self._state.attach_request(_apis.ydb_table.DeleteSessionRequest()),
+ _apis.TableService.Stub,
+ _apis.TableService.DeleteSession,
+ _session_impl.cleanup_session,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_execute_scheme(self, yql_text, settings=None):
+ return self._driver.future(
+ _session_impl.execute_scheme_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteSchemeQuery,
+ _session_impl.wrap_execute_scheme_result,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_prepare(self, query, settings=None):
+ data_query, _ = self._state.lookup(query)
+ if data_query is not None:
+ return _utilities.wrap_result_in_future(data_query)
+ return self._driver.future(
+ _session_impl.prepare_request_factory(self._state, query),
+ _apis.TableService.Stub,
+ _apis.TableService.PrepareDataQuery,
+ _session_impl.wrap_prepare_query_response,
+ settings,
+ (
+ self._state,
+ query,
+ ),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_create_table(self, path, table_description, settings=None):
+ return self._driver.future(
+ _session_impl.create_table_request_factory(
+ self._state, path, table_description
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_drop_table(self, path, settings=None):
+ return self._driver.future(
+ self._state.attach_request(_apis.ydb_table.DropTableRequest(path=path)),
+ _apis.TableService.Stub,
+ _apis.TableService.DropTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ return self._driver.future(
+ _session_impl.alter_table_request_factory(
+ self._state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.AlterTable,
+ _session_impl.AlterTableOperation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def async_copy_table(self, source_path, destination_path, settings=None):
+ return self.async_copy_tables(
+ [(source_path, destination_path)], settings=settings
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_copy_tables(self, source_destination_pairs, settings=None):
+ return self._driver.future(
+ _session_impl.copy_tables_request_factory(
+ self._state, source_destination_pairs
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.CopyTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_rename_tables(self, rename_tables, settings=None):
+ return self._driver.future(
+ _session_impl.rename_tables_request_factory(self._state, rename_tables),
+ _apis.TableService.Stub,
+ _apis.TableService.RenameTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_describe_table(self, path, settings=None):
+ return self._driver.future(
+ _session_impl.describe_table_request_factory(self._state, path, settings),
+ _apis.TableService.Stub,
+ _apis.TableService.DescribeTable,
+ _session_impl.wrap_describe_table_response,
+ settings,
+ (self._state, TableSchemeEntry),
+ self._state.endpoint,
+ )
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ITxContext:
+ @abstractmethod
+ def __init__(self, driver, session_state, session, tx_mode=None):
+ """
+ An object that provides a simple transaction context manager that allows statements execution
+ in a transaction. You don't have to open transaction explicitly, because context manager encapsulates
+ transaction control logic, and opens new transaction if:
+ 1) By explicit .begin();
+ 2) On execution of a first statement, which is strictly recommended method, because that avoids
+ useless round trip
+
+ This context manager is not thread-safe, so you should not manipulate on it concurrently.
+
+ :param driver: A driver instance
+ :param session_state: A state of session
+ :param tx_mode: A transaction mode, which is a one from the following choices:
+ 1) SerializableReadWrite() which is default mode;
+ 2) OnlineReadOnly();
+ 3) StaleReadOnly().
+ """
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ """
+ Enters a context manager and returns a session
+
+ :return: A session instance
+ """
+ pass
+
+ @abstractmethod
+ def __exit__(self, *args, **kwargs):
+ """
+ Closes a transaction context manager and rollbacks transaction if
+ it is not rolled back explicitly
+ """
+ pass
+
+ @property
+ @abstractmethod
+ def session_id(self):
+ """
+ A transaction's session id
+
+ :return: A transaction's session id
+ """
+ pass
+
+ @property
+ @abstractmethod
+ def tx_id(self):
+ """
+ Returns a id of open transaction or None otherwise
+
+ :return: A id of open transaction or None otherwise
+ """
+ pass
+
+ @abstractmethod
+ def execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and is not supported yql text queries.
+
+ :param query: A query, yql text or DataQuery instance.
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: An additional request settings
+
+ :return: A result sets or exception in case of execution errors
+ """
+ pass
+
+ @abstractmethod
+ def commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A committed transaction or exception if commit is failed
+ """
+ pass
+
+ @abstractmethod
+ def rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A rolled back transaction or exception if rollback is failed
+ """
+ pass
+
+ @abstractmethod
+ def begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: An open transaction
+ """
+ pass
+
+
+class BaseTxContext(ITxContext):
+ __slots__ = (
+ "_tx_state",
+ "_session_state",
+ "_driver",
+ "session",
+ "_finished",
+ "_allow_split_transactions",
+ )
+
+ _COMMIT = "commit"
+ _ROLLBACK = "rollback"
+
+ def __init__(
+ self,
+ driver,
+ session_state,
+ session,
+ tx_mode=None,
+ allow_split_transactions=_allow_split_transaction,
+ ):
+ """
+ An object that provides a simple transaction context manager that allows statements execution
+ in a transaction. You don't have to open transaction explicitly, because context manager encapsulates
+ transaction control logic, and opens new transaction if:
+
+ 1) By explicit .begin() and .async_begin() methods;
+ 2) On execution of a first statement, which is strictly recommended method, because that avoids useless round trip
+
+ This context manager is not thread-safe, so you should not manipulate on it concurrently.
+
+ :param driver: A driver instance
+ :param session_state: A state of session
+ :param tx_mode: A transaction mode, which is a one from the following choices:
+ 1) SerializableReadWrite() which is default mode;
+ 2) OnlineReadOnly();
+ 3) StaleReadOnly().
+ """
+ self._driver = driver
+ tx_mode = SerializableReadWrite() if tx_mode is None else tx_mode
+ self._tx_state = _tx_ctx_impl.TxState(tx_mode)
+ self._session_state = session_state
+ self.session = session
+ self._finished = ""
+ self._allow_split_transactions = allow_split_transactions
+
+ def __enter__(self):
+ """
+ Enters a context manager and returns a session
+
+ :return: A session instance
+ """
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ """
+ Closes a transaction context manager and rollbacks transaction if
+ it is not rolled back explicitly
+ """
+ if self._tx_state.tx_id is not None:
+ # It's strictly recommended to close transactions directly
+ # by using commit_tx=True flag while executing statement or by
+ # .commit() or .rollback() methods, but here we trying to do best
+ # effort to avoid useless open transactions
+ logger.warning("Potentially leaked tx: %s", self._tx_state.tx_id)
+ try:
+ self.rollback()
+ except issues.Error:
+ logger.warning("Failed to rollback leaked tx: %s", self._tx_state.tx_id)
+
+ self._tx_state.tx_id = None
+
+ @property
+ def session_id(self):
+ """
+ A transaction's session id
+
+ :return: A transaction's session id
+ """
+ return self._session_state.session_id
+
+ @property
+ def tx_id(self):
+ """
+ Returns a id of open transaction or None otherwise
+
+ :return: A id of open transaction or None otherwise
+ """
+ return self._tx_state.tx_id
+
+ def execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and is not supported yql text queries.
+
+ :param query: A query, yql text or DataQuery instance.
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: An additional request settings
+
+ :return: A result sets or exception in case of execution errors
+ """
+
+ self._check_split()
+ if commit_tx:
+ self._set_finish(self._COMMIT)
+
+ return self._driver(
+ _tx_ctx_impl.execute_request_factory(
+ self._session_state,
+ self._tx_state,
+ query,
+ parameters,
+ commit_tx,
+ settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteDataQuery,
+ _tx_ctx_impl.wrap_result_and_tx_id,
+ settings,
+ (self._session_state, self._tx_state, query),
+ self._session_state.endpoint,
+ )
+
+ def commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A committed transaction or exception if commit is failed
+ """
+
+ self._set_finish(self._COMMIT)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return self
+
+ return self._driver(
+ _tx_ctx_impl.commit_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.CommitTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A rolled back transaction or exception if rollback is failed
+ """
+
+ self._set_finish(self._ROLLBACK)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return self
+
+ return self._driver(
+ _tx_ctx_impl.rollback_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.RollbackTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: An open transaction
+ """
+ if self._tx_state.tx_id is not None:
+ return self
+
+ self._check_split()
+
+ return self._driver(
+ _tx_ctx_impl.begin_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.BeginTransaction,
+ _tx_ctx_impl.wrap_tx_begin_response,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def _set_finish(self, val):
+ self._check_split(val)
+ self._finished = val
+
+ def _check_split(self, allow=""):
+ """
+ Deny all operaions with transaction after commit/rollback.
+ Exception: double commit and double rollbacks, because it is safe
+ """
+ if self._allow_split_transactions:
+ return
+
+ if self._finished != "" and self._finished != allow:
+ raise RuntimeError("Any operation with finished transaction is denied")
+
+
+class TxContext(BaseTxContext):
+ @_utilities.wrap_async_call_exceptions
+ def async_execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and not supported for YQL text.
+
+ :param query: A query: YQL text or DataQuery instance. E
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: A request settings (an instance of ExecDataQuerySettings)
+
+ :return: A future of query execution
+ """
+
+ self._check_split()
+
+ return self._driver.future(
+ _tx_ctx_impl.execute_request_factory(
+ self._session_state,
+ self._tx_state,
+ query,
+ parameters,
+ commit_tx,
+ settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteDataQuery,
+ _tx_ctx_impl.wrap_result_and_tx_id,
+ settings,
+ (
+ self._session_state,
+ self._tx_state,
+ query,
+ ),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings (an instance of BaseRequestSettings)
+
+ :return: A future of commit call
+ """
+ self._set_finish(self._COMMIT)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return _utilities.wrap_result_in_future(self)
+
+ return self._driver.future(
+ _tx_ctx_impl.commit_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.CommitTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A future of rollback call
+ """
+ self._set_finish(self._ROLLBACK)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return _utilities.wrap_result_in_future(self)
+
+ return self._driver.future(
+ _tx_ctx_impl.rollback_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.RollbackTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: A future of begin call
+ """
+ if self._tx_state.tx_id is not None:
+ return _utilities.wrap_result_in_future(self)
+
+ self._check_split()
+
+ return self._driver.future(
+ _tx_ctx_impl.begin_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.BeginTransaction,
+ _tx_ctx_impl.wrap_tx_begin_response,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+
+class SessionPool(object):
+ def __init__(
+ self,
+ driver,
+ size=100,
+ workers_threads_count=4,
+ initializer=None,
+ min_pool_size=0,
+ ):
+ """
+ An object that encapsulates session creation, deletion and etc. and maintains
+ a pool of active sessions of specified size
+
+ :param driver: A Driver instance
+ :param size: A maximum number of sessions to maintain in the pool
+ """
+ self._logger = logger.getChild(self.__class__.__name__)
+ self._pool_impl = _sp_impl.SessionPoolImpl(
+ self._logger,
+ driver,
+ size,
+ workers_threads_count,
+ initializer,
+ min_pool_size,
+ )
+ if hasattr(driver, "_driver_config"):
+ self.tracer = driver._driver_config.tracer
+ else:
+ self.tracer = ydb.Tracer(None)
+
+ def retry_operation_sync(self, callee, retry_settings=None, *args, **kwargs):
+
+ retry_settings = RetrySettings() if retry_settings is None else retry_settings
+
+ def wrapped_callee():
+ with self.checkout(
+ timeout=retry_settings.get_session_client_timeout
+ ) as session:
+ return callee(session, *args, **kwargs)
+
+ return retry_operation_sync(wrapped_callee, retry_settings)
+
+ @property
+ def active_size(self):
+ return self._pool_impl.active_size
+
+ @property
+ def free_size(self):
+ return self._pool_impl.free_size
+
+ @property
+ def busy_size(self):
+ return self._pool_impl.busy_size
+
+ @property
+ def max_size(self):
+ return self._pool_impl.max_size
+
+ @property
+ def waiters_count(self):
+ return self._pool_impl.waiters_count
+
+ @tracing.with_trace()
+ def subscribe(self):
+ return self._pool_impl.subscribe()
+
+ @tracing.with_trace()
+ def unsubscribe(self, waiter):
+ return self._pool_impl.unsubscribe(waiter)
+
+ @tracing.with_trace()
+ def acquire(self, blocking=True, timeout=None):
+ return self._pool_impl.acquire(blocking, timeout)
+
+ @tracing.with_trace()
+ def release(self, session):
+ return self._pool_impl.put(session)
+
+ def async_checkout(self):
+ """
+ Returns a context manager that asynchronously checkouts a session from the pool.
+
+ """
+ return AsyncSessionCheckout(self)
+
+ def checkout(self, blocking=True, timeout=None):
+ return SessionCheckout(self, blocking, timeout)
+
+ def stop(self, timeout=None):
+ self._pool_impl.stop(timeout)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
+
+
+class AsyncSessionCheckout(object):
+ __slots__ = ("subscription", "pool")
+
+ def __init__(self, pool):
+ """
+ A context manager that asynchronously checkouts a session for the specified pool
+ and returns it on manager exit.
+
+ :param pool: A SessionPool instance.
+ """
+ self.pool = pool
+ self.subscription = None
+
+ def __enter__(self):
+ self.subscription = self.pool.subscribe()
+ return self.subscription
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.pool.unsubscribe(self.subscription)
+
+
+class SessionCheckout(object):
+ __slots__ = ("_acquired", "_pool", "_blocking", "_timeout")
+
+ def __init__(self, pool, blocking, timeout):
+ """
+ A context manager that checkouts a session from the specified pool and
+ returns it on manager exit.
+
+ :param pool: A SessionPool instance
+ :param blocking: A flag that specifies that session acquire method should blocks
+ :param timeout: A timeout in seconds for session acquire
+ """
+ self._pool = pool
+ self._acquired = None
+ self._blocking = blocking
+ self._timeout = timeout
+
+ def __enter__(self):
+ self._acquired = self._pool.acquire(self._blocking, self._timeout)
+ return self._acquired
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self._acquired is not None:
+ self._pool.release(self._acquired)
diff --git a/contrib/python/ydb/py2/ydb/tracing.py b/contrib/python/ydb/py2/ydb/tracing.py
new file mode 100644
index 0000000000..798bab02a3
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/tracing.py
@@ -0,0 +1,188 @@
+from enum import IntEnum
+import functools
+
+
+class TraceLevel(IntEnum):
+ DEBUG = 0
+ INFO = 1
+ ERROR = 2
+ NONE = 3
+
+
+class _TracingCtx:
+ def __init__(self, tracer, span_name):
+ self._enabled = tracer._open_tracer is not None
+ self._scope = None
+ self._tracer = tracer
+ self._span_name = span_name
+
+ def __enter__(self):
+ """
+ Creates new span
+ :return: self
+ """
+ if not self._enabled:
+ return self
+ self._scope = self._tracer._open_tracer.start_active_span(self._span_name)
+ self._scope.span.set_baggage_item("ctx", self)
+ self.trace(self._tracer._pre_tags)
+ return self
+
+ @property
+ def enabled(self):
+ """
+ :return: Is tracing enabled
+ """
+ return self._enabled
+
+ def trace(self, tags, trace_level=TraceLevel.INFO):
+ """
+ Add tags to current span
+
+ :param ydb.TraceLevel trace_level: level of tracing
+ :param dict tags: Dict of tags
+ """
+ if self._tracer._verbose_level < trace_level:
+ return
+ if not self.enabled or self._scope is None:
+ return
+ for key, value in tags.items():
+ self._scope.span.set_tag(key, value)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.enabled:
+ return
+ if exc_val:
+ self.trace(self._tracer._post_tags_err, trace_level=TraceLevel.ERROR)
+ self._tracer._on_err(self, exc_type, exc_val, exc_tb)
+ else:
+ self.trace(self._tracer._post_tags_ok)
+ self._scope.close()
+ self._scope = None
+
+
+def with_trace(span_name=None):
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(self, *args, **kwargs):
+ name = (
+ span_name
+ if span_name is not None
+ else self.__class__.__name__ + "." + f.__name__
+ )
+ with self.tracer.trace(name):
+ return f(self, *args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def trace(tracer, tags, trace_level=TraceLevel.INFO):
+ if tracer.enabled:
+ scope = tracer._open_tracer.scope_manager.active
+ if not scope:
+ return False
+
+ ctx = scope.span.get_baggage_item("ctx")
+ if ctx is None:
+ return False
+
+ return ctx.trace(tags, trace_level)
+
+
+class Tracer:
+ def __init__(self, tracer):
+ """
+ Init an tracer to trace requests
+
+ :param opentracing.Tracer tracer: opentracing.Tracer implementation. If None - tracing not enabled
+ """
+ self._open_tracer = tracer
+ self._pre_tags = {}
+ self._post_tags_ok = {}
+ self._post_tags_err = {}
+ self._on_err = lambda *args, **kwargs: None
+ self._verbose_level = TraceLevel.NONE
+
+ @property
+ def enabled(self):
+ return self._open_tracer is not None
+
+ def trace(self, span_name):
+ """
+ Create tracing context
+
+ :param str span_name:
+
+ :return: A tracing context
+ :rtype: _TracingCtx
+ """
+ return _TracingCtx(self, span_name)
+
+ def with_pre_tags(self, tags):
+ """
+ Add `tags` to every span immediately after creation
+
+ :param dict tags: tags dict
+
+ :return: self
+ """
+ self._pre_tags = tags
+ return self
+
+ def with_post_tags(self, ok_tags, err_tags):
+ """
+ Add some tags before span close
+
+ :param ok_tags: Add this tags if no error raised
+ :param err_tags: Add this tags if there is an exception
+
+ :return: self
+ """
+ self._post_tags_ok = ok_tags
+ self._post_tags_err = err_tags
+ return self
+
+ def with_on_error_callback(self, callee):
+ """
+ Add an callback, that will be called if there is an exception in span
+
+ :param callable[_TracingCtx, exc_type, exc_val, exc_tb] callee:
+
+ :return: self
+ """
+ self._on_err = callee
+ return self
+
+ def with_verbose_level(self, level):
+ self._verbose_level = level
+ return self
+
+ @classmethod
+ def default(cls, tracer):
+ """
+ Create default tracer
+
+ :param tracer:
+
+ :return: new tracer
+ """
+ return (
+ cls(tracer)
+ .with_post_tags({"ok": True}, {"ok": False})
+ .with_pre_tags({"started": True})
+ .with_on_error_callback(_default_on_error_callback)
+ .with_verbose_level(TraceLevel.INFO)
+ )
+
+
+def _default_on_error_callback(ctx, exc_type, exc_val, exc_tb):
+ ctx.trace(
+ {
+ "error.type": exc_type.__name__,
+ "error.value": exc_val,
+ "error.traceback": exc_tb,
+ },
+ trace_level=TraceLevel.ERROR,
+ )
diff --git a/contrib/python/ydb/py2/ydb/types.py b/contrib/python/ydb/py2/ydb/types.py
new file mode 100644
index 0000000000..a62c8a74a0
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/types.py
@@ -0,0 +1,445 @@
+# -*- coding: utf-8 -*-
+import abc
+import enum
+import six
+import json
+from . import _utilities, _apis
+from datetime import date, datetime, timedelta
+import uuid
+import struct
+from google.protobuf import struct_pb2
+
+
+_SECONDS_IN_DAY = 60 * 60 * 24
+_EPOCH = datetime(1970, 1, 1)
+
+if six.PY3:
+ _from_bytes = None
+else:
+
+ def _from_bytes(x, table_client_settings):
+ return _utilities.from_bytes(x)
+
+
+def _from_date(x, table_client_settings):
+ if (
+ table_client_settings is not None
+ and table_client_settings._native_date_in_result_sets
+ ):
+ return _EPOCH.date() + timedelta(days=x.uint32_value)
+ return x.uint32_value
+
+
+def _to_date(pb, value):
+ if isinstance(value, date):
+ pb.uint32_value = (value - _EPOCH.date()).days
+ else:
+ pb.uint32_value = value
+
+
+def _from_datetime_number(x, table_client_settings):
+ if (
+ table_client_settings is not None
+ and table_client_settings._native_datetime_in_result_sets
+ ):
+ return datetime.utcfromtimestamp(x)
+ return x
+
+
+def _from_json(x, table_client_settings):
+ if (
+ table_client_settings is not None
+ and table_client_settings._native_json_in_result_sets
+ ):
+ return json.loads(x)
+ if _from_bytes is not None:
+ return _from_bytes(x, table_client_settings)
+ return x
+
+
+def _to_uuid(value_pb, table_client_settings):
+ return uuid.UUID(bytes_le=struct.pack("QQ", value_pb.low_128, value_pb.high_128))
+
+
+def _from_uuid(pb, value):
+ pb.low_128 = struct.unpack("Q", value.bytes_le[0:8])[0]
+ pb.high_128 = struct.unpack("Q", value.bytes_le[8:16])[0]
+
+
+def _from_interval(value_pb, table_client_settings):
+ if (
+ table_client_settings is not None
+ and table_client_settings._native_interval_in_result_sets
+ ):
+ return timedelta(microseconds=value_pb.int64_value)
+ return value_pb.int64_value
+
+
+def _timedelta_to_microseconds(value):
+ return (value.days * _SECONDS_IN_DAY + value.seconds) * 1000000 + value.microseconds
+
+
+def _to_interval(pb, value):
+ if isinstance(value, timedelta):
+ pb.int64_value = _timedelta_to_microseconds(value)
+ else:
+ pb.int64_value = value
+
+
+def _from_timestamp(value_pb, table_client_settings):
+ if (
+ table_client_settings is not None
+ and table_client_settings._native_timestamp_in_result_sets
+ ):
+ return _EPOCH + timedelta(microseconds=value_pb.uint64_value)
+ return value_pb.uint64_value
+
+
+def _to_timestamp(pb, value):
+ if isinstance(value, datetime):
+ pb.uint64_value = _timedelta_to_microseconds(value - _EPOCH)
+ else:
+ pb.uint64_value = value
+
+
+@enum.unique
+class PrimitiveType(enum.Enum):
+ """
+ Enumerates all available primitive types that can be used
+ in computations.
+ """
+
+ Int32 = _apis.primitive_types.INT32, "int32_value"
+ Uint32 = _apis.primitive_types.UINT32, "uint32_value"
+ Int64 = _apis.primitive_types.INT64, "int64_value"
+ Uint64 = _apis.primitive_types.UINT64, "uint64_value"
+ Int8 = _apis.primitive_types.INT8, "int32_value"
+ Uint8 = _apis.primitive_types.UINT8, "uint32_value"
+ Int16 = _apis.primitive_types.INT16, "int32_value"
+ Uint16 = _apis.primitive_types.UINT16, "uint32_value"
+ Bool = _apis.primitive_types.BOOL, "bool_value"
+ Double = _apis.primitive_types.DOUBLE, "double_value"
+ Float = _apis.primitive_types.FLOAT, "float_value"
+
+ String = _apis.primitive_types.STRING, "bytes_value"
+ Utf8 = _apis.primitive_types.UTF8, "text_value", _from_bytes
+
+ Yson = _apis.primitive_types.YSON, "bytes_value"
+ Json = _apis.primitive_types.JSON, "text_value", _from_json
+ JsonDocument = _apis.primitive_types.JSON_DOCUMENT, "text_value", _from_json
+ UUID = (_apis.primitive_types.UUID, None, _to_uuid, _from_uuid)
+ Date = (
+ _apis.primitive_types.DATE,
+ None,
+ _from_date,
+ _to_date,
+ )
+ Datetime = (
+ _apis.primitive_types.DATETIME,
+ "uint32_value",
+ _from_datetime_number,
+ )
+ Timestamp = (
+ _apis.primitive_types.TIMESTAMP,
+ None,
+ _from_timestamp,
+ _to_timestamp,
+ )
+ Interval = (
+ _apis.primitive_types.INTERVAL,
+ None,
+ _from_interval,
+ _to_interval,
+ )
+
+ DyNumber = _apis.primitive_types.DYNUMBER, "text_value", _from_bytes
+
+ def __init__(self, idn, proto_field, to_obj=None, from_obj=None):
+ self._idn_ = idn
+ self._to_obj = to_obj
+ self._from_obj = from_obj
+ self._proto_field = proto_field
+
+ def get_value(self, value_pb, table_client_settings):
+ """
+ Extracts value from protocol buffer
+ :param value_pb: A protocol buffer
+ :return: A valid value of primitive type
+ """
+ if self._to_obj is not None and self._proto_field:
+ return self._to_obj(
+ getattr(value_pb, self._proto_field), table_client_settings
+ )
+
+ if self._to_obj is not None:
+ return self._to_obj(value_pb, table_client_settings)
+
+ return getattr(value_pb, self._proto_field)
+
+ def set_value(self, pb, value):
+ """
+ Sets value in a protocol buffer
+ :param pb: A protocol buffer
+ :param value: A valid value to set
+ :return: None
+ """
+ if self._from_obj:
+ self._from_obj(pb, value)
+ else:
+ setattr(pb, self._proto_field, value)
+
+ def __str__(self):
+ return self._name_
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a primitive type
+ :return: A protocol buffer representation
+ """
+ return _apis.ydb_value.Type(type_id=self._idn_)
+
+
+class DataQuery(object):
+ __slots__ = ("yql_text", "parameters_types", "name")
+
+ def __init__(self, query_id, parameters_types, name=None):
+ self.yql_text = query_id
+ self.parameters_types = parameters_types
+ self.name = _utilities.get_query_hash(self.yql_text) if name is None else name
+
+
+#######################
+# A deprecated alias #
+#######################
+DataType = PrimitiveType
+
+
+class AbstractTypeBuilder(object):
+ __metaclass__ = abc.ABCMeta
+
+ @property
+ @abc.abstractmethod
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ pass
+
+
+class DecimalType(AbstractTypeBuilder):
+ __slots__ = ("_proto", "_precision", "_scale")
+
+ def __init__(self, precision=22, scale=9):
+ """
+ Represents a decimal type
+ :param precision: A precision value
+ :param scale: A scale value
+ """
+ self._precision = precision
+ self._scale = scale
+ self._proto = _apis.ydb_value.Type()
+ self._proto.decimal_type.MergeFrom(
+ _apis.ydb_value.DecimalType(precision=self._precision, scale=self._scale)
+ )
+
+ @property
+ def precision(self):
+ return self._precision
+
+ @property
+ def scale(self):
+ return self._scale
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __eq__(self, other):
+ return self._precision == other.precision and self._scale == other.scale
+
+ def __str__(self):
+ """
+ Returns string representation of a type
+ :return: A string representation
+ """
+ return "Decimal(%d,%d)" % (self._precision, self._scale)
+
+
+class NullType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto")
+
+ def __init__(self):
+ self._proto = _apis.ydb_value.Type(null_type=struct_pb2.NULL_VALUE)
+
+ @property
+ def proto(self):
+ return self._proto
+
+ def __str__(self):
+ return "NullType"
+
+
+class OptionalType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto", "_item")
+
+ def __init__(self, optional_type):
+ """
+ Represents optional type that wraps inner type
+ :param optional_type: An instance of an inner type
+ """
+ self._repr = "%s?" % str(optional_type)
+ self._proto = _apis.ydb_value.Type()
+ self._item = optional_type
+ self._proto.optional_type.MergeFrom(
+ _apis.ydb_value.OptionalType(item=optional_type.proto)
+ )
+
+ @property
+ def item(self):
+ return self._item
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __eq__(self, other):
+ return self._item == other.item
+
+ def __str__(self):
+ return self._repr
+
+
+class ListType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto")
+
+ def __init__(self, list_type):
+ """
+ :param list_type: List item type builder
+ """
+ self._repr = "List<%s>" % str(list_type)
+ self._proto = _apis.ydb_value.Type(
+ list_type=_apis.ydb_value.ListType(item=list_type.proto)
+ )
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __str__(self):
+ return self._repr
+
+
+class DictType(AbstractTypeBuilder):
+ __slots__ = ("__repr", "__proto")
+
+ def __init__(self, key_type, payload_type):
+ """
+ :param key_type: Key type builder
+ :param payload_type: Payload type builder
+ """
+ self._repr = "Dict<%s,%s>" % (str(key_type), str(payload_type))
+ self._proto = _apis.ydb_value.Type(
+ dict_type=_apis.ydb_value.DictType(
+ key=key_type.proto,
+ payload=payload_type.proto,
+ )
+ )
+
+ @property
+ def proto(self):
+ return self._proto
+
+ def __str__(self):
+ return self._repr
+
+
+class TupleType(AbstractTypeBuilder):
+ __slots__ = ("__elements_repr", "__proto")
+
+ def __init__(self):
+ self.__elements_repr = []
+ self.__proto = _apis.ydb_value.Type(tuple_type=_apis.ydb_value.TupleType())
+
+ def add_element(self, element_type):
+ """
+ :param element_type: Adds additional element of tuple
+ :return: self
+ """
+ self.__elements_repr.append(str(element_type))
+ element = self.__proto.tuple_type.elements.add()
+ element.MergeFrom(element_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "Tuple<%s>" % ",".join(self.__elements_repr)
+
+
+class StructType(AbstractTypeBuilder):
+ __slots__ = ("__members_repr", "__proto")
+
+ def __init__(self):
+ self.__members_repr = []
+ self.__proto = _apis.ydb_value.Type(struct_type=_apis.ydb_value.StructType())
+
+ def add_member(self, name, member_type):
+ """
+ :param name:
+ :param member_type:
+ :return:
+ """
+ self.__members_repr.append("%s:%s" % (name, str(member_type)))
+ member = self.__proto.struct_type.members.add()
+ member.name = name
+ member.type.MergeFrom(member_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "Struct<%s>" % ",".join(self.__members_repr)
+
+
+class BulkUpsertColumns(AbstractTypeBuilder):
+ __slots__ = ("__columns_repr", "__proto")
+
+ def __init__(self):
+ self.__columns_repr = []
+ self.__proto = _apis.ydb_value.Type(struct_type=_apis.ydb_value.StructType())
+
+ def add_column(self, name, column_type):
+ """
+ :param name: A column name
+ :param column_type: A column type
+ """
+ self.__columns_repr.append("%s:%s" % (name, column_type))
+ column = self.__proto.struct_type.members.add()
+ column.name = name
+ column.type.MergeFrom(column_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "BulkUpsertColumns<%s>" % ",".join(self.__columns_repr)
diff --git a/contrib/python/ydb/py2/ydb/ydb_version.py b/contrib/python/ydb/py2/ydb/ydb_version.py
new file mode 100644
index 0000000000..c85cca1e48
--- /dev/null
+++ b/contrib/python/ydb/py2/ydb/ydb_version.py
@@ -0,0 +1 @@
+VERSION = "2.15.1"
diff --git a/contrib/python/ydb/py3/.dist-info/METADATA b/contrib/python/ydb/py3/.dist-info/METADATA
new file mode 100644
index 0000000000..058eb80b41
--- /dev/null
+++ b/contrib/python/ydb/py3/.dist-info/METADATA
@@ -0,0 +1,60 @@
+Metadata-Version: 2.1
+Name: ydb
+Version: 3.7.0
+Summary: YDB Python SDK
+Home-page: http://github.com/ydb-platform/ydb-python-sdk
+Author: Yandex LLC
+Author-email: ydb@yandex-team.ru
+License: Apache 2.0
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: AUTHORS
+Requires-Dist: grpcio >=1.42.0
+Requires-Dist: packaging
+Requires-Dist: protobuf <5.0.0,>=3.13.0
+Requires-Dist: aiohttp <4
+Provides-Extra: yc
+Requires-Dist: yandexcloud ; extra == 'yc'
+
+YDB Python SDK
+---
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/ydb-platform/ydb/blob/main/LICENSE)
+[![PyPI version](https://badge.fury.io/py/ydb.svg)](https://badge.fury.io/py/ydb)
+[![Functional tests](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml)
+[![Style checks](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml)
+
+Officially supported Python client for YDB.
+
+## Quickstart
+
+### Prerequisites
+
+- Python 3.8 or higher
+- `pip` version 9.0.1 or higher
+
+If necessary, upgrade your version of `pip`:
+
+```sh
+$ python -m pip install --upgrade pip
+```
+
+If you cannot upgrade `pip` due to a system-owned installation, you can
+run the example in a virtualenv:
+
+```sh
+$ python -m pip install virtualenv
+$ virtualenv venv
+$ source venv/bin/activate
+$ python -m pip install --upgrade pip
+```
+
+Install YDB python sdk:
+
+```sh
+$ python -m pip install ydb
+```
diff --git a/contrib/python/ydb/py3/.dist-info/top_level.txt b/contrib/python/ydb/py3/.dist-info/top_level.txt
new file mode 100644
index 0000000000..2578e07d93
--- /dev/null
+++ b/contrib/python/ydb/py3/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+tests
+ydb
diff --git a/contrib/python/ydb/py3/AUTHORS b/contrib/python/ydb/py3/AUTHORS
new file mode 100644
index 0000000000..69fee17e94
--- /dev/null
+++ b/contrib/python/ydb/py3/AUTHORS
@@ -0,0 +1,5 @@
+The following authors have created the source code of "Yandex Database Python SDK"
+published and distributed by YANDEX LLC as the owner:
+
+Vitalii Gridnev <gridnevvvit@gmail.com>
+Timofey Koolin <timofey.koolin@gmail.com>
diff --git a/contrib/python/ydb/py3/LICENSE b/contrib/python/ydb/py3/LICENSE
new file mode 100644
index 0000000000..cabac2dec9
--- /dev/null
+++ b/contrib/python/ydb/py3/LICENSE
@@ -0,0 +1,202 @@
+Copyright 2022 YANDEX LLC
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2022 YANDEX LLC
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/ydb/py3/README.md b/contrib/python/ydb/py3/README.md
new file mode 100644
index 0000000000..cfc57eb276
--- /dev/null
+++ b/contrib/python/ydb/py3/README.md
@@ -0,0 +1,37 @@
+YDB Python SDK
+---
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/ydb-platform/ydb/blob/main/LICENSE)
+[![PyPI version](https://badge.fury.io/py/ydb.svg)](https://badge.fury.io/py/ydb)
+[![Functional tests](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/tests.yaml)
+[![Style checks](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml/badge.svg)](https://github.com/ydb-platform/ydb-python-sdk/actions/workflows/style.yaml)
+
+Officially supported Python client for YDB.
+
+## Quickstart
+
+### Prerequisites
+
+- Python 3.8 or higher
+- `pip` version 9.0.1 or higher
+
+If necessary, upgrade your version of `pip`:
+
+```sh
+$ python -m pip install --upgrade pip
+```
+
+If you cannot upgrade `pip` due to a system-owned installation, you can
+run the example in a virtualenv:
+
+```sh
+$ python -m pip install virtualenv
+$ virtualenv venv
+$ source venv/bin/activate
+$ python -m pip install --upgrade pip
+```
+
+Install YDB python sdk:
+
+```sh
+$ python -m pip install ydb
+```
diff --git a/contrib/python/ydb/py3/ya.make b/contrib/python/ydb/py3/ya.make
new file mode 100644
index 0000000000..f92572fa82
--- /dev/null
+++ b/contrib/python/ydb/py3/ya.make
@@ -0,0 +1,98 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(3.7.0)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/aiohttp
+ contrib/python/grpcio
+ contrib/python/packaging
+ contrib/python/protobuf
+)
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ ydb.public.api.grpc
+ ydb.public.api.grpc.*
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ ydb/__init__.py
+ ydb/_apis.py
+ ydb/_errors.py
+ ydb/_grpc/__init__.py
+ ydb/_grpc/common/__init__.py
+ ydb/_grpc/grpcwrapper/__init__.py
+ ydb/_grpc/grpcwrapper/common_utils.py
+ ydb/_grpc/grpcwrapper/ydb_scheme.py
+ ydb/_grpc/grpcwrapper/ydb_topic.py
+ ydb/_grpc/grpcwrapper/ydb_topic_public_types.py
+ ydb/_session_impl.py
+ ydb/_sp_impl.py
+ ydb/_topic_common/__init__.py
+ ydb/_topic_common/common.py
+ ydb/_topic_reader/__init__.py
+ ydb/_topic_reader/datatypes.py
+ ydb/_topic_reader/topic_reader.py
+ ydb/_topic_reader/topic_reader_asyncio.py
+ ydb/_topic_reader/topic_reader_sync.py
+ ydb/_topic_writer/__init__.py
+ ydb/_topic_writer/topic_writer.py
+ ydb/_topic_writer/topic_writer_asyncio.py
+ ydb/_topic_writer/topic_writer_sync.py
+ ydb/_tx_ctx_impl.py
+ ydb/_utilities.py
+ ydb/aio/__init__.py
+ ydb/aio/_utilities.py
+ ydb/aio/connection.py
+ ydb/aio/credentials.py
+ ydb/aio/driver.py
+ ydb/aio/iam.py
+ ydb/aio/pool.py
+ ydb/aio/resolver.py
+ ydb/aio/scheme.py
+ ydb/aio/table.py
+ ydb/auth_helpers.py
+ ydb/connection.py
+ ydb/convert.py
+ ydb/credentials.py
+ ydb/dbapi/__init__.py
+ ydb/dbapi/connection.py
+ ydb/dbapi/cursor.py
+ ydb/dbapi/errors.py
+ ydb/default_pem.py
+ ydb/driver.py
+ ydb/export.py
+ ydb/global_settings.py
+ ydb/iam/__init__.py
+ ydb/iam/auth.py
+ ydb/import_client.py
+ ydb/interceptor.py
+ ydb/issues.py
+ ydb/operation.py
+ ydb/pool.py
+ ydb/resolver.py
+ ydb/scheme.py
+ ydb/scripting.py
+ ydb/settings.py
+ ydb/sqlalchemy/__init__.py
+ ydb/sqlalchemy/types.py
+ ydb/table.py
+ ydb/topic.py
+ ydb/tracing.py
+ ydb/types.py
+ ydb/ydb_version.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/ydb/py3/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/python/ydb/py3/ydb/__init__.py b/contrib/python/ydb/py3/ydb/__init__.py
new file mode 100644
index 0000000000..902b1e0850
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/__init__.py
@@ -0,0 +1,82 @@
+from pkgutil import extend_path
+
+__path__ = extend_path(__path__, __name__)
+
+from .credentials import * # noqa
+from .driver import * # noqa
+from .global_settings import * # noqa
+from .table import * # noqa
+from .issues import * # noqa
+from .types import * # noqa
+from .scheme import * # noqa
+from .settings import * # noqa
+from .resolver import * # noqa
+from .export import * # noqa
+from .auth_helpers import * # noqa
+from .operation import * # noqa
+from .scripting import * # noqa
+from .import_client import * # noqa
+from .tracing import * # noqa
+from .topic import * # noqa
+
+try:
+ import ydb.aio as aio # noqa
+except Exception:
+ pass
+
+# THIS AND BELOW IS AUTOGENERATED for arcadia only
+try:
+ import kikimr.public.sdk.python.ydb_v3_new_behavior # noqa
+ global_allow_split_transactions(False) # noqa
+ global_allow_truncated_result(False) # noqa
+except ModuleNotFoundError:
+ # Old, deprecated
+
+ import warnings
+ warnings.warn("Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior")
+
+ global_allow_split_transactions(True) # noqa
+ global_allow_truncated_result(True) # noqa
+
+
+def construct_credentials_from_environ(tracer=None):
+ import os
+ import warnings
+
+ from . import credentials, tracing
+
+ tracer = tracer if tracer is not None else tracing.Tracer(None)
+ warnings.warn(
+ "using construct_credentials_from_environ method DEPRECATED, use ydb.credentials_from_env_variables "
+ )
+
+ # dynamically import required authentication libraries
+ if (
+ os.getenv("USE_METADATA_CREDENTIALS") is not None
+ and int(os.getenv("USE_METADATA_CREDENTIALS")) == 1
+ ):
+ import ydb.iam
+
+ tracing.trace(tracer, {"credentials.metadata": True})
+ return ydb.iam.MetadataUrlCredentials()
+
+ if os.getenv("YDB_TOKEN") is not None:
+ tracing.trace(tracer, {"credentials.access_token": True})
+ return credentials.AuthTokenCredentials(os.getenv("YDB_TOKEN"))
+
+ if os.getenv("SA_KEY_FILE") is not None:
+
+ import ydb.iam
+
+ tracing.trace(tracer, {"credentials.sa_key_file": True})
+ root_certificates_file = os.getenv("SSL_ROOT_CERTIFICATES_FILE", None)
+ iam_channel_credentials = {}
+ if root_certificates_file is not None:
+ iam_channel_credentials = {
+ "root_certificates": read_bytes(root_certificates_file) # noqa
+ }
+ return ydb.iam.ServiceAccountCredentials.from_file(
+ os.getenv("SA_KEY_FILE"),
+ iam_channel_credentials=iam_channel_credentials,
+ iam_endpoint=os.getenv("IAM_ENDPOINT", "iam.api.cloud.yandex.net:443"),
+ )
diff --git a/contrib/python/ydb/py3/ydb/_apis.py b/contrib/python/ydb/py3/ydb/_apis.py
new file mode 100644
index 0000000000..27bc1bbec8
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_apis.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+import typing
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4 import (
+ ydb_cms_v1_pb2_grpc,
+ ydb_discovery_v1_pb2_grpc,
+ ydb_scheme_v1_pb2_grpc,
+ ydb_table_v1_pb2_grpc,
+ ydb_operation_v1_pb2_grpc,
+ ydb_topic_v1_pb2_grpc,
+ )
+
+ from ._grpc.v4.protos import (
+ ydb_status_codes_pb2,
+ ydb_discovery_pb2,
+ ydb_scheme_pb2,
+ ydb_table_pb2,
+ ydb_value_pb2,
+ ydb_operation_pb2,
+ ydb_common_pb2,
+ )
+else:
+ from ._grpc.common import (
+ ydb_cms_v1_pb2_grpc,
+ ydb_discovery_v1_pb2_grpc,
+ ydb_scheme_v1_pb2_grpc,
+ ydb_table_v1_pb2_grpc,
+ ydb_operation_v1_pb2_grpc,
+ ydb_topic_v1_pb2_grpc,
+ )
+
+ from ._grpc.common.protos import (
+ ydb_status_codes_pb2,
+ ydb_discovery_pb2,
+ ydb_scheme_pb2,
+ ydb_table_pb2,
+ ydb_value_pb2,
+ ydb_operation_pb2,
+ ydb_common_pb2,
+ )
+
+
+StatusIds = ydb_status_codes_pb2.StatusIds
+FeatureFlag = ydb_common_pb2.FeatureFlag
+primitive_types = ydb_value_pb2.Type.PrimitiveTypeId
+ydb_value = ydb_value_pb2
+ydb_scheme = ydb_scheme_pb2
+ydb_table = ydb_table_pb2
+ydb_discovery = ydb_discovery_pb2
+ydb_operation = ydb_operation_pb2
+
+
+class CmsService(object):
+ Stub = ydb_cms_v1_pb2_grpc.CmsServiceStub
+
+
+class DiscoveryService(object):
+ Stub = ydb_discovery_v1_pb2_grpc.DiscoveryServiceStub
+ ListEndpoints = "ListEndpoints"
+
+
+class OperationService(object):
+ Stub = ydb_operation_v1_pb2_grpc.OperationServiceStub
+ ForgetOperation = "ForgetOperation"
+ GetOperation = "GetOperation"
+ CancelOperation = "CancelOperation"
+
+
+class SchemeService(object):
+ Stub = ydb_scheme_v1_pb2_grpc.SchemeServiceStub
+ MakeDirectory = "MakeDirectory"
+ RemoveDirectory = "RemoveDirectory"
+ ListDirectory = "ListDirectory"
+ DescribePath = "DescribePath"
+ ModifyPermissions = "ModifyPermissions"
+
+
+class TableService(object):
+ Stub = ydb_table_v1_pb2_grpc.TableServiceStub
+
+ StreamExecuteScanQuery = "StreamExecuteScanQuery"
+ ExplainDataQuery = "ExplainDataQuery"
+ CreateTable = "CreateTable"
+ DropTable = "DropTable"
+ AlterTable = "AlterTable"
+ CopyTables = "CopyTables"
+ RenameTables = "RenameTables"
+ DescribeTable = "DescribeTable"
+ CreateSession = "CreateSession"
+ DeleteSession = "DeleteSession"
+ ExecuteSchemeQuery = "ExecuteSchemeQuery"
+ PrepareDataQuery = "PrepareDataQuery"
+ ExecuteDataQuery = "ExecuteDataQuery"
+ BeginTransaction = "BeginTransaction"
+ CommitTransaction = "CommitTransaction"
+ RollbackTransaction = "RollbackTransaction"
+ KeepAlive = "KeepAlive"
+ StreamReadTable = "StreamReadTable"
+ BulkUpsert = "BulkUpsert"
+
+
+class TopicService(object):
+ Stub = ydb_topic_v1_pb2_grpc.TopicServiceStub
+
+ CreateTopic = "CreateTopic"
+ DescribeTopic = "DescribeTopic"
+ DropTopic = "DropTopic"
+ StreamRead = "StreamRead"
+ StreamWrite = "StreamWrite"
diff --git a/contrib/python/ydb/py3/ydb/_errors.py b/contrib/python/ydb/py3/ydb/_errors.py
new file mode 100644
index 0000000000..17002d2574
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_errors.py
@@ -0,0 +1,53 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from . import issues
+
+_errors_retriable_fast_backoff_types = [
+ issues.Unavailable,
+]
+_errors_retriable_slow_backoff_types = [
+ issues.Aborted,
+ issues.BadSession,
+ issues.Overloaded,
+ issues.SessionPoolEmpty,
+ issues.ConnectionError,
+]
+_errors_retriable_slow_backoff_idempotent_types = [
+ issues.Undetermined,
+]
+
+
+def check_retriable_error(err, retry_settings, attempt):
+ if isinstance(err, issues.NotFound):
+ if retry_settings.retry_not_found:
+ return ErrorRetryInfo(True, retry_settings.fast_backoff.calc_timeout(attempt))
+ else:
+ return ErrorRetryInfo(False, None)
+
+ if isinstance(err, issues.InternalError):
+ if retry_settings.retry_internal_error:
+ return ErrorRetryInfo(True, retry_settings.slow_backoff.calc_timeout(attempt))
+ else:
+ return ErrorRetryInfo(False, None)
+
+ for t in _errors_retriable_fast_backoff_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(True, retry_settings.fast_backoff.calc_timeout(attempt))
+
+ for t in _errors_retriable_slow_backoff_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(True, retry_settings.slow_backoff.calc_timeout(attempt))
+
+ if retry_settings.idempotent:
+ for t in _errors_retriable_slow_backoff_idempotent_types:
+ if isinstance(err, t):
+ return ErrorRetryInfo(True, retry_settings.slow_backoff.calc_timeout(attempt))
+
+ return ErrorRetryInfo(False, None)
+
+
+@dataclass
+class ErrorRetryInfo:
+ is_retriable: bool
+ sleep_timeout_seconds: Optional[float]
diff --git a/contrib/python/ydb/py3/ydb/_grpc/__init__.py b/contrib/python/ydb/py3/ydb/_grpc/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/__init__.py
diff --git a/contrib/python/ydb/py3/ydb/_grpc/common/__init__.py b/contrib/python/ydb/py3/ydb/_grpc/common/__init__.py
new file mode 100644
index 0000000000..9dc3de8379
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/common/__init__.py
@@ -0,0 +1,13 @@
+import sys
+try:
+ from ydb.public.api.grpc import * # noqa
+ sys.modules["ydb._grpc.common"] = sys.modules["ydb.public.api.grpc"]
+
+ from ydb.public.api import protos
+ sys.modules["ydb._grpc.common.protos"] = sys.modules["ydb.public.api.protos"]
+except ImportError:
+ from contrib.ydb.public.api.grpc import * # noqa
+ sys.modules["ydb._grpc.common"] = sys.modules["contrib.ydb.public.api.grpc"]
+
+ from contrib.ydb.public.api import protos
+ sys.modules["ydb._grpc.common.protos"] = sys.modules["contrib.ydb.public.api.protos"]
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/__init__.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/__init__.py
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/common_utils.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/common_utils.py
new file mode 100644
index 0000000000..bc294025f3
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/common_utils.py
@@ -0,0 +1,316 @@
+from __future__ import annotations
+
+import abc
+import asyncio
+import concurrent.futures
+import contextvars
+import datetime
+import functools
+import typing
+from typing import (
+ Optional,
+ Any,
+ Iterator,
+ AsyncIterator,
+ Callable,
+ Iterable,
+ Union,
+ Coroutine,
+)
+from dataclasses import dataclass
+
+import grpc
+from google.protobuf.message import Message
+from google.protobuf.duration_pb2 import Duration as ProtoDuration
+from google.protobuf.timestamp_pb2 import Timestamp as ProtoTimeStamp
+
+import ydb.aio
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ..v4.protos import ydb_topic_pb2, ydb_issue_message_pb2
+else:
+ from ..common.protos import ydb_topic_pb2, ydb_issue_message_pb2
+
+from ... import issues, connection
+
+
+class IFromProto(abc.ABC):
+ @staticmethod
+ @abc.abstractmethod
+ def from_proto(msg: Message) -> Any:
+ ...
+
+
+class IFromProtoWithProtoType(IFromProto):
+ @staticmethod
+ @abc.abstractmethod
+ def empty_proto_message() -> Message:
+ ...
+
+
+class IToProto(abc.ABC):
+ @abc.abstractmethod
+ def to_proto(self) -> Message:
+ ...
+
+
+class IFromPublic(abc.ABC):
+ @staticmethod
+ @abc.abstractmethod
+ def from_public(o: typing.Any) -> typing.Any:
+ ...
+
+
+class IToPublic(abc.ABC):
+ @abc.abstractmethod
+ def to_public(self) -> typing.Any:
+ ...
+
+
+class UnknownGrpcMessageError(issues.Error):
+ pass
+
+
+_stop_grpc_connection_marker = object()
+
+
+class QueueToIteratorAsyncIO:
+ __slots__ = ("_queue",)
+
+ def __init__(self, q: asyncio.Queue):
+ self._queue = q
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ item = await self._queue.get()
+ if item is _stop_grpc_connection_marker:
+ raise StopAsyncIteration()
+ return item
+
+
+class AsyncQueueToSyncIteratorAsyncIO:
+ __slots__ = (
+ "_loop",
+ "_queue",
+ )
+ _queue: asyncio.Queue
+
+ def __init__(self, q: asyncio.Queue):
+ self._loop = asyncio.get_running_loop()
+ self._queue = q
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = asyncio.run_coroutine_threadsafe(self._queue.get(), self._loop).result()
+ if item is _stop_grpc_connection_marker:
+ raise StopIteration()
+ return item
+
+
+class SyncToAsyncIterator:
+ def __init__(self, sync_iterator: Iterator, executor: concurrent.futures.Executor):
+ self._sync_iterator = sync_iterator
+ self._executor = executor
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ try:
+ res = await to_thread(self._sync_iterator.__next__, executor=self._executor)
+ return res
+ except StopIteration:
+ raise StopAsyncIteration()
+
+
+class IGrpcWrapperAsyncIO(abc.ABC):
+ @abc.abstractmethod
+ async def receive(self) -> Any:
+ ...
+
+ @abc.abstractmethod
+ def write(self, wrap_message: IToProto):
+ ...
+
+ @abc.abstractmethod
+ def close(self):
+ ...
+
+
+SupportedDriverType = Union[ydb.Driver, ydb.aio.Driver]
+
+
+class GrpcWrapperAsyncIO(IGrpcWrapperAsyncIO):
+ from_client_grpc: asyncio.Queue
+ from_server_grpc: AsyncIterator
+ convert_server_grpc_to_wrapper: Callable[[Any], Any]
+ _connection_state: str
+ _stream_call: Optional[Union[grpc.aio.StreamStreamCall, "grpc._channel._MultiThreadedRendezvous"]]
+ _wait_executor: Optional[concurrent.futures.ThreadPoolExecutor]
+
+ def __init__(self, convert_server_grpc_to_wrapper):
+ self.from_client_grpc = asyncio.Queue()
+ self.convert_server_grpc_to_wrapper = convert_server_grpc_to_wrapper
+ self._connection_state = "new"
+ self._stream_call = None
+ self._wait_executor = None
+
+ def __del__(self):
+ self._clean_executor(wait=False)
+
+ async def start(self, driver: SupportedDriverType, stub, method):
+ if asyncio.iscoroutinefunction(driver.__call__):
+ await self._start_asyncio_driver(driver, stub, method)
+ else:
+ await self._start_sync_driver(driver, stub, method)
+ self._connection_state = "started"
+
+ def close(self):
+ self.from_client_grpc.put_nowait(_stop_grpc_connection_marker)
+ if self._stream_call:
+ self._stream_call.cancel()
+
+ self._clean_executor(wait=True)
+
+ def _clean_executor(self, wait: bool):
+ if self._wait_executor:
+ self._wait_executor.shutdown(wait)
+
+ async def _start_asyncio_driver(self, driver: ydb.aio.Driver, stub, method):
+ requests_iterator = QueueToIteratorAsyncIO(self.from_client_grpc)
+ stream_call = await driver(
+ requests_iterator,
+ stub,
+ method,
+ )
+ self._stream_call = stream_call
+ self.from_server_grpc = stream_call.__aiter__()
+
+ async def _start_sync_driver(self, driver: ydb.Driver, stub, method):
+ requests_iterator = AsyncQueueToSyncIteratorAsyncIO(self.from_client_grpc)
+ self._wait_executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
+
+ stream_call = await to_thread(driver, requests_iterator, stub, method, executor=self._wait_executor)
+ self._stream_call = stream_call
+ self.from_server_grpc = SyncToAsyncIterator(stream_call.__iter__(), self._wait_executor)
+
+ async def receive(self) -> Any:
+ # todo handle grpc exceptions and convert it to internal exceptions
+ try:
+ grpc_message = await self.from_server_grpc.__anext__()
+ except grpc.RpcError as e:
+ raise connection._rpc_error_handler(self._connection_state, e)
+
+ issues._process_response(grpc_message)
+
+ if self._connection_state != "has_received_messages":
+ self._connection_state = "has_received_messages"
+
+ # print("rekby, grpc, received", grpc_message)
+ return self.convert_server_grpc_to_wrapper(grpc_message)
+
+ def write(self, wrap_message: IToProto):
+ grpc_message = wrap_message.to_proto()
+ # print("rekby, grpc, send", grpc_message)
+ self.from_client_grpc.put_nowait(grpc_message)
+
+
+@dataclass(init=False)
+class ServerStatus(IFromProto):
+ __slots__ = ("_grpc_status_code", "_issues")
+
+ def __init__(
+ self,
+ status: issues.StatusCode,
+ issues: Iterable[Any],
+ ):
+ self.status = status
+ self.issues = issues
+
+ def __str__(self):
+ return self.__repr__()
+
+ @staticmethod
+ def from_proto(
+ msg: Union[
+ ydb_topic_pb2.StreamReadMessage.FromServer,
+ ydb_topic_pb2.StreamWriteMessage.FromServer,
+ ]
+ ) -> "ServerStatus":
+ return ServerStatus(msg.status, msg.issues)
+
+ def is_success(self) -> bool:
+ return self.status == issues.StatusCode.SUCCESS
+
+ @classmethod
+ def issue_to_str(cls, issue: ydb_issue_message_pb2.IssueMessage):
+ res = """code: %s message: "%s" """ % (issue.issue_code, issue.message)
+ if len(issue.issues) > 0:
+ d = ", "
+ res += d + d.join(str(sub_issue) for sub_issue in issue.issues)
+ return res
+
+
+def callback_from_asyncio(callback: Union[Callable, Coroutine]) -> [asyncio.Future, asyncio.Task]:
+ loop = asyncio.get_running_loop()
+
+ if asyncio.iscoroutinefunction(callback):
+ return loop.create_task(callback())
+ else:
+ return loop.run_in_executor(None, callback)
+
+
+async def to_thread(func, *args, executor: Optional[concurrent.futures.Executor], **kwargs):
+ """Asynchronously run function *func* in a separate thread.
+
+ Any *args and **kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propagated,
+ allowing context variables from the main thread to be accessed in the
+ separate thread.
+
+ Return a coroutine that can be awaited to get the eventual result of *func*.
+
+ copy to_thread from 3.10
+ """
+
+ loop = asyncio.get_running_loop()
+ ctx = contextvars.copy_context()
+ func_call = functools.partial(ctx.run, func, *args, **kwargs)
+ return await loop.run_in_executor(executor, func_call)
+
+
+def proto_duration_from_timedelta(t: Optional[datetime.timedelta]) -> Optional[ProtoDuration]:
+ if t is None:
+ return None
+
+ res = ProtoDuration()
+ res.FromTimedelta(t)
+
+
+def proto_timestamp_from_datetime(t: Optional[datetime.datetime]) -> Optional[ProtoTimeStamp]:
+ if t is None:
+ return None
+
+ res = ProtoTimeStamp()
+ res.FromDatetime(t)
+
+
+def datetime_from_proto_timestamp(
+ ts: Optional[ProtoTimeStamp],
+) -> Optional[datetime.datetime]:
+ if ts is None:
+ return None
+ return ts.ToDatetime()
+
+
+def timedelta_from_proto_duration(
+ d: Optional[ProtoDuration],
+) -> Optional[datetime.timedelta]:
+ if d is None:
+ return None
+ return d.ToTimedelta()
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_scheme.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_scheme.py
new file mode 100644
index 0000000000..b992203570
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_scheme.py
@@ -0,0 +1,36 @@
+import datetime
+import enum
+from dataclasses import dataclass
+from typing import List
+
+
+@dataclass
+class Entry:
+ name: str
+ owner: str
+ type: "Entry.Type"
+ effective_permissions: "Permissions"
+ permissions: "Permissions"
+ size_bytes: int
+ created_at: datetime.datetime
+
+ class Type(enum.IntEnum):
+ UNSPECIFIED = 0
+ DIRECTORY = 1
+ TABLE = 2
+ PERS_QUEUE_GROUP = 3
+ DATABASE = 4
+ RTMR_VOLUME = 5
+ BLOCK_STORE_VOLUME = 6
+ COORDINATION_NODE = 7
+ COLUMN_STORE = 12
+ COLUMN_TABLE = 13
+ SEQUENCE = 15
+ REPLICATION = 16
+ TOPIC = 17
+
+
+@dataclass
+class Permissions:
+ subject: str
+ permission_names: List[str]
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic.py
new file mode 100644
index 0000000000..5b5e294a21
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic.py
@@ -0,0 +1,1159 @@
+from __future__ import annotations
+
+import datetime
+import enum
+import typing
+from dataclasses import dataclass, field
+from typing import List, Union, Dict, Optional
+
+from google.protobuf.message import Message
+
+from . import ydb_topic_public_types
+from ... import scheme
+from ... import issues
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ..v4.protos import ydb_scheme_pb2, ydb_topic_pb2
+else:
+ from ..common.protos import ydb_scheme_pb2, ydb_topic_pb2
+
+from .common_utils import (
+ IFromProto,
+ IFromProtoWithProtoType,
+ IToProto,
+ IToPublic,
+ IFromPublic,
+ ServerStatus,
+ UnknownGrpcMessageError,
+ proto_duration_from_timedelta,
+ proto_timestamp_from_datetime,
+ datetime_from_proto_timestamp,
+ timedelta_from_proto_duration,
+)
+
+
+class Codec(int, IToPublic):
+ CODEC_UNSPECIFIED = 0
+ CODEC_RAW = 1
+ CODEC_GZIP = 2
+ CODEC_LZOP = 3
+ CODEC_ZSTD = 4
+
+ @staticmethod
+ def from_proto_iterable(codecs: typing.Iterable[int]) -> List["Codec"]:
+ return [Codec(int(codec)) for codec in codecs]
+
+ def to_public(self) -> ydb_topic_public_types.PublicCodec:
+ return ydb_topic_public_types.PublicCodec(int(self))
+
+
+@dataclass
+class SupportedCodecs(IToProto, IFromProto, IToPublic):
+ codecs: List[Codec]
+
+ def to_proto(self) -> ydb_topic_pb2.SupportedCodecs:
+ return ydb_topic_pb2.SupportedCodecs(
+ codecs=self.codecs,
+ )
+
+ @staticmethod
+ def from_proto(msg: Optional[ydb_topic_pb2.SupportedCodecs]) -> "SupportedCodecs":
+ if msg is None:
+ return SupportedCodecs(codecs=[])
+
+ return SupportedCodecs(
+ codecs=Codec.from_proto_iterable(msg.codecs),
+ )
+
+ def to_public(self) -> List[ydb_topic_public_types.PublicCodec]:
+ return list(map(Codec.to_public, self.codecs))
+
+
+@dataclass(order=True)
+class OffsetsRange(IFromProto, IToProto):
+ """
+ half-opened interval, include [start, end) offsets
+ """
+
+ __slots__ = ("start", "end")
+
+ start: int # first offset
+ end: int # offset after last, included to range
+
+ def __post_init__(self):
+ if self.end < self.start:
+ raise ValueError("offset end must be not less then start. Got start=%s end=%s" % (self.start, self.end))
+
+ @staticmethod
+ def from_proto(msg: ydb_topic_pb2.OffsetsRange) -> "OffsetsRange":
+ return OffsetsRange(
+ start=msg.start,
+ end=msg.end,
+ )
+
+ def to_proto(self) -> ydb_topic_pb2.OffsetsRange:
+ return ydb_topic_pb2.OffsetsRange(
+ start=self.start,
+ end=self.end,
+ )
+
+ def is_intersected_with(self, other: "OffsetsRange") -> bool:
+ return (
+ self.start <= other.start < self.end
+ or self.start < other.end <= self.end
+ or other.start <= self.start < other.end
+ or other.start < self.end <= other.end
+ )
+
+
+@dataclass
+class UpdateTokenRequest(IToProto):
+ token: str
+
+ def to_proto(self) -> Message:
+ res = ydb_topic_pb2.UpdateTokenRequest()
+ res.token = self.token
+ return res
+
+
+@dataclass
+class UpdateTokenResponse(IFromProto):
+ @staticmethod
+ def from_proto(msg: ydb_topic_pb2.UpdateTokenResponse) -> typing.Any:
+ return UpdateTokenResponse()
+
+
+########################################################################################################################
+# StreamWrite
+########################################################################################################################
+
+
+class StreamWriteMessage:
+ @dataclass()
+ class InitRequest(IToProto):
+ path: str
+ producer_id: str
+ write_session_meta: typing.Dict[str, str]
+ partitioning: "StreamWriteMessage.PartitioningType"
+ get_last_seq_no: bool
+
+ def to_proto(self) -> ydb_topic_pb2.StreamWriteMessage.InitRequest:
+ proto = ydb_topic_pb2.StreamWriteMessage.InitRequest()
+ proto.path = self.path
+ proto.producer_id = self.producer_id
+
+ if self.partitioning is None:
+ pass
+ elif isinstance(self.partitioning, StreamWriteMessage.PartitioningMessageGroupID):
+ proto.message_group_id = self.partitioning.message_group_id
+ elif isinstance(self.partitioning, StreamWriteMessage.PartitioningPartitionID):
+ proto.partition_id = self.partitioning.partition_id
+ else:
+ raise Exception("Bad partitioning type at StreamWriteMessage.InitRequest")
+
+ if self.write_session_meta:
+ for key in self.write_session_meta:
+ proto.write_session_meta[key] = self.write_session_meta[key]
+
+ proto.get_last_seq_no = self.get_last_seq_no
+ return proto
+
+ @dataclass
+ class InitResponse(IFromProto):
+ last_seq_no: Union[int, None]
+ session_id: str
+ partition_id: int
+ supported_codecs: typing.List[int]
+ status: ServerStatus = None
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamWriteMessage.InitResponse,
+ ) -> "StreamWriteMessage.InitResponse":
+ codecs = [] # type: typing.List[int]
+ if msg.supported_codecs:
+ for codec in msg.supported_codecs.codecs:
+ codecs.append(codec)
+
+ return StreamWriteMessage.InitResponse(
+ last_seq_no=msg.last_seq_no,
+ session_id=msg.session_id,
+ partition_id=msg.partition_id,
+ supported_codecs=codecs,
+ )
+
+ @dataclass
+ class WriteRequest(IToProto):
+ messages: typing.List["StreamWriteMessage.WriteRequest.MessageData"]
+ codec: int
+
+ @dataclass
+ class MessageData(IToProto):
+ seq_no: int
+ created_at: datetime.datetime
+ data: bytes
+ uncompressed_size: int
+ partitioning: "StreamWriteMessage.PartitioningType"
+
+ def to_proto(
+ self,
+ ) -> ydb_topic_pb2.StreamWriteMessage.WriteRequest.MessageData:
+ proto = ydb_topic_pb2.StreamWriteMessage.WriteRequest.MessageData()
+ proto.seq_no = self.seq_no
+ proto.created_at.FromDatetime(self.created_at)
+ proto.data = self.data
+ proto.uncompressed_size = self.uncompressed_size
+
+ if self.partitioning is None:
+ pass
+ elif isinstance(self.partitioning, StreamWriteMessage.PartitioningPartitionID):
+ proto.partition_id = self.partitioning.partition_id
+ elif isinstance(self.partitioning, StreamWriteMessage.PartitioningMessageGroupID):
+ proto.message_group_id = self.partitioning.message_group_id
+ else:
+ raise Exception("Bad partition at StreamWriteMessage.WriteRequest.MessageData")
+
+ return proto
+
+ def to_proto(self) -> ydb_topic_pb2.StreamWriteMessage.WriteRequest:
+ proto = ydb_topic_pb2.StreamWriteMessage.WriteRequest()
+ proto.codec = self.codec
+
+ for message in self.messages:
+ proto_mess = proto.messages.add()
+ proto_mess.CopyFrom(message.to_proto())
+
+ return proto
+
+ @dataclass
+ class WriteResponse(IFromProto):
+ partition_id: int
+ acks: typing.List["StreamWriteMessage.WriteResponse.WriteAck"]
+ write_statistics: "StreamWriteMessage.WriteResponse.WriteStatistics"
+ status: Optional[ServerStatus] = field(default=None)
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamWriteMessage.WriteResponse,
+ ) -> "StreamWriteMessage.WriteResponse":
+ acks = []
+ for proto_ack in msg.acks:
+ ack = StreamWriteMessage.WriteResponse.WriteAck.from_proto(proto_ack)
+ acks.append(ack)
+ write_statistics = StreamWriteMessage.WriteResponse.WriteStatistics(
+ persisting_time=msg.write_statistics.persisting_time.ToTimedelta(),
+ min_queue_wait_time=msg.write_statistics.min_queue_wait_time.ToTimedelta(),
+ max_queue_wait_time=msg.write_statistics.max_queue_wait_time.ToTimedelta(),
+ partition_quota_wait_time=msg.write_statistics.partition_quota_wait_time.ToTimedelta(),
+ topic_quota_wait_time=msg.write_statistics.topic_quota_wait_time.ToTimedelta(),
+ )
+ return StreamWriteMessage.WriteResponse(
+ partition_id=msg.partition_id,
+ acks=acks,
+ write_statistics=write_statistics,
+ status=None,
+ )
+
+ @dataclass
+ class WriteAck(IFromProto):
+ seq_no: int
+ message_write_status: Union[
+ "StreamWriteMessage.WriteResponse.WriteAck.StatusWritten",
+ "StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped",
+ int,
+ ]
+
+ @classmethod
+ def from_proto(cls, proto_ack: ydb_topic_pb2.StreamWriteMessage.WriteResponse.WriteAck):
+ if proto_ack.HasField("written"):
+ message_write_status = StreamWriteMessage.WriteResponse.WriteAck.StatusWritten(
+ proto_ack.written.offset
+ )
+ elif proto_ack.HasField("skipped"):
+ reason = proto_ack.skipped.reason
+ try:
+ message_write_status = StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped(
+ reason=StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped.Reason.from_protobuf_code(
+ reason
+ )
+ )
+ except ValueError:
+ message_write_status = reason
+ else:
+ raise NotImplementedError("unexpected ack status")
+
+ return StreamWriteMessage.WriteResponse.WriteAck(
+ seq_no=proto_ack.seq_no,
+ message_write_status=message_write_status,
+ )
+
+ @dataclass
+ class StatusWritten:
+ offset: int
+
+ @dataclass
+ class StatusSkipped:
+ reason: "StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped.Reason"
+
+ class Reason(enum.Enum):
+ UNSPECIFIED = 0
+ ALREADY_WRITTEN = 1
+
+ @classmethod
+ def from_protobuf_code(
+ cls, code: int
+ ) -> Union["StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped.Reason", int]:
+ try:
+ return StreamWriteMessage.WriteResponse.WriteAck.StatusSkipped.Reason(code)
+ except ValueError:
+ return code
+
+ @dataclass
+ class WriteStatistics:
+ persisting_time: datetime.timedelta
+ min_queue_wait_time: datetime.timedelta
+ max_queue_wait_time: datetime.timedelta
+ partition_quota_wait_time: datetime.timedelta
+ topic_quota_wait_time: datetime.timedelta
+
+ @dataclass
+ class PartitioningMessageGroupID:
+ message_group_id: str
+
+ @dataclass
+ class PartitioningPartitionID:
+ partition_id: int
+
+ PartitioningType = Union[PartitioningMessageGroupID, PartitioningPartitionID, None]
+
+ @dataclass
+ class FromClient(IToProto):
+ value: "WriterMessagesFromClientToServer"
+
+ def __init__(self, value: "WriterMessagesFromClientToServer"):
+ self.value = value
+
+ def to_proto(self) -> Message:
+ res = ydb_topic_pb2.StreamWriteMessage.FromClient()
+ value = self.value
+ if isinstance(value, StreamWriteMessage.WriteRequest):
+ res.write_request.CopyFrom(value.to_proto())
+ elif isinstance(value, StreamWriteMessage.InitRequest):
+ res.init_request.CopyFrom(value.to_proto())
+ elif isinstance(value, UpdateTokenRequest):
+ res.update_token_request.CopyFrom(value.to_proto())
+ else:
+ raise Exception("Unknown outcoming grpc message: %s" % value)
+ return res
+
+ class FromServer(IFromProto):
+ @staticmethod
+ def from_proto(msg: ydb_topic_pb2.StreamWriteMessage.FromServer) -> typing.Any:
+ message_type = msg.WhichOneof("server_message")
+ if message_type == "write_response":
+ res = StreamWriteMessage.WriteResponse.from_proto(msg.write_response)
+ elif message_type == "init_response":
+ res = StreamWriteMessage.InitResponse.from_proto(msg.init_response)
+ elif message_type == "update_token_response":
+ res = UpdateTokenResponse.from_proto(msg.update_token_response)
+ else:
+ # todo log instead of exception - for allow add messages in the future
+ raise UnknownGrpcMessageError("Unexpected proto message: %s" % msg)
+
+ res.status = ServerStatus(msg.status, msg.issues)
+ return res
+
+
+WriterMessagesFromClientToServer = Union[
+ StreamWriteMessage.InitRequest, StreamWriteMessage.WriteRequest, UpdateTokenRequest
+]
+WriterMessagesFromServerToClient = Union[
+ StreamWriteMessage.InitResponse,
+ StreamWriteMessage.WriteResponse,
+ UpdateTokenResponse,
+]
+
+
+########################################################################################################################
+# StreamRead
+########################################################################################################################
+
+
+class StreamReadMessage:
+ @dataclass
+ class PartitionSession(IFromProto):
+ partition_session_id: int
+ path: str
+ partition_id: int
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.PartitionSession,
+ ) -> "StreamReadMessage.PartitionSession":
+ return StreamReadMessage.PartitionSession(
+ partition_session_id=msg.partition_session_id,
+ path=msg.path,
+ partition_id=msg.partition_id,
+ )
+
+ @dataclass
+ class InitRequest(IToProto):
+ topics_read_settings: List["StreamReadMessage.InitRequest.TopicReadSettings"]
+ consumer: str
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.InitRequest:
+ res = ydb_topic_pb2.StreamReadMessage.InitRequest()
+ res.consumer = self.consumer
+ for settings in self.topics_read_settings:
+ res.topics_read_settings.append(settings.to_proto())
+ return res
+
+ @dataclass
+ class TopicReadSettings(IToProto):
+ path: str
+ partition_ids: List[int] = field(default_factory=list)
+ max_lag: Optional[datetime.timedelta] = None
+ read_from: Optional[datetime.datetime] = None
+
+ def to_proto(
+ self,
+ ) -> ydb_topic_pb2.StreamReadMessage.InitRequest.TopicReadSettings:
+ res = ydb_topic_pb2.StreamReadMessage.InitRequest.TopicReadSettings()
+ res.path = self.path
+ res.partition_ids.extend(self.partition_ids)
+ max_lag = proto_duration_from_timedelta(self.max_lag)
+ if max_lag is not None:
+ res.max_lag = max_lag
+
+ read_from = proto_timestamp_from_datetime(self.read_from)
+ if read_from is not None:
+ res.read_from = read_from
+
+ return res
+
+ @dataclass
+ class InitResponse(IFromProto):
+ session_id: str
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.InitResponse,
+ ) -> "StreamReadMessage.InitResponse":
+ return StreamReadMessage.InitResponse(session_id=msg.session_id)
+
+ @dataclass
+ class ReadRequest(IToProto):
+ bytes_size: int
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.ReadRequest:
+ res = ydb_topic_pb2.StreamReadMessage.ReadRequest()
+ res.bytes_size = self.bytes_size
+ return res
+
+ @dataclass
+ class ReadResponse(IFromProto):
+ partition_data: List["StreamReadMessage.ReadResponse.PartitionData"]
+ bytes_size: int
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.ReadResponse,
+ ) -> "StreamReadMessage.ReadResponse":
+ partition_data = []
+ for proto_partition_data in msg.partition_data:
+ partition_data.append(StreamReadMessage.ReadResponse.PartitionData.from_proto(proto_partition_data))
+ return StreamReadMessage.ReadResponse(
+ partition_data=partition_data,
+ bytes_size=msg.bytes_size,
+ )
+
+ @dataclass
+ class MessageData(IFromProto):
+ offset: int
+ seq_no: int
+ created_at: datetime.datetime
+ data: bytes
+ uncompresed_size: int
+ message_group_id: str
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.ReadResponse.MessageData,
+ ) -> "StreamReadMessage.ReadResponse.MessageData":
+ return StreamReadMessage.ReadResponse.MessageData(
+ offset=msg.offset,
+ seq_no=msg.seq_no,
+ created_at=msg.created_at.ToDatetime(),
+ data=msg.data,
+ uncompresed_size=msg.uncompressed_size,
+ message_group_id=msg.message_group_id,
+ )
+
+ @dataclass
+ class Batch(IFromProto):
+ message_data: List["StreamReadMessage.ReadResponse.MessageData"]
+ producer_id: str
+ write_session_meta: Dict[str, str]
+ codec: int
+ written_at: datetime.datetime
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.ReadResponse.Batch,
+ ) -> "StreamReadMessage.ReadResponse.Batch":
+ message_data = []
+ for message in msg.message_data:
+ message_data.append(StreamReadMessage.ReadResponse.MessageData.from_proto(message))
+ return StreamReadMessage.ReadResponse.Batch(
+ message_data=message_data,
+ producer_id=msg.producer_id,
+ write_session_meta=dict(msg.write_session_meta),
+ codec=msg.codec,
+ written_at=msg.written_at.ToDatetime(),
+ )
+
+ @dataclass
+ class PartitionData(IFromProto):
+ partition_session_id: int
+ batches: List["StreamReadMessage.ReadResponse.Batch"]
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.ReadResponse.PartitionData,
+ ) -> "StreamReadMessage.ReadResponse.PartitionData":
+ batches = []
+ for proto_batch in msg.batches:
+ batches.append(StreamReadMessage.ReadResponse.Batch.from_proto(proto_batch))
+ return StreamReadMessage.ReadResponse.PartitionData(
+ partition_session_id=msg.partition_session_id,
+ batches=batches,
+ )
+
+ @dataclass
+ class CommitOffsetRequest(IToProto):
+ commit_offsets: List["PartitionCommitOffset"]
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.CommitOffsetRequest:
+ res = ydb_topic_pb2.StreamReadMessage.CommitOffsetRequest(
+ commit_offsets=list(
+ map(
+ StreamReadMessage.CommitOffsetRequest.PartitionCommitOffset.to_proto,
+ self.commit_offsets,
+ )
+ ),
+ )
+ return res
+
+ @dataclass
+ class PartitionCommitOffset(IToProto):
+ partition_session_id: int
+ offsets: List["OffsetsRange"]
+
+ def to_proto(
+ self,
+ ) -> ydb_topic_pb2.StreamReadMessage.CommitOffsetRequest.PartitionCommitOffset:
+ res = ydb_topic_pb2.StreamReadMessage.CommitOffsetRequest.PartitionCommitOffset(
+ partition_session_id=self.partition_session_id,
+ offsets=list(map(OffsetsRange.to_proto, self.offsets)),
+ )
+ return res
+
+ @dataclass
+ class CommitOffsetResponse(IFromProto):
+ partitions_committed_offsets: List["StreamReadMessage.CommitOffsetResponse.PartitionCommittedOffset"]
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.CommitOffsetResponse,
+ ) -> "StreamReadMessage.CommitOffsetResponse":
+ return StreamReadMessage.CommitOffsetResponse(
+ partitions_committed_offsets=list(
+ map(
+ StreamReadMessage.CommitOffsetResponse.PartitionCommittedOffset.from_proto,
+ msg.partitions_committed_offsets,
+ )
+ )
+ )
+
+ @dataclass
+ class PartitionCommittedOffset(IFromProto):
+ partition_session_id: int
+ committed_offset: int
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.CommitOffsetResponse.PartitionCommittedOffset,
+ ) -> "StreamReadMessage.CommitOffsetResponse.PartitionCommittedOffset":
+ return StreamReadMessage.CommitOffsetResponse.PartitionCommittedOffset(
+ partition_session_id=msg.partition_session_id,
+ committed_offset=msg.committed_offset,
+ )
+
+ @dataclass
+ class PartitionSessionStatusRequest(IToProto):
+ partition_session_id: int
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.PartitionSessionStatusRequest:
+ return ydb_topic_pb2.StreamReadMessage.PartitionSessionStatusRequest(
+ partition_session_id=self.partition_session_id
+ )
+
+ @dataclass
+ class PartitionSessionStatusResponse(IFromProto):
+ partition_session_id: int
+ partition_offsets: "OffsetsRange"
+ committed_offset: int
+ write_time_high_watermark: float
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.PartitionSessionStatusResponse,
+ ) -> "StreamReadMessage.PartitionSessionStatusResponse":
+ return StreamReadMessage.PartitionSessionStatusResponse(
+ partition_session_id=msg.partition_session_id,
+ partition_offsets=OffsetsRange.from_proto(msg.partition_offsets),
+ committed_offset=msg.committed_offset,
+ write_time_high_watermark=msg.write_time_high_watermark,
+ )
+
+ @dataclass
+ class StartPartitionSessionRequest(IFromProto):
+ partition_session: "StreamReadMessage.PartitionSession"
+ committed_offset: int
+ partition_offsets: "OffsetsRange"
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.StartPartitionSessionRequest,
+ ) -> "StreamReadMessage.StartPartitionSessionRequest":
+ return StreamReadMessage.StartPartitionSessionRequest(
+ partition_session=StreamReadMessage.PartitionSession.from_proto(msg.partition_session),
+ committed_offset=msg.committed_offset,
+ partition_offsets=OffsetsRange.from_proto(msg.partition_offsets),
+ )
+
+ @dataclass
+ class StartPartitionSessionResponse(IToProto):
+ partition_session_id: int
+ read_offset: Optional[int]
+ commit_offset: Optional[int]
+
+ def to_proto(
+ self,
+ ) -> ydb_topic_pb2.StreamReadMessage.StartPartitionSessionResponse:
+ res = ydb_topic_pb2.StreamReadMessage.StartPartitionSessionResponse()
+ res.partition_session_id = self.partition_session_id
+ if self.read_offset is not None:
+ res.read_offset = self.read_offset
+ if self.commit_offset is not None:
+ res.commit_offset = self.commit_offset
+ return res
+
+ @dataclass
+ class StopPartitionSessionRequest(IFromProto):
+ partition_session_id: int
+ graceful: bool
+ committed_offset: int
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.StopPartitionSessionRequest,
+ ) -> StreamReadMessage.StopPartitionSessionRequest:
+ return StreamReadMessage.StopPartitionSessionRequest(
+ partition_session_id=msg.partition_session_id,
+ graceful=msg.graceful,
+ committed_offset=msg.committed_offset,
+ )
+
+ @dataclass
+ class StopPartitionSessionResponse(IToProto):
+ partition_session_id: int
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.StopPartitionSessionResponse:
+ return ydb_topic_pb2.StreamReadMessage.StopPartitionSessionResponse(
+ partition_session_id=self.partition_session_id,
+ )
+
+ @dataclass
+ class FromClient(IToProto):
+ client_message: "ReaderMessagesFromClientToServer"
+
+ def __init__(self, client_message: "ReaderMessagesFromClientToServer"):
+ self.client_message = client_message
+
+ def to_proto(self) -> ydb_topic_pb2.StreamReadMessage.FromClient:
+ res = ydb_topic_pb2.StreamReadMessage.FromClient()
+ if isinstance(self.client_message, StreamReadMessage.ReadRequest):
+ res.read_request.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, StreamReadMessage.CommitOffsetRequest):
+ res.commit_offset_request.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, StreamReadMessage.InitRequest):
+ res.init_request.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, UpdateTokenRequest):
+ res.update_token_request.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, StreamReadMessage.StartPartitionSessionResponse):
+ res.start_partition_session_response.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, StreamReadMessage.StopPartitionSessionResponse):
+ res.stop_partition_session_response.CopyFrom(self.client_message.to_proto())
+ elif isinstance(self.client_message, StreamReadMessage.PartitionSessionStatusRequest):
+ res.start_partition_session_response.CopyFrom(self.client_message.to_proto())
+ else:
+ raise NotImplementedError("Unknown message type: %s" % type(self.client_message))
+ return res
+
+ @dataclass
+ class FromServer(IFromProto):
+ server_message: "ReaderMessagesFromServerToClient"
+ server_status: ServerStatus
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.StreamReadMessage.FromServer,
+ ) -> "StreamReadMessage.FromServer":
+ mess_type = msg.WhichOneof("server_message")
+ server_status = ServerStatus.from_proto(msg)
+ if mess_type == "read_response":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.ReadResponse.from_proto(msg.read_response),
+ )
+ elif mess_type == "commit_offset_response":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.CommitOffsetResponse.from_proto(msg.commit_offset_response),
+ )
+ elif mess_type == "init_response":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.InitResponse.from_proto(msg.init_response),
+ )
+ elif mess_type == "start_partition_session_request":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.StartPartitionSessionRequest.from_proto(
+ msg.start_partition_session_request,
+ ),
+ )
+ elif mess_type == "stop_partition_session_request":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.StopPartitionSessionRequest.from_proto(
+ msg.stop_partition_session_request
+ ),
+ )
+ elif mess_type == "update_token_response":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=UpdateTokenResponse.from_proto(msg.update_token_response),
+ )
+ elif mess_type == "partition_session_status_response":
+ return StreamReadMessage.FromServer(
+ server_status=server_status,
+ server_message=StreamReadMessage.PartitionSessionStatusResponse.from_proto(
+ msg.partition_session_status_response
+ ),
+ )
+ else:
+ raise issues.UnexpectedGrpcMessage(
+ "Unexpected message while parse ReaderMessagesFromServerToClient: '%s'" % mess_type
+ )
+
+
+ReaderMessagesFromClientToServer = Union[
+ StreamReadMessage.InitRequest,
+ StreamReadMessage.ReadRequest,
+ StreamReadMessage.CommitOffsetRequest,
+ StreamReadMessage.PartitionSessionStatusRequest,
+ UpdateTokenRequest,
+ StreamReadMessage.StartPartitionSessionResponse,
+ StreamReadMessage.StopPartitionSessionResponse,
+]
+
+ReaderMessagesFromServerToClient = Union[
+ StreamReadMessage.InitResponse,
+ StreamReadMessage.ReadResponse,
+ StreamReadMessage.CommitOffsetResponse,
+ StreamReadMessage.PartitionSessionStatusResponse,
+ UpdateTokenResponse,
+ StreamReadMessage.StartPartitionSessionRequest,
+ StreamReadMessage.StopPartitionSessionRequest,
+]
+
+
+@dataclass
+class MultipleWindowsStat(IFromProto, IToPublic):
+ per_minute: int
+ per_hour: int
+ per_day: int
+
+ @staticmethod
+ def from_proto(
+ msg: Optional[ydb_topic_pb2.MultipleWindowsStat],
+ ) -> Optional["MultipleWindowsStat"]:
+ if msg is None:
+ return None
+ return MultipleWindowsStat(
+ per_minute=msg.per_minute,
+ per_hour=msg.per_hour,
+ per_day=msg.per_day,
+ )
+
+ def to_public(self) -> ydb_topic_public_types.PublicMultipleWindowsStat:
+ return ydb_topic_public_types.PublicMultipleWindowsStat(
+ per_minute=self.per_minute,
+ per_hour=self.per_hour,
+ per_day=self.per_day,
+ )
+
+
+@dataclass
+class Consumer(IToProto, IFromProto, IFromPublic, IToPublic):
+ name: str
+ important: bool
+ read_from: typing.Optional[datetime.datetime]
+ supported_codecs: SupportedCodecs
+ attributes: Dict[str, str]
+ consumer_stats: typing.Optional["Consumer.ConsumerStats"]
+
+ def to_proto(self) -> ydb_topic_pb2.Consumer:
+ return ydb_topic_pb2.Consumer(
+ name=self.name,
+ important=self.important,
+ read_from=proto_timestamp_from_datetime(self.read_from),
+ supported_codecs=self.supported_codecs.to_proto(),
+ attributes=self.attributes,
+ # consumer_stats - readonly field
+ )
+
+ @staticmethod
+ def from_proto(msg: Optional[ydb_topic_pb2.Consumer]) -> Optional["Consumer"]:
+ return Consumer(
+ name=msg.name,
+ important=msg.important,
+ read_from=datetime_from_proto_timestamp(msg.read_from),
+ supported_codecs=SupportedCodecs.from_proto(msg.supported_codecs),
+ attributes=dict(msg.attributes),
+ consumer_stats=Consumer.ConsumerStats.from_proto(msg.consumer_stats),
+ )
+
+ @staticmethod
+ def from_public(consumer: ydb_topic_public_types.PublicConsumer):
+ if consumer is None:
+ return None
+
+ supported_codecs = []
+ if consumer.supported_codecs is not None:
+ supported_codecs = consumer.supported_codecs
+
+ return Consumer(
+ name=consumer.name,
+ important=consumer.important,
+ read_from=consumer.read_from,
+ supported_codecs=SupportedCodecs(codecs=supported_codecs),
+ attributes=consumer.attributes,
+ consumer_stats=None,
+ )
+
+ def to_public(self) -> ydb_topic_public_types.PublicConsumer:
+ return ydb_topic_public_types.PublicConsumer(
+ name=self.name,
+ important=self.important,
+ read_from=self.read_from,
+ supported_codecs=self.supported_codecs.to_public(),
+ attributes=self.attributes,
+ )
+
+ @dataclass
+ class ConsumerStats(IFromProto):
+ min_partitions_last_read_time: datetime.datetime
+ max_read_time_lag: datetime.timedelta
+ max_write_time_lag: datetime.timedelta
+ bytes_read: MultipleWindowsStat
+
+ @staticmethod
+ def from_proto(
+ msg: ydb_topic_pb2.Consumer.ConsumerStats,
+ ) -> "Consumer.ConsumerStats":
+ return Consumer.ConsumerStats(
+ min_partitions_last_read_time=datetime_from_proto_timestamp(msg.min_partitions_last_read_time),
+ max_read_time_lag=timedelta_from_proto_duration(msg.max_read_time_lag),
+ max_write_time_lag=timedelta_from_proto_duration(msg.max_write_time_lag),
+ bytes_read=MultipleWindowsStat.from_proto(msg.bytes_read),
+ )
+
+
+@dataclass
+class PartitioningSettings(IToProto, IFromProto):
+ min_active_partitions: int
+ partition_count_limit: int
+
+ @staticmethod
+ def from_proto(msg: ydb_topic_pb2.PartitioningSettings) -> "PartitioningSettings":
+ return PartitioningSettings(
+ min_active_partitions=msg.min_active_partitions,
+ partition_count_limit=msg.partition_count_limit,
+ )
+
+ def to_proto(self) -> ydb_topic_pb2.PartitioningSettings:
+ return ydb_topic_pb2.PartitioningSettings(
+ min_active_partitions=self.min_active_partitions,
+ partition_count_limit=self.partition_count_limit,
+ )
+
+
+class MeteringMode(int, IFromProto, IFromPublic, IToPublic):
+ UNSPECIFIED = 0
+ RESERVED_CAPACITY = 1
+ REQUEST_UNITS = 2
+
+ @staticmethod
+ def from_public(
+ m: Optional[ydb_topic_public_types.PublicMeteringMode],
+ ) -> Optional["MeteringMode"]:
+ if m is None:
+ return None
+
+ return MeteringMode(m)
+
+ @staticmethod
+ def from_proto(code: Optional[int]) -> Optional["MeteringMode"]:
+ if code is None:
+ return None
+
+ return MeteringMode(code)
+
+ def to_public(self) -> ydb_topic_public_types.PublicMeteringMode:
+ try:
+ ydb_topic_public_types.PublicMeteringMode(int(self))
+ except KeyError:
+ return ydb_topic_public_types.PublicMeteringMode.UNSPECIFIED
+
+
+@dataclass
+class CreateTopicRequest(IToProto, IFromPublic):
+ path: str
+ partitioning_settings: "PartitioningSettings"
+ retention_period: typing.Optional[datetime.timedelta]
+ retention_storage_mb: typing.Optional[int]
+ supported_codecs: "SupportedCodecs"
+ partition_write_speed_bytes_per_second: typing.Optional[int]
+ partition_write_burst_bytes: typing.Optional[int]
+ attributes: Dict[str, str]
+ consumers: List["Consumer"]
+ metering_mode: "MeteringMode"
+
+ def to_proto(self) -> ydb_topic_pb2.CreateTopicRequest:
+ return ydb_topic_pb2.CreateTopicRequest(
+ path=self.path,
+ partitioning_settings=self.partitioning_settings.to_proto(),
+ retention_period=proto_duration_from_timedelta(self.retention_period),
+ retention_storage_mb=self.retention_storage_mb,
+ supported_codecs=self.supported_codecs.to_proto(),
+ partition_write_speed_bytes_per_second=self.partition_write_speed_bytes_per_second,
+ partition_write_burst_bytes=self.partition_write_burst_bytes,
+ attributes=self.attributes,
+ consumers=[consumer.to_proto() for consumer in self.consumers],
+ metering_mode=self.metering_mode,
+ )
+
+ @staticmethod
+ def from_public(req: ydb_topic_public_types.CreateTopicRequestParams):
+ supported_codecs = []
+
+ if req.supported_codecs is not None:
+ supported_codecs = req.supported_codecs
+
+ consumers = []
+ if req.consumers is not None:
+ for consumer in req.consumers:
+ if isinstance(consumer, str):
+ consumer = ydb_topic_public_types.PublicConsumer(name=consumer)
+ consumers.append(Consumer.from_public(consumer))
+
+ return CreateTopicRequest(
+ path=req.path,
+ partitioning_settings=PartitioningSettings(
+ min_active_partitions=req.min_active_partitions,
+ partition_count_limit=req.partition_count_limit,
+ ),
+ retention_period=req.retention_period,
+ retention_storage_mb=req.retention_storage_mb,
+ supported_codecs=SupportedCodecs(
+ codecs=supported_codecs,
+ ),
+ partition_write_speed_bytes_per_second=req.partition_write_speed_bytes_per_second,
+ partition_write_burst_bytes=req.partition_write_burst_bytes,
+ attributes=req.attributes,
+ consumers=consumers,
+ metering_mode=MeteringMode.from_public(req.metering_mode),
+ )
+
+
+@dataclass
+class CreateTopicResult:
+ pass
+
+
+@dataclass
+class DescribeTopicRequest:
+ path: str
+ include_stats: bool
+
+
+@dataclass
+class DescribeTopicResult(IFromProtoWithProtoType, IToPublic):
+ self_proto: ydb_scheme_pb2.Entry
+ partitioning_settings: PartitioningSettings
+ partitions: List["DescribeTopicResult.PartitionInfo"]
+ retention_period: datetime.timedelta
+ retention_storage_mb: int
+ supported_codecs: SupportedCodecs
+ partition_write_speed_bytes_per_second: int
+ partition_write_burst_bytes: int
+ attributes: Dict[str, str]
+ consumers: List["Consumer"]
+ metering_mode: MeteringMode
+ topic_stats: "DescribeTopicResult.TopicStats"
+
+ @staticmethod
+ def from_proto(msg: ydb_topic_pb2.DescribeTopicResult) -> "DescribeTopicResult":
+ return DescribeTopicResult(
+ self_proto=msg.self,
+ partitioning_settings=PartitioningSettings.from_proto(msg.partitioning_settings),
+ partitions=list(map(DescribeTopicResult.PartitionInfo.from_proto, msg.partitions)),
+ retention_period=msg.retention_period,
+ retention_storage_mb=msg.retention_storage_mb,
+ supported_codecs=SupportedCodecs.from_proto(msg.supported_codecs),
+ partition_write_speed_bytes_per_second=msg.partition_write_speed_bytes_per_second,
+ partition_write_burst_bytes=msg.partition_write_burst_bytes,
+ attributes=dict(msg.attributes),
+ consumers=list(map(Consumer.from_proto, msg.consumers)),
+ metering_mode=MeteringMode.from_proto(msg.metering_mode),
+ topic_stats=DescribeTopicResult.TopicStats.from_proto(msg.topic_stats),
+ )
+
+ @staticmethod
+ def empty_proto_message() -> ydb_topic_pb2.DescribeTopicResult:
+ return ydb_topic_pb2.DescribeTopicResult()
+
+ def to_public(self) -> ydb_topic_public_types.PublicDescribeTopicResult:
+ return ydb_topic_public_types.PublicDescribeTopicResult(
+ self=scheme._wrap_scheme_entry(self.self_proto),
+ min_active_partitions=self.partitioning_settings.min_active_partitions,
+ partition_count_limit=self.partitioning_settings.partition_count_limit,
+ partitions=list(map(DescribeTopicResult.PartitionInfo.to_public, self.partitions)),
+ retention_period=self.retention_period,
+ retention_storage_mb=self.retention_storage_mb,
+ supported_codecs=self.supported_codecs.to_public(),
+ partition_write_speed_bytes_per_second=self.partition_write_speed_bytes_per_second,
+ partition_write_burst_bytes=self.partition_write_burst_bytes,
+ attributes=self.attributes,
+ consumers=list(map(Consumer.to_public, self.consumers)),
+ metering_mode=self.metering_mode.to_public(),
+ topic_stats=self.topic_stats.to_public(),
+ )
+
+ @dataclass
+ class PartitionInfo(IFromProto, IToPublic):
+ partition_id: int
+ active: bool
+ child_partition_ids: List[int]
+ parent_partition_ids: List[int]
+ partition_stats: "PartitionStats"
+
+ @staticmethod
+ def from_proto(
+ msg: Optional[ydb_topic_pb2.DescribeTopicResult.PartitionInfo],
+ ) -> Optional["DescribeTopicResult.PartitionInfo"]:
+ if msg is None:
+ return None
+
+ return DescribeTopicResult.PartitionInfo(
+ partition_id=msg.partition_id,
+ active=msg.active,
+ child_partition_ids=list(msg.child_partition_ids),
+ parent_partition_ids=list(msg.parent_partition_ids),
+ partition_stats=PartitionStats.from_proto(msg.partition_stats),
+ )
+
+ def to_public(
+ self,
+ ) -> ydb_topic_public_types.PublicDescribeTopicResult.PartitionInfo:
+ partition_stats = None
+ if self.partition_stats is not None:
+ partition_stats = self.partition_stats.to_public()
+ return ydb_topic_public_types.PublicDescribeTopicResult.PartitionInfo(
+ partition_id=self.partition_id,
+ active=self.active,
+ child_partition_ids=self.child_partition_ids,
+ parent_partition_ids=self.parent_partition_ids,
+ partition_stats=partition_stats,
+ )
+
+ @dataclass
+ class TopicStats(IFromProto, IToPublic):
+ store_size_bytes: int
+ min_last_write_time: datetime.datetime
+ max_write_time_lag: datetime.timedelta
+ bytes_written: "MultipleWindowsStat"
+
+ @staticmethod
+ def from_proto(
+ msg: Optional[ydb_topic_pb2.DescribeTopicResult.TopicStats],
+ ) -> Optional["DescribeTopicResult.TopicStats"]:
+ if msg is None:
+ return None
+
+ return DescribeTopicResult.TopicStats(
+ store_size_bytes=msg.store_size_bytes,
+ min_last_write_time=datetime_from_proto_timestamp(msg.min_last_write_time),
+ max_write_time_lag=timedelta_from_proto_duration(msg.max_write_time_lag),
+ bytes_written=MultipleWindowsStat.from_proto(msg.bytes_written),
+ )
+
+ def to_public(
+ self,
+ ) -> ydb_topic_public_types.PublicDescribeTopicResult.TopicStats:
+ return ydb_topic_public_types.PublicDescribeTopicResult.TopicStats(
+ store_size_bytes=self.store_size_bytes,
+ min_last_write_time=self.min_last_write_time,
+ max_write_time_lag=self.max_write_time_lag,
+ bytes_written=self.bytes_written.to_public(),
+ )
+
+
+@dataclass
+class PartitionStats(IFromProto, IToPublic):
+ partition_offsets: OffsetsRange
+ store_size_bytes: int
+ last_write_time: datetime.datetime
+ max_write_time_lag: datetime.timedelta
+ bytes_written: "MultipleWindowsStat"
+ partition_node_id: int
+
+ @staticmethod
+ def from_proto(
+ msg: Optional[ydb_topic_pb2.PartitionStats],
+ ) -> Optional["PartitionStats"]:
+ if msg is None:
+ return None
+ return PartitionStats(
+ partition_offsets=OffsetsRange.from_proto(msg.partition_offsets),
+ store_size_bytes=msg.store_size_bytes,
+ last_write_time=datetime_from_proto_timestamp(msg.last_write_time),
+ max_write_time_lag=timedelta_from_proto_duration(msg.max_write_time_lag),
+ bytes_written=MultipleWindowsStat.from_proto(msg.bytes_written),
+ partition_node_id=msg.partition_node_id,
+ )
+
+ def to_public(self) -> ydb_topic_public_types.PublicPartitionStats:
+ return ydb_topic_public_types.PublicPartitionStats(
+ partition_start=self.partition_offsets.start,
+ partition_end=self.partition_offsets.end,
+ store_size_bytes=self.store_size_bytes,
+ last_write_time=self.last_write_time,
+ max_write_time_lag=self.max_write_time_lag,
+ bytes_written=self.bytes_written.to_public(),
+ partition_node_id=self.partition_node_id,
+ )
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic_public_types.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic_public_types.py
new file mode 100644
index 0000000000..df280a8bb5
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_topic_public_types.py
@@ -0,0 +1,198 @@
+import datetime
+import typing
+from dataclasses import dataclass, field
+from enum import IntEnum
+from typing import Optional, List, Union, Dict
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ..v4.protos import ydb_topic_pb2
+else:
+ from ..common.protos import ydb_topic_pb2
+
+from .common_utils import IToProto
+from ...scheme import SchemeEntry
+
+
+@dataclass
+# need similar struct to PublicDescribeTopicResult
+class CreateTopicRequestParams:
+ path: str
+ min_active_partitions: Optional[int]
+ partition_count_limit: Optional[int]
+ retention_period: Optional[datetime.timedelta]
+ retention_storage_mb: Optional[int]
+ supported_codecs: Optional[List[Union["PublicCodec", int]]]
+ partition_write_speed_bytes_per_second: Optional[int]
+ partition_write_burst_bytes: Optional[int]
+ attributes: Optional[Dict[str, str]]
+ consumers: Optional[List[Union["PublicConsumer", str]]]
+ metering_mode: Optional["PublicMeteringMode"]
+
+
+class PublicCodec(int):
+ """
+ Codec value may contain any int number.
+
+ Values below is only well-known predefined values,
+ but protocol support custom codecs.
+ """
+
+ UNSPECIFIED = 0
+ RAW = 1
+ GZIP = 2
+ LZOP = 3 # Has not supported codec in standard library
+ ZSTD = 4 # Has not supported codec in standard library
+
+
+class PublicMeteringMode(IntEnum):
+ UNSPECIFIED = 0
+ RESERVED_CAPACITY = 1
+ REQUEST_UNITS = 2
+
+
+@dataclass
+class PublicConsumer:
+ name: str
+ important: bool = False
+ """
+ Consumer may be marked as 'important'. It means messages for this consumer will never expire due to retention.
+ User should take care that such consumer never stalls, to prevent running out of disk space.
+ """
+
+ read_from: Optional[datetime.datetime] = None
+ "All messages with smaller server written_at timestamp will be skipped."
+
+ supported_codecs: List[PublicCodec] = field(default_factory=lambda: list())
+ """
+ List of supported codecs by this consumer.
+ supported_codecs on topic must be contained inside this list.
+ """
+
+ attributes: Dict[str, str] = field(default_factory=lambda: dict())
+ "Attributes of consumer"
+
+
+@dataclass
+class DropTopicRequestParams(IToProto):
+ path: str
+
+ def to_proto(self) -> ydb_topic_pb2.DropTopicRequest:
+ return ydb_topic_pb2.DropTopicRequest(path=self.path)
+
+
+@dataclass
+class DescribeTopicRequestParams(IToProto):
+ path: str
+ include_stats: bool
+
+ def to_proto(self) -> ydb_topic_pb2.DescribeTopicRequest:
+ return ydb_topic_pb2.DescribeTopicRequest(path=self.path, include_stats=self.include_stats)
+
+
+@dataclass
+# Need similar struct to CreateTopicRequestParams
+class PublicDescribeTopicResult:
+ self: SchemeEntry
+ "Description of scheme object"
+
+ min_active_partitions: int
+ "Minimum partition count auto merge would stop working at"
+
+ partition_count_limit: int
+ "Limit for total partition count, including active (open for write) and read-only partitions"
+
+ partitions: List["PublicDescribeTopicResult.PartitionInfo"]
+ "Partitions description"
+
+ retention_period: datetime.timedelta
+ "How long data in partition should be stored"
+
+ retention_storage_mb: int
+ "How much data in partition should be stored. Zero value means infinite limit"
+
+ supported_codecs: List[PublicCodec]
+ "List of allowed codecs for writers"
+
+ partition_write_speed_bytes_per_second: int
+ "Partition write speed in bytes per second"
+
+ partition_write_burst_bytes: int
+ "Burst size for write in partition, in bytes"
+
+ attributes: Dict[str, str]
+ """User and server attributes of topic. Server attributes starts from "_" and will be validated by server."""
+
+ consumers: List[PublicConsumer]
+ """List of consumers for this topic"""
+
+ metering_mode: PublicMeteringMode
+ "Metering settings"
+
+ topic_stats: "PublicDescribeTopicResult.TopicStats"
+ "Statistics of topic"
+
+ @dataclass
+ class PartitionInfo:
+ partition_id: int
+ "Partition identifier"
+
+ active: bool
+ "Is partition open for write"
+
+ child_partition_ids: List[int]
+ "Ids of partitions which was formed when this partition was split or merged"
+
+ parent_partition_ids: List[int]
+ "Ids of partitions from which this partition was formed by split or merge"
+
+ partition_stats: Optional["PublicPartitionStats"]
+ "Stats for partition, filled only when include_stats in request is true"
+
+ @dataclass
+ class TopicStats:
+ store_size_bytes: int
+ "Approximate size of topic"
+
+ min_last_write_time: datetime.datetime
+ "Minimum of timestamps of last write among all partitions."
+
+ max_write_time_lag: datetime.timedelta
+ """
+ Maximum of differences between write timestamp and create timestamp for all messages,
+ written during last minute.
+ """
+
+ bytes_written: "PublicMultipleWindowsStat"
+ "How much bytes were written statistics."
+
+
+@dataclass
+class PublicPartitionStats:
+ partition_start: int
+ "first message offset in the partition"
+
+ partition_end: int
+ "offset after last stored message offset in the partition (last offset + 1)"
+
+ store_size_bytes: int
+ "Approximate size of partition"
+
+ last_write_time: datetime.datetime
+ "Timestamp of last write"
+
+ max_write_time_lag: datetime.timedelta
+ "Maximum of differences between write timestamp and create timestamp for all messages, written during last minute."
+
+ bytes_written: "PublicMultipleWindowsStat"
+ "How much bytes were written during several windows in this partition."
+
+ partition_node_id: int
+ "Host where tablet for this partition works. Useful for debugging purposes."
+
+
+@dataclass
+class PublicMultipleWindowsStat:
+ per_minute: int
+ per_hour: int
+ per_day: int
diff --git a/contrib/python/ydb/py3/ydb/_session_impl.py b/contrib/python/ydb/py3/ydb/_session_impl.py
new file mode 100644
index 0000000000..a61612bcd7
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_session_impl.py
@@ -0,0 +1,456 @@
+import functools
+from google.protobuf.empty_pb2 import Empty
+from . import issues, types, _apis, convert, scheme, operation, _utilities
+
+X_YDB_SERVER_HINTS = "x-ydb-server-hints"
+X_YDB_SESSION_CLOSE = "session-close"
+
+
+def _check_session_is_closing(rpc_state, session_state):
+ metadata = rpc_state.trailing_metadata()
+ if X_YDB_SESSION_CLOSE in metadata.get(X_YDB_SERVER_HINTS, []):
+ session_state.set_closing()
+
+
+def bad_session_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state, *args, **kwargs):
+ try:
+ _check_session_is_closing(rpc_state, session_state)
+ return func(rpc_state, response_pb, session_state, *args, **kwargs)
+ except issues.BadSession:
+ session_state.reset()
+ raise
+
+ return decorator
+
+
+@bad_session_handler
+def wrap_prepare_query_response(rpc_state, response_pb, session_state, yql_text):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.PrepareQueryResult()
+ response_pb.operation.result.Unpack(message)
+ data_query = types.DataQuery(yql_text, message.parameters_types)
+ session_state.keep(data_query, message.query_id)
+ return data_query
+
+
+def prepare_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(_apis.ydb_table.PrepareDataQueryRequest())
+ request.yql_text = yql_text
+ return request
+
+
+class AlterTableOperation(operation.Operation):
+ def __init__(self, rpc_state, response_pb, driver):
+ super(AlterTableOperation, self).__init__(rpc_state, response_pb, driver)
+ self.ready = response_pb.operation.ready
+
+
+def copy_tables_request_factory(session_state, source_destination_pairs):
+ request = session_state.attach_request(_apis.ydb_table.CopyTablesRequest())
+ for source_path, destination_path in source_destination_pairs:
+ table_item = request.tables.add()
+ table_item.source_path = source_path
+ table_item.destination_path = destination_path
+ return request
+
+
+def rename_tables_request_factory(session_state, rename_items):
+ request = session_state.attach_request(_apis.ydb_table.RenameTablesRequest())
+ for item in rename_items:
+ table_item = request.tables.add()
+ table_item.source_path = item.source_path
+ table_item.destination_path = item.destination_path
+ table_item.replace_destination = item.replace_destination
+ return request
+
+
+def explain_data_query_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(_apis.ydb_table.ExplainDataQueryRequest())
+ request.yql_text = yql_text
+ return request
+
+
+class _ExplainResponse(object):
+ def __init__(self, ast, plan):
+ self.query_ast = ast
+ self.query_plan = plan
+
+
+def wrap_explain_response(rpc_state, response_pb, session_state):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExplainQueryResult()
+ response_pb.operation.result.Unpack(message)
+ return _ExplainResponse(message.query_ast, message.query_plan)
+
+
+@bad_session_handler
+def wrap_execute_scheme_result(rpc_state, response_pb, session_state):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExecuteQueryResult()
+ response_pb.operation.result.Unpack(message)
+ return convert.ResultSets(message.result_sets)
+
+
+def execute_scheme_request_factory(session_state, yql_text):
+ request = session_state.start_query().attach_request(_apis.ydb_table.ExecuteSchemeQueryRequest())
+ request.yql_text = yql_text
+ return request
+
+
+@bad_session_handler
+def wrap_describe_table_response(rpc_state, response_pb, sesssion_state, scheme_entry_cls):
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.DescribeTableResult()
+ response_pb.operation.result.Unpack(message)
+ return scheme._wrap_scheme_entry(
+ message.self,
+ scheme_entry_cls,
+ message.columns,
+ message.primary_key,
+ message.shard_key_bounds,
+ message.indexes,
+ message.table_stats if message.HasField("table_stats") else None,
+ message.ttl_settings if message.HasField("ttl_settings") else None,
+ message.attributes,
+ message.partitioning_settings if message.HasField("partitioning_settings") else None,
+ message.column_families,
+ message.key_bloom_filter,
+ message.read_replicas_settings if message.HasField("read_replicas_settings") else None,
+ message.storage_settings if message.HasField("storage_settings") else None,
+ )
+
+
+def explicit_partitions_factory(primary_key, columns, split_points):
+ column_types = {}
+ pk = set(primary_key)
+ for column in columns:
+ if column.name in pk:
+ column_types[column.name] = column.type
+
+ explicit_partitions = _apis.ydb_table.ExplicitPartitions()
+ for split_point in split_points:
+ typed_value = explicit_partitions.split_points.add()
+ split_point_type = types.TupleType()
+ prefix_size = len(split_point.value)
+ for pl_el_id, pk_name in enumerate(primary_key):
+ if pl_el_id >= prefix_size:
+ break
+
+ split_point_type.add_element(column_types[pk_name])
+
+ typed_value.type.MergeFrom(split_point_type.proto)
+ typed_value.value.MergeFrom(convert.from_native_value(split_point_type.proto, split_point.value))
+
+ return explicit_partitions
+
+
+def create_table_request_factory(session_state, path, table_description):
+ if isinstance(table_description, _apis.ydb_table.CreateTableRequest):
+ request = session_state.attach_request(table_description)
+ return request
+
+ request = _apis.ydb_table.CreateTableRequest()
+ request.path = path
+ request.primary_key.extend(list(table_description.primary_key))
+ for column in table_description.columns:
+ request.columns.add(name=column.name, type=column.type_pb, family=column.family)
+
+ if table_description.profile is not None:
+ request.profile.MergeFrom(table_description.profile.to_pb(table_description))
+
+ for index in table_description.indexes:
+ request.indexes.add().MergeFrom(index.to_pb())
+
+ if table_description.ttl_settings is not None:
+ request.ttl_settings.MergeFrom(table_description.ttl_settings.to_pb())
+
+ request.attributes.update(table_description.attributes)
+
+ if table_description.column_families:
+ for column_family in table_description.column_families:
+ request.column_families.add().MergeFrom(column_family.to_pb())
+
+ if table_description.storage_settings is not None:
+ request.storage_settings.MergeFrom(table_description.storage_settings.to_pb())
+
+ if table_description.read_replicas_settings is not None:
+ request.read_replicas_settings.MergeFrom(table_description.read_replicas_settings.to_pb())
+
+ if table_description.partitioning_settings is not None:
+ request.partitioning_settings.MergeFrom(table_description.partitioning_settings.to_pb())
+
+ request.key_bloom_filter = table_description.key_bloom_filter
+ if table_description.compaction_policy is not None:
+ request.compaction_policy = table_description.compaction_policy
+ if table_description.partition_at_keys is not None:
+ request.partition_at_keys.MergeFrom(
+ explicit_partitions_factory(
+ list(table_description.primary_key),
+ table_description.columns,
+ table_description.partition_at_keys.split_points,
+ )
+ )
+
+ elif table_description.uniform_partitions > 0:
+ request.uniform_partitions = table_description.uniform_partitions
+
+ return session_state.attach_request(request)
+
+
+def keep_alive_request_factory(session_state):
+ request = _apis.ydb_table.KeepAliveRequest()
+ return session_state.attach_request(request)
+
+
+@bad_session_handler
+def cleanup_session(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ session_state.reset()
+ return session
+
+
+@bad_session_handler
+def initialize_session(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.CreateSessionResult()
+ response_pb.operation.result.Unpack(message)
+ session_state.set_id(message.session_id).attach_endpoint(rpc_state.endpoint_key)
+ return session
+
+
+@bad_session_handler
+def wrap_operation(rpc_state, response_pb, session_state, driver=None):
+ return operation.Operation(rpc_state, response_pb, driver)
+
+
+def wrap_operation_bulk_upsert(rpc_state, response_pb, driver=None):
+ return operation.Operation(rpc_state, response_pb, driver)
+
+
+@bad_session_handler
+def wrap_keep_alive_response(rpc_state, response_pb, session_state, session):
+ issues._process_response(response_pb.operation)
+ return session
+
+
+def describe_table_request_factory(session_state, path, settings=None):
+ request = session_state.attach_request(_apis.ydb_table.DescribeTableRequest())
+ request.path = path
+
+ if settings is not None and hasattr(settings, "include_shard_key_bounds") and settings.include_shard_key_bounds:
+ request.include_shard_key_bounds = settings.include_shard_key_bounds
+
+ if settings is not None and hasattr(settings, "include_table_stats") and settings.include_table_stats:
+ request.include_table_stats = settings.include_table_stats
+
+ return request
+
+
+def alter_table_request_factory(
+ session_state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+):
+ request = session_state.attach_request(_apis.ydb_table.AlterTableRequest(path=path))
+ if add_columns is not None:
+ for column in add_columns:
+ request.add_columns.add(name=column.name, type=column.type_pb)
+
+ if drop_columns is not None:
+ request.drop_columns.extend(list(drop_columns))
+
+ if drop_indexes is not None:
+ request.drop_indexes.extend(list(drop_indexes))
+
+ if add_indexes is not None:
+ for index in add_indexes:
+ request.add_indexes.add().MergeFrom(index.to_pb())
+
+ if alter_attributes is not None:
+ request.alter_attributes.update(alter_attributes)
+
+ if set_ttl_settings is not None:
+ request.set_ttl_settings.MergeFrom(set_ttl_settings.to_pb())
+
+ if drop_ttl_settings is not None and drop_ttl_settings:
+ request.drop_ttl_settings.MergeFrom(Empty())
+
+ if add_column_families is not None:
+ for column_family in add_column_families:
+ request.add_column_families.add().MergeFrom(column_family.to_pb())
+
+ if alter_column_families is not None:
+ for column_family in alter_column_families:
+ request.alter_column_families.add().MergeFrom(column_family.to_pb())
+
+ if alter_storage_settings is not None:
+ request.alter_storage_settings.MergeFrom(alter_storage_settings.to_pb())
+
+ if set_compaction_policy is not None:
+ request.set_compaction_policy = set_compaction_policy
+
+ if alter_partitioning_settings is not None:
+ request.alter_partitioning_settings.MergeFrom(alter_partitioning_settings.to_pb())
+
+ if set_key_bloom_filter is not None:
+ request.set_key_bloom_filter = set_key_bloom_filter
+
+ if set_read_replicas_settings is not None:
+ request.set_read_replicas_settings.MergeFrom(set_read_replicas_settings.to_pb())
+
+ return request
+
+
+def read_table_request_factory(
+ session_state,
+ path,
+ key_range=None,
+ columns=None,
+ ordered=False,
+ row_limit=None,
+ use_snapshot=None,
+):
+ request = _apis.ydb_table.ReadTableRequest()
+ request.path = path
+ request.ordered = ordered
+ if key_range is not None and key_range.from_bound is not None:
+ target_attribute = "greater_or_equal" if key_range.from_bound.is_inclusive() else "greater"
+ getattr(request.key_range, target_attribute).MergeFrom(
+ convert.to_typed_value_from_native(key_range.from_bound.type, key_range.from_bound.value)
+ )
+
+ if key_range is not None and key_range.to_bound is not None:
+ target_attribute = "less_or_equal" if key_range.to_bound.is_inclusive() else "less"
+ getattr(request.key_range, target_attribute).MergeFrom(
+ convert.to_typed_value_from_native(key_range.to_bound.type, key_range.to_bound.value)
+ )
+
+ if columns is not None:
+ for column in columns:
+ request.columns.append(column)
+ if row_limit:
+ # NOTE(gvit): pylint cannot understand that row_limit is not None
+ request.row_limit = row_limit # pylint: disable=E5903
+ if use_snapshot is not None:
+ if isinstance(use_snapshot, bool):
+ if use_snapshot:
+ request.use_snapshot = _apis.FeatureFlag.ENABLED
+ else:
+ request.use_snapshot = _apis.FeatureFlag.DISABLED
+ else:
+ request.use_snapshot = use_snapshot
+ return session_state.attach_request(request)
+
+
+def bulk_upsert_request_factory(table, rows, column_types):
+ request = _apis.ydb_table.BulkUpsertRequest()
+ request.table = table
+ request.rows.MergeFrom(convert.to_typed_value_from_native(types.ListType(column_types).proto, rows))
+ return request
+
+
+def wrap_read_table_response(response):
+ issues._process_response(response)
+ snapshot = response.snapshot if response.HasField("snapshot") else None
+ return convert.ResultSet.from_message(response.result.result_set, snapshot=snapshot)
+
+
+class SessionState(object):
+ def __init__(self, table_client_settings):
+ self._session_id = None
+ self._query_cache = _utilities.LRUCache(1000)
+ self._default = (None, None)
+ self._pending_query = False
+ self._endpoint = None
+ self._closing = False
+ self._client_cache_enabled = table_client_settings._client_query_cache_enabled
+ self.table_client_settings = table_client_settings
+
+ def __contains__(self, query):
+ return self.lookup(query) != self._default
+
+ def reset(self):
+ self._query_cache = _utilities.LRUCache(1000)
+ self._session_id = None
+ self._pending_query = False
+ self._endpoint = None
+
+ def attach_endpoint(self, endpoint):
+ self._endpoint = endpoint
+ return self
+
+ def set_closing(self):
+ self._closing = True
+ return self
+
+ def closing(self):
+ return self._closing
+
+ @property
+ def endpoint(self):
+ return self._endpoint
+
+ @property
+ def session_id(self):
+ return self._session_id
+
+ def pending_query(self):
+ return self._pending_query
+
+ def set_id(self, session_id):
+ self._session_id = session_id
+ return self
+
+ def keep(self, query, query_id):
+ if self._client_cache_enabled:
+ self._query_cache.put(query.name, (query, query_id))
+ else:
+ self._query_cache.put(query.name, (query, None))
+ return self
+
+ @staticmethod
+ def _query_key(query):
+ return query.name if isinstance(query, types.DataQuery) else _utilities.get_query_hash(query)
+
+ def lookup(self, query):
+ return self._query_cache.get(self._query_key(query), self._default)
+
+ def erase(self, query):
+ query, _ = self.lookup(query)
+ self._query_cache.erase(query.name)
+
+ def complete_query(self):
+ self._pending_query = False
+ return self
+
+ def start_query(self):
+ if self._pending_query:
+ # don't invalidate session at this point
+ self.reset()
+ raise issues.BadSession("Pending previous query completion!")
+ self._pending_query = True
+ return self
+
+ def attach_request(self, request):
+ if self._session_id is None:
+ raise issues.BadSession("Empty session_id")
+ request.session_id = self._session_id
+ return request
diff --git a/contrib/python/ydb/py3/ydb/_sp_impl.py b/contrib/python/ydb/py3/ydb/_sp_impl.py
new file mode 100644
index 0000000000..dfea89f9c4
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_sp_impl.py
@@ -0,0 +1,397 @@
+# -*- coding: utf-8 -*-
+import collections
+from concurrent import futures
+import queue
+import time
+import threading
+from . import settings, issues, _utilities, tracing
+
+
+class SessionPoolImpl(object):
+ def __init__(
+ self,
+ logger,
+ driver,
+ size,
+ workers_threads_count=4,
+ initializer=None,
+ min_pool_size=0,
+ ):
+ self._lock = threading.RLock()
+ self._waiters = collections.OrderedDict()
+ self._driver = driver
+ if hasattr(driver, "_driver_config"):
+ self.tracer = driver._driver_config.tracer
+ else:
+ self.tracer = tracing.Tracer(None)
+ self._active_queue = queue.PriorityQueue()
+ self._active_count = 0
+ self._size = size
+ self._req_settings = settings.BaseRequestSettings().with_timeout(3)
+ self._tp = futures.ThreadPoolExecutor(workers_threads_count)
+ self._initializer = initializer
+ self._should_stop = threading.Event()
+ self._keep_alive_threshold = 4 * 60
+ self._spin_timeout = 30
+ self._event_queue = queue.Queue()
+ self._driver_await_timeout = 3
+ self._event_loop_thread = threading.Thread(target=self.events_loop)
+ self._event_loop_thread.daemon = True
+ self._event_loop_thread.start()
+ self._logger = logger
+ self._min_pool_size = min_pool_size
+ self._terminating = False
+ if self._min_pool_size > self._size:
+ raise ValueError("Invalid min pool size value!")
+ for _ in range(self._min_pool_size):
+ self._prepare(self._create())
+
+ def stop(self, timeout):
+ with self._lock:
+ self._logger.debug("Requested session pool stop.")
+ self._event_queue.put(self._terminate_event)
+ self._should_stop.set()
+ self._terminating = True
+
+ self._logger.debug("Session pool is under stop, cancelling all in flight waiters.")
+ while True:
+ try:
+ _, waiter = self._waiters.popitem(last=False)
+ session = self._create()
+ waiter.set_result(session)
+ self._logger.debug(
+ "Waiter %s has been replied with empty session info. Session details: %s.",
+ waiter,
+ session,
+ )
+ except KeyError:
+ break
+
+ self._logger.debug("Destroying sessions in active queue")
+ while True:
+ try:
+ _, session = self._active_queue.get(block=False)
+ self._destroy(session, "session-pool-terminated")
+
+ except queue.Empty:
+ break
+
+ self._logger.debug("Destroyed active sessions")
+
+ self._event_loop_thread.join(timeout)
+
+ def _terminate_event(self):
+ self._logger.debug("Terminated session pool.")
+ raise StopIteration()
+
+ def _delayed_prepare(self, session):
+ try:
+ self._driver.wait(self._driver_await_timeout, fail_fast=False)
+ except Exception:
+ pass
+
+ self._prepare(session)
+
+ def pick(self):
+ with self._lock:
+ try:
+ priority, session = self._active_queue.get_nowait()
+ except queue.Empty:
+ return None
+
+ till_expire = priority - time.time()
+ if till_expire < self._keep_alive_threshold:
+ return session
+ self._active_queue.put((priority, session))
+ return None
+
+ def _create(self):
+ with self._lock:
+ session = self._driver.table_client.session()
+ self._logger.debug("Created session %s", session)
+ self._active_count += 1
+ return session
+
+ @property
+ def active_size(self):
+ with self._lock:
+ return self._active_count
+
+ @property
+ def free_size(self):
+ with self._lock:
+ return self._active_queue.qsize()
+
+ @property
+ def busy_size(self):
+ with self._lock:
+ return self._active_count - self._active_queue.qsize()
+
+ @property
+ def max_size(self):
+ return self._size
+
+ @property
+ def waiters_count(self):
+ with self._lock:
+ return len(self._waiters)
+
+ def _is_min_pool_size_satisfied(self, delta=0):
+ if self._terminating:
+ return True
+ return self._active_count + delta >= self._min_pool_size
+
+ def _destroy(self, session, reason):
+ self._logger.debug("Requested session destroy: %s, reason: %s", session, reason)
+ with self._lock:
+ tracing.trace(self.tracer, {"destroy.reason": reason})
+ self._active_count -= 1
+ self._logger.debug(
+ "Session %s is no longer active. Current active count %d.",
+ session,
+ self._active_count,
+ )
+ cnt_waiters = len(self._waiters)
+ if cnt_waiters > 0:
+ self._logger.debug(
+ "In flight waiters: %d, preparing session %s replacement.",
+ cnt_waiters,
+ session,
+ )
+ # we have a waiter that should be replied, so we have to prepare replacement
+ self._prepare(self._create())
+ elif not self._is_min_pool_size_satisfied():
+ self._logger.debug(
+ "Current session pool size is less than %s, actual size %s",
+ self._min_pool_size,
+ self._active_count,
+ )
+ self._prepare(self._create())
+
+ if session.initialized():
+ session.async_delete(self._req_settings)
+ self._logger.debug("Sent delete on session %s", session)
+
+ def put(self, session):
+ with self._lock:
+ self._logger.debug("Put on session %s", session)
+ if session.closing():
+ self._destroy(session, "session-close")
+ return False
+
+ if session.pending_query():
+ self._destroy(session, "pending-query")
+ return False
+
+ if not session.initialized() or self._should_stop.is_set():
+ self._destroy(session, "not-initialized")
+ # we should probably prepare replacement session here
+ return False
+
+ try:
+ _, waiter = self._waiters.popitem(last=False)
+ waiter.set_result(session)
+ tracing.trace(self.tracer, {"put.to_waiter": True})
+ self._logger.debug("Replying to waiter with a session %s", session)
+ except KeyError:
+ priority = time.time() + 10 * 60
+ tracing.trace(self.tracer, {"put.to_pool": True, "session.new_priority": priority})
+ self._active_queue.put((priority, session))
+
+ def _on_session_create(self, session, f):
+ with self._lock:
+ try:
+ f.result()
+ if self._initializer is None:
+ return self.put(session)
+ except issues.Error as e:
+ self._logger.error(
+ "Failed to create session. Put event to a delayed queue. Reason: %s",
+ str(e),
+ )
+ return self._event_queue.put(lambda: self._delayed_prepare(session))
+
+ except Exception as e:
+ self._logger.exception(
+ "Failed to create session. Put event to a delayed queue. Reason: %s",
+ str(e),
+ )
+ return self._event_queue.put(lambda: self._delayed_prepare(session))
+
+ init_f = self._tp.submit(self._initializer, session)
+
+ def _on_initialize(in_f):
+ try:
+ in_f.result()
+ self.put(session)
+ except Exception:
+ self._prepare(session)
+
+ init_f.add_done_callback(_on_initialize)
+
+ def _prepare(self, session):
+ if self._should_stop.is_set():
+ self._destroy(session, "session-pool-terminated")
+ return
+
+ with self._lock:
+ self._logger.debug("Preparing session %s", session)
+ if len(self._waiters) < 1 and self._is_min_pool_size_satisfied(delta=-1):
+ self._logger.info("No pending waiters, will destroy session")
+ return self._destroy(session, "session-useless")
+
+ f = session.async_create(self._req_settings)
+ f.add_done_callback(lambda _: self._on_session_create(session, _))
+
+ def _waiter_cleanup(self, w):
+ with self._lock:
+ try:
+ self._waiters.pop(w)
+ except KeyError:
+ return None
+
+ def subscribe(self):
+ with self._lock:
+ try:
+ _, session = self._active_queue.get(block=False)
+ tracing.trace(self.tracer, {"acquire.found_free_session": True})
+ return _utilities.wrap_result_in_future(session)
+ except queue.Empty:
+ self._logger.debug("Active session queue is empty, subscribe waiter for a session")
+ waiter = _utilities.future()
+ self._logger.debug("Subscribe waiter %s", waiter)
+ if self._should_stop.is_set():
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.empty_session": True,
+ },
+ )
+ session = self._create()
+ self._logger.debug(
+ "Session pool is under stop, replying with empty session, %s",
+ session,
+ )
+ waiter.set_result(session)
+ return waiter
+
+ waiter.add_done_callback(self._waiter_cleanup)
+ self._waiters[waiter] = waiter
+ if self._active_count < self._size:
+ self._logger.debug(
+ "Session pool is not large enough (active_count < size: %d < %d). "
+ "will create a new session.",
+ self._active_count,
+ self._size,
+ )
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.creating_new_session": True,
+ "session_pool.active_size": self._active_count,
+ "session_pool.size": self._size,
+ },
+ )
+ session = self._create()
+ self._prepare(session)
+ else:
+ tracing.trace(
+ self.tracer,
+ {
+ "acquire.found_free_session": False,
+ "acquire.creating_new_session": False,
+ "session_pool.active_size": self._active_count,
+ "session_pool.size": self._size,
+ "acquire.waiting_for_free_session": True,
+ },
+ )
+ return waiter
+
+ def unsubscribe(self, waiter):
+ with self._lock:
+ try:
+ # at first we remove waiter from list of the waiters to ensure
+ # we will not signal it right now
+ self._logger.debug("Unsubscribe on waiter %s", waiter)
+ self._waiters.pop(waiter)
+ except KeyError:
+ try:
+ session = waiter.result(timeout=-1)
+ self.put(session)
+ except (futures.CancelledError, futures.TimeoutError):
+ # future is cancelled and not signalled
+ pass
+
+ def _on_keep_alive(self, session, f):
+ try:
+ self.put(f.result())
+ # additional logic should be added to check
+ # current status of the session
+ except issues.Error:
+ self._destroy(session, "keep-alive-error")
+ except Exception:
+ self._destroy(session, "keep-alive-error")
+
+ def acquire(self, blocking=True, timeout=None):
+ if self._should_stop.is_set():
+ self._logger.error("Take session from closed session pool")
+ raise ValueError("Take session from closed session pool.")
+
+ waiter = self.subscribe()
+ has_result = False
+ if blocking:
+ tracing.trace(self.tracer, {"acquire.blocking": True})
+ try:
+ tracing.trace(self.tracer, {"acquire.blocking.wait": True})
+ session = waiter.result(timeout=timeout)
+ has_result = True
+ return session
+ except futures.TimeoutError:
+ tracing.trace(self.tracer, {"acquire.blocking.timeout": True})
+ raise issues.SessionPoolEmpty("Timeout on session acquire.")
+ finally:
+ if not has_result:
+ self.unsubscribe(waiter)
+
+ else:
+ tracing.trace(self.tracer, {"acquire.nonblocking": True})
+ try:
+ session = waiter.result(timeout=-1)
+ has_result = True
+ return session
+ except futures.TimeoutError:
+ raise issues.SessionPoolEmpty("Session pool is empty.")
+ finally:
+ if not has_result:
+ self.unsubscribe(waiter)
+
+ def events_loop(self):
+ while True:
+ try:
+ if self._should_stop.is_set():
+ break
+
+ event = self._event_queue.get(timeout=self._spin_timeout)
+ event()
+ except StopIteration:
+ break
+
+ except queue.Empty:
+ while True:
+ if not self.send_keep_alive():
+ break
+
+ def send_keep_alive(self):
+ session = self.pick()
+ if session is None:
+ return False
+
+ if self._should_stop.is_set():
+ self._destroy(session, "session-pool-terminated")
+ return False
+
+ f = session.async_keep_alive(self._req_settings)
+ f.add_done_callback(lambda q: self._on_keep_alive(session, q))
+ return True
diff --git a/contrib/python/ydb/py3/ydb/_topic_common/__init__.py b/contrib/python/ydb/py3/ydb/_topic_common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_common/__init__.py
diff --git a/contrib/python/ydb/py3/ydb/_topic_common/common.py b/contrib/python/ydb/py3/ydb/_topic_common/common.py
new file mode 100644
index 0000000000..7a97336edf
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_common/common.py
@@ -0,0 +1,145 @@
+import asyncio
+import concurrent.futures
+import threading
+import typing
+from typing import Optional
+
+from .. import operation, issues
+from .._grpc.grpcwrapper.common_utils import IFromProtoWithProtoType
+
+TimeoutType = typing.Union[int, float, None]
+
+
+def wrap_operation(rpc_state, response_pb, driver=None):
+ return operation.Operation(rpc_state, response_pb, driver)
+
+
+ResultType = typing.TypeVar("ResultType", bound=IFromProtoWithProtoType)
+
+
+def create_result_wrapper(
+ result_type: typing.Type[ResultType],
+) -> typing.Callable[[typing.Any, typing.Any, typing.Any], ResultType]:
+ def wrapper(rpc_state, response_pb, driver=None):
+ issues._process_response(response_pb.operation)
+ msg = result_type.empty_proto_message()
+ response_pb.operation.result.Unpack(msg)
+ return result_type.from_proto(msg)
+
+ return wrapper
+
+
+_shared_event_loop_lock = threading.Lock()
+_shared_event_loop: Optional[asyncio.AbstractEventLoop] = None
+
+
+def _get_shared_event_loop() -> asyncio.AbstractEventLoop:
+ global _shared_event_loop
+
+ if _shared_event_loop is not None:
+ return _shared_event_loop
+
+ with _shared_event_loop_lock:
+ if _shared_event_loop is not None:
+ return _shared_event_loop
+
+ event_loop_set_done = concurrent.futures.Future()
+
+ def start_event_loop():
+ event_loop = asyncio.new_event_loop()
+ event_loop_set_done.set_result(event_loop)
+ asyncio.set_event_loop(event_loop)
+ event_loop.run_forever()
+
+ t = threading.Thread(
+ target=start_event_loop,
+ name="Common ydb topic event loop",
+ daemon=True,
+ )
+ t.start()
+
+ _shared_event_loop = event_loop_set_done.result()
+ return _shared_event_loop
+
+
+class CallFromSyncToAsync:
+ _loop: asyncio.AbstractEventLoop
+
+ def __init__(self, loop: asyncio.AbstractEventLoop):
+ self._loop = loop
+
+ def unsafe_call_with_future(self, coro: typing.Coroutine) -> concurrent.futures.Future:
+ """
+ returned result from coro may be lost
+ """
+ return asyncio.run_coroutine_threadsafe(coro, self._loop)
+
+ def unsafe_call_with_result(self, coro: typing.Coroutine, timeout: TimeoutType):
+ """
+ returned result from coro may be lost by race future cancel by timeout and return value from coroutine
+ """
+ f = self.unsafe_call_with_future(coro)
+ try:
+ return f.result(timeout)
+ except concurrent.futures.TimeoutError:
+ raise TimeoutError()
+ finally:
+ if not f.done():
+ f.cancel()
+
+ def safe_call_with_result(self, coro: typing.Coroutine, timeout: TimeoutType):
+ """
+ no lost returned value from coro, but may be slower especially timeout latency - it wait coroutine cancelation.
+ """
+
+ if timeout is not None and timeout <= 0:
+ return self._safe_call_fast(coro)
+
+ async def call_coro():
+ task = self._loop.create_task(coro)
+ try:
+ res = await asyncio.wait_for(task, timeout)
+ return res
+ except asyncio.TimeoutError:
+ try:
+ res = await task
+ return res
+ except asyncio.CancelledError:
+ pass
+
+ # return builtin TimeoutError instead of asyncio.TimeoutError
+ raise TimeoutError()
+
+ return asyncio.run_coroutine_threadsafe(call_coro(), self._loop).result()
+
+ def _safe_call_fast(self, coro: typing.Coroutine):
+ """
+ no lost returned value from coro, but may be slower especially timeout latency - it wait coroutine cancelation.
+ Wait coroutine result only one loop.
+ """
+ res = concurrent.futures.Future()
+
+ async def call_coro():
+ try:
+ res.set_result(await coro)
+ except asyncio.CancelledError:
+ res.set_exception(TimeoutError())
+
+ coro_future = asyncio.run_coroutine_threadsafe(call_coro(), self._loop)
+ asyncio.run_coroutine_threadsafe(asyncio.sleep(0), self._loop).result()
+ coro_future.cancel()
+ return res.result()
+
+ def call_sync(self, callback: typing.Callable[[], typing.Any]) -> typing.Any:
+ result = concurrent.futures.Future()
+
+ def call_callback():
+ try:
+ res = callback()
+ result.set_result(res)
+ except BaseException as err:
+ result.set_exception(err)
+
+ self._loop.call_soon_threadsafe(call_callback)
+
+ return result.result()
diff --git a/contrib/python/ydb/py3/ydb/_topic_reader/__init__.py b/contrib/python/ydb/py3/ydb/_topic_reader/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_reader/__init__.py
diff --git a/contrib/python/ydb/py3/ydb/_topic_reader/datatypes.py b/contrib/python/ydb/py3/ydb/_topic_reader/datatypes.py
new file mode 100644
index 0000000000..28155ea7a3
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_reader/datatypes.py
@@ -0,0 +1,173 @@
+from __future__ import annotations
+
+import abc
+import asyncio
+import bisect
+import enum
+from collections import deque
+from dataclasses import dataclass, field
+import datetime
+from typing import Union, Any, List, Dict, Deque, Optional
+
+from ydb._grpc.grpcwrapper.ydb_topic import OffsetsRange, Codec
+from ydb._topic_reader import topic_reader_asyncio
+
+
+class ICommittable(abc.ABC):
+ @abc.abstractmethod
+ def _commit_get_partition_session(self) -> PartitionSession:
+ ...
+
+ @abc.abstractmethod
+ def _commit_get_offsets_range(self) -> OffsetsRange:
+ ...
+
+
+class ISessionAlive(abc.ABC):
+ @property
+ @abc.abstractmethod
+ def alive(self) -> bool:
+ pass
+
+
+@dataclass
+class PublicMessage(ICommittable, ISessionAlive):
+ seqno: int
+ created_at: datetime.datetime
+ message_group_id: str
+ session_metadata: Dict[str, str]
+ offset: int
+ written_at: datetime.datetime
+ producer_id: str
+ data: Union[bytes, Any] # set as original decompressed bytes or deserialized object if deserializer set in reader
+ _partition_session: PartitionSession
+ _commit_start_offset: int
+ _commit_end_offset: int
+
+ def _commit_get_partition_session(self) -> PartitionSession:
+ return self._partition_session
+
+ def _commit_get_offsets_range(self) -> OffsetsRange:
+ return OffsetsRange(self._commit_start_offset, self._commit_end_offset)
+
+ # ISessionAlive implementation
+ @property
+ def alive(self) -> bool:
+ return not self._partition_session.closed
+
+
+@dataclass
+class PartitionSession:
+ id: int
+ state: "PartitionSession.State"
+ topic_path: str
+ partition_id: int
+ committed_offset: int # last commit offset, acked from server. Processed messages up to the field-1 offset.
+ reader_reconnector_id: int
+ reader_stream_id: int
+ _next_message_start_commit_offset: int = field(init=False)
+
+ # todo: check if deque is optimal
+ _ack_waiters: Deque["PartitionSession.CommitAckWaiter"] = field(init=False, default_factory=lambda: deque())
+
+ _state_changed: asyncio.Event = field(init=False, default_factory=lambda: asyncio.Event(), compare=False)
+
+ def __post_init__(self):
+ self._next_message_start_commit_offset = self.committed_offset
+
+ def add_waiter(self, end_offset: int) -> "PartitionSession.CommitAckWaiter":
+ self._ensure_not_closed()
+
+ waiter = PartitionSession.CommitAckWaiter(end_offset, asyncio.Future())
+ if end_offset <= self.committed_offset:
+ waiter._finish_ok()
+ return waiter
+
+ # fast way
+ if self._ack_waiters and self._ack_waiters[-1].end_offset < end_offset:
+ self._ack_waiters.append(waiter)
+ else:
+ bisect.insort(self._ack_waiters, waiter)
+
+ return waiter
+
+ def ack_notify(self, offset: int):
+ self._ensure_not_closed()
+
+ self.committed_offset = offset
+
+ if not self._ack_waiters:
+ # todo log warning
+ # must be never receive ack for not sended request
+ return
+
+ while self._ack_waiters:
+ if self._ack_waiters[0].end_offset > offset:
+ break
+ waiter = self._ack_waiters.popleft()
+ waiter._finish_ok()
+
+ def close(self):
+ if self.closed:
+ return
+
+ self.state = PartitionSession.State.Stopped
+ exception = topic_reader_asyncio.PublicTopicReaderPartitionExpiredError()
+ for waiter in self._ack_waiters:
+ waiter._finish_error(exception)
+
+ @property
+ def closed(self):
+ return self.state == PartitionSession.State.Stopped
+
+ def _ensure_not_closed(self):
+ if self.state == PartitionSession.State.Stopped:
+ raise topic_reader_asyncio.PublicTopicReaderPartitionExpiredError()
+
+ class State(enum.Enum):
+ Active = 1
+ GracefulShutdown = 2
+ Stopped = 3
+
+ @dataclass(order=True)
+ class CommitAckWaiter:
+ end_offset: int
+ future: asyncio.Future = field(compare=False)
+ _done: bool = field(default=False, init=False)
+ _exception: Optional[Exception] = field(default=None, init=False)
+
+ def _finish_ok(self):
+ self._done = True
+ self.future.set_result(None)
+
+ def _finish_error(self, error: Exception):
+ self._exception = error
+ self.future.set_exception(error)
+
+
+@dataclass
+class PublicBatch(ICommittable, ISessionAlive):
+ messages: List[PublicMessage]
+ _partition_session: PartitionSession
+ _bytes_size: int
+ _codec: Codec
+
+ def _commit_get_partition_session(self) -> PartitionSession:
+ return self.messages[0]._commit_get_partition_session()
+
+ def _commit_get_offsets_range(self) -> OffsetsRange:
+ return OffsetsRange(
+ self.messages[0]._commit_get_offsets_range().start,
+ self.messages[-1]._commit_get_offsets_range().end,
+ )
+
+ def empty(self) -> bool:
+ return len(self.messages) == 0
+
+ # ISessionAlive implementation
+ @property
+ def alive(self) -> bool:
+ return not self._partition_session.closed
+
+ def pop_message(self) -> PublicMessage:
+ return self.messages.pop(0)
diff --git a/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader.py b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader.py
new file mode 100644
index 0000000000..17fb288555
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader.py
@@ -0,0 +1,134 @@
+import concurrent.futures
+import enum
+import datetime
+from dataclasses import dataclass
+from typing import (
+ Union,
+ Optional,
+ List,
+ Mapping,
+ Callable,
+)
+
+from ..table import RetrySettings
+from .._grpc.grpcwrapper.ydb_topic import StreamReadMessage, OffsetsRange
+
+
+@dataclass
+class PublicTopicSelector:
+ path: str
+ partitions: Optional[Union[int, List[int]]] = None
+ read_from: Optional[datetime.datetime] = None
+ max_lag: Optional[datetime.timedelta] = None
+
+ def _to_topic_read_settings(self) -> StreamReadMessage.InitRequest.TopicReadSettings:
+ partitions = self.partitions
+ if partitions is None:
+ partitions = []
+
+ elif not isinstance(partitions, list):
+ partitions = [partitions]
+
+ return StreamReadMessage.InitRequest.TopicReadSettings(
+ path=self.path,
+ partition_ids=partitions,
+ max_lag=self.max_lag,
+ read_from=self.read_from,
+ )
+
+
+TopicSelectorTypes = Union[str, PublicTopicSelector, List[Union[str, PublicTopicSelector]]]
+
+
+@dataclass
+class PublicReaderSettings:
+ consumer: str
+ topic: TopicSelectorTypes
+ buffer_size_bytes: int = 50 * 1024 * 1024
+
+ decoders: Union[Mapping[int, Callable[[bytes], bytes]], None] = None
+ """decoders: map[codec_code] func(encoded_bytes)->decoded_bytes"""
+
+ # decoder_executor, must be set for handle non raw messages
+ decoder_executor: Optional[concurrent.futures.Executor] = None
+ update_token_interval: Union[int, float] = 3600
+
+ def __post_init__(self):
+ # check possible create init message
+ _ = self._init_message()
+
+ def _init_message(self) -> StreamReadMessage.InitRequest:
+ if not isinstance(self.consumer, str):
+ raise TypeError("Unsupported type for customer field: '%s'" % type(self.consumer))
+
+ if isinstance(self.topic, list):
+ selectors = self.topic
+ else:
+ selectors = [self.topic]
+
+ for index, selector in enumerate(selectors):
+ if isinstance(selector, str):
+ selectors[index] = PublicTopicSelector(path=selector)
+ elif isinstance(selector, PublicTopicSelector):
+ pass
+ else:
+ raise TypeError("Unsupported type for topic field: '%s'" % type(selector))
+
+ return StreamReadMessage.InitRequest(
+ topics_read_settings=list(map(PublicTopicSelector._to_topic_read_settings, selectors)), # type: ignore
+ consumer=self.consumer,
+ )
+
+ def _retry_settings(self) -> RetrySettings:
+ return RetrySettings(idempotent=True)
+
+
+class Events:
+ class OnCommit:
+ topic: str
+ offset: int
+
+ class OnPartitionGetStartOffsetRequest:
+ topic: str
+ partition_id: int
+
+ class OnPartitionGetStartOffsetResponse:
+ start_offset: int
+
+ class OnInitPartition:
+ pass
+
+ class OnShutdownPatition:
+ pass
+
+
+class RetryPolicy:
+ connection_timeout_sec: float
+ overload_timeout_sec: float
+ retry_access_denied: bool = False
+
+
+class CommitResult:
+ topic: str
+ partition: int
+ offset: int
+ state: "CommitResult.State"
+ details: str # for humans only, content messages may be change in any time
+
+ class State(enum.Enum):
+ UNSENT = 1 # commit didn't send to the server
+ SENT = 2 # commit was sent to server, but ack hasn't received
+ ACKED = 3 # ack from server is received
+
+
+class SessionStat:
+ path: str
+ partition_id: str
+ partition_offsets: OffsetsRange
+ committed_offset: int
+ write_time_high_watermark: datetime.datetime
+ write_time_high_watermark_timestamp_nano: int
+
+
+class StubEvent:
+ pass
diff --git a/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_asyncio.py b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_asyncio.py
new file mode 100644
index 0000000000..50684f7cf9
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_asyncio.py
@@ -0,0 +1,659 @@
+from __future__ import annotations
+
+import asyncio
+import concurrent.futures
+import gzip
+import typing
+from asyncio import Task
+from collections import deque
+from typing import Optional, Set, Dict, Union, Callable
+
+from .. import _apis, issues
+from .._utilities import AtomicCounter
+from ..aio import Driver
+from ..issues import Error as YdbError, _process_response
+from . import datatypes
+from . import topic_reader
+from .._grpc.grpcwrapper.common_utils import (
+ IGrpcWrapperAsyncIO,
+ SupportedDriverType,
+ GrpcWrapperAsyncIO,
+)
+from .._grpc.grpcwrapper.ydb_topic import (
+ StreamReadMessage,
+ UpdateTokenRequest,
+ UpdateTokenResponse,
+ Codec,
+)
+from .._errors import check_retriable_error
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class TopicReaderError(YdbError):
+ pass
+
+
+class TopicReaderUnexpectedCodec(YdbError):
+ pass
+
+
+class PublicTopicReaderPartitionExpiredError(TopicReaderError):
+ """
+ Commit message when partition read session are dropped.
+ It is ok - the message/batch will not commit to server and will receive in other read session
+ (with this or other reader).
+ """
+
+ def __init__(self, message: str = "Topic reader partition session is closed"):
+ super().__init__(message)
+
+
+class TopicReaderStreamClosedError(TopicReaderError):
+ def __init__(self):
+ super().__init__("Topic reader stream is closed")
+
+
+class TopicReaderClosedError(TopicReaderError):
+ def __init__(self):
+ super().__init__("Topic reader is closed already")
+
+
+class PublicAsyncIOReader:
+ _loop: asyncio.AbstractEventLoop
+ _closed: bool
+ _reconnector: ReaderReconnector
+ _parent: typing.Any # need for prevent close parent client by GC
+
+ def __init__(
+ self,
+ driver: Driver,
+ settings: topic_reader.PublicReaderSettings,
+ *,
+ _parent=None,
+ ):
+ self._loop = asyncio.get_running_loop()
+ self._closed = False
+ self._reconnector = ReaderReconnector(driver, settings)
+ self._parent = _parent
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ await self.close()
+
+ def __del__(self):
+ if not self._closed:
+ self._loop.create_task(self.close(flush=False), name="close reader")
+
+ async def wait_message(self):
+ """
+ Wait at least one message from reader.
+ """
+ await self._reconnector.wait_message()
+
+ async def receive_batch(
+ self,
+ ) -> typing.Union[datatypes.PublicBatch, None]:
+ """
+ Get one messages batch from reader.
+ All messages in a batch from same partition.
+
+ use asyncio.wait_for for wait with timeout.
+ """
+ await self._reconnector.wait_message()
+ return self._reconnector.receive_batch_nowait()
+
+ async def receive_message(self) -> typing.Optional[datatypes.PublicMessage]:
+ """
+ Block until receive new message
+
+ use asyncio.wait_for for wait with timeout.
+ """
+ await self._reconnector.wait_message()
+ return self._reconnector.receive_message_nowait()
+
+ def commit(self, batch: typing.Union[datatypes.PublicMessage, datatypes.PublicBatch]):
+ """
+ Write commit message to a buffer.
+
+ For the method no way check the commit result
+ (for example if lost connection - commits will not re-send and committed messages will receive again).
+ """
+ try:
+ self._reconnector.commit(batch)
+ except PublicTopicReaderPartitionExpiredError:
+ pass
+
+ async def commit_with_ack(self, batch: typing.Union[datatypes.PublicMessage, datatypes.PublicBatch]):
+ """
+ write commit message to a buffer and wait ack from the server.
+
+ use asyncio.wait_for for wait with timeout.
+
+ may raise ydb.TopicReaderPartitionExpiredError, the error mean reader partition closed from server
+ before receive commit ack. Message may be acked or not (if not - it will send in other read session,
+ to this or other reader).
+ """
+ waiter = self._reconnector.commit(batch)
+ await waiter.future
+
+ async def close(self, flush: bool = True):
+ if self._closed:
+ raise TopicReaderClosedError()
+
+ self._closed = True
+ await self._reconnector.close(flush)
+
+
+class ReaderReconnector:
+ _static_reader_reconnector_counter = AtomicCounter()
+
+ _id: int
+ _settings: topic_reader.PublicReaderSettings
+ _driver: Driver
+ _background_tasks: Set[Task]
+
+ _state_changed: asyncio.Event
+ _stream_reader: Optional["ReaderStream"]
+ _first_error: asyncio.Future[YdbError]
+
+ def __init__(self, driver: Driver, settings: topic_reader.PublicReaderSettings):
+ self._id = self._static_reader_reconnector_counter.inc_and_get()
+ self._settings = settings
+ self._driver = driver
+ self._background_tasks = set()
+
+ self._state_changed = asyncio.Event()
+ self._stream_reader = None
+ self._background_tasks.add(asyncio.create_task(self._connection_loop()))
+ self._first_error = asyncio.get_running_loop().create_future()
+
+ async def _connection_loop(self):
+ attempt = 0
+ while True:
+ try:
+ self._stream_reader = await ReaderStream.create(self._id, self._driver, self._settings)
+ attempt = 0
+ self._state_changed.set()
+ await self._stream_reader.wait_error()
+ except BaseException as err:
+ retry_info = check_retriable_error(err, self._settings._retry_settings(), attempt)
+ if not retry_info.is_retriable:
+ self._set_first_error(err)
+ return
+ await asyncio.sleep(retry_info.sleep_timeout_seconds)
+
+ attempt += 1
+ finally:
+ if self._stream_reader is not None:
+ # noinspection PyBroadException
+ try:
+ await self._stream_reader.close()
+ except BaseException:
+ # supress any error on close stream reader
+ pass
+
+ async def wait_message(self):
+ while True:
+ if self._first_error.done():
+ raise self._first_error.result()
+
+ if self._stream_reader:
+ try:
+ await self._stream_reader.wait_messages()
+ return
+ except YdbError:
+ pass # handle errors in reconnection loop
+
+ await self._state_changed.wait()
+ self._state_changed.clear()
+
+ def receive_batch_nowait(self):
+ return self._stream_reader.receive_batch_nowait()
+
+ def receive_message_nowait(self):
+ return self._stream_reader.receive_message_nowait()
+
+ def commit(self, batch: datatypes.ICommittable) -> datatypes.PartitionSession.CommitAckWaiter:
+ return self._stream_reader.commit(batch)
+
+ async def close(self, flush: bool):
+ if self._stream_reader:
+ if flush:
+ await self.flush()
+ await self._stream_reader.close()
+ for task in self._background_tasks:
+ task.cancel()
+
+ await asyncio.wait(self._background_tasks)
+
+ async def flush(self):
+ if self._stream_reader:
+ await self._stream_reader.flush()
+
+ def _set_first_error(self, err: issues.Error):
+ try:
+ self._first_error.set_result(err)
+ self._state_changed.set()
+ except asyncio.InvalidStateError:
+ # skip if already has result
+ pass
+
+
+class ReaderStream:
+ _static_id_counter = AtomicCounter()
+
+ _loop: asyncio.AbstractEventLoop
+ _id: int
+ _reader_reconnector_id: int
+ _session_id: str
+ _stream: Optional[IGrpcWrapperAsyncIO]
+ _started: bool
+ _background_tasks: Set[asyncio.Task]
+ _partition_sessions: Dict[int, datatypes.PartitionSession]
+ _buffer_size_bytes: int # use for init request, then for debug purposes only
+ _decode_executor: concurrent.futures.Executor
+ _decoders: Dict[int, typing.Callable[[bytes], bytes]] # dict[codec_code] func(encoded_bytes)->decoded_bytes
+
+ if typing.TYPE_CHECKING:
+ _batches_to_decode: asyncio.Queue[datatypes.PublicBatch]
+ else:
+ _batches_to_decode: asyncio.Queue
+
+ _state_changed: asyncio.Event
+ _closed: bool
+ _message_batches: typing.Deque[datatypes.PublicBatch]
+ _first_error: asyncio.Future[YdbError]
+
+ _update_token_interval: Union[int, float]
+ _update_token_event: asyncio.Event
+ _get_token_function: Callable[[], str]
+
+ def __init__(
+ self,
+ reader_reconnector_id: int,
+ settings: topic_reader.PublicReaderSettings,
+ get_token_function: Optional[Callable[[], str]] = None,
+ ):
+ self._loop = asyncio.get_running_loop()
+ self._id = ReaderStream._static_id_counter.inc_and_get()
+ self._reader_reconnector_id = reader_reconnector_id
+ self._session_id = "not initialized"
+ self._stream = None
+ self._started = False
+ self._background_tasks = set()
+ self._partition_sessions = dict()
+ self._buffer_size_bytes = settings.buffer_size_bytes
+ self._decode_executor = settings.decoder_executor
+
+ self._decoders = {Codec.CODEC_GZIP: gzip.decompress}
+ if settings.decoders:
+ self._decoders.update(settings.decoders)
+
+ self._state_changed = asyncio.Event()
+ self._closed = False
+ self._first_error = asyncio.get_running_loop().create_future()
+ self._batches_to_decode = asyncio.Queue()
+ self._message_batches = deque()
+
+ self._update_token_interval = settings.update_token_interval
+ self._get_token_function = get_token_function
+ self._update_token_event = asyncio.Event()
+
+ @staticmethod
+ async def create(
+ reader_reconnector_id: int,
+ driver: SupportedDriverType,
+ settings: topic_reader.PublicReaderSettings,
+ ) -> "ReaderStream":
+ stream = GrpcWrapperAsyncIO(StreamReadMessage.FromServer.from_proto)
+
+ await stream.start(driver, _apis.TopicService.Stub, _apis.TopicService.StreamRead)
+
+ creds = driver._credentials
+ reader = ReaderStream(
+ reader_reconnector_id,
+ settings,
+ get_token_function=creds.get_auth_token if creds else None,
+ )
+ await reader._start(stream, settings._init_message())
+ return reader
+
+ async def _start(self, stream: IGrpcWrapperAsyncIO, init_message: StreamReadMessage.InitRequest):
+ if self._started:
+ raise TopicReaderError("Double start ReaderStream")
+
+ self._started = True
+ self._stream = stream
+
+ stream.write(StreamReadMessage.FromClient(client_message=init_message))
+ init_response = await stream.receive() # type: StreamReadMessage.FromServer
+ if isinstance(init_response.server_message, StreamReadMessage.InitResponse):
+ self._session_id = init_response.server_message.session_id
+ else:
+ raise TopicReaderError("Unexpected message after InitRequest: %s", init_response)
+
+ self._update_token_event.set()
+
+ self._background_tasks.add(asyncio.create_task(self._read_messages_loop(), name="read_messages_loop"))
+ self._background_tasks.add(asyncio.create_task(self._decode_batches_loop()))
+ if self._get_token_function:
+ self._background_tasks.add(asyncio.create_task(self._update_token_loop(), name="update_token_loop"))
+
+ async def wait_error(self):
+ raise await self._first_error
+
+ async def wait_messages(self):
+ while True:
+ if self._get_first_error():
+ raise self._get_first_error()
+
+ if self._message_batches:
+ return
+
+ await self._state_changed.wait()
+ self._state_changed.clear()
+
+ def receive_batch_nowait(self):
+ if self._get_first_error():
+ raise self._get_first_error()
+
+ if not self._message_batches:
+ return None
+
+ batch = self._message_batches.popleft()
+ self._buffer_release_bytes(batch._bytes_size)
+ return batch
+
+ def receive_message_nowait(self):
+ if self._get_first_error():
+ raise self._get_first_error()
+
+ try:
+ batch = self._message_batches[0]
+ message = batch.pop_message()
+ except IndexError:
+ return None
+
+ if batch.empty():
+ self.receive_batch_nowait()
+
+ return message
+
+ def commit(self, batch: datatypes.ICommittable) -> datatypes.PartitionSession.CommitAckWaiter:
+ partition_session = batch._commit_get_partition_session()
+
+ if partition_session.reader_reconnector_id != self._reader_reconnector_id:
+ raise TopicReaderError("reader can commit only self-produced messages")
+
+ if partition_session.reader_stream_id != self._id:
+ raise PublicTopicReaderPartitionExpiredError("commit messages after reconnect to server")
+
+ if partition_session.id not in self._partition_sessions:
+ raise PublicTopicReaderPartitionExpiredError("commit messages after server stop the partition read session")
+
+ commit_range = batch._commit_get_offsets_range()
+ waiter = partition_session.add_waiter(commit_range.end)
+
+ if not waiter.future.done():
+ client_message = StreamReadMessage.CommitOffsetRequest(
+ commit_offsets=[
+ StreamReadMessage.CommitOffsetRequest.PartitionCommitOffset(
+ partition_session_id=partition_session.id,
+ offsets=[commit_range],
+ )
+ ]
+ )
+ self._stream.write(StreamReadMessage.FromClient(client_message=client_message))
+
+ return waiter
+
+ async def _read_messages_loop(self):
+ try:
+ self._stream.write(
+ StreamReadMessage.FromClient(
+ client_message=StreamReadMessage.ReadRequest(
+ bytes_size=self._buffer_size_bytes,
+ ),
+ )
+ )
+ while True:
+ try:
+ message = await self._stream.receive() # type: StreamReadMessage.FromServer
+ _process_response(message.server_status)
+
+ if isinstance(message.server_message, StreamReadMessage.ReadResponse):
+ self._on_read_response(message.server_message)
+
+ elif isinstance(message.server_message, StreamReadMessage.CommitOffsetResponse):
+ self._on_commit_response(message.server_message)
+
+ elif isinstance(
+ message.server_message,
+ StreamReadMessage.StartPartitionSessionRequest,
+ ):
+ self._on_start_partition_session(message.server_message)
+
+ elif isinstance(
+ message.server_message,
+ StreamReadMessage.StopPartitionSessionRequest,
+ ):
+ self._on_partition_session_stop(message.server_message)
+
+ elif isinstance(message.server_message, UpdateTokenResponse):
+ self._update_token_event.set()
+
+ else:
+ raise issues.UnexpectedGrpcMessage(
+ "Unexpected message in _read_messages_loop: %s" % type(message.server_message)
+ )
+ except issues.UnexpectedGrpcMessage as e:
+ logger.exception("unexpected message in stream reader: %s" % e)
+
+ self._state_changed.set()
+ except Exception as e:
+ self._set_first_error(e)
+ return
+
+ async def _update_token_loop(self):
+ while True:
+ await asyncio.sleep(self._update_token_interval)
+ await self._update_token(token=self._get_token_function())
+
+ async def _update_token(self, token: str):
+ await self._update_token_event.wait()
+ try:
+ msg = StreamReadMessage.FromClient(UpdateTokenRequest(token))
+ self._stream.write(msg)
+ finally:
+ self._update_token_event.clear()
+
+ def _on_start_partition_session(self, message: StreamReadMessage.StartPartitionSessionRequest):
+ try:
+ if message.partition_session.partition_session_id in self._partition_sessions:
+ raise TopicReaderError(
+ "Double start partition session: %s" % message.partition_session.partition_session_id
+ )
+
+ self._partition_sessions[message.partition_session.partition_session_id] = datatypes.PartitionSession(
+ id=message.partition_session.partition_session_id,
+ state=datatypes.PartitionSession.State.Active,
+ topic_path=message.partition_session.path,
+ partition_id=message.partition_session.partition_id,
+ committed_offset=message.committed_offset,
+ reader_reconnector_id=self._reader_reconnector_id,
+ reader_stream_id=self._id,
+ )
+ self._stream.write(
+ StreamReadMessage.FromClient(
+ client_message=StreamReadMessage.StartPartitionSessionResponse(
+ partition_session_id=message.partition_session.partition_session_id,
+ read_offset=None,
+ commit_offset=None,
+ )
+ ),
+ )
+ except YdbError as err:
+ self._set_first_error(err)
+
+ def _on_partition_session_stop(self, message: StreamReadMessage.StopPartitionSessionRequest):
+ if message.partition_session_id not in self._partition_sessions:
+ # may if receive stop partition with graceful=false after response on stop partition
+ # with graceful=true and remove partition from internal dictionary
+ return
+
+ partition = self._partition_sessions.pop(message.partition_session_id)
+ partition.close()
+
+ if message.graceful:
+ self._stream.write(
+ StreamReadMessage.FromClient(
+ client_message=StreamReadMessage.StopPartitionSessionResponse(
+ partition_session_id=message.partition_session_id,
+ )
+ )
+ )
+
+ def _on_read_response(self, message: StreamReadMessage.ReadResponse):
+ self._buffer_consume_bytes(message.bytes_size)
+
+ batches = self._read_response_to_batches(message)
+ for batch in batches:
+ self._batches_to_decode.put_nowait(batch)
+
+ def _on_commit_response(self, message: StreamReadMessage.CommitOffsetResponse):
+ for partition_offset in message.partitions_committed_offsets:
+ if partition_offset.partition_session_id not in self._partition_sessions:
+ continue
+
+ session = self._partition_sessions[partition_offset.partition_session_id]
+ session.ack_notify(partition_offset.committed_offset)
+
+ def _buffer_consume_bytes(self, bytes_size):
+ self._buffer_size_bytes -= bytes_size
+
+ def _buffer_release_bytes(self, bytes_size):
+ self._buffer_size_bytes += bytes_size
+ self._stream.write(
+ StreamReadMessage.FromClient(
+ client_message=StreamReadMessage.ReadRequest(
+ bytes_size=bytes_size,
+ )
+ )
+ )
+
+ def _read_response_to_batches(self, message: StreamReadMessage.ReadResponse) -> typing.List[datatypes.PublicBatch]:
+ batches = []
+
+ batch_count = sum(len(p.batches) for p in message.partition_data)
+ if batch_count == 0:
+ return batches
+
+ bytes_per_batch = message.bytes_size // batch_count
+ additional_bytes_to_last_batch = message.bytes_size - bytes_per_batch * batch_count
+
+ for partition_data in message.partition_data:
+ partition_session = self._partition_sessions[partition_data.partition_session_id]
+ for server_batch in partition_data.batches:
+ messages = []
+ for message_data in server_batch.message_data:
+ mess = datatypes.PublicMessage(
+ seqno=message_data.seq_no,
+ created_at=message_data.created_at,
+ message_group_id=message_data.message_group_id,
+ session_metadata=server_batch.write_session_meta,
+ offset=message_data.offset,
+ written_at=server_batch.written_at,
+ producer_id=server_batch.producer_id,
+ data=message_data.data,
+ _partition_session=partition_session,
+ _commit_start_offset=partition_session._next_message_start_commit_offset,
+ _commit_end_offset=message_data.offset + 1,
+ )
+ messages.append(mess)
+ partition_session._next_message_start_commit_offset = mess._commit_end_offset
+
+ if messages:
+ batch = datatypes.PublicBatch(
+ messages=messages,
+ _partition_session=partition_session,
+ _bytes_size=bytes_per_batch,
+ _codec=Codec(server_batch.codec),
+ )
+ batches.append(batch)
+
+ batches[-1]._bytes_size += additional_bytes_to_last_batch
+ return batches
+
+ async def _decode_batches_loop(self):
+ while True:
+ batch = await self._batches_to_decode.get()
+ await self._decode_batch_inplace(batch)
+ self._message_batches.append(batch)
+ self._state_changed.set()
+
+ async def _decode_batch_inplace(self, batch):
+ if batch._codec == Codec.CODEC_RAW:
+ return
+
+ try:
+ decode_func = self._decoders[batch._codec]
+ except KeyError:
+ raise TopicReaderUnexpectedCodec("Receive message with unexpected codec: %s" % batch._codec)
+
+ decode_data_futures = []
+ for message in batch.messages:
+ future = self._loop.run_in_executor(self._decode_executor, decode_func, message.data)
+ decode_data_futures.append(future)
+
+ decoded_data = await asyncio.gather(*decode_data_futures)
+ for index, message in enumerate(batch.messages):
+ message.data = decoded_data[index]
+
+ batch._codec = Codec.CODEC_RAW
+
+ def _set_first_error(self, err: YdbError):
+ try:
+ self._first_error.set_result(err)
+ self._state_changed.set()
+ except asyncio.InvalidStateError:
+ # skip later set errors
+ pass
+
+ def _get_first_error(self) -> Optional[YdbError]:
+ if self._first_error.done():
+ return self._first_error.result()
+
+ async def flush(self):
+ if self._closed:
+ raise RuntimeError("Flush on closed Stream")
+
+ futures = []
+ for session in self._partition_sessions.values():
+ futures.extend(w.future for w in session._ack_waiters)
+
+ if futures:
+ await asyncio.wait(futures)
+
+ async def close(self):
+ if self._closed:
+ return
+
+ self._closed = True
+
+ self._set_first_error(TopicReaderStreamClosedError())
+ self._state_changed.set()
+ self._stream.close()
+
+ for session in self._partition_sessions.values():
+ session.close()
+ self._partition_sessions.clear()
+
+ for task in self._background_tasks:
+ task.cancel()
+
+ if self._background_tasks:
+ await asyncio.wait(self._background_tasks)
diff --git a/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_sync.py b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_sync.py
new file mode 100644
index 0000000000..e5b4e1a2b4
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_reader/topic_reader_sync.py
@@ -0,0 +1,155 @@
+import asyncio
+import concurrent.futures
+import typing
+from typing import List, Union, Optional
+
+from ydb._grpc.grpcwrapper.common_utils import SupportedDriverType
+from ydb._topic_common.common import (
+ _get_shared_event_loop,
+ CallFromSyncToAsync,
+ TimeoutType,
+)
+from ydb._topic_reader import datatypes
+from ydb._topic_reader.datatypes import PublicBatch
+from ydb._topic_reader.topic_reader import (
+ PublicReaderSettings,
+ CommitResult,
+)
+from ydb._topic_reader.topic_reader_asyncio import (
+ PublicAsyncIOReader,
+ TopicReaderClosedError,
+)
+
+
+class TopicReaderSync:
+ _caller: CallFromSyncToAsync
+ _async_reader: PublicAsyncIOReader
+ _closed: bool
+ _parent: typing.Any # need for prevent stop the client by GC
+
+ def __init__(
+ self,
+ driver: SupportedDriverType,
+ settings: PublicReaderSettings,
+ *,
+ eventloop: Optional[asyncio.AbstractEventLoop] = None,
+ _parent=None, # need for prevent stop the client by GC
+ ):
+ self._closed = False
+
+ if eventloop:
+ loop = eventloop
+ else:
+ loop = _get_shared_event_loop()
+
+ self._caller = CallFromSyncToAsync(loop)
+
+ async def create_reader():
+ return PublicAsyncIOReader(driver, settings)
+
+ self._async_reader = asyncio.run_coroutine_threadsafe(create_reader(), loop).result()
+
+ self._parent = _parent
+
+ def __del__(self):
+ self.close(flush=False)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ def receive_message(self, *, timeout: TimeoutType = None) -> datatypes.PublicMessage:
+ """
+ Block until receive new message
+ It has no async_ version for prevent lost messages, use async_wait_message as signal for new batches available.
+ receive_message(timeout=0) may return None even right after async_wait_message() is ok - because lost of partition
+ or connection to server lost
+
+ if no new message in timeout seconds (default - infinite): raise TimeoutError()
+ if timeout <= 0 - it will fast wait only one event loop cycle - without wait any i/o operations or pauses, get messages from internal buffer only.
+ """
+ self._check_closed()
+
+ return self._caller.safe_call_with_result(self._async_reader.receive_message(), timeout)
+
+ def async_wait_message(self) -> concurrent.futures.Future:
+ """
+ Return future, which will completed when the reader has least one message in queue.
+ If reader already has message - future will return completed.
+
+ Possible situation when receive signal about message available, but no messages when try to receive a message.
+ If message expired between send event and try to retrieve message (for example connection broken).
+ """
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_future(self._async_reader.wait_message())
+
+ def receive_batch(
+ self,
+ *,
+ max_messages: typing.Union[int, None] = None,
+ max_bytes: typing.Union[int, None] = None,
+ timeout: Union[float, None] = None,
+ ) -> Union[PublicBatch, None]:
+ """
+ Get one messages batch from reader
+ It has no async_ version for prevent lost messages, use async_wait_message as signal for new batches available.
+
+ if no new message in timeout seconds (default - infinite): raise TimeoutError()
+ if timeout <= 0 - it will fast wait only one event loop cycle - without wait any i/o operations or pauses, get messages from internal buffer only.
+ """
+ self._check_closed()
+
+ return self._caller.safe_call_with_result(
+ self._async_reader.receive_batch(),
+ timeout,
+ )
+
+ def commit(self, mess: typing.Union[datatypes.PublicMessage, datatypes.PublicBatch]):
+ """
+ Put commit message to internal buffer.
+
+ For the method no way check the commit result
+ (for example if lost connection - commits will not re-send and committed messages will receive again)
+ """
+ self._check_closed()
+
+ self._caller.call_sync(lambda: self._async_reader.commit(mess))
+
+ def commit_with_ack(
+ self,
+ mess: typing.Union[datatypes.PublicMessage, datatypes.PublicBatch],
+ timeout: TimeoutType = None,
+ ) -> Union[CommitResult, List[CommitResult]]:
+ """
+ write commit message to a buffer and wait ack from the server.
+
+ if receive in timeout seconds (default - infinite): raise TimeoutError()
+ """
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_result(self._async_reader.commit_with_ack(mess), timeout)
+
+ def async_commit_with_ack(
+ self, mess: typing.Union[datatypes.PublicMessage, datatypes.PublicBatch]
+ ) -> concurrent.futures.Future:
+ """
+ write commit message to a buffer and return Future for wait result.
+ """
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_future(self._async_reader.commit_with_ack(mess))
+
+ def close(self, *, flush: bool = True, timeout: TimeoutType = None):
+ if self._closed:
+ return
+
+ self._closed = True
+
+ self._caller.safe_call_with_result(self._async_reader.close(flush), timeout)
+
+ def _check_closed(self):
+ if self._closed:
+ raise TopicReaderClosedError()
diff --git a/contrib/python/ydb/py3/ydb/_topic_writer/__init__.py b/contrib/python/ydb/py3/ydb/_topic_writer/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_writer/__init__.py
diff --git a/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer.py b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer.py
new file mode 100644
index 0000000000..527bf03eac
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer.py
@@ -0,0 +1,287 @@
+import concurrent.futures
+import datetime
+import enum
+import itertools
+import uuid
+from dataclasses import dataclass
+from enum import Enum
+from typing import List, Union, Optional, Any, Dict
+
+import typing
+
+import ydb.aio
+from .._grpc.grpcwrapper.ydb_topic import StreamWriteMessage
+from .._grpc.grpcwrapper.common_utils import IToProto
+from .._grpc.grpcwrapper.ydb_topic_public_types import PublicCodec
+from .. import connection
+
+Message = typing.Union["PublicMessage", "PublicMessage.SimpleMessageSourceType"]
+
+
+@dataclass
+class PublicWriterSettings:
+ """
+ Settings for topic writer.
+
+ order of fields IS NOT stable, use keywords only
+ """
+
+ topic: str
+ producer_id: Optional[str] = None
+ session_metadata: Optional[Dict[str, str]] = None
+ partition_id: Optional[int] = None
+ auto_seqno: bool = True
+ auto_created_at: bool = True
+ codec: Optional[PublicCodec] = None # default mean auto-select
+ encoder_executor: Optional[concurrent.futures.Executor] = None # default shared client executor pool
+ encoders: Optional[typing.Mapping[PublicCodec, typing.Callable[[bytes], bytes]]] = None
+ update_token_interval: Union[int, float] = 3600
+
+ def __post_init__(self):
+ if self.producer_id is None:
+ self.producer_id = uuid.uuid4().hex
+
+
+@dataclass
+class PublicWriteResult:
+ @dataclass(eq=True)
+ class Written:
+ __slots__ = "offset"
+ offset: int
+
+ @dataclass(eq=True)
+ class Skipped:
+ pass
+
+
+PublicWriteResultTypes = Union[PublicWriteResult.Written, PublicWriteResult.Skipped]
+
+
+class WriterSettings(PublicWriterSettings):
+ def __init__(self, settings: PublicWriterSettings):
+ self.__dict__ = settings.__dict__.copy()
+
+ def create_init_request(self) -> StreamWriteMessage.InitRequest:
+ return StreamWriteMessage.InitRequest(
+ path=self.topic,
+ producer_id=self.producer_id,
+ write_session_meta=self.session_metadata,
+ partitioning=self.get_partitioning(),
+ get_last_seq_no=True,
+ )
+
+ def get_partitioning(self) -> StreamWriteMessage.PartitioningType:
+ if self.partition_id is not None:
+ return StreamWriteMessage.PartitioningPartitionID(self.partition_id)
+ return StreamWriteMessage.PartitioningMessageGroupID(self.producer_id)
+
+
+class SendMode(Enum):
+ ASYNC = 1
+ SYNC = 2
+
+
+@dataclass
+class PublicWriterInitInfo:
+ __slots__ = ("last_seqno", "supported_codecs")
+ last_seqno: Optional[int]
+ supported_codecs: List[PublicCodec]
+
+
+class PublicMessage:
+ seqno: Optional[int]
+ created_at: Optional[datetime.datetime]
+ data: "PublicMessage.SimpleMessageSourceType"
+
+ SimpleMessageSourceType = Union[str, bytes] # Will be extend
+
+ def __init__(
+ self,
+ data: SimpleMessageSourceType,
+ *,
+ seqno: Optional[int] = None,
+ created_at: Optional[datetime.datetime] = None,
+ ):
+ self.seqno = seqno
+ self.created_at = created_at
+ self.data = data
+
+ @staticmethod
+ def _create_message(data: Message) -> "PublicMessage":
+ if isinstance(data, PublicMessage):
+ return data
+ return PublicMessage(data=data)
+
+
+class InternalMessage(StreamWriteMessage.WriteRequest.MessageData, IToProto):
+ codec: PublicCodec
+
+ def __init__(self, mess: PublicMessage):
+ super().__init__(
+ seq_no=mess.seqno,
+ created_at=mess.created_at,
+ data=mess.data,
+ uncompressed_size=len(mess.data),
+ partitioning=None,
+ )
+ self.codec = PublicCodec.RAW
+
+ def get_bytes(self) -> bytes:
+ if self.data is None:
+ return bytes()
+ if isinstance(self.data, bytes):
+ return self.data
+ if isinstance(self.data, str):
+ return self.data.encode("utf-8")
+ raise ValueError("Bad data type")
+
+ def to_message_data(self) -> StreamWriteMessage.WriteRequest.MessageData:
+ data = self.get_bytes()
+ return StreamWriteMessage.WriteRequest.MessageData(
+ seq_no=self.seq_no,
+ created_at=self.created_at,
+ data=data,
+ uncompressed_size=len(data),
+ partitioning=None, # unsupported by server now
+ )
+
+
+class MessageSendResult:
+ offset: Optional[int]
+ write_status: "MessageWriteStatus"
+
+
+class MessageWriteStatus(enum.Enum):
+ Written = 1
+ AlreadyWritten = 2
+
+
+class RetryPolicy:
+ connection_timeout_sec: float
+ overload_timeout_sec: float
+ retry_access_denied: bool = False
+
+
+class TopicWriterError(ydb.Error):
+ def __init__(self, message: str):
+ super(TopicWriterError, self).__init__(message)
+
+
+class TopicWriterClosedError(ydb.Error):
+ def __init__(self):
+ super().__init__("Topic writer already closed")
+
+
+class TopicWriterRepeatableError(TopicWriterError):
+ pass
+
+
+class TopicWriterStopped(TopicWriterError):
+ def __init__(self):
+ super(TopicWriterStopped, self).__init__("topic writer was stopped by call close")
+
+
+def default_serializer_message_content(data: Any) -> bytes:
+ if data is None:
+ return bytes()
+ if isinstance(data, bytes):
+ return data
+ if isinstance(data, bytearray):
+ return bytes(data)
+ if isinstance(data, str):
+ return data.encode(encoding="utf-8")
+ raise ValueError("can't serialize type %s to bytes" % type(data))
+
+
+def messages_to_proto_requests(
+ messages: List[InternalMessage],
+) -> List[StreamWriteMessage.FromClient]:
+
+ gropus = _slit_messages_for_send(messages)
+
+ res = [] # type: List[StreamWriteMessage.FromClient]
+ for group in gropus:
+ req = StreamWriteMessage.FromClient(
+ StreamWriteMessage.WriteRequest(
+ messages=list(map(InternalMessage.to_message_data, group)),
+ codec=group[0].codec,
+ )
+ )
+ res.append(req)
+ return res
+
+
+_max_int = 2**63 - 1
+
+_message_data_overhead = (
+ StreamWriteMessage.FromClient(
+ StreamWriteMessage.WriteRequest(
+ messages=[
+ StreamWriteMessage.WriteRequest.MessageData(
+ seq_no=_max_int,
+ created_at=datetime.datetime(3000, 1, 1, 1, 1, 1, 1),
+ data=bytes(1),
+ uncompressed_size=_max_int,
+ partitioning=StreamWriteMessage.PartitioningMessageGroupID(
+ message_group_id="a" * 100,
+ ),
+ ),
+ ],
+ codec=20000,
+ )
+ )
+ .to_proto()
+ .ByteSize()
+)
+
+
+def _slit_messages_for_send(
+ messages: List[InternalMessage],
+) -> List[List[InternalMessage]]:
+ codec_groups = [] # type: List[List[InternalMessage]]
+ for _, messages in itertools.groupby(messages, lambda x: x.codec):
+ codec_groups.append(list(messages))
+
+ res = [] # type: List[List[InternalMessage]]
+ for codec_group in codec_groups:
+ group_by_size = _split_messages_by_size_with_default_overhead(codec_group)
+ res.extend(group_by_size)
+ return res
+
+
+def _split_messages_by_size_with_default_overhead(
+ messages: List[InternalMessage],
+) -> List[List[InternalMessage]]:
+ def get_message_size(msg: InternalMessage):
+ return len(msg.data) + _message_data_overhead
+
+ return _split_messages_by_size(messages, connection._DEFAULT_MAX_GRPC_MESSAGE_SIZE, get_message_size)
+
+
+def _split_messages_by_size(
+ messages: List[InternalMessage],
+ split_size: int,
+ get_msg_size: typing.Callable[[InternalMessage], int],
+) -> List[List[InternalMessage]]:
+ res = []
+ group = []
+ group_size = 0
+
+ for msg in messages:
+ msg_size = get_msg_size(msg)
+
+ if len(group) == 0:
+ group.append(msg)
+ group_size += msg_size
+ elif group_size + msg_size <= split_size:
+ group.append(msg)
+ group_size += msg_size
+ else:
+ res.append(group)
+ group = [msg]
+ group_size = msg_size
+
+ if len(group) > 0:
+ res.append(group)
+
+ return res
diff --git a/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_asyncio.py b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_asyncio.py
new file mode 100644
index 0000000000..dd969c7e8e
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_asyncio.py
@@ -0,0 +1,677 @@
+import asyncio
+import concurrent.futures
+import datetime
+import functools
+import gzip
+import typing
+from collections import deque
+from typing import Deque, AsyncIterator, Union, List, Optional, Dict, Callable
+
+import logging
+
+import ydb
+from .topic_writer import (
+ PublicWriterSettings,
+ WriterSettings,
+ PublicMessage,
+ PublicWriterInitInfo,
+ InternalMessage,
+ TopicWriterStopped,
+ TopicWriterError,
+ messages_to_proto_requests,
+ PublicWriteResult,
+ PublicWriteResultTypes,
+ Message,
+)
+from .. import (
+ _apis,
+ issues,
+ check_retriable_error,
+ RetrySettings,
+)
+from .._grpc.grpcwrapper.ydb_topic_public_types import PublicCodec
+from .._grpc.grpcwrapper.ydb_topic import (
+ UpdateTokenRequest,
+ UpdateTokenResponse,
+ StreamWriteMessage,
+ WriterMessagesFromServerToClient,
+)
+from .._grpc.grpcwrapper.common_utils import (
+ IGrpcWrapperAsyncIO,
+ SupportedDriverType,
+ GrpcWrapperAsyncIO,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class WriterAsyncIO:
+ _loop: asyncio.AbstractEventLoop
+ _reconnector: "WriterAsyncIOReconnector"
+ _closed: bool
+ _parent: typing.Any # need for prevent close parent client by GC
+
+ def __init__(
+ self,
+ driver: SupportedDriverType,
+ settings: PublicWriterSettings,
+ _client=None,
+ ):
+ self._loop = asyncio.get_running_loop()
+ self._closed = False
+ self._reconnector = WriterAsyncIOReconnector(driver=driver, settings=WriterSettings(settings))
+ self._parent = _client
+
+ async def __aenter__(self) -> "WriterAsyncIO":
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ try:
+ await self.close()
+ except BaseException:
+ if exc_val is None:
+ raise
+
+ def __del__(self):
+ if self._closed or self._loop.is_closed():
+ return
+
+ self._loop.call_soon(functools.partial(self.close, flush=False))
+
+ async def close(self, *, flush: bool = True):
+ if self._closed:
+ return
+
+ self._closed = True
+
+ await self._reconnector.close(flush)
+
+ async def write_with_ack(
+ self,
+ messages: Union[Message, List[Message]],
+ ) -> Union[PublicWriteResultTypes, List[PublicWriteResultTypes]]:
+ """
+ IT IS SLOWLY WAY. IT IS BAD CHOISE IN MOST CASES.
+ It is recommended to use write with optionally flush or write_with_ack_futures and receive acks by wait futures.
+
+ send one or number of messages to server and wait acks.
+
+ For wait with timeout use asyncio.wait_for.
+ """
+ futures = await self.write_with_ack_future(messages)
+ if not isinstance(futures, list):
+ futures = [futures]
+
+ await asyncio.wait(futures)
+ results = [f.result() for f in futures]
+
+ return results if isinstance(messages, list) else results[0]
+
+ async def write_with_ack_future(
+ self,
+ messages: Union[Message, List[Message]],
+ ) -> Union[asyncio.Future, List[asyncio.Future]]:
+ """
+ send one or number of messages to server.
+ return feature, which can be waited for check send result.
+
+ Usually it is fast method, but can wait if internal buffer is full.
+
+ For wait with timeout use asyncio.wait_for.
+ """
+ input_single_message = not isinstance(messages, list)
+ converted_messages = []
+ if isinstance(messages, list):
+ for m in messages:
+ converted_messages.append(PublicMessage._create_message(m))
+ else:
+ converted_messages = [PublicMessage._create_message(messages)]
+
+ futures = await self._reconnector.write_with_ack_future(converted_messages)
+ if input_single_message:
+ return futures[0]
+ else:
+ return futures
+
+ async def write(
+ self,
+ messages: Union[Message, List[Message]],
+ ):
+ """
+ send one or number of messages to server.
+ it put message to internal buffer
+
+ For wait with timeout use asyncio.wait_for.
+ """
+ await self.write_with_ack_future(messages)
+
+ async def flush(self):
+ """
+ Force send all messages from internal buffer and wait acks from server for all
+ messages.
+
+ For wait with timeout use asyncio.wait_for.
+ """
+ return await self._reconnector.flush()
+
+ async def wait_init(self) -> PublicWriterInitInfo:
+ """
+ wait while real connection will be established to server.
+
+ For wait with timeout use asyncio.wait_for()
+ """
+ return await self._reconnector.wait_init()
+
+
+class WriterAsyncIOReconnector:
+ _closed: bool
+ _loop: asyncio.AbstractEventLoop
+ _credentials: Union[ydb.credentials.Credentials, None]
+ _driver: ydb.aio.Driver
+ _init_message: StreamWriteMessage.InitRequest
+ _stream_connected: asyncio.Event
+ _settings: WriterSettings
+ _codec: PublicCodec
+ _codec_functions: Dict[PublicCodec, Callable[[bytes], bytes]]
+ _encode_executor: Optional[concurrent.futures.Executor]
+ _codec_selector_batch_num: int
+ _codec_selector_last_codec: Optional[PublicCodec]
+ _codec_selector_check_batches_interval: int
+
+ if typing.TYPE_CHECKING:
+ _messages_for_encode: asyncio.Queue[List[InternalMessage]]
+ else:
+ _messages_for_encode: asyncio.Queue
+ _messages: Deque[InternalMessage]
+ _messages_future: Deque[asyncio.Future]
+ _new_messages: asyncio.Queue
+ _background_tasks: List[asyncio.Task]
+
+ _state_changed: asyncio.Event
+ if typing.TYPE_CHECKING:
+ _stop_reason: asyncio.Future[BaseException]
+ else:
+ _stop_reason: asyncio.Future
+ _init_info: Optional[PublicWriterInitInfo]
+
+ def __init__(self, driver: SupportedDriverType, settings: WriterSettings):
+ self._closed = False
+ self._loop = asyncio.get_running_loop()
+ self._driver = driver
+ self._credentials = driver._credentials
+ self._init_message = settings.create_init_request()
+ self._new_messages = asyncio.Queue()
+ self._init_info = None
+ self._stream_connected = asyncio.Event()
+ self._settings = settings
+
+ self._codec_functions = {
+ PublicCodec.RAW: lambda data: data,
+ PublicCodec.GZIP: gzip.compress,
+ }
+
+ if settings.encoders:
+ self._codec_functions.update(settings.encoders)
+
+ self._encode_executor = settings.encoder_executor
+
+ self._codec_selector_batch_num = 0
+ self._codec_selector_last_codec = None
+ self._codec_selector_check_batches_interval = 10000
+
+ self._codec = self._settings.codec
+ if self._codec and self._codec not in self._codec_functions:
+ known_codecs = sorted(self._codec_functions.keys())
+ raise ValueError("Unknown codec for writer: %s, supported codecs: %s" % (self._codec, known_codecs))
+
+ self._last_known_seq_no = 0
+ self._messages_for_encode = asyncio.Queue()
+ self._messages = deque()
+ self._messages_future = deque()
+ self._new_messages = asyncio.Queue()
+ self._stop_reason = self._loop.create_future()
+ self._background_tasks = [
+ asyncio.create_task(self._connection_loop(), name="connection_loop"),
+ asyncio.create_task(self._encode_loop(), name="encode_loop"),
+ ]
+
+ self._state_changed = asyncio.Event()
+
+ async def close(self, flush: bool):
+ if self._closed:
+ return
+ self._closed = True
+ logger.debug("Close writer reconnector")
+
+ if flush:
+ await self.flush()
+
+ self._stop(TopicWriterStopped())
+
+ for task in self._background_tasks:
+ task.cancel()
+ await asyncio.wait(self._background_tasks)
+
+ # if work was stopped before close by error - raise the error
+ try:
+ self._check_stop()
+ except TopicWriterStopped:
+ pass
+
+ async def wait_init(self) -> PublicWriterInitInfo:
+ while True:
+ if self._stop_reason.done():
+ raise self._stop_reason.exception()
+
+ if self._init_info:
+ return self._init_info
+
+ await self._state_changed.wait()
+
+ async def wait_stop(self) -> BaseException:
+ try:
+ await self._stop_reason
+ except BaseException as stop_reason:
+ return stop_reason
+
+ async def write_with_ack_future(self, messages: List[PublicMessage]) -> List[asyncio.Future]:
+ # todo check internal buffer limit
+ self._check_stop()
+
+ if self._settings.auto_seqno:
+ await self.wait_init()
+
+ internal_messages = self._prepare_internal_messages(messages)
+ messages_future = [self._loop.create_future() for _ in internal_messages]
+
+ self._messages_future.extend(messages_future)
+
+ if self._codec == PublicCodec.RAW:
+ self._add_messages_to_send_queue(internal_messages)
+ else:
+ self._messages_for_encode.put_nowait(internal_messages)
+
+ return messages_future
+
+ def _add_messages_to_send_queue(self, internal_messages: List[InternalMessage]):
+ self._messages.extend(internal_messages)
+ for m in internal_messages:
+ self._new_messages.put_nowait(m)
+
+ def _prepare_internal_messages(self, messages: List[PublicMessage]) -> List[InternalMessage]:
+ if self._settings.auto_created_at:
+ now = datetime.datetime.now()
+ else:
+ now = None
+
+ res = []
+ for m in messages:
+ internal_message = InternalMessage(m)
+ if self._settings.auto_seqno:
+ if internal_message.seq_no is None:
+ self._last_known_seq_no += 1
+ internal_message.seq_no = self._last_known_seq_no
+ else:
+ raise TopicWriterError("Explicit seqno and auto_seq setting is mutual exclusive")
+ else:
+ if internal_message.seq_no is None or internal_message.seq_no == 0:
+ raise TopicWriterError("Empty seqno and auto_seq setting is disabled")
+ elif internal_message.seq_no <= self._last_known_seq_no:
+ raise TopicWriterError("Message seqno is duplicated: %s" % internal_message.seq_no)
+ else:
+ self._last_known_seq_no = internal_message.seq_no
+
+ if self._settings.auto_created_at:
+ if internal_message.created_at is not None:
+ raise TopicWriterError(
+ "Explicit set auto_created_at and setting auto_created_at is mutual exclusive"
+ )
+ else:
+ internal_message.created_at = now
+
+ res.append(internal_message)
+
+ return res
+
+ def _check_stop(self):
+ if self._stop_reason.done():
+ raise self._stop_reason.exception()
+
+ async def _connection_loop(self):
+ retry_settings = RetrySettings() # todo
+
+ while True:
+ attempt = 0 # todo calc and reset
+ tasks = []
+
+ # noinspection PyBroadException
+ stream_writer = None
+ try:
+ stream_writer = await WriterAsyncIOStream.create(
+ self._driver,
+ self._init_message,
+ self._settings.update_token_interval,
+ )
+ try:
+ if self._init_info is None:
+ self._last_known_seq_no = stream_writer.last_seqno
+ self._init_info = PublicWriterInitInfo(
+ last_seqno=stream_writer.last_seqno,
+ supported_codecs=stream_writer.supported_codecs,
+ )
+ self._state_changed.set()
+
+ except asyncio.InvalidStateError:
+ pass
+
+ self._stream_connected.set()
+
+ send_loop = asyncio.create_task(self._send_loop(stream_writer), name="writer send loop")
+ receive_loop = asyncio.create_task(self._read_loop(stream_writer), name="writer receive loop")
+
+ tasks = [send_loop, receive_loop]
+ done, _ = await asyncio.wait([send_loop, receive_loop], return_when=asyncio.FIRST_COMPLETED)
+ done.pop().result() # need for raise exception - reason of stop task
+ except issues.Error as err:
+ err_info = check_retriable_error(err, retry_settings, attempt)
+ if not err_info.is_retriable:
+ self._stop(err)
+ return
+
+ await asyncio.sleep(err_info.sleep_timeout_seconds)
+
+ except (asyncio.CancelledError, Exception) as err:
+ self._stop(err)
+ return
+ finally:
+ for task in tasks:
+ task.cancel()
+ if tasks:
+ await asyncio.wait(tasks)
+ if stream_writer:
+ await stream_writer.close()
+
+ async def _encode_loop(self):
+ try:
+ while True:
+ messages = await self._messages_for_encode.get()
+ while not self._messages_for_encode.empty():
+ messages.extend(self._messages_for_encode.get_nowait())
+
+ batch_codec = await self._codec_selector(messages)
+ await self._encode_data_inplace(batch_codec, messages)
+ self._add_messages_to_send_queue(messages)
+ except BaseException as err:
+ self._stop(err)
+
+ async def _encode_data_inplace(self, codec: PublicCodec, messages: List[InternalMessage]):
+ if codec == PublicCodec.RAW:
+ return
+
+ eventloop = asyncio.get_running_loop()
+ encode_waiters = []
+ encoder_function = self._codec_functions[codec]
+
+ for message in messages:
+ encoded_data_futures = eventloop.run_in_executor(
+ self._encode_executor, encoder_function, message.get_bytes()
+ )
+ encode_waiters.append(encoded_data_futures)
+
+ encoded_datas = await asyncio.gather(*encode_waiters)
+
+ for index, data in enumerate(encoded_datas):
+ message = messages[index]
+ message.codec = codec
+ message.data = data
+
+ async def _codec_selector(self, messages: List[InternalMessage]) -> PublicCodec:
+ if self._codec is not None:
+ return self._codec
+
+ if self._codec_selector_last_codec is None:
+ available_codecs = await self._get_available_codecs()
+
+ # use every of available encoders at start for prevent problems
+ # with rare used encoders (on writer or reader side)
+ if self._codec_selector_batch_num < len(available_codecs):
+ codec = available_codecs[self._codec_selector_batch_num]
+ else:
+ codec = await self._codec_selector_by_check_compress(messages)
+ self._codec_selector_last_codec = codec
+ else:
+ if self._codec_selector_batch_num % self._codec_selector_check_batches_interval == 0:
+ self._codec_selector_last_codec = await self._codec_selector_by_check_compress(messages)
+ codec = self._codec_selector_last_codec
+ self._codec_selector_batch_num += 1
+ return codec
+
+ async def _get_available_codecs(self) -> List[PublicCodec]:
+ info = await self.wait_init()
+ topic_supported_codecs = info.supported_codecs
+ if not topic_supported_codecs:
+ topic_supported_codecs = [PublicCodec.RAW, PublicCodec.GZIP]
+
+ res = []
+ for codec in topic_supported_codecs:
+ if codec in self._codec_functions:
+ res.append(codec)
+
+ if not res:
+ raise TopicWriterError("Writer does not support topic's codecs")
+
+ res.sort()
+
+ return res
+
+ async def _codec_selector_by_check_compress(self, messages: List[InternalMessage]) -> PublicCodec:
+ """
+ Try to compress messages and choose codec with the smallest result size.
+ """
+
+ test_messages = messages[:10]
+
+ available_codecs = await self._get_available_codecs()
+ if len(available_codecs) == 1:
+ return available_codecs[0]
+
+ def get_compressed_size(codec) -> int:
+ s = 0
+ f = self._codec_functions[codec]
+
+ for m in test_messages:
+ encoded = f(m.get_bytes())
+ s += len(encoded)
+
+ return s
+
+ def select_codec() -> PublicCodec:
+ min_codec = available_codecs[0]
+ min_size = get_compressed_size(min_codec)
+ for codec in available_codecs[1:]:
+ size = get_compressed_size(codec)
+ if size < min_size:
+ min_codec = codec
+ min_size = size
+ return min_codec
+
+ loop = asyncio.get_running_loop()
+ codec = await loop.run_in_executor(self._encode_executor, select_codec)
+ return codec
+
+ async def _read_loop(self, writer: "WriterAsyncIOStream"):
+ while True:
+ resp = await writer.receive()
+
+ for ack in resp.acks:
+ self._handle_receive_ack(ack)
+
+ def _handle_receive_ack(self, ack):
+ current_message = self._messages.popleft()
+ message_future = self._messages_future.popleft()
+ if current_message.seq_no != ack.seq_no:
+ raise TopicWriterError(
+ "internal error - receive unexpected ack. Expected seqno: %s, received seqno: %s"
+ % (current_message.seq_no, ack.seq_no)
+ )
+ write_ack_msg = StreamWriteMessage.WriteResponse.WriteAck
+ status = ack.message_write_status
+ if isinstance(status, write_ack_msg.StatusSkipped):
+ result = PublicWriteResult.Skipped()
+ elif isinstance(status, write_ack_msg.StatusWritten):
+ result = PublicWriteResult.Written(offset=status.offset)
+ else:
+ raise TopicWriterError("internal error - receive unexpected ack message.")
+ message_future.set_result(result)
+
+ async def _send_loop(self, writer: "WriterAsyncIOStream"):
+ try:
+ messages = list(self._messages)
+
+ last_seq_no = 0
+ for m in messages:
+ writer.write([m])
+ last_seq_no = m.seq_no
+
+ while True:
+ m = await self._new_messages.get() # type: InternalMessage
+ if m.seq_no > last_seq_no:
+ writer.write([m])
+ except Exception as e:
+ self._stop(e)
+ raise
+
+ def _stop(self, reason: BaseException):
+ if reason is None:
+ raise Exception("writer stop reason can not be None")
+
+ if self._stop_reason.done():
+ return
+
+ self._stop_reason.set_exception(reason)
+
+ for f in self._messages_future:
+ f.set_exception(reason)
+
+ self._state_changed.set()
+ logger.info("Stop topic writer: %s" % reason)
+
+ async def flush(self):
+ if not self._messages_future:
+ return
+
+ # wait last message
+ await asyncio.wait(self._messages_future)
+
+
+class WriterAsyncIOStream:
+ # todo slots
+ _closed: bool
+
+ last_seqno: int
+ supported_codecs: Optional[List[PublicCodec]]
+
+ _stream: IGrpcWrapperAsyncIO
+ _requests: asyncio.Queue
+ _responses: AsyncIterator
+
+ _update_token_interval: Optional[Union[int, float]]
+ _update_token_task: Optional[asyncio.Task]
+ _update_token_event: asyncio.Event
+ _get_token_function: Optional[Callable[[], str]]
+
+ def __init__(
+ self,
+ update_token_interval: Optional[Union[int, float]] = None,
+ get_token_function: Optional[Callable[[], str]] = None,
+ ):
+ self._closed = False
+
+ self._update_token_interval = update_token_interval
+ self._get_token_function = get_token_function
+ self._update_token_event = asyncio.Event()
+ self._update_token_task = None
+
+ async def close(self):
+ if self._closed:
+ return
+ self._closed = True
+
+ if self._update_token_task:
+ self._update_token_task.cancel()
+ await asyncio.wait([self._update_token_task])
+
+ self._stream.close()
+
+ @staticmethod
+ async def create(
+ driver: SupportedDriverType,
+ init_request: StreamWriteMessage.InitRequest,
+ update_token_interval: Optional[Union[int, float]] = None,
+ ) -> "WriterAsyncIOStream":
+ stream = GrpcWrapperAsyncIO(StreamWriteMessage.FromServer.from_proto)
+
+ await stream.start(driver, _apis.TopicService.Stub, _apis.TopicService.StreamWrite)
+
+ creds = driver._credentials
+ writer = WriterAsyncIOStream(
+ update_token_interval=update_token_interval,
+ get_token_function=creds.get_auth_token if creds else lambda: "",
+ )
+ await writer._start(stream, init_request)
+ return writer
+
+ async def receive(self) -> StreamWriteMessage.WriteResponse:
+ while True:
+ item = await self._stream.receive()
+
+ if isinstance(item, StreamWriteMessage.WriteResponse):
+ return item
+ if isinstance(item, UpdateTokenResponse):
+ self._update_token_event.set()
+ continue
+
+ # todo log unknown messages instead of raise exception
+ raise Exception("Unknown message while read writer answers: %s" % item)
+
+ async def _start(self, stream: IGrpcWrapperAsyncIO, init_message: StreamWriteMessage.InitRequest):
+ stream.write(StreamWriteMessage.FromClient(init_message))
+
+ resp = await stream.receive()
+ self._ensure_ok(resp)
+ if not isinstance(resp, StreamWriteMessage.InitResponse):
+ raise TopicWriterError("Unexpected answer for init request: %s" % resp)
+
+ self.last_seqno = resp.last_seq_no
+ self.supported_codecs = [PublicCodec(codec) for codec in resp.supported_codecs]
+
+ self._stream = stream
+
+ if self._update_token_interval is not None:
+ self._update_token_event.set()
+ self._update_token_task = asyncio.create_task(self._update_token_loop(), name="update_token_loop")
+
+ @staticmethod
+ def _ensure_ok(message: WriterMessagesFromServerToClient):
+ if not message.status.is_success():
+ raise TopicWriterError(f"status error from server in writer: {message.status}")
+
+ def write(self, messages: List[InternalMessage]):
+ if self._closed:
+ raise RuntimeError("Can not write on closed stream.")
+
+ for request in messages_to_proto_requests(messages):
+ self._stream.write(request)
+
+ async def _update_token_loop(self):
+ while True:
+ await asyncio.sleep(self._update_token_interval)
+ await self._update_token(token=self._get_token_function())
+
+ async def _update_token(self, token: str):
+ await self._update_token_event.wait()
+ try:
+ msg = StreamWriteMessage.FromClient(UpdateTokenRequest(token))
+ self._stream.write(msg)
+ finally:
+ self._update_token_event.clear()
diff --git a/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_sync.py b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_sync.py
new file mode 100644
index 0000000000..a5193caf7c
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_topic_writer/topic_writer_sync.py
@@ -0,0 +1,124 @@
+from __future__ import annotations
+
+import asyncio
+import typing
+from concurrent.futures import Future
+from typing import Union, List, Optional
+
+from .._grpc.grpcwrapper.common_utils import SupportedDriverType
+from .topic_writer import (
+ PublicWriterSettings,
+ PublicWriterInitInfo,
+ PublicWriteResult,
+ Message,
+ TopicWriterClosedError,
+)
+
+from .topic_writer_asyncio import WriterAsyncIO
+from .._topic_common.common import (
+ _get_shared_event_loop,
+ TimeoutType,
+ CallFromSyncToAsync,
+)
+
+
+class WriterSync:
+ _caller: CallFromSyncToAsync
+ _async_writer: WriterAsyncIO
+ _closed: bool
+ _parent: typing.Any # need for prevent close parent client by GC
+
+ def __init__(
+ self,
+ driver: SupportedDriverType,
+ settings: PublicWriterSettings,
+ *,
+ eventloop: Optional[asyncio.AbstractEventLoop] = None,
+ _parent=None,
+ ):
+
+ self._closed = False
+
+ if eventloop:
+ loop = eventloop
+ else:
+ loop = _get_shared_event_loop()
+
+ self._caller = CallFromSyncToAsync(loop)
+
+ async def create_async_writer():
+ return WriterAsyncIO(driver, settings)
+
+ self._async_writer = self._caller.safe_call_with_result(create_async_writer(), None)
+ self._parent = _parent
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ try:
+ self.close()
+ except BaseException:
+ if exc_val is None:
+ raise
+
+ def __del__(self):
+ self.close(flush=False)
+
+ def close(self, *, flush: bool = True, timeout: TimeoutType = None):
+ if self._closed:
+ return
+
+ self._closed = True
+
+ self._caller.safe_call_with_result(self._async_writer.close(flush=flush), timeout)
+
+ def _check_closed(self):
+ if self._closed:
+ raise TopicWriterClosedError()
+
+ def async_flush(self) -> Future:
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_future(self._async_writer.flush())
+
+ def flush(self, *, timeout=None):
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_result(self._async_writer.flush(), timeout)
+
+ def async_wait_init(self) -> Future[PublicWriterInitInfo]:
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_future(self._async_writer.wait_init())
+
+ def wait_init(self, *, timeout: TimeoutType = None) -> PublicWriterInitInfo:
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_result(self._async_writer.wait_init(), timeout)
+
+ def write(
+ self,
+ messages: Union[Message, List[Message]],
+ timeout: TimeoutType = None,
+ ):
+ self._check_closed()
+
+ self._caller.safe_call_with_result(self._async_writer.write(messages), timeout)
+
+ def async_write_with_ack(
+ self,
+ messages: Union[Message, List[Message]],
+ ) -> Future[Union[PublicWriteResult, List[PublicWriteResult]]]:
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_future(self._async_writer.write_with_ack(messages))
+
+ def write_with_ack(
+ self,
+ messages: Union[Message, List[Message]],
+ timeout: Union[float, None] = None,
+ ) -> Union[PublicWriteResult, List[PublicWriteResult]]:
+ self._check_closed()
+
+ return self._caller.unsafe_call_with_result(self._async_writer.write_with_ack(messages), timeout=timeout)
diff --git a/contrib/python/ydb/py3/ydb/_tx_ctx_impl.py b/contrib/python/ydb/py3/ydb/_tx_ctx_impl.py
new file mode 100644
index 0000000000..3c7eec39e6
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_tx_ctx_impl.py
@@ -0,0 +1,171 @@
+from . import issues, _session_impl, _apis, types, convert
+import functools
+
+
+def reset_tx_id_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state, tx_state, *args, **kwargs):
+ try:
+ return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs)
+ except issues.Error:
+ tx_state.tx_id = None
+ tx_state.dead = True
+ raise
+
+ return decorator
+
+
+def not_found_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs):
+ try:
+ return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs)
+ except issues.NotFound:
+ session_state.erase(query)
+ raise
+
+ return decorator
+
+
+def wrap_tx_factory_handler(func):
+ @functools.wraps(func)
+ def decorator(session_state, tx_state, *args, **kwargs):
+ if tx_state.dead:
+ raise issues.PreconditionFailed("Failed to perform action on broken transaction context!")
+ return func(session_state, tx_state, *args, **kwargs)
+
+ return decorator
+
+
+@_session_impl.bad_session_handler
+@reset_tx_id_handler
+def wrap_result_on_rollback_or_commit_tx(rpc_state, response_pb, session_state, tx_state, tx):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ # transaction successfully committed or rolled back
+ tx_state.tx_id = None
+ return tx
+
+
+@_session_impl.bad_session_handler
+def wrap_tx_begin_response(rpc_state, response_pb, session_state, tx_state, tx):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.BeginTransactionResult()
+ response_pb.operation.result.Unpack(message)
+ tx_state.tx_id = message.tx_meta.id
+ return tx
+
+
+@wrap_tx_factory_handler
+def begin_request_factory(session_state, tx_state):
+ request = _apis.ydb_table.BeginTransactionRequest()
+ request = session_state.start_query().attach_request(request)
+ request.tx_settings.MergeFrom(_construct_tx_settings(tx_state))
+ return request
+
+
+@wrap_tx_factory_handler
+def rollback_request_factory(session_state, tx_state):
+ request = _apis.ydb_table.RollbackTransactionRequest()
+ request.tx_id = tx_state.tx_id
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+@wrap_tx_factory_handler
+def commit_request_factory(session_state, tx_state):
+ """
+ Constructs commit request
+ """
+ request = _apis.ydb_table.CommitTransactionRequest()
+ request.tx_id = tx_state.tx_id
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+class TxState(object):
+ __slots__ = ("tx_id", "tx_mode", "dead", "initialized")
+
+ def __init__(self, tx_mode):
+ """
+ Holds transaction context manager info
+ :param tx_mode: A mode of transaction
+ """
+ self.tx_id = None
+ self.tx_mode = tx_mode
+ self.dead = False
+ self.initialized = False
+
+
+def _construct_tx_settings(tx_state):
+ tx_settings = _apis.ydb_table.TransactionSettings()
+ mode_property = getattr(tx_settings, tx_state.tx_mode.name)
+ mode_property.MergeFrom(tx_state.tx_mode.settings)
+ return tx_settings
+
+
+@wrap_tx_factory_handler
+def execute_request_factory(session_state, tx_state, query, parameters, commit_tx, settings):
+ data_query, query_id = session_state.lookup(query)
+ parameters_types = {}
+
+ is_data_query = False
+
+ if query_id is not None:
+ query_pb = _apis.ydb_table.Query(id=query_id)
+ parameters_types = data_query.parameters_types
+ else:
+ if data_query is not None:
+ # client cache disabled for send query text every time
+ yql_text = data_query.yql_text
+ parameters_types = data_query.parameters_types
+ is_data_query = True
+ elif isinstance(query, types.DataQuery):
+ yql_text = query.yql_text
+ parameters_types = query.parameters_types
+ is_data_query = True
+ else:
+ yql_text = query
+ query_pb = _apis.ydb_table.Query(yql_text=yql_text)
+ request = _apis.ydb_table.ExecuteDataQueryRequest(parameters=convert.parameters_to_pb(parameters_types, parameters))
+
+ if query_id is not None:
+ # SDK not send query text and nothing save to cache
+ keep_in_cache = False
+ elif settings is not None and hasattr(settings, "keep_in_cache"):
+ keep_in_cache = settings.keep_in_cache
+ elif parameters:
+ keep_in_cache = True
+ elif is_data_query:
+ keep_in_cache = True
+ else:
+ keep_in_cache = False
+
+ if keep_in_cache:
+ request.query_cache_policy.keep_in_cache = True
+
+ request.query.MergeFrom(query_pb)
+ tx_control = _apis.ydb_table.TransactionControl()
+ tx_control.commit_tx = commit_tx
+ if tx_state.tx_id is not None:
+ tx_control.tx_id = tx_state.tx_id
+ else:
+ tx_control.begin_tx.MergeFrom(_construct_tx_settings(tx_state))
+ request.tx_control.MergeFrom(tx_control)
+ request = session_state.start_query().attach_request(request)
+ return request
+
+
+@_session_impl.bad_session_handler
+@reset_tx_id_handler
+@not_found_handler
+def wrap_result_and_tx_id(rpc_state, response_pb, session_state, tx_state, query):
+ session_state.complete_query()
+ issues._process_response(response_pb.operation)
+ message = _apis.ydb_table.ExecuteQueryResult()
+ response_pb.operation.result.Unpack(message)
+ if message.query_meta.id and isinstance(query, types.DataQuery):
+ session_state.keep(query, message.query_meta.id)
+ tx_state.tx_id = None if not message.tx_meta.id else message.tx_meta.id
+ return convert.ResultSets(message.result_sets, session_state.table_client_settings)
diff --git a/contrib/python/ydb/py3/ydb/_utilities.py b/contrib/python/ydb/py3/ydb/_utilities.py
new file mode 100644
index 0000000000..e89b0af315
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/_utilities.py
@@ -0,0 +1,184 @@
+# -*- coding: utf-8 -*-
+import importlib.util
+import threading
+import codecs
+from concurrent import futures
+import functools
+import hashlib
+import collections
+import urllib.parse
+from . import ydb_version
+
+try:
+ from . import interceptor
+except ImportError:
+ interceptor = None
+
+
+_grpcs_protocol = "grpcs://"
+_grpc_protocol = "grpc://"
+
+
+def wrap_result_in_future(result):
+ f = futures.Future()
+ f.set_result(result)
+ return f
+
+
+def wrap_exception_in_future(exc):
+ f = futures.Future()
+ f.set_exception(exc)
+ return f
+
+
+def future():
+ return futures.Future()
+
+
+def x_ydb_sdk_build_info_header():
+ return ("x-ydb-sdk-build-info", "ydb-python-sdk/" + ydb_version.VERSION)
+
+
+def is_secure_protocol(endpoint):
+ return endpoint.startswith("grpcs://")
+
+
+def wrap_endpoint(endpoint):
+ if endpoint.startswith(_grpcs_protocol):
+ return endpoint[len(_grpcs_protocol) :]
+ if endpoint.startswith(_grpc_protocol):
+ return endpoint[len(_grpc_protocol) :]
+ return endpoint
+
+
+def parse_connection_string(connection_string):
+ cs = connection_string
+ if not cs.startswith(_grpc_protocol) and not cs.startswith(_grpcs_protocol):
+ # default is grpcs
+ cs = _grpcs_protocol + cs
+
+ p = urllib.parse.urlparse(connection_string)
+ b = urllib.parse.parse_qs(p.query)
+ database = b.get("database", [])
+ assert len(database) > 0
+
+ return p.scheme + "://" + p.netloc, database[0]
+
+
+# Decorator that ensures no exceptions are leaked from decorated async call
+def wrap_async_call_exceptions(f):
+ @functools.wraps(f)
+ def decorator(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception as e:
+ return wrap_exception_in_future(e)
+
+ return decorator
+
+
+def check_module_exists(path: str) -> bool:
+ try:
+ if importlib.util.find_spec(path):
+ return True
+ except ModuleNotFoundError:
+ pass
+ return False
+
+
+def get_query_hash(yql_text):
+ try:
+ return hashlib.sha256(str(yql_text, "utf-8").encode("utf-8")).hexdigest()
+ except TypeError:
+ return hashlib.sha256(str(yql_text).encode("utf-8")).hexdigest()
+
+
+class LRUCache(object):
+ def __init__(self, capacity=1000):
+ self.items = collections.OrderedDict()
+ self.capacity = capacity
+
+ def put(self, key, value):
+ self.items[key] = value
+ while len(self.items) > self.capacity:
+ self.items.popitem(last=False)
+
+ def get(self, key, _default):
+ if key not in self.items:
+ return _default
+ value = self.items.pop(key)
+ self.items[key] = value
+ return value
+
+ def erase(self, key):
+ self.items.pop(key)
+
+
+def from_bytes(val):
+ """
+ Translates value into valid utf8 string
+ :param val: A value to translate
+ :return: A valid utf8 string
+ """
+ try:
+ return codecs.decode(val, "utf8")
+ except (UnicodeEncodeError, TypeError):
+ return val
+
+
+class AsyncResponseIterator(object):
+ def __init__(self, it, wrapper):
+ self.it = it
+ self.wrapper = wrapper
+
+ def cancel(self):
+ self.it.cancel()
+ return self
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ return interceptor.operate_async_stream_call(self.it, self.wrapper)
+
+ def next(self):
+ return self._next()
+
+ def __next__(self):
+ return self._next()
+
+
+class SyncResponseIterator(object):
+ def __init__(self, it, wrapper):
+ self.it = it
+ self.wrapper = wrapper
+
+ def cancel(self):
+ self.it.cancel()
+ return self
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ return self.wrapper(next(self.it))
+
+ def next(self):
+ return self._next()
+
+ def __next__(self):
+ return self._next()
+
+
+class AtomicCounter:
+ _lock: threading.Lock
+ _value: int
+
+ def __init__(self, initial_value: int = 0):
+ self._lock = threading.Lock()
+ self._value = initial_value
+
+ def inc_and_get(self) -> int:
+ with self._lock:
+ self._value += 1
+ return self._value
diff --git a/contrib/python/ydb/py3/ydb/aio/__init__.py b/contrib/python/ydb/py3/ydb/aio/__init__.py
new file mode 100644
index 0000000000..acc44db57a
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/__init__.py
@@ -0,0 +1,2 @@
+from .driver import Driver # noqa
+from .table import SessionPool, retry_operation # noqa
diff --git a/contrib/python/ydb/py3/ydb/aio/_utilities.py b/contrib/python/ydb/py3/ydb/aio/_utilities.py
new file mode 100644
index 0000000000..10cbead667
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/_utilities.py
@@ -0,0 +1,20 @@
+class AsyncResponseIterator(object):
+ def __init__(self, it, wrapper):
+ self.it = it.__aiter__()
+ self.wrapper = wrapper
+
+ def cancel(self):
+ self.it.cancel()
+ return self
+
+ def __aiter__(self):
+ return self
+
+ async def _next(self):
+ return self.wrapper(await self.it.__anext__())
+
+ async def next(self):
+ return await self._next()
+
+ async def __anext__(self):
+ return await self._next()
diff --git a/contrib/python/ydb/py3/ydb/aio/connection.py b/contrib/python/ydb/py3/ydb/aio/connection.py
new file mode 100644
index 0000000000..9c66149002
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/connection.py
@@ -0,0 +1,254 @@
+import logging
+import asyncio
+import typing
+from typing import Any, Tuple, Callable, Iterable
+import collections
+import grpc
+
+from ydb import _apis, _utilities
+
+from ydb.connection import (
+ _log_request,
+ _log_response,
+ _rpc_error_handler,
+ _get_request_timeout,
+ _set_server_timeouts,
+ _RpcState as RpcState,
+ EndpointOptions,
+ channel_factory,
+ YDB_DATABASE_HEADER,
+ YDB_TRACE_ID_HEADER,
+ YDB_REQUEST_TYPE_HEADER,
+ EndpointKey,
+)
+from ydb.driver import DriverConfig
+from ydb.settings import BaseRequestSettings
+from ydb import issues
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ydb._grpc.v4 import ydb_topic_v1_pb2_grpc
+else:
+ from ydb._grpc.common import ydb_topic_v1_pb2_grpc
+
+
+_stubs_list = (
+ _apis.TableService.Stub,
+ _apis.SchemeService.Stub,
+ _apis.DiscoveryService.Stub,
+ _apis.CmsService.Stub,
+ ydb_topic_v1_pb2_grpc.TopicServiceStub,
+)
+logger = logging.getLogger(__name__)
+
+
+async def _construct_metadata(driver_config, settings):
+ """
+ Translates request settings into RPC metadata
+ :param driver_config: A driver config
+ :param settings: An instance of BaseRequestSettings
+ :return: RPC metadata
+ """
+ metadata = []
+ if driver_config.database is not None:
+ metadata.append((YDB_DATABASE_HEADER, driver_config.database))
+
+ if driver_config.credentials is not None:
+ res = driver_config.credentials.auth_metadata()
+ if asyncio.iscoroutine(res):
+ res = await res
+ metadata.extend(res)
+
+ if settings is not None:
+ if settings.trace_id is not None:
+ metadata.append((YDB_TRACE_ID_HEADER, settings.trace_id))
+ if settings.request_type is not None:
+ metadata.append((YDB_REQUEST_TYPE_HEADER, settings.request_type))
+
+ metadata.append(_utilities.x_ydb_sdk_build_info_header())
+ return metadata
+
+
+class _RpcState(RpcState):
+ __slots__ = (
+ "rpc",
+ "request_id",
+ "rendezvous",
+ "result_future",
+ "rpc_name",
+ "endpoint",
+ "metadata_kv",
+ "_trailing_metadata",
+ )
+
+ def __init__(self, stub_instance: Any, rpc_name: str, endpoint: str, endpoint_key):
+ super().__init__(stub_instance, rpc_name, endpoint, endpoint_key)
+
+ async def __call__(self, *args, **kwargs):
+ resp = self.rpc(*args, **kwargs)
+ if hasattr(resp, "__await__"): # Check to support async iterators from streams
+ response = await resp
+ self._trailing_metadata = await resp.trailing_metadata()
+ return response
+ return resp
+
+ def trailing_metadata(self):
+ if self.metadata_kv is None:
+ self.metadata_kv = collections.defaultdict(set)
+ for key, value in self._trailing_metadata:
+ self.metadata_kv[key].add(value)
+ return self.metadata_kv
+
+ def future(self, *args, **kwargs):
+ raise NotImplementedError
+
+
+class Connection:
+ __slots__ = (
+ "endpoint",
+ "_channel",
+ "_call_states",
+ "_stub_instances",
+ "_driver_config",
+ "_cleanup_callbacks",
+ "__weakref__",
+ "lock",
+ "calls",
+ "closing",
+ "endpoint_key",
+ "node_id",
+ )
+
+ def __init__(
+ self,
+ endpoint: str,
+ driver_config: DriverConfig = None,
+ endpoint_options: EndpointOptions = None,
+ ):
+ global _stubs_list
+ self.endpoint = endpoint
+ self.endpoint_key = EndpointKey(self.endpoint, getattr(endpoint_options, "node_id", None))
+ self.node_id = getattr(endpoint_options, "node_id", None)
+ self._channel = channel_factory(self.endpoint, driver_config, grpc.aio, endpoint_options=endpoint_options)
+ self._driver_config = driver_config
+
+ self._stub_instances = {}
+ self._cleanup_callbacks = []
+ for stub in _stubs_list:
+ self._stub_instances[stub] = stub(self._channel)
+
+ self.calls = {}
+ self.closing = False
+
+ def _prepare_stub_instance(self, stub: Any):
+ if stub not in self._stub_instances:
+ self._stub_instances[stub] = stub(self._channel)
+
+ async def _prepare_call(
+ self, stub: Any, rpc_name: str, request: Any, settings: BaseRequestSettings
+ ) -> Tuple[_RpcState, float, Any]:
+
+ timeout, metadata = _get_request_timeout(settings), await _construct_metadata(self._driver_config, settings)
+ _set_server_timeouts(request, settings, timeout)
+ self._prepare_stub_instance(stub)
+ rpc_state = _RpcState(self._stub_instances[stub], rpc_name, self.endpoint, self.endpoint_key)
+ logger.debug("%s: creating call state", rpc_state)
+
+ if self.closing:
+ raise issues.ConnectionLost("Couldn't start call")
+
+ # Call successfully prepared and registered
+ _log_request(rpc_state, request)
+ return rpc_state, timeout, metadata
+
+ async def __call__(
+ self,
+ request: Any,
+ stub: Any,
+ rpc_name: str,
+ wrap_result: Callable = None,
+ settings: BaseRequestSettings = None,
+ wrap_args: Iterable = (),
+ on_disconnected: Callable = None,
+ ) -> Any:
+ """
+ Async method to execute request
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param on_disconnected: A callable to be executed when underlying channel becomes disconnected
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A result of computation
+ """
+ rpc_state, timeout, metadata = await self._prepare_call(stub, rpc_name, request, settings)
+ try:
+ feature = asyncio.ensure_future(rpc_state(request, timeout=timeout, metadata=metadata))
+
+ # Add feature to dict to wait until it finished when close called
+ self.calls[rpc_state.request_id] = feature
+
+ response = await feature
+ _log_response(rpc_state, response)
+ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args)
+ except grpc.RpcError as rpc_error:
+ if on_disconnected:
+ coro = on_disconnected()
+ if asyncio.iscoroutine(coro):
+ await coro
+ on_disconnected = None
+ raise _rpc_error_handler(rpc_state, rpc_error, on_disconnected)
+ finally:
+ self._finish_call(rpc_state)
+
+ def _finish_call(self, call_state: _RpcState):
+ self.calls.pop(call_state.request_id)
+
+ async def destroy(self, grace: float = 0):
+ """
+ Destroys the underlying gRPC channel
+ This method does not cancel tasks, but destroys them.
+ :param grace:
+ :return: None
+ """
+ if hasattr(self, "_channel") and hasattr(self._channel, "close"):
+ await self._channel.close(grace)
+
+ def add_cleanup_callback(self, callback):
+ self._cleanup_callbacks.append(callback)
+
+ async def connection_ready(self, ready_timeout=10):
+ """
+ Awaits until channel is ready
+ :return: None
+ """
+
+ await asyncio.wait_for(self._channel.channel_ready(), timeout=ready_timeout)
+
+ async def close(self, grace: float = None):
+ """
+ Closes the underlying gRPC channel
+ :param: grace: If a grace period is specified, this method wait until all active
+ RPCs are finshed, once the grace period is reached the ones that haven't
+ been terminated are cancelled. If grace is None, this method will wait until all tasks are finished.
+ :return: None
+ """
+ logger.info("Closing channel for endpoint %s", self.endpoint)
+
+ self.closing = True
+
+ if self.calls:
+ await asyncio.wait(self.calls.values(), timeout=grace)
+
+ for callback in self._cleanup_callbacks:
+ callback(self)
+
+ await self.destroy()
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ await self.close()
diff --git a/contrib/python/ydb/py3/ydb/aio/credentials.py b/contrib/python/ydb/py3/ydb/aio/credentials.py
new file mode 100644
index 0000000000..48db925eba
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/credentials.py
@@ -0,0 +1,110 @@
+import time
+
+import abc
+import asyncio
+import logging
+from ydb import issues, credentials
+
+logger = logging.getLogger(__name__)
+
+
+class _OneToManyValue(object):
+ def __init__(self):
+ self._value = None
+ self._condition = asyncio.Condition()
+
+ async def consume(self, timeout=3):
+ async with self._condition:
+ if self._value is None:
+ try:
+ await asyncio.wait_for(self._condition.wait(), timeout=timeout)
+ except Exception:
+ return self._value
+ return self._value
+
+ async def update(self, n_value):
+ async with self._condition:
+ prev_value = self._value
+ self._value = n_value
+ if prev_value is None:
+ self._condition.notify_all()
+
+
+class _AtMostOneExecution(object):
+ def __init__(self):
+ self._can_schedule = True
+ self._lock = asyncio.Lock() # Lock to guarantee only one execution
+
+ async def _wrapped_execution(self, callback):
+ await self._lock.acquire()
+ try:
+ res = callback()
+ if asyncio.iscoroutine(res):
+ await res
+ except Exception:
+ pass
+
+ finally:
+ self._lock.release()
+ self._can_schedule = True
+
+ def submit(self, callback):
+ if self._can_schedule:
+ self._can_schedule = False
+ asyncio.ensure_future(self._wrapped_execution(callback))
+
+
+class AbstractExpiringTokenCredentials(credentials.AbstractExpiringTokenCredentials):
+ def __init__(self):
+ super(AbstractExpiringTokenCredentials, self).__init__()
+ self._tp = _AtMostOneExecution()
+ self._cached_token = _OneToManyValue()
+
+ @abc.abstractmethod
+ async def _make_token_request(self):
+ pass
+
+ async def _refresh(self):
+ current_time = time.time()
+ self._log_refresh_start(current_time)
+
+ try:
+ auth_metadata = await self._make_token_request()
+ await self._cached_token.update(auth_metadata["access_token"])
+ self.update_expiration_info(auth_metadata)
+ self.logger.info(
+ "Token refresh successful. current_time %s, refresh_in %s",
+ current_time,
+ self._refresh_in,
+ )
+
+ except (KeyboardInterrupt, SystemExit):
+ return
+
+ except Exception as e:
+ self.last_error = str(e)
+ await asyncio.sleep(1)
+ self._tp.submit(self._refresh)
+
+ async def token(self):
+ current_time = time.time()
+ if current_time > self._refresh_in:
+ self._tp.submit(self._refresh)
+
+ cached_token = await self._cached_token.consume(timeout=3)
+ if cached_token is None:
+ if self.last_error is None:
+ raise issues.ConnectionError(
+ "%s: timeout occurred while waiting for token.\n%s"
+ % (
+ self.__class__.__name__,
+ self.extra_error_message,
+ )
+ )
+ raise issues.ConnectionError(
+ "%s: %s.\n%s" % (self.__class__.__name__, self.last_error, self.extra_error_message)
+ )
+ return cached_token
+
+ async def auth_metadata(self):
+ return [(credentials.YDB_AUTH_TICKET_HEADER, await self.token())]
diff --git a/contrib/python/ydb/py3/ydb/aio/driver.py b/contrib/python/ydb/py3/ydb/aio/driver.py
new file mode 100644
index 0000000000..0f4f3630f9
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/driver.py
@@ -0,0 +1,61 @@
+from . import pool, scheme, table
+import ydb
+from .. import _utilities
+from ydb.driver import get_config, default_credentials
+
+
+class DriverConfig(ydb.DriverConfig):
+ @classmethod
+ def default_from_endpoint_and_database(cls, endpoint, database, root_certificates=None, credentials=None, **kwargs):
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+ @classmethod
+ def default_from_connection_string(cls, connection_string, root_certificates=None, credentials=None, **kwargs):
+ endpoint, database = _utilities.parse_connection_string(connection_string)
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+
+class Driver(pool.ConnectionPool):
+ _credentials: ydb.Credentials # used for topic clients
+
+ def __init__(
+ self,
+ driver_config=None,
+ connection_string=None,
+ endpoint=None,
+ database=None,
+ root_certificates=None,
+ credentials=None,
+ **kwargs
+ ):
+ from .. import topic # local import for prevent cycle import error
+
+ config = get_config(
+ driver_config,
+ connection_string,
+ endpoint,
+ database,
+ root_certificates,
+ credentials,
+ config_class=DriverConfig,
+ )
+
+ super(Driver, self).__init__(config)
+
+ self._credentials = config.credentials
+
+ self.scheme_client = scheme.SchemeClient(self)
+ self.table_client = table.TableClient(self, config.table_client_settings)
+ self.topic_client = topic.TopicClientAsyncIO(self, config.topic_client_settings)
diff --git a/contrib/python/ydb/py3/ydb/aio/iam.py b/contrib/python/ydb/py3/ydb/aio/iam.py
new file mode 100644
index 0000000000..eab8faffe0
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/iam.py
@@ -0,0 +1,132 @@
+import grpc.aio
+import time
+
+import abc
+import logging
+from ydb.iam import auth
+from .credentials import AbstractExpiringTokenCredentials
+
+logger = logging.getLogger(__name__)
+
+try:
+ from yandex.cloud.iam.v1 import iam_token_service_pb2_grpc
+ from yandex.cloud.iam.v1 import iam_token_service_pb2
+ import jwt
+except ImportError:
+ jwt = None
+ iam_token_service_pb2_grpc = None
+ iam_token_service_pb2 = None
+
+try:
+ import aiohttp
+except ImportError:
+ aiohttp = None
+
+
+class TokenServiceCredentials(AbstractExpiringTokenCredentials):
+ def __init__(self, iam_endpoint=None, iam_channel_credentials=None):
+ super(TokenServiceCredentials, self).__init__()
+ assert iam_token_service_pb2_grpc is not None, 'run pip install "ydb[yc]" to use service account credentials'
+ self._get_token_request_timeout = 10
+ self._iam_endpoint = "iam.api.cloud.yandex.net:443" if iam_endpoint is None else iam_endpoint
+ self._iam_channel_credentials = {} if iam_channel_credentials is None else iam_channel_credentials
+
+ def _channel_factory(self):
+ return grpc.aio.secure_channel(
+ self._iam_endpoint,
+ grpc.ssl_channel_credentials(**self._iam_channel_credentials),
+ )
+
+ @abc.abstractmethod
+ def _get_token_request(self):
+ pass
+
+ async def _make_token_request(self):
+ async with self._channel_factory() as channel:
+ stub = iam_token_service_pb2_grpc.IamTokenServiceStub(channel)
+ response = await stub.Create(self._get_token_request(), timeout=self._get_token_request_timeout)
+ self.logger.debug(str(response))
+ expires_in = max(0, response.expires_at.seconds - int(time.time()))
+ return {"access_token": response.iam_token, "expires_in": expires_in}
+
+
+# IamTokenCredentials need for backward compatibility
+# Deprecated
+IamTokenCredentials = TokenServiceCredentials
+
+
+class JWTIamCredentials(TokenServiceCredentials, auth.BaseJWTCredentials):
+ def __init__(
+ self,
+ account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ TokenServiceCredentials.__init__(self, iam_endpoint, iam_channel_credentials)
+ auth.BaseJWTCredentials.__init__(self, account_id, access_key_id, private_key)
+
+ def _get_token_request(self):
+ return iam_token_service_pb2.CreateIamTokenRequest(
+ jwt=auth.get_jwt(
+ self._account_id,
+ self._access_key_id,
+ self._private_key,
+ self._jwt_expiration_timeout,
+ )
+ )
+
+
+class YandexPassportOAuthIamCredentials(TokenServiceCredentials):
+ def __init__(
+ self,
+ yandex_passport_oauth_token,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ self._yandex_passport_oauth_token = yandex_passport_oauth_token
+ super(YandexPassportOAuthIamCredentials, self).__init__(iam_endpoint, iam_channel_credentials)
+
+ def _get_token_request(self):
+ return iam_token_service_pb2.CreateIamTokenRequest(
+ yandex_passport_oauth_token=self._yandex_passport_oauth_token
+ )
+
+
+class MetadataUrlCredentials(AbstractExpiringTokenCredentials):
+ def __init__(self, metadata_url=None):
+ super(MetadataUrlCredentials, self).__init__()
+ assert aiohttp is not None, "Install aiohttp library to use metadata credentials provider"
+ self._metadata_url = auth.DEFAULT_METADATA_URL if metadata_url is None else metadata_url
+ self._tp.submit(self._refresh)
+ self.extra_error_message = "Check that metadata service configured properly and application deployed in VM or function at Yandex.Cloud."
+
+ async def _make_token_request(self):
+ timeout = aiohttp.ClientTimeout(total=2)
+ async with aiohttp.ClientSession(timeout=timeout) as session:
+ async with session.get(self._metadata_url, headers={"Metadata-Flavor": "Google"}) as response:
+ if not response.ok:
+ self.logger.error("Error while getting token from metadata: %s" % await response.text())
+ response.raise_for_status()
+ # response from default metadata credentials provider
+ # contains text/plain content type.
+ return await response.json(content_type=None)
+
+
+class ServiceAccountCredentials(JWTIamCredentials):
+ def __init__(
+ self,
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ super(ServiceAccountCredentials, self).__init__(
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint,
+ iam_channel_credentials,
+ )
diff --git a/contrib/python/ydb/py3/ydb/aio/pool.py b/contrib/python/ydb/py3/ydb/aio/pool.py
new file mode 100644
index 0000000000..c637a7ca96
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/pool.py
@@ -0,0 +1,262 @@
+import asyncio
+import logging
+import random
+import typing
+
+from ydb import issues
+from ydb.pool import ConnectionsCache as _ConnectionsCache, IConnectionPool
+
+from .connection import Connection, EndpointKey
+
+from . import resolver
+
+logger = logging.getLogger(__name__)
+
+
+class ConnectionsCache(_ConnectionsCache):
+ def __init__(self, use_all_nodes: bool = False):
+ super().__init__(use_all_nodes)
+ self.lock = resolver._FakeLock() # Mock lock to emulate thread safety
+ self._event = asyncio.Event()
+ self._fast_fail_event = asyncio.Event()
+
+ self._fast_fail_error = None
+
+ async def get(self, preferred_endpoint: typing.Optional[EndpointKey] = None, fast_fail=False, wait_timeout=10):
+
+ if fast_fail:
+ await asyncio.wait_for(self._fast_fail_event.wait(), timeout=wait_timeout)
+ if self._fast_fail_error:
+ raise self._fast_fail_error
+ else:
+ await asyncio.wait_for(self._event.wait(), timeout=wait_timeout)
+
+ if preferred_endpoint is not None and preferred_endpoint.node_id in self.connections_by_node_id:
+ return self.connections_by_node_id[preferred_endpoint.node_id]
+
+ if preferred_endpoint is not None and preferred_endpoint.endpoint in self.connections:
+ return self.connections[preferred_endpoint.endpoint]
+
+ for conn_lst in self.conn_lst_order:
+ try:
+ endpoint, connection = conn_lst.popitem(last=False)
+ conn_lst[endpoint] = connection
+ return connection
+ except KeyError:
+ continue
+
+ raise issues.ConnectionLost("Couldn't find valid connection")
+
+ def add(self, connection, preferred=False):
+
+ if connection is None:
+ return False
+
+ connection.add_cleanup_callback(self.remove)
+
+ if preferred:
+ self.preferred[connection.endpoint] = connection
+
+ self.connections_by_node_id[connection.node_id] = connection
+ self.connections[connection.endpoint] = connection
+
+ self._event.set()
+
+ if len(self.connections) > 0:
+ self.complete_discovery(None)
+
+ return True
+
+ def complete_discovery(self, error):
+ self._fast_fail_error = error
+ self._fast_fail_event.set()
+
+ def remove(self, connection):
+ self.connections_by_node_id.pop(connection.node_id, None)
+ self.preferred.pop(connection.endpoint, None)
+ self.connections.pop(connection.endpoint, None)
+ self.outdated.pop(connection.endpoint, None)
+ if len(self.connections) == 0:
+ self._event.clear()
+ if not self._fast_fail_error:
+ self._fast_fail_event.clear()
+
+ async def cleanup(self):
+ actual_connections = list(self.connections.values())
+ for connection in actual_connections:
+ await connection.close()
+
+ async def cleanup_outdated(self):
+ outdated_connections = list(self.outdated.values())
+ for outdated_connection in outdated_connections:
+ await outdated_connection.close()
+ return self
+
+
+class Discovery:
+ def __init__(self, store: ConnectionsCache, driver_config):
+ self.logger = logger.getChild(self.__class__.__name__)
+ self._cache = store
+ self._driver_config = driver_config
+ self._resolver = resolver.DiscoveryEndpointsResolver(self._driver_config)
+ self._base_discovery_interval = 60
+ self._ready_timeout = 4
+ self._discovery_request_timeout = 2
+ self._should_stop = False
+ self._wake_up_event = asyncio.Event()
+ self._max_size = 9
+ self._base_emergency_retry_interval = 1
+ self._ssl_required = False
+ if driver_config.root_certificates is not None or driver_config.secure_channel:
+ self._ssl_required = True
+
+ def discovery_debug_details(self):
+ return self._resolver.debug_details()
+
+ def notify_disconnected(self):
+ self._wake_up_event.set()
+
+ def _emergency_retry_interval(self):
+ return (1 + random.random()) * self._base_emergency_retry_interval
+
+ def _discovery_interval(self):
+ return (1 + random.random()) * self._base_discovery_interval
+
+ async def execute_discovery(self):
+
+ resolve_details = await self._resolver.resolve()
+
+ if resolve_details is None:
+ return False
+
+ resolved_endpoints = set(
+ endpoint
+ for resolved_endpoint in resolve_details.endpoints
+ for endpoint, endpoint_options in resolved_endpoint.endpoints_with_options()
+ )
+ for cached_endpoint in self._cache.values():
+ if cached_endpoint.endpoint not in resolved_endpoints:
+ self._cache.make_outdated(cached_endpoint)
+
+ for resolved_endpoint in resolve_details.endpoints:
+ if self._ssl_required and not resolved_endpoint.ssl:
+ continue
+
+ if not self._ssl_required and resolved_endpoint.ssl:
+ continue
+
+ preferred = resolve_details.self_location == resolved_endpoint.location
+
+ for (
+ endpoint,
+ endpoint_options,
+ ) in resolved_endpoint.endpoints_with_options():
+ if self._cache.size >= self._max_size or self._cache.already_exists(endpoint):
+ continue
+
+ ready_connection = Connection(endpoint, self._driver_config, endpoint_options=endpoint_options)
+ await ready_connection.connection_ready(ready_timeout=self._ready_timeout)
+
+ self._cache.add(ready_connection, preferred)
+
+ await self._cache.cleanup_outdated()
+ return self._cache.size > 0
+
+ def stop(self):
+ self._should_stop = True
+ self._wake_up_event.set()
+
+ async def run(self):
+ while True:
+ try:
+ successful = await self.execute_discovery()
+ except Exception:
+ successful = False
+ if successful:
+ self._cache.complete_discovery(None)
+ else:
+ self._cache.complete_discovery(issues.ConnectionFailure(str(self.discovery_debug_details())))
+
+ interval = self._discovery_interval() if successful else self._emergency_retry_interval()
+
+ try:
+ await asyncio.wait_for(self._wake_up_event.wait(), timeout=interval)
+ if self._should_stop:
+ break
+ else:
+ self._wake_up_event.clear()
+ continue
+ except asyncio.TimeoutError:
+ continue
+
+ await self._cache.cleanup()
+ self.logger.info("Successfully terminated discovery process")
+
+
+class ConnectionPool(IConnectionPool):
+ def __init__(self, driver_config):
+ self._driver_config = driver_config
+ self._store = ConnectionsCache(driver_config.use_all_nodes)
+ self._grpc_init = Connection(self._driver_config.endpoint, self._driver_config)
+ self._stopped = False
+ self._discovery = Discovery(self._store, self._driver_config)
+
+ self._discovery_task = asyncio.get_event_loop().create_task(self._discovery.run())
+
+ async def stop(self, timeout=10):
+ self._discovery.stop()
+ await self._grpc_init.close()
+ try:
+ await asyncio.wait_for(self._discovery_task, timeout=timeout)
+ except asyncio.TimeoutError:
+ self._discovery_task.cancel()
+ self._stopped = True
+
+ def _on_disconnected(self, connection):
+ async def __wrapper__():
+ await connection.close()
+ self._discovery.notify_disconnected()
+
+ return __wrapper__
+
+ async def wait(self, timeout=7, fail_fast=False):
+ await self._store.get(fast_fail=fail_fast, wait_timeout=timeout)
+
+ def discovery_debug_details(self):
+ return self._discovery.discovery_debug_details()
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.stop()
+
+ async def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ fast_fail=False,
+ ):
+ if self._stopped:
+ raise issues.Error("Driver was stopped")
+ wait_timeout = settings.timeout if settings else 10
+ try:
+ connection = await self._store.get(preferred_endpoint, fast_fail=fast_fail, wait_timeout=wait_timeout)
+ except Exception:
+ self._discovery.notify_disconnected()
+ raise
+
+ return await connection(
+ request,
+ stub,
+ rpc_name,
+ wrap_result,
+ settings,
+ wrap_args,
+ self._on_disconnected(connection),
+ )
diff --git a/contrib/python/ydb/py3/ydb/aio/resolver.py b/contrib/python/ydb/py3/ydb/aio/resolver.py
new file mode 100644
index 0000000000..e8d27bac58
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/resolver.py
@@ -0,0 +1,68 @@
+from . import connection as conn_impl
+
+from ydb import _apis, settings as settings_impl
+from ydb.resolver import (
+ DiscoveryResult,
+ DiscoveryEndpointsResolver as _DiscoveryEndpointsResolver,
+ _list_endpoints_request_factory,
+)
+
+
+class _FakeLock:
+ def __init__(self):
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+
+class DiscoveryEndpointsResolver(_DiscoveryEndpointsResolver):
+ def __init__(self, driver_config):
+ super().__init__(driver_config)
+ self._lock = _FakeLock()
+
+ async def resolve(self):
+ self.logger.debug("Preparing initial endpoint to resolve endpoints")
+ endpoint = next(self._endpoints_iter)
+ connection = conn_impl.Connection(endpoint, self._driver_config)
+ try:
+ await connection.connection_ready()
+ except Exception:
+ self._add_debug_details(
+ 'Failed to establish connection to YDB discovery endpoint: "%s". Check endpoint correctness.' % endpoint
+ )
+ return None
+ self.logger.debug("Resolving endpoints for database %s", self._driver_config.database)
+
+ try:
+ resolved = await connection(
+ _list_endpoints_request_factory(self._driver_config),
+ _apis.DiscoveryService.Stub,
+ _apis.DiscoveryService.ListEndpoints,
+ DiscoveryResult.from_response,
+ settings=settings_impl.BaseRequestSettings().with_timeout(self._ready_timeout),
+ )
+
+ self._add_debug_details(
+ "Resolved endpoints for database %s: %s",
+ self._driver_config.database,
+ resolved,
+ )
+
+ return resolved
+ except Exception as e:
+
+ self._add_debug_details(
+ 'Failed to resolve endpoints for database %s. Endpoint: "%s". Error details:\n %s',
+ self._driver_config.database,
+ endpoint,
+ e,
+ )
+
+ finally:
+ await connection.close()
+
+ return None
diff --git a/contrib/python/ydb/py3/ydb/aio/scheme.py b/contrib/python/ydb/py3/ydb/aio/scheme.py
new file mode 100644
index 0000000000..8b1233ec74
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/scheme.py
@@ -0,0 +1,21 @@
+from ydb import scheme
+
+
+class SchemeClient(scheme.BaseSchemeClient):
+ def __init__(self, driver):
+ super(SchemeClient, self).__init__(driver)
+
+ async def make_directory(self, path, settings=None):
+ return await super(SchemeClient, self).make_directory(path, settings)
+
+ async def remove_directory(self, path, settings=None):
+ return await super(SchemeClient, self).remove_directory(path, settings)
+
+ async def list_directory(self, path, settings=None):
+ return await super(SchemeClient, self).list_directory(path, settings)
+
+ async def describe_path(self, path, settings=None):
+ return await super(SchemeClient, self).describe_path(path, settings)
+
+ async def modify_permissions(self, path, settings):
+ return await super(SchemeClient, self).modify_permissions(path, settings)
diff --git a/contrib/python/ydb/py3/ydb/aio/table.py b/contrib/python/ydb/py3/ydb/aio/table.py
new file mode 100644
index 0000000000..2a33cf786e
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/aio/table.py
@@ -0,0 +1,499 @@
+import asyncio
+import logging
+import time
+import typing
+
+import ydb
+
+from ydb import issues, settings as settings_impl, table
+
+from ydb.table import (
+ BaseSession,
+ BaseTableClient,
+ _scan_query_request_factory,
+ _wrap_scan_query_response,
+ BaseTxContext,
+)
+from . import _utilities
+from ydb import _apis, _session_impl
+
+logger = logging.getLogger(__name__)
+
+
+class Session(BaseSession):
+ async def read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ): # pylint: disable=W0236
+ request = _session_impl.read_table_request_factory(
+ self._state,
+ path,
+ key_range,
+ columns,
+ ordered,
+ row_limit,
+ use_snapshot=use_snapshot,
+ )
+ stream_it = await self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamReadTable,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(stream_it, _session_impl.wrap_read_table_response)
+
+ async def keep_alive(self, settings=None): # pylint: disable=W0236
+ return await super().keep_alive(settings)
+
+ async def create(self, settings=None): # pylint: disable=W0236
+ res = super().create(settings)
+ if asyncio.iscoroutine(res):
+ res = await res
+ return res
+
+ async def delete(self, settings=None): # pylint: disable=W0236
+ return await super().delete(settings)
+
+ async def execute_scheme(self, yql_text, settings=None): # pylint: disable=W0236
+ return await super().execute_scheme(yql_text, settings)
+
+ async def prepare(self, query, settings=None): # pylint: disable=W0236
+ res = super().prepare(query, settings)
+ if asyncio.iscoroutine(res):
+ res = await res
+ return res
+
+ async def explain(self, yql_text, settings=None): # pylint: disable=W0236
+ return await super().explain(yql_text, settings)
+
+ async def create_table(self, path, table_description, settings=None): # pylint: disable=W0236
+ return await super().create_table(path, table_description, settings)
+
+ async def drop_table(self, path, settings=None): # pylint: disable=W0236
+ return await super().drop_table(path, settings)
+
+ async def alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ): # pylint: disable=W0236,R0913,R0914
+ return await super().alter_table(
+ path,
+ add_columns,
+ drop_columns,
+ settings,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+ )
+
+ def transaction(self, tx_mode=None, *, allow_split_transactions=None):
+ return TxContext(
+ self._driver,
+ self._state,
+ self,
+ tx_mode,
+ allow_split_transactions=allow_split_transactions,
+ )
+
+ async def describe_table(self, path, settings=None): # pylint: disable=W0236
+ return await super().describe_table(path, settings)
+
+ async def copy_table(self, source_path, destination_path, settings=None): # pylint: disable=W0236
+ return await super().copy_table(source_path, destination_path, settings)
+
+ async def copy_tables(self, source_destination_pairs, settings=None): # pylint: disable=W0236
+ return await super().copy_tables(source_destination_pairs, settings)
+
+ async def rename_tables(self, rename_items, settings=None): # pylint: disable=W0236
+ return await super().rename_tables(rename_items, settings)
+
+
+class TableClient(BaseTableClient):
+ def session(self):
+ return Session(self._driver, self._table_client_settings)
+
+ async def bulk_upsert(self, *args, **kwargs): # pylint: disable=W0236
+ return await super().bulk_upsert(*args, **kwargs)
+
+ async def scan_query(self, query, parameters=None, settings=None): # pylint: disable=W0236
+ request = _scan_query_request_factory(query, parameters, settings)
+ response = await self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamExecuteScanQuery,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(
+ response,
+ lambda resp: _wrap_scan_query_response(resp, self._table_client_settings),
+ )
+
+
+class TxContext(BaseTxContext):
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ if self._tx_state.tx_id is not None:
+ # It's strictly recommended to close transactions directly
+ # by using commit_tx=True flag while executing statement or by
+ # .commit() or .rollback() methods, but here we trying to do best
+ # effort to avoid useless open transactions
+ logger.warning("Potentially leaked tx: %s", self._tx_state.tx_id)
+ try:
+ await self.rollback()
+ except issues.Error:
+ logger.warning("Failed to rollback leaked tx: %s", self._tx_state.tx_id)
+
+ self._tx_state.tx_id = None
+
+ async def execute(self, query, parameters=None, commit_tx=False, settings=None): # pylint: disable=W0236
+
+ return await super().execute(query, parameters, commit_tx, settings)
+
+ async def commit(self, settings=None): # pylint: disable=W0236
+ res = super().commit(settings)
+ if asyncio.iscoroutine(res):
+ res = await res
+ return res
+
+ async def rollback(self, settings=None): # pylint: disable=W0236
+ res = super().rollback(settings)
+ if asyncio.iscoroutine(res):
+ res = await res
+ return res
+
+ async def begin(self, settings=None): # pylint: disable=W0236
+ res = super().begin(settings)
+ if asyncio.iscoroutine(res):
+ res = await res
+ return res
+
+
+async def retry_operation(callee, retry_settings=None, *args, **kwargs): # pylint: disable=W1113
+ """
+ The retry operation helper can be used to retry a coroutine that raises YDB specific
+ exceptions.
+
+ :param callee: A coroutine to retry.
+ :param retry_settings: An instance of ydb.RetrySettings that describes how the coroutine
+ should be retried. If None, default instance of retry settings will be used.
+ :param args: A tuple with positional arguments to be passed into the coroutine.
+ :param kwargs: A dictionary with keyword arguments to be passed into the coroutine.
+
+ Returns awaitable result of coroutine. If retries are not succussful exception is raised.
+ """
+
+ opt_generator = ydb.retry_operation_impl(callee, retry_settings, *args, **kwargs)
+ for next_opt in opt_generator:
+ if isinstance(next_opt, ydb.YdbRetryOperationSleepOpt):
+ await asyncio.sleep(next_opt.timeout)
+ else:
+ try:
+ return await next_opt.result
+ except Exception as e: # pylint: disable=W0703
+ next_opt.set_exception(e)
+
+
+class SessionCheckout:
+ __slots__ = ("_acquired", "_pool", "_blocking", "_timeout", "_retry_timeout")
+
+ def __init__(self, pool, timeout, retry_timeout):
+ """
+ A context manager that checkouts a session from the specified pool and
+ returns it on manager exit.
+ :param pool: A SessionPool instance
+ :param blocking: A flag that specifies that session acquire method should blocks
+ :param timeout: A timeout in seconds for session acquire
+ """
+ self._pool = pool
+ self._acquired = None
+ self._timeout = timeout
+ self._retry_timeout = retry_timeout
+
+ async def __aenter__(self):
+ self._acquired = await self._pool.acquire(self._timeout, self._retry_timeout)
+ return self._acquired
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ if self._acquired is not None:
+ await self._pool.release(self._acquired)
+
+
+class SessionPool:
+ def __init__(self, driver: ydb.pool.IConnectionPool, size: int, min_pool_size: int = 0):
+ self._driver_await_timeout = 3
+ self._should_stop = asyncio.Event()
+ self._waiters = 0
+ self._driver = driver
+ self._active_queue = asyncio.PriorityQueue()
+ self._active_count = 0
+ self._size = size
+ self._req_settings = settings_impl.BaseRequestSettings().with_timeout(3)
+ self._logger = logger.getChild(self.__class__.__name__)
+ self._min_pool_size = min_pool_size
+ self._keep_alive_threshold = 4 * 60
+ self._terminating = False
+ self._init_session_timeout = 20
+
+ self._keep_alive_task = asyncio.ensure_future(self._keep_alive_loop())
+
+ self._min_pool_tasks = []
+
+ for _ in range(self._min_pool_size):
+ self._min_pool_tasks.append(asyncio.ensure_future(self._init_and_put(self._init_session_timeout)))
+
+ async def retry_operation(
+ self, callee: typing.Callable, *args, retry_settings: table.RetrySettings = None, **kwargs
+ ):
+
+ if retry_settings is None:
+ retry_settings = table.RetrySettings()
+
+ async def wrapper_callee():
+ async with self.checkout(timeout=retry_settings.get_session_client_timeout) as session:
+ return await callee(session, *args, **kwargs)
+
+ return await retry_operation(wrapper_callee, retry_settings)
+
+ def _create(self) -> ydb.ISession:
+ self._active_count += 1
+ session = self._driver.table_client.session()
+ self._logger.debug("Created session %s", session)
+ return session
+
+ async def _init_session_logic(self, session: ydb.ISession) -> typing.Optional[ydb.ISession]:
+ try:
+ await self._driver.wait(self._driver_await_timeout)
+ session = await session.create(self._req_settings)
+ return session
+ except issues.Error as e:
+ self._logger.error("Failed to create session. Reason: %s", str(e))
+ except Exception as e: # pylint: disable=W0703
+ self._logger.exception("Failed to create session. Reason: %s", str(e))
+
+ return None
+
+ async def _init_session(self, session: ydb.ISession, retry_num: int = None) -> typing.Optional[ydb.ISession]:
+ """
+ :param retry_num: Number of retries. If None - retries until success.
+ :return:
+ """
+ i = 0
+ while retry_num is None or i < retry_num:
+ curr_sess = await self._init_session_logic(session)
+ if curr_sess:
+ return curr_sess
+ i += 1
+ return None
+
+ async def _prepare_session(self, timeout, retry_num) -> ydb.ISession:
+ session = self._create()
+ try:
+ new_sess = await asyncio.wait_for(self._init_session(session, retry_num=retry_num), timeout=timeout)
+ if not new_sess:
+ self._destroy(session)
+ return new_sess
+ except Exception as e:
+ self._destroy(session)
+ raise e
+
+ async def _get_session_from_queue(self, timeout: float):
+ task_wait = asyncio.ensure_future(asyncio.wait_for(self._active_queue.get(), timeout=timeout))
+ task_should_stop = asyncio.ensure_future(self._should_stop.wait())
+ done, _ = await asyncio.wait((task_wait, task_should_stop), return_when=asyncio.FIRST_COMPLETED)
+ if task_should_stop in done:
+ task_wait.cancel()
+ return self._create()
+ _, session = task_wait.result()
+ return session
+
+ async def acquire(self, timeout: float = None, retry_timeout: float = None, retry_num: int = None) -> ydb.ISession:
+
+ if self._should_stop.is_set():
+ self._logger.error("Take session from closed session pool")
+ raise ValueError("Take session from closed session pool.")
+
+ if retry_timeout is None:
+ retry_timeout = timeout
+
+ try:
+ _, session = self._active_queue.get_nowait()
+ self._logger.debug("Acquired active session from queue: %s", session.session_id)
+ return session
+ except asyncio.QueueEmpty:
+ pass
+
+ if self._active_count < self._size:
+ self._logger.debug(
+ "Session pool is not large enough (active_count < size: %d < %d). " "will create a new session.",
+ self._active_count,
+ self._size,
+ )
+ try:
+ session = await self._prepare_session(timeout=retry_timeout, retry_num=retry_num)
+ except asyncio.TimeoutError:
+ raise issues.SessionPoolEmpty("Timeout when creating session") from None
+
+ if session is not None:
+ self._logger.debug("Acquired new created session: %s", session.session_id)
+ return session
+
+ try:
+ self._waiters += 1
+ session = await self._get_session_from_queue(timeout)
+ return session
+ except asyncio.TimeoutError:
+ raise issues.SessionPoolEmpty("Timeout when wait") from None
+ finally:
+ self._waiters -= 1
+
+ def _is_min_pool_size_satisfied(self, delta=0):
+ if self._terminating:
+ return True
+ return self._active_count + delta >= self._min_pool_size
+
+ async def _init_and_put(self, timeout=10):
+ sess = await self._prepare_session(timeout=timeout, retry_num=None)
+ await self.release(session=sess)
+
+ def _destroy(self, session: ydb.ISession, wait_for_del: bool = False):
+ self._logger.debug("Requested session destroy: %s.", session)
+ self._active_count -= 1
+ self._logger.debug(
+ "Session %s is no longer active. Current active count %d.",
+ session,
+ self._active_count,
+ )
+
+ if self._waiters > 0 or not self._is_min_pool_size_satisfied():
+ asyncio.ensure_future(self._init_and_put(self._init_session_timeout))
+
+ if session.initialized():
+ coro = session.delete(self._req_settings)
+ if wait_for_del:
+ self._logger.debug("Sent delete on session %s", session)
+ return coro
+ else:
+ asyncio.ensure_future(coro)
+ return None
+
+ async def release(self, session: ydb.ISession):
+ self._logger.debug("Put on session %s", session.session_id)
+ if session.closing():
+ self._destroy(session)
+ return False
+
+ if session.pending_query():
+ self._destroy(session)
+ return False
+ if not session.initialized() or self._should_stop.is_set():
+ self._destroy(session)
+ return False
+
+ await self._active_queue.put((time.time() + 10 * 60, session))
+ self._logger.debug("Session returned to queue: %s", session.session_id)
+
+ async def _pick_for_keepalive(self):
+ try:
+ priority, session = self._active_queue.get_nowait()
+ except asyncio.QueueEmpty:
+ return None
+
+ till_expire = priority - time.time()
+ if till_expire < self._keep_alive_threshold:
+ return session
+ await self._active_queue.put((priority, session))
+ return None
+
+ async def _send_keep_alive(self, session: ydb.ISession):
+ if session is None:
+ return False
+ if self._should_stop.is_set():
+ self._destroy(session)
+ return False
+ await session.keep_alive(self._req_settings)
+ try:
+ await self.release(session)
+ except Exception: # pylint: disable=W0703
+ self._destroy(session)
+
+ async def _keep_alive_loop(self):
+ while True:
+ try:
+ await asyncio.wait_for(self._should_stop.wait(), timeout=self._keep_alive_threshold // 4)
+ break
+ except asyncio.TimeoutError:
+ while True:
+ session = await self._pick_for_keepalive()
+ if not session:
+ break
+ asyncio.ensure_future(self._send_keep_alive(session))
+
+ async def stop(self, timeout=None):
+ self._logger.debug("Requested session pool stop.")
+ self._should_stop.set()
+ self._terminating = True
+
+ for task in self._min_pool_tasks:
+ task.cancel()
+
+ self._logger.debug("Destroying sessions in active queue")
+
+ tasks = []
+
+ while True:
+ try:
+ _, session = self._active_queue.get_nowait()
+ tasks.append(self._destroy(session, wait_for_del=True))
+
+ except asyncio.QueueEmpty:
+ break
+
+ await asyncio.gather(*tasks)
+
+ self._logger.debug("Destroyed active sessions")
+
+ await asyncio.wait_for(self._keep_alive_task, timeout=timeout)
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.stop()
+
+ async def wait_until_min_size(self):
+ await asyncio.gather(*self._min_pool_tasks)
+
+ def checkout(self, timeout: float = None, retry_timeout: float = None):
+ return SessionCheckout(self, timeout, retry_timeout=retry_timeout)
diff --git a/contrib/python/ydb/py3/ydb/auth_helpers.py b/contrib/python/ydb/py3/ydb/auth_helpers.py
new file mode 100644
index 0000000000..6399c3cfdf
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/auth_helpers.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+import os
+
+
+def read_bytes(f):
+ with open(f, "rb") as fr:
+ return fr.read()
+
+
+def load_ydb_root_certificate():
+ path = os.getenv("YDB_SSL_ROOT_CERTIFICATES_FILE", None)
+ if path is not None and os.path.exists(path):
+ return read_bytes(path)
+ return None
diff --git a/contrib/python/ydb/py3/ydb/connection.py b/contrib/python/ydb/py3/ydb/connection.py
new file mode 100644
index 0000000000..1c4bd9c78d
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/connection.py
@@ -0,0 +1,517 @@
+# -*- coding: utf-8 -*-
+import logging
+import copy
+from concurrent import futures
+import uuid
+import threading
+import collections
+
+from google.protobuf import text_format
+import grpc
+from . import issues, _apis, _utilities
+from . import default_pem
+
+_stubs_list = (
+ _apis.TableService.Stub,
+ _apis.SchemeService.Stub,
+ _apis.DiscoveryService.Stub,
+ _apis.CmsService.Stub,
+)
+
+logger = logging.getLogger(__name__)
+DEFAULT_TIMEOUT = 600
+YDB_DATABASE_HEADER = "x-ydb-database"
+YDB_TRACE_ID_HEADER = "x-ydb-trace-id"
+YDB_REQUEST_TYPE_HEADER = "x-ydb-request-type"
+
+_DEFAULT_MAX_GRPC_MESSAGE_SIZE = 64 * 10**6
+
+
+def _message_to_string(message):
+ """
+ Constructs a string representation of provided message or generator
+ :param message: A protocol buffer or generator instance
+ :return: A string
+ """
+ try:
+ return text_format.MessageToString(message, as_one_line=True)
+ except Exception:
+ return str(message)
+
+
+def _log_response(rpc_state, response):
+ """
+ Writes a message with response into debug logs
+ :param rpc_state: A state of rpc
+ :param response: A received response
+ :return: None
+ """
+ if logger.isEnabledFor(logging.DEBUG):
+ logger.debug("%s: response = { %s }", rpc_state, _message_to_string(response))
+
+
+def _log_request(rpc_state, request):
+ """
+ Writes a message with request into debug logs
+ :param rpc_state: An id of request
+ :param request: A received response
+ :return: None
+ """
+ if logger.isEnabledFor(logging.DEBUG):
+ logger.debug("%s: request = { %s }", rpc_state, _message_to_string(request))
+
+
+def _rpc_error_handler(rpc_state, rpc_error, on_disconnected=None):
+ """
+ RPC call error handler, that translates gRPC error into YDB issue
+ :param rpc_state: A state of rpc
+ :param rpc_error: an underlying rpc error to handle
+ :param on_disconnected: a handler to call on disconnected connection
+ """
+ logger.info("%s: received error, %s", rpc_state, rpc_error)
+ if isinstance(rpc_error, grpc.Call):
+ if rpc_error.code() == grpc.StatusCode.UNAUTHENTICATED:
+ return issues.Unauthenticated(rpc_error.details())
+ elif rpc_error.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
+ return issues.DeadlineExceed("Deadline exceeded on request")
+ elif rpc_error.code() == grpc.StatusCode.UNIMPLEMENTED:
+ return issues.Unimplemented("Method or feature is not implemented on server!")
+
+ logger.debug("%s: unhandled rpc error, disconnecting channel", rpc_state)
+ if on_disconnected is not None:
+ on_disconnected()
+
+ return issues.ConnectionLost("Rpc error, reason %s" % str(rpc_error))
+
+
+def _on_response_callback(rpc_state, call_state_unref, wrap_result=None, on_disconnected=None, wrap_args=()):
+ """
+ Callback to be executed on received RPC response
+ :param rpc_state: A name of RPC
+ :param wrap_result: A callable that wraps received response
+ :param on_disconnected: A handler to executed on disconnected channel
+ :param wrap_args: An arguments to be passed into wrap result callable
+ :return: None
+ """
+ try:
+ logger.debug("%s: on response callback started", rpc_state)
+ response = rpc_state.rendezvous.result()
+ _log_response(rpc_state, response)
+ response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args)
+ rpc_state.result_future.set_result(response)
+ logger.debug("%s: on response callback success", rpc_state)
+ except grpc.FutureCancelledError as e:
+ logger.debug("%s: request execution cancelled", rpc_state)
+ if not rpc_state.result_future.cancelled():
+ rpc_state.result_future.set_exception(e)
+
+ except grpc.RpcError as rpc_call_error:
+ rpc_state.result_future.set_exception(_rpc_error_handler(rpc_state, rpc_call_error, on_disconnected))
+
+ except issues.Error as e:
+ logger.info("%s: received exception, %s", rpc_state, str(e))
+ rpc_state.result_future.set_exception(e)
+
+ except Exception as e:
+ logger.error("%s: received exception, %s", rpc_state, str(e))
+ rpc_state.result_future.set_exception(issues.ConnectionLost(str(e)))
+
+ call_state_unref()
+
+
+def _construct_metadata(driver_config, settings):
+ """
+ Translates request settings into RPC metadata
+ :param driver_config: A driver config
+ :param settings: An instance of BaseRequestSettings
+ :return: RPC metadata
+ """
+ metadata = []
+ if driver_config.database is not None:
+ metadata.append((YDB_DATABASE_HEADER, driver_config.database))
+
+ need_rpc_auth = getattr(settings, "need_rpc_auth", True)
+ if driver_config.credentials is not None and need_rpc_auth:
+ metadata.extend(driver_config.credentials.auth_metadata())
+
+ if settings is not None:
+ if settings.trace_id is not None:
+ metadata.append((YDB_TRACE_ID_HEADER, settings.trace_id))
+ if settings.request_type is not None:
+ metadata.append((YDB_REQUEST_TYPE_HEADER, settings.request_type))
+ metadata.extend(getattr(settings, "headers", []))
+
+ metadata.append(_utilities.x_ydb_sdk_build_info_header())
+ return metadata
+
+
+def _get_request_timeout(settings):
+ """
+ Extracts RPC timeout from request settings
+ :param settings: an instance of BaseRequestSettings
+ :return: timeout of RPC execution
+ """
+ if settings is None or settings.timeout is None:
+ return DEFAULT_TIMEOUT
+ return settings.timeout
+
+
+class EndpointOptions(object):
+ __slots__ = ("ssl_target_name_override", "node_id")
+
+ def __init__(self, ssl_target_name_override=None, node_id=None):
+ self.ssl_target_name_override = ssl_target_name_override
+ self.node_id = node_id
+
+
+def _construct_channel_options(driver_config, endpoint_options=None):
+ """
+ Constructs gRPC channel initialization options
+ :param driver_config: A driver config instance
+ :param endpoint_options: Endpoint options
+ :return: A channel initialization options
+ """
+ _default_connect_options = [
+ ("grpc.max_receive_message_length", _DEFAULT_MAX_GRPC_MESSAGE_SIZE),
+ ("grpc.max_send_message_length", _DEFAULT_MAX_GRPC_MESSAGE_SIZE),
+ ("grpc.primary_user_agent", driver_config.primary_user_agent),
+ (
+ "grpc.lb_policy_name",
+ getattr(driver_config, "grpc_lb_policy_name", "round_robin"),
+ ),
+ ]
+ if driver_config.grpc_keep_alive_timeout is not None:
+ _default_connect_options.extend(
+ [
+ ("grpc.keepalive_time_ms", driver_config.grpc_keep_alive_timeout >> 3),
+ ("grpc.keepalive_timeout_ms", driver_config.grpc_keep_alive_timeout),
+ ("grpc.http2.max_pings_without_data", 0),
+ ("grpc.keepalive_permit_without_calls", 0),
+ ]
+ )
+ if endpoint_options is not None:
+ if endpoint_options.ssl_target_name_override:
+ _default_connect_options.append(
+ (
+ "grpc.ssl_target_name_override",
+ endpoint_options.ssl_target_name_override,
+ )
+ )
+ if driver_config.channel_options is None:
+ return _default_connect_options
+ channel_options = copy.deepcopy(driver_config.channel_options)
+ custom_options_keys = set(i[0] for i in driver_config.channel_options)
+ for item in filter(lambda x: x[0] not in custom_options_keys, _default_connect_options):
+ channel_options.append(item)
+ return channel_options
+
+
+class _RpcState(object):
+ __slots__ = (
+ "rpc",
+ "request_id",
+ "result_future",
+ "rpc_name",
+ "endpoint",
+ "rendezvous",
+ "metadata_kv",
+ "endpoint_key",
+ )
+
+ def __init__(self, stub_instance, rpc_name, endpoint, endpoint_key):
+ """Stores all RPC related data"""
+ self.rpc_name = rpc_name
+ self.rpc = getattr(stub_instance, rpc_name)
+ self.request_id = uuid.uuid4()
+ self.endpoint = endpoint
+ self.rendezvous = None
+ self.metadata_kv = None
+ self.endpoint_key = endpoint_key
+
+ def __str__(self):
+ return "RpcState(%s, %s, %s)" % (self.rpc_name, self.request_id, self.endpoint)
+
+ def __call__(self, *args, **kwargs):
+ """Execute a RPC."""
+ try:
+ response, rendezvous = self.rpc.with_call(*args, **kwargs)
+ self.rendezvous = rendezvous
+ return response
+ except AttributeError:
+ return self.rpc(*args, **kwargs)
+
+ def trailing_metadata(self):
+ """Trailing metadata of the call."""
+ if self.metadata_kv is None:
+
+ self.metadata_kv = collections.defaultdict(set)
+ for metadatum in self.rendezvous.trailing_metadata():
+ self.metadata_kv[metadatum.key].add(metadatum.value)
+
+ return self.metadata_kv
+
+ def future(self, *args, **kwargs):
+ self.rendezvous = self.rpc.future(*args, **kwargs)
+ self.result_future = futures.Future()
+
+ def _cancel_callback(f):
+ """forwards cancel to gPRC future"""
+ if f.cancelled():
+ self.rendezvous.cancel()
+
+ self.rendezvous.add_done_callback(_cancel_callback)
+ return self.rendezvous, self.result_future
+
+
+_nanos_in_second = 10**9
+
+
+def _set_duration(duration_value, seconds_float):
+ duration_value.seconds = int(seconds_float)
+ duration_value.nanos = int((seconds_float - int(seconds_float)) * _nanos_in_second)
+ return duration_value
+
+
+def _set_server_timeouts(request, settings, default_value):
+ if not hasattr(request, "operation_params"):
+ return
+
+ operation_timeout = getattr(settings, "operation_timeout", default_value)
+ operation_timeout = default_value if operation_timeout is None else operation_timeout
+ cancel_after = getattr(settings, "cancel_after", default_value)
+ cancel_after = default_value if cancel_after is None else cancel_after
+ _set_duration(request.operation_params.operation_timeout, operation_timeout)
+ _set_duration(request.operation_params.cancel_after, cancel_after)
+
+
+def channel_factory(endpoint, driver_config, channel_provider=None, endpoint_options=None):
+ channel_provider = channel_provider if channel_provider is not None else grpc
+ options = _construct_channel_options(driver_config, endpoint_options)
+ logger.debug("Channel options: {}".format(options))
+
+ if driver_config.root_certificates is None and not driver_config.secure_channel:
+ return channel_provider.insecure_channel(
+ endpoint, options, compression=getattr(driver_config, "compression", None)
+ )
+
+ root_certificates = driver_config.root_certificates
+ if root_certificates is None:
+ root_certificates = default_pem.load_default_pem()
+ credentials = grpc.ssl_channel_credentials(
+ root_certificates, driver_config.private_key, driver_config.certificate_chain
+ )
+ return channel_provider.secure_channel(
+ endpoint,
+ credentials,
+ options,
+ compression=getattr(driver_config, "compression", None),
+ )
+
+
+class EndpointKey(object):
+ __slots__ = ("endpoint", "node_id")
+
+ def __init__(self, endpoint, node_id):
+ self.endpoint = endpoint
+ self.node_id = node_id
+
+
+class Connection(object):
+ __slots__ = (
+ "endpoint",
+ "_channel",
+ "_call_states",
+ "_stub_instances",
+ "_driver_config",
+ "_cleanup_callbacks",
+ "__weakref__",
+ "lock",
+ "calls",
+ "closing",
+ "endpoint_key",
+ "node_id",
+ )
+
+ def __init__(self, endpoint, driver_config=None, endpoint_options=None):
+ """
+ Object that wraps gRPC channel and encapsulates gRPC request execution logic
+ :param endpoint: endpoint to connect (in pattern host:port), constructed by user or
+ discovered by the YDB endpoint discovery mechanism
+ :param driver_config: A driver config instance to be used for RPC call interception
+ """
+ global _stubs_list
+ self.endpoint = endpoint
+ self.node_id = getattr(endpoint_options, "node_id", None)
+ self.endpoint_key = EndpointKey(endpoint, getattr(endpoint_options, "node_id", None))
+ self._channel = channel_factory(self.endpoint, driver_config, endpoint_options=endpoint_options)
+ self._driver_config = driver_config
+ self._call_states = {}
+ self._stub_instances = {}
+ self._cleanup_callbacks = []
+ # pre-initialize stubs
+ for stub in _stubs_list:
+ self._stub_instances[stub] = stub(self._channel)
+ self.lock = threading.RLock()
+ self.calls = 0
+ self.closing = False
+
+ def _prepare_stub_instance(self, stub):
+ if stub not in self._stub_instances:
+ self._stub_instances[stub] = stub(self._channel)
+
+ def add_cleanup_callback(self, callback):
+ self._cleanup_callbacks.append(callback)
+
+ def _prepare_call(self, stub, rpc_name, request, settings):
+ timeout, metadata = _get_request_timeout(settings), _construct_metadata(self._driver_config, settings)
+ _set_server_timeouts(request, settings, timeout)
+ self._prepare_stub_instance(stub)
+ rpc_state = _RpcState(self._stub_instances[stub], rpc_name, self.endpoint, self.endpoint_key)
+ logger.debug("%s: creating call state", rpc_state)
+ with self.lock:
+ if self.closing:
+ raise issues.ConnectionLost("Couldn't start call")
+ self.calls += 1
+ self._call_states[rpc_state.request_id] = rpc_state
+ # Call successfully prepared and registered
+ _log_request(rpc_state, request)
+ return rpc_state, timeout, metadata
+
+ def _finish_call(self, call_state):
+ with self.lock:
+ self.calls -= 1
+ self._call_states.pop(call_state.request_id, None)
+ # Call successfully finished
+ if self.closing and self.calls == 0:
+ # Channel is closing and we have to destroy channel
+ self.destroy()
+
+ def future(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ on_disconnected=None,
+ ):
+ """
+ Sends request constructed by client
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param on_disconnected: A callable to be executed when underlying channel becomes disconnected
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A future of computation
+ """
+ rpc_state, timeout, metadata = self._prepare_call(stub, rpc_name, request, settings)
+ rendezvous, result_future = rpc_state.future(
+ request,
+ timeout,
+ metadata,
+ compression=getattr(settings, "compression", None),
+ )
+ rendezvous.add_done_callback(
+ lambda resp_future: _on_response_callback(
+ rpc_state,
+ lambda: self._finish_call(rpc_state),
+ wrap_result,
+ on_disconnected,
+ wrap_args,
+ )
+ )
+ return result_future
+
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ on_disconnected=None,
+ ):
+ """
+ Synchronously sends request constructed by client library
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param on_disconnected: A callable to be executed when underlying channel becomes disconnected
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A result of computation
+ """
+ rpc_state, timeout, metadata = self._prepare_call(stub, rpc_name, request, settings)
+ try:
+ response = rpc_state(
+ request,
+ timeout,
+ metadata,
+ compression=getattr(settings, "compression", None),
+ )
+ _log_response(rpc_state, response)
+ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args)
+ except grpc.RpcError as rpc_error:
+ raise _rpc_error_handler(rpc_state, rpc_error, on_disconnected)
+ finally:
+ self._finish_call(rpc_state)
+
+ @classmethod
+ def ready_factory(cls, endpoint, driver_config, ready_timeout=10, endpoint_options=None):
+ candidate = cls(endpoint, driver_config, endpoint_options=endpoint_options)
+ ready_future = candidate.ready_future()
+ try:
+ ready_future.result(timeout=ready_timeout)
+ return candidate
+ except grpc.FutureTimeoutError:
+ ready_future.cancel()
+ candidate.close()
+ return None
+
+ except Exception:
+ candidate.close()
+ return None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ def close(self):
+ """
+ Closes the underlying gRPC channel
+ :return: None
+ """
+ logger.info("Closing channel for endpoint %s", self.endpoint)
+ with self.lock:
+ self.closing = True
+
+ for callback in self._cleanup_callbacks:
+ callback(self)
+
+ # potentially we should cancel in-flight calls here but currently
+ # it is not required since gRPC can successfully cancel these calls manually.
+
+ if self.calls == 0:
+ # everything is cancelled/completed and channel can be destroyed
+ self.destroy()
+
+ def destroy(self):
+ if hasattr(self, "_channel") and hasattr(self._channel, "close"):
+ self._channel.close()
+
+ def ready_future(self):
+ """
+ Creates a future that tracks underlying gRPC channel is ready
+ :return: A Future object that matures when the underlying channel is ready
+ to receive request
+ """
+ return grpc.channel_ready_future(self._channel)
diff --git a/contrib/python/ydb/py3/ydb/convert.py b/contrib/python/ydb/py3/ydb/convert.py
new file mode 100644
index 0000000000..6c4164bc44
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/convert.py
@@ -0,0 +1,470 @@
+# -*- coding: utf-8 -*-
+import decimal
+from google.protobuf import struct_pb2
+
+from . import issues, types, _apis
+
+
+_SHIFT_BIT_COUNT = 64
+_SHIFT = 2**64
+_SIGN_BIT = 2**63
+_DecimalNanRepr = 10**35 + 1
+_DecimalInfRepr = 10**35
+_DecimalSignedInfRepr = -(10**35)
+_primitive_type_by_id = {}
+_default_allow_truncated_result = False
+
+
+def _initialize():
+ for pt in types.PrimitiveType:
+ _primitive_type_by_id[pt._idn_] = pt
+
+
+_initialize()
+
+
+class _DotDict(dict):
+ def __init__(self, *args, **kwargs):
+ super(_DotDict, self).__init__(*args, **kwargs)
+
+ def __getattr__(self, item):
+ return self[item]
+
+
+def _is_decimal_signed(hi_value):
+ return (hi_value & _SIGN_BIT) == _SIGN_BIT
+
+
+def _pb_to_decimal(type_pb, value_pb, table_client_settings):
+ hi = (value_pb.high_128 - (1 << _SHIFT_BIT_COUNT)) if _is_decimal_signed(value_pb.high_128) else value_pb.high_128
+ int128_value = value_pb.low_128 + (hi << _SHIFT_BIT_COUNT)
+ if int128_value == _DecimalNanRepr:
+ return decimal.Decimal("Nan")
+ elif int128_value == _DecimalInfRepr:
+ return decimal.Decimal("Inf")
+ elif int128_value == _DecimalSignedInfRepr:
+ return decimal.Decimal("-Inf")
+ return decimal.Decimal(int128_value) / decimal.Decimal(10**type_pb.decimal_type.scale)
+
+
+def _pb_to_primitive(type_pb, value_pb, table_client_settings):
+ return _primitive_type_by_id.get(type_pb.type_id).get_value(value_pb, table_client_settings)
+
+
+def _pb_to_optional(type_pb, value_pb, table_client_settings):
+ if value_pb.WhichOneof("value") == "null_flag_value":
+ return None
+ if value_pb.WhichOneof("value") == "nested_value":
+ return _to_native_value(type_pb.optional_type.item, value_pb.nested_value, table_client_settings)
+ return _to_native_value(type_pb.optional_type.item, value_pb, table_client_settings)
+
+
+def _pb_to_list(type_pb, value_pb, table_client_settings):
+ return [
+ _to_native_value(type_pb.list_type.item, value_proto_item, table_client_settings)
+ for value_proto_item in value_pb.items
+ ]
+
+
+def _pb_to_tuple(type_pb, value_pb, table_client_settings):
+ return tuple(
+ _to_native_value(item_type, item_value, table_client_settings)
+ for item_type, item_value in zip(type_pb.tuple_type.elements, value_pb.items)
+ )
+
+
+def _pb_to_dict(type_pb, value_pb, table_client_settings):
+ result = {}
+ for kv_pair in value_pb.pairs:
+ key = _to_native_value(type_pb.dict_type.key, kv_pair.key, table_client_settings)
+ payload = _to_native_value(type_pb.dict_type.payload, kv_pair.payload, table_client_settings)
+ result[key] = payload
+ return result
+
+
+class _Struct(_DotDict):
+ pass
+
+
+def _pb_to_struct(type_pb, value_pb, table_client_settings):
+ result = _Struct()
+ for member, item in zip(type_pb.struct_type.members, value_pb.items):
+ result[member.name] = _to_native_value(member.type, item, table_client_settings)
+ return result
+
+
+def _pb_to_void(type_pb, value_pb, table_client_settings):
+ return None
+
+
+_to_native_map = {
+ "type_id": _pb_to_primitive,
+ "decimal_type": _pb_to_decimal,
+ "optional_type": _pb_to_optional,
+ "list_type": _pb_to_list,
+ "tuple_type": _pb_to_tuple,
+ "dict_type": _pb_to_dict,
+ "struct_type": _pb_to_struct,
+ "void_type": _pb_to_void,
+ "empty_list_type": _pb_to_list,
+ "empty_dict_type": _pb_to_dict,
+}
+
+
+def _to_native_value(type_pb, value_pb, table_client_settings=None):
+ return _to_native_map.get(type_pb.WhichOneof("type"))(type_pb, value_pb, table_client_settings)
+
+
+def _decimal_to_int128(value_type, value):
+ if value.is_nan():
+ return _DecimalNanRepr
+ elif value.is_infinite():
+ if value.is_signed():
+ return _DecimalSignedInfRepr
+ return _DecimalInfRepr
+
+ sign, digits, exponent = value.as_tuple()
+ int128_value = 0
+ digits_count = 0
+ for digit in digits:
+ int128_value *= 10
+ int128_value += digit
+ digits_count += 1
+
+ if value_type.decimal_type.scale + exponent < 0:
+ raise issues.GenericError("Couldn't parse decimal value, exponent is too large")
+
+ for _ in range(value_type.decimal_type.scale + exponent):
+ int128_value *= 10
+ digits_count += 1
+
+ if digits_count > value_type.decimal_type.precision + value_type.decimal_type.scale:
+ raise issues.GenericError("Couldn't parse decimal value, digits count > 35")
+
+ if sign:
+ int128_value *= -1
+
+ return int128_value
+
+
+def _decimal_to_pb(value_type, value):
+ value_pb = _apis.ydb_value.Value()
+ int128_value = _decimal_to_int128(value_type, value)
+ if int128_value < 0:
+ value_pb.high_128 = (int128_value >> _SHIFT_BIT_COUNT) + (1 << _SHIFT_BIT_COUNT)
+ int128_value -= (int128_value >> _SHIFT_BIT_COUNT) << _SHIFT_BIT_COUNT
+ else:
+ value_pb.high_128 = int128_value >> _SHIFT_BIT_COUNT
+ int128_value -= value_pb.high_128 << _SHIFT_BIT_COUNT
+ value_pb.low_128 = int128_value
+ return value_pb
+
+
+def _primitive_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ data_type = _primitive_type_by_id.get(type_pb.type_id)
+ data_type.set_value(value_pb, value)
+ return value_pb
+
+
+def _optional_to_pb(type_pb, value):
+ if value is None:
+ return _apis.ydb_value.Value(null_flag_value=struct_pb2.NULL_VALUE)
+ return _from_native_value(type_pb.optional_type.item, value)
+
+
+def _list_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for element in value:
+ value_item_proto = value_pb.items.add()
+ value_item_proto.MergeFrom(_from_native_value(type_pb.list_type.item, element))
+ return value_pb
+
+
+def _tuple_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for element_type, element_value in zip(type_pb.tuple_type.elements, value):
+ value_item_proto = value_pb.items.add()
+ value_item_proto.MergeFrom(_from_native_value(element_type, element_value))
+ return value_pb
+
+
+def _dict_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for key, payload in value.items():
+ kv_pair = value_pb.pairs.add()
+ kv_pair.key.MergeFrom(_from_native_value(type_pb.dict_type.key, key))
+ if payload:
+ kv_pair.payload.MergeFrom(_from_native_value(type_pb.dict_type.payload, payload))
+ return value_pb
+
+
+def _struct_to_pb(type_pb, value):
+ value_pb = _apis.ydb_value.Value()
+ for member in type_pb.struct_type.members:
+ value_item_proto = value_pb.items.add()
+ value_item = value[member.name] if isinstance(value, dict) else getattr(value, member.name)
+ value_item_proto.MergeFrom(_from_native_value(member.type, value_item))
+ return value_pb
+
+
+_from_native_map = {
+ "type_id": _primitive_to_pb,
+ "decimal_type": _decimal_to_pb,
+ "optional_type": _optional_to_pb,
+ "list_type": _list_to_pb,
+ "tuple_type": _tuple_to_pb,
+ "dict_type": _dict_to_pb,
+ "struct_type": _struct_to_pb,
+}
+
+
+def _decimal_type_to_native(type_pb):
+ return types.DecimalType(type_pb.decimal_type.precision, type_pb.decimal_type.scale)
+
+
+def _optional_type_to_native(type_pb):
+ return types.OptionalType(type_to_native(type_pb.optional_type.item))
+
+
+def _list_type_to_native(type_pb):
+ return types.ListType(type_to_native(type_pb.list_type.item))
+
+
+def _primitive_type_to_native(type_pb):
+ return _primitive_type_by_id.get(type_pb.type_id)
+
+
+def _null_type_factory(type_pb):
+ return types.NullType()
+
+
+_type_to_native_map = {
+ "optional_type": _optional_type_to_native,
+ "type_id": _primitive_type_to_native,
+ "decimal_type": _decimal_type_to_native,
+ "null_type": _null_type_factory,
+ "list_type": _list_type_to_native,
+}
+
+
+def type_to_native(type_pb):
+ return _type_to_native_map.get(type_pb.WhichOneof("type"))(type_pb)
+
+
+def _from_native_value(type_pb, value):
+ return _from_native_map.get(type_pb.WhichOneof("type"))(type_pb, value)
+
+
+def to_typed_value_from_native(type_pb, value):
+ typed_value = _apis.ydb_value.TypedValue()
+ typed_value.type.MergeFrom(type_pb)
+ typed_value.value.MergeFrom(from_native_value(type_pb, value))
+ return typed_value
+
+
+def parameters_to_pb(parameters_types, parameters_values):
+ if parameters_values is None or not parameters_values:
+ return {}
+
+ param_values_pb = {}
+ for name, type_pb in parameters_types.items():
+ result = _apis.ydb_value.TypedValue()
+ ttype = type_pb
+ if isinstance(type_pb, types.AbstractTypeBuilder):
+ ttype = type_pb.proto
+ elif isinstance(type_pb, types.PrimitiveType):
+ ttype = type_pb.proto
+ result.type.MergeFrom(ttype)
+ result.value.MergeFrom(_from_native_value(ttype, parameters_values[name]))
+ param_values_pb[name] = result
+ return param_values_pb
+
+
+def _unwrap_optionality(column):
+ c_type = column.type
+ current_type = c_type.WhichOneof("type")
+ while current_type == "optional_type":
+ c_type = c_type.optional_type.item
+ current_type = c_type.WhichOneof("type")
+ return _to_native_map.get(current_type), c_type
+
+
+class _ResultSet(object):
+ __slots__ = ("columns", "rows", "truncated", "snapshot")
+
+ def __init__(self, columns, rows, truncated, snapshot=None):
+ self.columns = columns
+ self.rows = rows
+ self.truncated = truncated
+ self.snapshot = snapshot
+
+ @classmethod
+ def from_message(cls, message, table_client_settings=None, snapshot=None):
+ rows = []
+ # prepare column parsers before actuall parsing
+ column_parsers = []
+ if len(message.rows) > 0:
+ for column in message.columns:
+ column_parsers.append(_unwrap_optionality(column))
+
+ for row_proto in message.rows:
+ row = _Row(message.columns)
+ for column, value, column_info in zip(message.columns, row_proto.items, column_parsers):
+ v_type = value.WhichOneof("value")
+ if v_type == "null_flag_value":
+ row[column.name] = None
+ continue
+
+ while v_type == "nested_value":
+ value = value.nested_value
+ v_type = value.WhichOneof("value")
+
+ column_parser, unwrapped_type = column_info
+ row[column.name] = column_parser(unwrapped_type, value, table_client_settings)
+ rows.append(row)
+ return cls(message.columns, rows, message.truncated, snapshot)
+
+ @classmethod
+ def lazy_from_message(cls, message, table_client_settings=None, snapshot=None):
+ rows = _LazyRows(message.rows, table_client_settings, message.columns)
+ return cls(message.columns, rows, message.truncated, snapshot)
+
+
+ResultSet = _ResultSet
+
+
+class _Row(_DotDict):
+ def __init__(self, columns):
+ super(_Row, self).__init__()
+ self._columns = columns
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self[self._columns[key].name]
+ elif isinstance(key, slice):
+ return tuple(map(lambda x: self[x.name], self._columns[key]))
+ else:
+ return super(_Row, self).__getitem__(key)
+
+
+class _LazyRowItem:
+
+ __slots__ = ["_item", "_type", "_table_client_settings", "_processed", "_parser"]
+
+ def __init__(self, proto_item, proto_type, table_client_settings, parser):
+ self._item = proto_item
+ self._type = proto_type
+ self._table_client_settings = table_client_settings
+ self._processed = False
+ self._parser = parser
+
+ def get(self):
+ if not self._processed:
+
+ self._item = self._parser(self._type, self._item, self._table_client_settings)
+ self._processed = True
+ return self._item
+
+
+class _LazyRow(_DotDict):
+ def __init__(self, columns, proto_row, table_client_settings, parsers):
+ super(_LazyRow, self).__init__()
+ self._columns = columns
+ self._table_client_settings = table_client_settings
+ for i, (column, row_item) in enumerate(zip(self._columns, proto_row.items)):
+ super(_LazyRow, self).__setitem__(
+ column.name,
+ _LazyRowItem(row_item, column.type, table_client_settings, parsers[i]),
+ )
+
+ def __setitem__(self, key, value):
+ raise NotImplementedError("Cannot insert values into lazy row")
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self[self._columns[key].name]
+ elif isinstance(key, slice):
+ return tuple(map(lambda x: self[x.name], self._columns[key]))
+ else:
+ return super(_LazyRow, self).__getitem__(key).get()
+
+ def __iter__(self):
+ return super(_LazyRow, self).__iter__()
+
+ def __next__(self):
+ return super(_LazyRow, self).__next__().get()
+
+ def next(self):
+ return self.__next__()
+
+
+def from_native_value(type_pb, value):
+ return _from_native_value(type_pb, value)
+
+
+def to_native_value(typed_value):
+ return _to_native_value(typed_value.type, typed_value.value)
+
+
+class _LazyRows:
+ def __init__(self, rows, table_client_settings, columns):
+ self._rows = rows
+ self._parsers = [_LazyParser(columns, i) for i in range(len(columns))]
+ self._table_client_settings = table_client_settings
+ self._columns = columns
+
+ def __len__(self):
+ return len(self._rows)
+
+ def fetchone(self):
+ return _LazyRow(self._columns, self._rows[0], self._table_client_settings, self._parsers)
+
+ def fetchmany(self, number):
+ for index in range(min(len(self), number)):
+ yield _LazyRow(
+ self._columns,
+ self._rows[index],
+ self._table_client_settings,
+ self._parsers,
+ )
+
+ def __iter__(self):
+ for row in self.fetchmany(len(self)):
+ yield row
+
+ def fetchall(self):
+ for row in self:
+ yield row
+
+
+class _LazyParser:
+ __slots__ = ["_columns", "_column_index", "_prepared"]
+
+ def __init__(self, columns, column_index):
+ self._columns = columns
+ self._column_index = column_index
+ self._prepared = None
+
+ def __call__(self, *args, **kwargs):
+ if self._prepared is None:
+ self._prepared = _to_native_map.get(self._columns[self._column_index].type.WhichOneof("type"))
+ return self._prepared(*args, **kwargs)
+
+
+class ResultSets(list):
+ def __init__(self, result_sets_pb, table_client_settings=None):
+ make_lazy = False if table_client_settings is None else table_client_settings._make_result_sets_lazy
+
+ allow_truncated_result = _default_allow_truncated_result
+ if table_client_settings:
+ allow_truncated_result = table_client_settings._allow_truncated_result
+
+ result_sets = []
+ initializer = _ResultSet.from_message if not make_lazy else _ResultSet.lazy_from_message
+ for result_set in result_sets_pb:
+ result_set = initializer(result_set, table_client_settings)
+ if result_set.truncated and not allow_truncated_result:
+ raise issues.TruncatedResponseError("Response for the request was truncated by server")
+ result_sets.append(result_set)
+ super(ResultSets, self).__init__(result_sets)
diff --git a/contrib/python/ydb/py3/ydb/credentials.py b/contrib/python/ydb/py3/ydb/credentials.py
new file mode 100644
index 0000000000..ab50279846
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/credentials.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+import abc
+import typing
+
+from . import tracing, issues, connection
+from . import settings as settings_impl
+import threading
+from concurrent import futures
+import logging
+import time
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_auth_pb2
+ from ._grpc.v4 import ydb_auth_v1_pb2_grpc
+else:
+ from ._grpc.common.protos import ydb_auth_pb2
+ from ._grpc.common import ydb_auth_v1_pb2_grpc
+
+
+YDB_AUTH_TICKET_HEADER = "x-ydb-auth-ticket"
+logger = logging.getLogger(__name__)
+
+
+class AbstractCredentials(abc.ABC):
+ """
+ An abstract class that provides auth metadata
+ """
+
+
+class Credentials(abc.ABC):
+ def __init__(self, tracer=None):
+ self.tracer = tracer if tracer is not None else tracing.Tracer(None)
+
+ @abc.abstractmethod
+ def auth_metadata(self):
+ """
+ :return: An iterable with auth metadata
+ """
+ pass
+
+ def get_auth_token(self) -> str:
+ for header, token in self.auth_metadata():
+ if header == YDB_AUTH_TICKET_HEADER:
+ return token
+ return ""
+
+
+class OneToManyValue(object):
+ def __init__(self):
+ self._value = None
+ self._condition = threading.Condition()
+
+ def consume(self, timeout=3):
+ with self._condition:
+ if self._value is None:
+ self._condition.wait(timeout=timeout)
+ return self._value
+
+ def update(self, n_value):
+ with self._condition:
+ prev_value = self._value
+ self._value = n_value
+ if prev_value is None:
+ self._condition.notify_all()
+
+
+class AtMostOneExecution(object):
+ def __init__(self):
+ self._can_schedule = True
+ self._lock = threading.Lock()
+ self._tp = futures.ThreadPoolExecutor(1)
+
+ def wrapped_execution(self, callback):
+ try:
+ callback()
+ except Exception:
+ pass
+
+ finally:
+ self.cleanup()
+
+ def submit(self, callback):
+ with self._lock:
+ if self._can_schedule:
+ self._tp.submit(self.wrapped_execution, callback)
+ self._can_schedule = False
+
+ def cleanup(self):
+ with self._lock:
+ self._can_schedule = True
+
+
+class AbstractExpiringTokenCredentials(Credentials):
+ def __init__(self, tracer=None):
+ super(AbstractExpiringTokenCredentials, self).__init__(tracer)
+ self._expires_in = 0
+ self._refresh_in = 0
+ self._hour = 60 * 60
+ self._cached_token = OneToManyValue()
+ self._tp = AtMostOneExecution()
+ self.logger = logger.getChild(self.__class__.__name__)
+ self.last_error = None
+ self.extra_error_message = ""
+
+ @abc.abstractmethod
+ def _make_token_request(self):
+ pass
+
+ def _log_refresh_start(self, current_time):
+ self.logger.debug("Start refresh token from metadata")
+ if current_time > self._refresh_in:
+ self.logger.info(
+ "Cached token reached refresh_in deadline, current time %s, deadline %s",
+ current_time,
+ self._refresh_in,
+ )
+
+ if current_time > self._expires_in and self._expires_in > 0:
+ self.logger.error(
+ "Cached token reached expires_in deadline, current time %s, deadline %s",
+ current_time,
+ self._expires_in,
+ )
+
+ def _update_expiration_info(self, auth_metadata):
+ self._expires_in = time.time() + min(self._hour, auth_metadata["expires_in"] / 2)
+ self._refresh_in = time.time() + min(self._hour / 2, auth_metadata["expires_in"] / 4)
+
+ def _refresh(self):
+ current_time = time.time()
+ self._log_refresh_start(current_time)
+ try:
+ token_response = self._make_token_request()
+ self._cached_token.update(token_response["access_token"])
+ self._update_expiration_info(token_response)
+ self.logger.info(
+ "Token refresh successful. current_time %s, refresh_in %s",
+ current_time,
+ self._refresh_in,
+ )
+
+ except (KeyboardInterrupt, SystemExit):
+ return
+
+ except Exception as e:
+ self.last_error = str(e)
+ time.sleep(1)
+ self._tp.submit(self._refresh)
+
+ @property
+ @tracing.with_trace()
+ def token(self):
+ current_time = time.time()
+ if current_time > self._refresh_in:
+ tracing.trace(self.tracer, {"refresh": True})
+ self._tp.submit(self._refresh)
+ cached_token = self._cached_token.consume(timeout=3)
+ tracing.trace(self.tracer, {"consumed": True})
+ if cached_token is None:
+ if self.last_error is None:
+ raise issues.ConnectionError(
+ "%s: timeout occurred while waiting for token.\n%s"
+ % (
+ self.__class__.__name__,
+ self.extra_error_message,
+ )
+ )
+ raise issues.ConnectionError(
+ "%s: %s.\n%s" % (self.__class__.__name__, self.last_error, self.extra_error_message)
+ )
+ return cached_token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self.token)]
+
+
+def _wrap_static_credentials_response(rpc_state, response):
+ issues._process_response(response.operation)
+ result = ydb_auth_pb2.LoginResult()
+ response.operation.result.Unpack(result)
+ return result
+
+
+class StaticCredentials(AbstractExpiringTokenCredentials):
+ def __init__(self, driver_config, user, password="", tracer=None):
+ super(StaticCredentials, self).__init__(tracer)
+ self.driver_config = driver_config
+ self.user = user
+ self.password = password
+ self.request_timeout = 10
+
+ def _make_token_request(self):
+ conn = connection.Connection.ready_factory(self.driver_config.endpoint, self.driver_config)
+ assert conn is not None, "Failed to establish connection in to %s" % self.driver_config.endpoint
+ try:
+ result = conn(
+ ydb_auth_pb2.LoginRequest(user=self.user, password=self.password),
+ ydb_auth_v1_pb2_grpc.AuthServiceStub,
+ "Login",
+ _wrap_static_credentials_response,
+ settings_impl.BaseRequestSettings().with_timeout(self.request_timeout).with_need_rpc_auth(False),
+ )
+ finally:
+ conn.close()
+ return {"expires_in": 30 * 60, "access_token": result.token}
+
+
+class AnonymousCredentials(Credentials):
+ @staticmethod
+ def auth_metadata():
+ return []
+
+
+class AuthTokenCredentials(Credentials):
+ def __init__(self, token):
+ self._token = token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self._token)]
+
+
+class AccessTokenCredentials(Credentials):
+ def __init__(self, token):
+ self._token = token
+
+ def auth_metadata(self):
+ return [(YDB_AUTH_TICKET_HEADER, self._token)]
diff --git a/contrib/python/ydb/py3/ydb/dbapi/__init__.py b/contrib/python/ydb/py3/ydb/dbapi/__init__.py
new file mode 100644
index 0000000000..7363921192
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/dbapi/__init__.py
@@ -0,0 +1,47 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from .connection import Connection
+from .errors import (
+ Warning,
+ Error,
+ InterfaceError,
+ DatabaseError,
+ DataError,
+ OperationalError,
+ IntegrityError,
+ InternalError,
+ ProgrammingError,
+ NotSupportedError,
+)
+
+version = "0.0.31"
+
+version_info = (
+ 1,
+ 0,
+ 0,
+)
+
+apilevel = "1.0"
+
+threadsafety = 0
+
+paramstyle = "qmark"
+
+errors = (
+ Warning,
+ Error,
+ InterfaceError,
+ DatabaseError,
+ DataError,
+ OperationalError,
+ IntegrityError,
+ InternalError,
+ ProgrammingError,
+ NotSupportedError,
+)
+
+
+def connect(*args, **kwargs):
+ return Connection(*args, **kwargs)
diff --git a/contrib/python/ydb/py3/ydb/dbapi/connection.py b/contrib/python/ydb/py3/ydb/dbapi/connection.py
new file mode 100644
index 0000000000..4c22acd132
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/dbapi/connection.py
@@ -0,0 +1,87 @@
+from __future__ import absolute_import, unicode_literals
+
+import posixpath
+
+import ydb
+from .cursor import Cursor
+from .errors import DatabaseError
+
+
+class Connection(object):
+
+ deiver = None
+ pool = None
+
+ def __init__(self, endpoint, database=None, **conn_kwargs):
+ self.endpoint = endpoint
+ self.database = database
+ self._conn_kwargs = conn_kwargs
+ driver, pool = self._create_driver(self.endpoint, self.database, **conn_kwargs)
+ self.driver = driver
+ self.pool = pool
+
+ def cursor(self):
+ return Cursor(self)
+
+ def execute(self, sql, parameters=None):
+ return self.cursor().execute(sql, parameters)
+
+ def executemany(self, sql, parameters):
+ return self.cursor().executemany(sql, parameters)
+
+ def describe(self, table_path):
+ full_path = posixpath.join(self.database, table_path)
+ try:
+ res = self.pool.retry_operation_sync(lambda cli: cli.describe_table(full_path))
+ return res.columns
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ except Exception:
+ raise DatabaseError("Failed to describe table %r" % (table_path,))
+
+ def check_exists(self, table_path):
+ try:
+ self.driver.scheme_client.describe_path(table_path)
+ return True
+ except ydb.SchemeError:
+ return False
+
+ def commit(self):
+ pass
+
+ def rollback(self):
+ pass
+
+ def close(self):
+ if self.pool is not None:
+ self.pool.stop()
+ if self.driver is not None:
+ self.driver.stop()
+
+ @staticmethod
+ def _create_endpoint(host, port):
+ return "%s:%d" % (host, port)
+
+ @staticmethod
+ def _create_driver(endpoint, database, **conn_kwargs):
+ driver_config = ydb.DriverConfig(
+ endpoint,
+ database=database,
+ table_client_settings=ydb.TableClientSettings()
+ .with_native_date_in_result_sets(True)
+ .with_native_datetime_in_result_sets(True)
+ .with_native_json_in_result_sets(True),
+ **conn_kwargs
+ )
+ driver = ydb.Driver(driver_config)
+ try:
+ driver.wait(timeout=5, fail_fast=True)
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ except Exception:
+ driver.stop()
+ raise DatabaseError("Failed to connect to YDB, details %s" % driver.discovery_debug_details())
+
+ return driver, ydb.SessionPool(driver)
diff --git a/contrib/python/ydb/py3/ydb/dbapi/cursor.py b/contrib/python/ydb/py3/ydb/dbapi/cursor.py
new file mode 100644
index 0000000000..92c471d562
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/dbapi/cursor.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import, unicode_literals
+
+import collections
+import datetime
+import itertools
+import logging
+
+import ydb
+from .errors import DatabaseError
+
+
+LOGGER = logging.getLogger(__name__)
+
+
+STR_QUOTE_MAP = (
+ ("\\", "\\\\"),
+ ("'", r"\'"),
+ ("\0", r"\x00"),
+ # To re-check: \b \f \r \n \t
+)
+
+
+def render_str(value):
+ for r_from, r_to in STR_QUOTE_MAP:
+ value = value.replace(r_from, r_to)
+ return "'" + value + "'"
+
+
+def render_date(value):
+ return "Date({})".format(render_str(value.isoformat()))
+
+
+def render_datetime(value):
+ # TODO: is there a better solution for this?
+ return "DateTime::MakeDatetime(DateTime::ParseIso8601({}))".format(render_str(value.isoformat()))
+
+
+def render(value):
+ if value is None:
+ return "NULL"
+ if isinstance(value, str):
+ return render_str(value)
+ if isinstance(value, datetime.datetime):
+ return render_datetime(value)
+ if isinstance(value, datetime.date):
+ return render_date(value)
+ return repr(value)
+
+
+def render_sql(sql, parameters):
+ if not parameters:
+ return sql
+
+ assert sql.count("?") == len(parameters), "num of placeholders != num of params"
+
+ quoted_params = [render(param) for param in parameters]
+ quoted_params += [""]
+ sql_pieces = sql.split("?")
+ assert len(sql_pieces) == len(quoted_params)
+ return "".join(piece for pair in zip(sql_pieces, quoted_params) for piece in pair if piece)
+
+
+def named_result_for(column_names):
+ # TODO fix: this doesn't allow columns names starting with underscore, e.g. `select 1 as _a`.
+ return collections.namedtuple("NamedResult", column_names)
+
+
+def _get_column_type(type_obj):
+ return str(type_obj)
+
+
+def get_column_type(type_obj):
+ return _get_column_type(ydb.convert.type_to_native(type_obj))
+
+
+class Cursor(object):
+ def __init__(self, connection):
+ self.connection = connection
+ self.description = []
+ self.arraysize = 1
+ self.logger = LOGGER
+ self.rows = None
+ self._rows_prefetched = None
+
+ def execute(self, sql, parameters=None):
+ fsql = render_sql(sql, parameters)
+ self.logger.debug("execute sql: %s", fsql)
+ try:
+ chunks = self.connection.driver.table_client.scan_query(fsql)
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ self.description = []
+
+ rows = self._rows_iterable(chunks)
+ # Prefetch the description:
+ try:
+ first_row = next(rows)
+ except StopIteration:
+ pass
+ else:
+ rows = itertools.chain((first_row,), rows)
+ if self.rows is not None:
+ rows = itertools.chain(self.rows, rows)
+
+ self.rows = rows
+
+ def _rows_iterable(self, chunks_iterable):
+ description = None
+ try:
+ for chunk in chunks_iterable:
+ if description is None and len(chunk.result_set.rows) > 0:
+ description = [
+ (
+ col.name,
+ get_column_type(col.type),
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ for col in chunk.result_set.columns
+ ]
+ self.description = description
+ for row in chunk.result_set.rows:
+ # returns tuple to be compatible with SqlAlchemy and because
+ # of this PEP to return a sequence: https://www.python.org/dev/peps/pep-0249/#fetchmany
+ yield row[::]
+ except ydb.Error as e:
+ raise DatabaseError(e.message, e.issues, e.status)
+
+ def _ensure_prefetched(self):
+ if self.rows is not None and self._rows_prefetched is None:
+ self._rows_prefetched = list(self.rows)
+ self.rows = iter(self._rows_prefetched)
+ return self._rows_prefetched
+
+ def executemany(self, sql, seq_of_parameters):
+ for parameters in seq_of_parameters:
+ self.execute(sql, parameters)
+
+ def executescript(self, script):
+ return self.execute(script)
+
+ def fetchone(self):
+ if self.rows is None:
+ return None
+ try:
+ return next(self.rows)
+ except StopIteration:
+ return None
+
+ def fetchmany(self, size=None):
+ if size is None:
+ size = self.arraysize
+
+ return list(itertools.islice(self.rows, size))
+
+ def fetchall(self):
+ return list(self.rows)
+
+ def nextset(self):
+ self.fetchall()
+
+ def setinputsizes(self, sizes):
+ pass
+
+ def setoutputsize(self, column=None):
+ pass
+
+ def close(self):
+ self.rows = None
+ self._rows_prefetched = None
+
+ @property
+ def rowcount(self):
+ return len(self._ensure_prefetched())
diff --git a/contrib/python/ydb/py3/ydb/dbapi/errors.py b/contrib/python/ydb/py3/ydb/dbapi/errors.py
new file mode 100644
index 0000000000..65e453856a
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/dbapi/errors.py
@@ -0,0 +1,102 @@
+class Warning(Exception):
+ pass
+
+
+class Error(Exception):
+ def __init__(self, message, issues=None, status=None):
+
+ pretty_issues = _pretty_issues(issues)
+ message = message if pretty_issues is None else pretty_issues
+
+ super(Error, self).__init__(message)
+ self.issues = issues
+ self.message = message
+ self.status = status
+
+
+class InterfaceError(Error):
+ pass
+
+
+class DatabaseError(Error):
+ pass
+
+
+class DataError(DatabaseError):
+ pass
+
+
+class OperationalError(DatabaseError):
+ pass
+
+
+class IntegrityError(DatabaseError):
+ pass
+
+
+class InternalError(DatabaseError):
+ pass
+
+
+class ProgrammingError(DatabaseError):
+ pass
+
+
+class NotSupportedError(DatabaseError):
+ pass
+
+
+def _pretty_issues(issues):
+ if issues is None:
+ return None
+
+ children_messages = [_get_messages(issue, root=True) for issue in issues]
+
+ if None in children_messages:
+ return None
+
+ return "\n" + "\n".join(children_messages)
+
+
+def _get_messages(issue, max_depth=100, indent=2, depth=0, root=False):
+ if depth >= max_depth:
+ return None
+ margin_str = " " * depth * indent
+ pre_message = ""
+ children = ""
+ if issue.issues:
+ collapsed_messages = []
+ while not root and len(issue.issues) == 1:
+ collapsed_messages.append(issue.message)
+ issue = issue.issues[0]
+ if collapsed_messages:
+ pre_message = margin_str + ", ".join(collapsed_messages) + "\n"
+ depth += 1
+ margin_str = " " * depth * indent
+ else:
+ pre_message = ""
+
+ children_messages = [
+ _get_messages(iss, max_depth=max_depth, indent=indent, depth=depth + 1) for iss in issue.issues
+ ]
+
+ if None in children_messages:
+ return None
+
+ children = "\n".join(children_messages)
+
+ return (
+ pre_message
+ + margin_str
+ + issue.message
+ + "\n"
+ + margin_str
+ + "severity level: "
+ + str(issue.severity)
+ + "\n"
+ + margin_str
+ + "issue code: "
+ + str(issue.issue_code)
+ + "\n"
+ + children
+ )
diff --git a/contrib/python/ydb/py3/ydb/default_pem.py b/contrib/python/ydb/py3/ydb/default_pem.py
new file mode 100644
index 0000000000..b8272efd29
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/default_pem.py
@@ -0,0 +1,4686 @@
+data = """
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 146587175971765017618439757810265552097
+# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
+# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
+# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
+f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
+mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
+zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
+fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
+vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
+Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
+zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
+Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
+k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
+DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
+lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
+Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
+d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
+XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
+gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
+d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
+J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
+DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
+F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
+SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
+E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 146587176055767053814479386953112547951
+# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
+# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
+# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
+CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
+GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
+XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
+re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
+PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
+mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
+8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
+x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
+nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
+kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
+twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
+8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
+vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
+z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
+pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
+pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
+R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
+RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
+0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
+5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
+izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
+yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 146587176140553309517047991083707763997
+# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
+# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
+# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
+-----BEGIN CERTIFICATE-----
+MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
+736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
+DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
+fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
+njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 146587176229350439916519468929765261721
+# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
+# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
+# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
+-----BEGIN CERTIFICATE-----
+MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
+hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
+xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
+CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
+sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIFZTCCA02gAwIBAgIKUlD06gAAAAAAGDANBgkqhkiG9w0BAQ0FADAfMR0wGwYD
+VQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xODA2MjgxMTE0NTdaFw0zMjA2
+MjgxMTI0NTdaMFsxEjAQBgoJkiaJk/IsZAEZFgJydTEWMBQGCgmSJomT8ixkARkW
+BnlhbmRleDESMBAGCgmSJomT8ixkARkWAmxkMRkwFwYDVQQDExBZYW5kZXhJbnRl
+cm5hbENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy6Sab1PCbISk
+GSAUpr6JJKLXlf4O+cBhjALfQn2QpPL/cDjZ2+MPXuAUgE8KT+/mbAGA2rJID0KY
+RjDSkByxnhoX8jwWsmPYXoAmOMPkgKRG9/ZefnMrK4oVhGgLmxnpbEkNbGh88cJ1
+OVzgD5LVHSpDqm7iEuoUPOJCWXQ51+rZ0Lw9zBEU8v3yXXI345iWpLj92pOQDH0G
+Tqr7BnQywxcgb5BYdywayacIT7UTJZk7832m5k7Oa3qMIKKXHsx26rNVUVBfpzph
+OFvqkLetOKHk7827NDKr3I3OFXzQk4gy6tagv8PZNp+XGOBWfYkbLfI4xbTnjHIW
+n5q1gfKPOQIDAQABo4IBZTCCAWEwEAYJKwYBBAGCNxUBBAMCAQIwIwYJKwYBBAGC
+NxUCBBYEFNgaef9LcdQKs6qfsfiuWF5p/yqRMB0GA1UdDgQWBBSP3TKDCRNT3ZEa
+Zumz1DzFtPJnSDBZBgNVHSAEUjBQME4GBFUdIAAwRjBEBggrBgEFBQcCARY4aHR0
+cDovL2NybHMueWFuZGV4LnJ1L2Nwcy9ZYW5kZXhJbnRlcm5hbENBL3BvbGljaWVz
+Lmh0bWwwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0PBAQDAgGGMA8G
+A1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUq7nF/6Hv5lMdMzkihNF21DdOLWow
+VAYDVR0fBE0wSzBJoEegRYZDaHR0cDovL2NybHMueWFuZGV4LnJ1L1lhbmRleElu
+dGVybmFsUm9vdENBL1lhbmRleEludGVybmFsUm9vdENBLmNybDANBgkqhkiG9w0B
+AQ0FAAOCAgEAQnOiyykjwtSuCBV6rSiM8Q1rQIcfyqn1JBxSGeBMABc64loWSPaQ
+DtYPIW5rwNX7TQ94bjyYgCxhwHqUED/fcBOmXCQ2iBsdy5LOcNEZaC2kBHQuZ7dL
+0fSvpE98a41y9yY6CJGFXg8E/4GrQwgQEqT5Qbe9GHPadpRu+ptVvI6uLZG3ks2o
+oodjOm5C0SIo1pY4OtPAYE/AzTaYkTFbAqYcPfEfXHEOigBJBeXnQs7cANxX/RaF
+PnHEjZbGY57EtBP6p5ckndkfEmqp3PLXbsQteNOVpsUw5eVqEzinSisBmLc28nnr
+5QEojRontAaZd7ZzB5zaGkVuE+0laUUWSNBhfGE1R3LrTJEK9L7FEsBBprOxIWww
+CvLmAfglouwuNRc2TjRdfnZaEfPLD7NYIF4ahXPAMcfTii23Tlr2uB7LetNykSlX
+Z9S5/yf61VFEKnxuipFPNgtKqPcFgFUxlEb+wOeOfYZ7ex8VlpMBWbadj3Go025b
+KZUwKwHDQvgJ5pz9g3t+t5Xieu2pwyddWGu+1SItRohRhlyTiep7oW6yTps7Qt0e
+8pdLuLG7ZF19h1Pxi+dVbeaeNcsGEAOdRuCk+RTZHNe+J4yC8tNJOepnfYDul6SB
+RjFWthiFK45+TZRHAcsG9JuV8JNvgoKaL75v/GUsKaeJ3Cps3rBStfc=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIFGTCCAwGgAwIBAgIQJMM7ZIy2SYxCBgK7WcFwnjANBgkqhkiG9w0BAQ0FADAf
+MR0wGwYDVQQDExRZYW5kZXhJbnRlcm5hbFJvb3RDQTAeFw0xMzAyMTExMzQxNDNa
+Fw0zMzAyMTExMzUxNDJaMB8xHTAbBgNVBAMTFFlhbmRleEludGVybmFsUm9vdENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAgb4xoQjBQ7oEFk8EHVGy
+1pDEmPWw0Wgw5nX9RM7LL2xQWyUuEq+Lf9Dgh+O725aZ9+SO2oEs47DHHt81/fne
+5N6xOftRrCpy8hGtUR/A3bvjnQgjs+zdXvcO9cTuuzzPTFSts/iZATZsAruiepMx
+SGj9S1fGwvYws/yiXWNoNBz4Tu1Tlp0g+5fp/ADjnxc6DqNk6w01mJRDbx+6rlBO
+aIH2tQmJXDVoFdrhmBK9qOfjxWlIYGy83TnrvdXwi5mKTMtpEREMgyNLX75UjpvO
+NkZgBvEXPQq+g91wBGsWIE2sYlguXiBniQgAJOyRuSdTxcJoG8tZkLDPRi5RouWY
+gxXr13edn1TRDGco2hkdtSUBlajBMSvAq+H0hkslzWD/R+BXkn9dh0/DFnxVt4XU
+5JbFyd/sKV/rF4Vygfw9ssh1ZIWdqkfZ2QXOZ2gH4AEeoN/9vEfUPwqPVzL0XEZK
+r4s2WjU9mE5tHrVsQOZ80wnvYHYi2JHbl0hr5ghs4RIyJwx6LEEnj2tzMFec4f7o
+dQeSsZpgRJmpvpAfRTxhIRjZBrKxnMytedAkUPguBQwjVCn7+EaKiJfpu42JG8Mm
++/dHi+Q9Tc+0tX5pKOIpQMlMxMHw8MfPmUjC3AAd9lsmCtuybYoeN2IRdbzzchJ8
+l1ZuoI3gH7pcIeElfVSqSBkCAwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFKu5xf+h7+ZTHTM5IoTRdtQ3Ti1qMBAGCSsGAQQB
+gjcVAQQDAgEAMA0GCSqGSIb3DQEBDQUAA4ICAQAVpyJ1qLjqRLC34F1UXkC3vxpO
+nV6WgzpzA+DUNog4Y6RhTnh0Bsir+I+FTl0zFCm7JpT/3NP9VjfEitMkHehmHhQK
+c7cIBZSF62K477OTvLz+9ku2O/bGTtYv9fAvR4BmzFfyPDoAKOjJSghD1p/7El+1
+eSjvcUBzLnBUtxO/iYXRNo7B3+1qo4F5Hz7rPRLI0UWW/0UAfVCO2fFtyF6C1iEY
+/q0Ldbf3YIaMkf2WgGhnX9yH/8OiIij2r0LVNHS811apyycjep8y/NkG4q1Z9jEi
+VEX3P6NEL8dWtXQlvlNGMcfDT3lmB+tS32CPEUwce/Ble646rukbERRwFfxXojpf
+C6ium+LtJc7qnK6ygnYF4D6mz4H+3WaxJd1S1hGQxOb/3WVw63tZFnN62F6/nc5g
+6T44Yb7ND6y3nVcygLpbQsws6HsjX65CoSjrrPn0YhKxNBscF7M7tLTW/5LK9uhk
+yjRCkJ0YagpeLxfV1l1ZJZaTPZvY9+ylHnWHhzlq0FzcrooSSsp4i44DB2K7O2ID
+87leymZkKUY6PMDa4GkDJx0dG4UXDhRETMf+NkYgtLJ+UIzMNskwVDcxO4kVL+Hi
+Pj78bnC5yCw8P5YylR45LdxLzLO68unoXOyFz1etGXzszw8lJI9LNubYxk77mK8H
+LpuQKbSbIERsmR+QqQ==
+-----END CERTIFICATE-----
+"""
+
+
+def load_default_pem():
+ global data
+
+ return data.encode("utf-8")
diff --git a/contrib/python/ydb/py3/ydb/driver.py b/contrib/python/ydb/py3/ydb/driver.py
new file mode 100644
index 0000000000..89109b9b57
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/driver.py
@@ -0,0 +1,254 @@
+# -*- coding: utf-8 -*-
+from . import credentials as credentials_impl, table, scheme, pool
+from . import tracing
+import os
+import grpc
+from . import iam
+from . import _utilities
+
+from typing import Any # noqa
+
+
+class RPCCompression:
+ """Indicates the compression method to be used for an RPC."""
+
+ NoCompression = grpc.Compression.NoCompression
+ Deflate = grpc.Compression.Deflate
+ Gzip = grpc.Compression.Gzip
+
+
+def default_credentials(credentials=None, tracer=None):
+ tracer = tracer if tracer is not None else tracing.Tracer(None)
+ with tracer.trace("Driver.default_credentials") as ctx:
+ if credentials is None:
+ ctx.trace({"credentials.anonymous": True})
+ return credentials_impl.AnonymousCredentials()
+ else:
+ ctx.trace({"credentials.prepared": True})
+ return credentials
+
+
+def credentials_from_env_variables(tracer=None):
+ tracer = tracer if tracer is not None else tracing.Tracer(None)
+ with tracer.trace("Driver.credentials_from_env_variables") as ctx:
+ service_account_key_file = os.getenv("YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS")
+ if service_account_key_file is not None:
+ ctx.trace({"credentials.service_account_key_file": True})
+ import ydb.iam
+
+ return ydb.iam.ServiceAccountCredentials.from_file(service_account_key_file)
+
+ anonymous_credetials = os.getenv("YDB_ANONYMOUS_CREDENTIALS", "0") == "1"
+ if anonymous_credetials:
+ ctx.trace({"credentials.anonymous": True})
+ return credentials_impl.AnonymousCredentials()
+
+ metadata_credentials = os.getenv("YDB_METADATA_CREDENTIALS", "0") == "1"
+ if metadata_credentials:
+ ctx.trace({"credentials.metadata": True})
+
+ return iam.MetadataUrlCredentials(tracer=tracer)
+
+ access_token = os.getenv("YDB_ACCESS_TOKEN_CREDENTIALS")
+ if access_token is not None:
+ ctx.trace({"credentials.access_token": True})
+ return credentials_impl.AuthTokenCredentials(access_token)
+
+ ctx.trace(
+ {
+ "credentials.env_default": True,
+ "credentials.metadata": True,
+ }
+ )
+ return iam.MetadataUrlCredentials(tracer=tracer)
+
+
+class DriverConfig(object):
+ __slots__ = (
+ "endpoint",
+ "database",
+ "ca_cert",
+ "channel_options",
+ "credentials",
+ "use_all_nodes",
+ "root_certificates",
+ "certificate_chain",
+ "private_key",
+ "grpc_keep_alive_timeout",
+ "secure_channel",
+ "table_client_settings",
+ "topic_client_settings",
+ "endpoints",
+ "primary_user_agent",
+ "tracer",
+ "grpc_lb_policy_name",
+ "discovery_request_timeout",
+ "compression",
+ )
+
+ def __init__(
+ self,
+ endpoint,
+ database=None,
+ ca_cert=None,
+ auth_token=None,
+ channel_options=None,
+ credentials=None,
+ use_all_nodes=False,
+ root_certificates=None,
+ certificate_chain=None,
+ private_key=None,
+ grpc_keep_alive_timeout=None,
+ table_client_settings=None,
+ topic_client_settings=None,
+ endpoints=None,
+ primary_user_agent="python-library",
+ tracer=None,
+ grpc_lb_policy_name="round_robin",
+ discovery_request_timeout=10,
+ compression=None,
+ ):
+ """
+ A driver config to initialize a driver instance
+
+ :param endpoint: A endpoint specified in pattern host:port to be used for initial channel initialization and for YDB endpoint discovery mechanism
+ :param database: A name of the database
+ :param ca_cert: A CA certificate when SSL should be used
+ :param auth_token: A authentication token
+ :param credentials: An instance of AbstractCredentials
+ :param use_all_nodes: A balancing policy that forces to use all available nodes.
+ :param root_certificates: The PEM-encoded root certificates as a byte string.
+ :param private_key: The PEM-encoded private key as a byte string, or None if no\
+ private key should be used.
+ :param certificate_chain: The PEM-encoded certificate chain as a byte string\
+ to use or or None if no certificate chain should be used.
+ :param grpc_keep_alive_timeout: GRpc KeepAlive timeout, ms
+ :param ydb.Tracer tracer: ydb.Tracer instance to trace requests in driver.\
+ If tracing aio ScopeManager must be ContextVarsScopeManager
+ :param grpc_lb_policy_name: A load balancing policy to be used for discovery channel construction. Default value is `round_round`
+ :param discovery_request_timeout: A default timeout to complete the discovery. The default value is 10 seconds.
+
+ """
+ self.endpoint = endpoint
+ self.database = database
+ self.ca_cert = ca_cert
+ self.channel_options = channel_options
+ self.secure_channel = _utilities.is_secure_protocol(endpoint)
+ self.endpoint = _utilities.wrap_endpoint(self.endpoint)
+ self.endpoints = []
+ if endpoints is not None:
+ self.endpoints = [_utilities.wrap_endpoint(endp) for endp in endpoints]
+ if auth_token is not None:
+ credentials = credentials_impl.AuthTokenCredentials(auth_token)
+ self.credentials = credentials
+ self.use_all_nodes = use_all_nodes
+ self.root_certificates = root_certificates
+ self.certificate_chain = certificate_chain
+ self.private_key = private_key
+ self.grpc_keep_alive_timeout = grpc_keep_alive_timeout
+ self.table_client_settings = table_client_settings
+ self.topic_client_settings = topic_client_settings
+ self.primary_user_agent = primary_user_agent
+ self.tracer = tracer if tracer is not None else tracing.Tracer(None)
+ self.grpc_lb_policy_name = grpc_lb_policy_name
+ self.discovery_request_timeout = discovery_request_timeout
+ self.compression = compression
+
+ def set_database(self, database):
+ self.database = database
+ return self
+
+ @classmethod
+ def default_from_endpoint_and_database(cls, endpoint, database, root_certificates=None, credentials=None, **kwargs):
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+ @classmethod
+ def default_from_connection_string(cls, connection_string, root_certificates=None, credentials=None, **kwargs):
+ endpoint, database = _utilities.parse_connection_string(connection_string)
+ return cls(
+ endpoint,
+ database,
+ credentials=default_credentials(credentials),
+ root_certificates=root_certificates,
+ **kwargs
+ )
+
+ def set_grpc_keep_alive_timeout(self, timeout):
+ self.grpc_keep_alive_timeout = timeout
+ return self
+
+
+ConnectionParams = DriverConfig
+
+
+def get_config(
+ driver_config=None,
+ connection_string=None,
+ endpoint=None,
+ database=None,
+ root_certificates=None,
+ credentials=None,
+ config_class=DriverConfig,
+ **kwargs
+):
+ if driver_config is None:
+ if connection_string is not None:
+ driver_config = config_class.default_from_connection_string(
+ connection_string, root_certificates, credentials, **kwargs
+ )
+ else:
+ driver_config = config_class.default_from_endpoint_and_database(
+ endpoint, database, root_certificates, credentials, **kwargs
+ )
+ return driver_config
+ return driver_config
+
+
+class Driver(pool.ConnectionPool):
+ __slots__ = ("scheme_client", "table_client")
+
+ def __init__(
+ self,
+ driver_config=None,
+ connection_string=None,
+ endpoint=None,
+ database=None,
+ root_certificates=None,
+ credentials=None,
+ **kwargs
+ ):
+ """
+ Constructs a driver instance to be used in table and scheme clients.
+ It encapsulates endpoints discovery mechanism and provides ability to execute RPCs
+ on discovered endpoints
+
+ :param driver_config: A driver config
+ :param connection_string: A string in the following format: <protocol>://<hostame>:<port>/?database=/path/to/the/database
+ :param endpoint: An endpoint specified in the following format: <protocol>://<hostame>:<port>
+ :param database: A database path
+ :param credentials: A credentials. If not specifed credentials constructed by default.
+ """
+ from . import topic # local import for prevent cycle import error
+
+ driver_config = get_config(
+ driver_config,
+ connection_string,
+ endpoint,
+ database,
+ root_certificates,
+ credentials,
+ )
+
+ super(Driver, self).__init__(driver_config)
+
+ self._credentials = driver_config.credentials
+
+ self.scheme_client = scheme.SchemeClient(self)
+ self.table_client = table.TableClient(self, driver_config.table_client_settings)
+ self.topic_client = topic.TopicClient(self, driver_config.topic_client_settings)
diff --git a/contrib/python/ydb/py3/ydb/export.py b/contrib/python/ydb/py3/ydb/export.py
new file mode 100644
index 0000000000..827925c7bc
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/export.py
@@ -0,0 +1,286 @@
+import enum
+import typing
+
+from . import _apis
+
+from . import settings_impl as s_impl
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_export_pb2
+ from ._grpc.v4 import ydb_export_v1_pb2_grpc
+else:
+ from ._grpc.common.protos import ydb_export_pb2
+ from ._grpc.common import ydb_export_v1_pb2_grpc
+
+from . import operation
+
+_ExportToYt = "ExportToYt"
+_ExportToS3 = "ExportToS3"
+_progresses = {}
+
+
+@enum.unique
+class ExportProgress(enum.IntEnum):
+ UNSPECIFIED = 0
+ PREPARING = 1
+ TRANSFER_DATA = 2
+ DONE = 3
+ CANCELLATION = 4
+ CANCELLED = 5
+
+
+def _initialize_progresses():
+ for key, value in ydb_export_pb2.ExportProgress.Progress.items():
+ _progresses[value] = getattr(ExportProgress, key[len("PROGRESS_") :])
+
+
+_initialize_progresses()
+
+
+class ExportToYTOperation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ExportToYTOperation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_export_pb2.ExportToYtMetadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+ self.items_progress = metadata.items_progress
+
+ def __str__(self):
+ return "ExportToYTOperation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ExportToS3Operation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ExportToS3Operation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_export_pb2.ExportToS3Metadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+ self.items_progress = metadata.items_progress
+
+ def __str__(self):
+ return "ExportToS3Operation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ExportToYTSettings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExportToYTSettings, self).__init__()
+ self.items = []
+ self.number_of_retries = 0
+ self.token = None
+ self.host = None
+ self.port = None
+ self.uid = None
+ self.use_type_v3 = False
+
+ def with_port(self, port):
+ self.port = port
+ return self
+
+ def with_host(self, host):
+ self.host = host
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_token(self, token):
+ self.token = token
+ return self
+
+ def with_item(self, item):
+ """
+ :param: A source & destination tuple to export.
+ """
+ self.items.append(item)
+ return self
+
+ def with_source_and_destination(self, source_path, destination_path):
+ return self.with_item((source_path, destination_path))
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+ def with_use_type_v3(self, use_type_v3):
+ self.use_type_v3 = use_type_v3
+ return self
+
+
+class ExportToS3Settings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExportToS3Settings, self).__init__()
+ self.items = []
+ self.bucket = None
+ self.endpoint = None
+ self.scheme = 2
+ self.uid = None
+ self.access_key = None
+ self.secret_key = None
+ self.number_of_retries = 0
+ self.storage_class = None
+ self.export_compression = None
+
+ def with_scheme(self, scheme):
+ self.scheme = scheme
+ return self
+
+ def with_storage_class(self, storage_class):
+ self.storage_class = storage_class
+ return self
+
+ def with_export_compression(self, compression):
+ self.export_compression = compression
+ return self
+
+ def with_bucket(self, bucket):
+ self.bucket = bucket
+ return self
+
+ def with_endpoint(self, endpoint):
+ self.endpoint = endpoint
+ return self
+
+ def with_access_key(self, access_key):
+ self.access_key = access_key
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_secret_key(self, secret_key):
+ self.secret_key = secret_key
+ return self
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_source_and_destination(self, source_path, destination_prefix):
+ return self.with_item((source_path, destination_prefix))
+
+ def with_item(self, item):
+ self.items.append(item)
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+
+def _export_to_yt_request_factory(settings):
+ request = ydb_export_pb2.ExportToYtRequest(
+ settings=ydb_export_pb2.ExportToYtSettings(host=settings.host, token=settings.token)
+ )
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ if settings.port:
+ request.settings.port = settings.port
+
+ if settings.use_type_v3:
+ request.settings.use_type_v3 = settings.use_type_v3
+
+ for source_path, destination_path in settings.items:
+ request.settings.items.add(source_path=source_path, destination_path=destination_path)
+
+ return request
+
+
+def _get_operation_request(operation_id):
+ request = _apis.ydb_operation.GetOperationRequest(id=operation_id)
+ return request
+
+
+def _export_to_s3_request_factory(settings):
+ request = ydb_export_pb2.ExportToS3Request(
+ settings=ydb_export_pb2.ExportToS3Settings(
+ endpoint=settings.endpoint,
+ bucket=settings.bucket,
+ access_key=settings.access_key,
+ secret_key=settings.secret_key,
+ scheme=settings.scheme,
+ storage_class=settings.storage_class,
+ )
+ )
+
+ if settings.uid is not None:
+ request.operation_params.labels["uid"] = settings.uid
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ if settings.export_compression is not None:
+ request.settings.compression = settings.export_compression
+
+ for source_path, destination_prefix in settings.items:
+ request.settings.items.add(
+ source_path=source_path,
+ destination_prefix=destination_prefix,
+ )
+
+ return request
+
+
+class ExportClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def get_export_to_s3_operation(self, operation_id, settings=None):
+ return self._driver(
+ _get_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ ExportToS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def export_to_s3(self, settings):
+ return self._driver(
+ _export_to_s3_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToS3,
+ ExportToS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def export_to_yt(self, settings):
+ return self._driver(
+ _export_to_yt_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToYt,
+ ExportToYTOperation,
+ settings,
+ (self._driver,),
+ )
+
+ def async_export_to_yt(self, settings):
+ return self._driver.future(
+ _export_to_yt_request_factory(settings),
+ ydb_export_v1_pb2_grpc.ExportServiceStub,
+ _ExportToYt,
+ ExportToYTOperation,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py3/ydb/global_settings.py b/contrib/python/ydb/py3/ydb/global_settings.py
new file mode 100644
index 0000000000..8edac3f4b4
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/global_settings.py
@@ -0,0 +1,24 @@
+import warnings
+
+from . import convert
+from . import table
+
+
+def global_allow_truncated_result(enabled: bool = True):
+ if convert._default_allow_truncated_result == enabled:
+ return
+
+ if enabled:
+ warnings.warn("Global allow truncated response is deprecated behaviour.")
+
+ convert._default_allow_truncated_result = enabled
+
+
+def global_allow_split_transactions(enabled: bool):
+ if table._default_allow_split_transaction == enabled:
+ return
+
+ if enabled:
+ warnings.warn("Global allow split transaction is deprecated behaviour.")
+
+ table._default_allow_split_transaction = enabled
diff --git a/contrib/python/ydb/py3/ydb/iam/__init__.py b/contrib/python/ydb/py3/ydb/iam/__init__.py
new file mode 100644
index 0000000000..7167efe13e
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/iam/__init__.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+from .auth import ServiceAccountCredentials # noqa
+from .auth import MetadataUrlCredentials # noqa
diff --git a/contrib/python/ydb/py3/ydb/iam/auth.py b/contrib/python/ydb/py3/ydb/iam/auth.py
new file mode 100644
index 0000000000..82e7c9f6c8
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/iam/auth.py
@@ -0,0 +1,178 @@
+# -*- coding: utf-8 -*-
+from ydb import credentials, tracing
+import grpc
+import time
+import abc
+from datetime import datetime
+import json
+import os
+
+try:
+ from yandex.cloud.iam.v1 import iam_token_service_pb2_grpc
+ from yandex.cloud.iam.v1 import iam_token_service_pb2
+ import jwt
+except ImportError:
+ jwt = None
+ iam_token_service_pb2_grpc = None
+ iam_token_service_pb2 = None
+
+try:
+ import requests
+except ImportError:
+ requests = None
+
+
+DEFAULT_METADATA_URL = "http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token"
+
+
+def get_jwt(account_id, access_key_id, private_key, jwt_expiration_timeout):
+ now = time.time()
+ now_utc = datetime.utcfromtimestamp(now)
+ exp_utc = datetime.utcfromtimestamp(now + jwt_expiration_timeout)
+ return jwt.encode(
+ key=private_key,
+ algorithm="PS256",
+ headers={"typ": "JWT", "alg": "PS256", "kid": access_key_id},
+ payload={
+ "iss": account_id,
+ "aud": "https://iam.api.cloud.yandex.net/iam/v1/tokens",
+ "iat": now_utc,
+ "exp": exp_utc,
+ },
+ )
+
+
+class TokenServiceCredentials(credentials.AbstractExpiringTokenCredentials):
+ def __init__(self, iam_endpoint=None, iam_channel_credentials=None, tracer=None):
+ super(TokenServiceCredentials, self).__init__(tracer)
+ assert iam_token_service_pb2_grpc is not None, 'run pip install "ydb[yc]" to use service account credentials'
+ self._get_token_request_timeout = 10
+ self._iam_token_service_pb2 = iam_token_service_pb2
+ self._iam_token_service_pb2_grpc = iam_token_service_pb2_grpc
+ self._iam_endpoint = "iam.api.cloud.yandex.net:443" if iam_endpoint is None else iam_endpoint
+ self._iam_channel_credentials = {} if iam_channel_credentials is None else iam_channel_credentials
+
+ def _channel_factory(self):
+ return grpc.secure_channel(
+ self._iam_endpoint,
+ grpc.ssl_channel_credentials(**self._iam_channel_credentials),
+ )
+
+ @abc.abstractmethod
+ def _get_token_request(self):
+ pass
+
+ @tracing.with_trace()
+ def _make_token_request(self):
+ with self._channel_factory() as channel:
+ tracing.trace(self.tracer, {"iam_token.from_service": True})
+ stub = self._iam_token_service_pb2_grpc.IamTokenServiceStub(channel)
+ response = stub.Create(self._get_token_request(), timeout=self._get_token_request_timeout)
+ expires_in = max(0, response.expires_at.seconds - int(time.time()))
+ return {"access_token": response.iam_token, "expires_in": expires_in}
+
+
+class BaseJWTCredentials(abc.ABC):
+ def __init__(self, account_id, access_key_id, private_key):
+ self._account_id = account_id
+ self._jwt_expiration_timeout = 60.0 * 60
+ self._token_expiration_timeout = 120
+ self._access_key_id = access_key_id
+ self._private_key = private_key
+
+ def set_token_expiration_timeout(self, value):
+ self._token_expiration_timeout = value
+ return self
+
+ @classmethod
+ def from_file(cls, key_file, iam_endpoint=None, iam_channel_credentials=None):
+ with open(os.path.expanduser(key_file), "r") as r:
+ output = json.loads(r.read())
+ account_id = output.get("service_account_id", None)
+ if account_id is None:
+ account_id = output.get("user_account_id", None)
+ return cls(
+ account_id,
+ output["id"],
+ output["private_key"],
+ iam_endpoint=iam_endpoint,
+ iam_channel_credentials=iam_channel_credentials,
+ )
+
+
+class JWTIamCredentials(TokenServiceCredentials, BaseJWTCredentials):
+ def __init__(
+ self,
+ account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ TokenServiceCredentials.__init__(self, iam_endpoint, iam_channel_credentials)
+ BaseJWTCredentials.__init__(self, account_id, access_key_id, private_key)
+
+ def _get_token_request(self):
+ return self._iam_token_service_pb2.CreateIamTokenRequest(
+ jwt=get_jwt(
+ self._account_id,
+ self._access_key_id,
+ self._private_key,
+ self._jwt_expiration_timeout,
+ )
+ )
+
+
+class YandexPassportOAuthIamCredentials(TokenServiceCredentials):
+ def __init__(
+ self,
+ yandex_passport_oauth_token,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ self._yandex_passport_oauth_token = yandex_passport_oauth_token
+ super(YandexPassportOAuthIamCredentials, self).__init__(iam_endpoint, iam_channel_credentials)
+
+ def _get_token_request(self):
+ return iam_token_service_pb2.CreateIamTokenRequest(
+ yandex_passport_oauth_token=self._yandex_passport_oauth_token
+ )
+
+
+class MetadataUrlCredentials(credentials.AbstractExpiringTokenCredentials):
+ def __init__(self, metadata_url=None, tracer=None):
+ """
+ :param metadata_url: Metadata url
+ :param ydb.Tracer tracer: ydb tracer
+ """
+ super(MetadataUrlCredentials, self).__init__(tracer)
+ assert requests is not None, "Install requests library to use metadata credentials provider"
+ self.extra_error_message = (
+ "Check that metadata service configured properly since we failed to fetch it from metadata_url."
+ )
+ self._metadata_url = DEFAULT_METADATA_URL if metadata_url is None else metadata_url
+ self._tp.submit(self._refresh)
+
+ @tracing.with_trace()
+ def _make_token_request(self):
+ response = requests.get(self._metadata_url, headers={"Metadata-Flavor": "Google"}, timeout=3)
+ response.raise_for_status()
+ return json.loads(response.text)
+
+
+class ServiceAccountCredentials(JWTIamCredentials):
+ def __init__(
+ self,
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint=None,
+ iam_channel_credentials=None,
+ ):
+ super(ServiceAccountCredentials, self).__init__(
+ service_account_id,
+ access_key_id,
+ private_key,
+ iam_endpoint,
+ iam_channel_credentials,
+ )
diff --git a/contrib/python/ydb/py3/ydb/import_client.py b/contrib/python/ydb/py3/ydb/import_client.py
new file mode 100644
index 0000000000..d94294ca7c
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/import_client.py
@@ -0,0 +1,164 @@
+import enum
+import typing
+
+from . import _apis
+
+from . import settings_impl as s_impl
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_import_pb2
+ from ._grpc.v4 import ydb_import_v1_pb2_grpc
+else:
+ from ._grpc.common.protos import ydb_import_pb2
+ from ._grpc.common import ydb_import_v1_pb2_grpc
+
+
+from . import operation
+
+_ImportFromS3 = "ImportFromS3"
+_progresses = {}
+
+
+@enum.unique
+class ImportProgress(enum.IntEnum):
+ UNSPECIFIED = 0
+ PREPARING = 1
+ TRANSFER_DATA = 2
+ BUILD_INDEXES = 3
+ DONE = 4
+ CANCELLATION = 5
+ CANCELLED = 6
+
+
+def _initialize_progresses():
+ for key, value in ydb_import_pb2.ImportProgress.Progress.items():
+ _progresses[value] = getattr(ImportProgress, key[len("PROGRESS_") :])
+
+
+_initialize_progresses()
+
+
+class ImportFromS3Operation(operation.Operation):
+ def __init__(self, rpc_state, response, driver):
+ super(ImportFromS3Operation, self).__init__(rpc_state, response, driver)
+ metadata = ydb_import_pb2.ImportFromS3Metadata()
+ response.operation.metadata.Unpack(metadata)
+ self.progress = _progresses.get(metadata.progress)
+
+ def __str__(self):
+ return "ImportFromS3Operation<id: %s, progress: %s>" % (
+ self.id,
+ self.progress.name,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class ImportFromS3Settings(s_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ImportFromS3Settings, self).__init__()
+ self.items = []
+ self.bucket = None
+ self.endpoint = None
+ self.scheme = 2
+ self.uid = None
+ self.access_key = None
+ self.secret_key = None
+ self.number_of_retries = 0
+
+ def with_scheme(self, scheme):
+ self.scheme = scheme
+ return self
+
+ def with_bucket(self, bucket):
+ self.bucket = bucket
+ return self
+
+ def with_endpoint(self, endpoint):
+ self.endpoint = endpoint
+ return self
+
+ def with_access_key(self, access_key):
+ self.access_key = access_key
+ return self
+
+ def with_uid(self, uid):
+ self.uid = uid
+ return self
+
+ def with_secret_key(self, secret_key):
+ self.secret_key = secret_key
+ return self
+
+ def with_number_of_retries(self, number_of_retries):
+ self.number_of_retries = number_of_retries
+ return self
+
+ def with_source_and_destination(self, source_path, destination_prefix):
+ return self.with_item((source_path, destination_prefix))
+
+ def with_item(self, item):
+ self.items.append(item)
+ return self
+
+ def with_items(self, *items):
+ self.items.extend(items)
+ return self
+
+
+def _get_operation_request(operation_id):
+ request = _apis.ydb_operation.GetOperationRequest(id=operation_id)
+ return request
+
+
+def _import_from_s3_request_factory(settings):
+ request = ydb_import_pb2.ImportFromS3Request(
+ settings=ydb_import_pb2.ImportFromS3Settings(
+ endpoint=settings.endpoint,
+ bucket=settings.bucket,
+ access_key=settings.access_key,
+ secret_key=settings.secret_key,
+ scheme=settings.scheme,
+ )
+ )
+
+ if settings.uid is not None:
+ request.operation_params.labels["uid"] = settings.uid
+
+ if settings.number_of_retries > 0:
+ request.settings.number_of_retries = settings.number_of_retries
+
+ for source, destination in settings.items:
+ request.settings.items.add(
+ source_prefix=source,
+ destination_path=destination,
+ )
+
+ return request
+
+
+class ImportClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def get_import_from_s3_operation(self, operation_id, settings=None):
+ return self._driver(
+ _get_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ ImportFromS3Operation,
+ settings,
+ (self._driver,),
+ )
+
+ def import_from_s3(self, settings):
+ return self._driver(
+ _import_from_s3_request_factory(settings),
+ ydb_import_v1_pb2_grpc.ImportServiceStub,
+ _ImportFromS3,
+ ImportFromS3Operation,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py3/ydb/interceptor.py b/contrib/python/ydb/py3/ydb/interceptor.py
new file mode 100644
index 0000000000..5405177a8e
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/interceptor.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+import grpc
+from concurrent import futures
+from grpc._cython import cygrpc
+from grpc._channel import _handle_event, _EMPTY_FLAGS
+
+
+def _event_handler(state, response_deserializer):
+ def handle_event(event):
+ with state.condition:
+ callbacks = _handle_event(event, state, response_deserializer)
+ state.condition.notify_all()
+ done = not state.due
+ for callback in callbacks:
+ callback()
+
+ if getattr(state, "on_event_handler_callback", None) is not None:
+ state.on_event_handler_callback(state)
+
+ return done and state.fork_epoch >= cygrpc.get_fork_epoch()
+
+ return handle_event
+
+
+def on_event_callback(future, it, response_wrapper):
+ def _callback(state):
+ with state.condition:
+ if state.response is not None:
+ response = state.response
+ state.response = None
+ if not future.done():
+ try:
+ future.set_result(response_wrapper(response))
+ except Exception as e:
+ future.set_exception(e)
+ elif cygrpc.OperationType.receive_message not in state.due:
+ if state.code is grpc.StatusCode.OK:
+ if not future.done():
+ future.set_exception(StopIteration())
+ elif state.code is not None:
+ if not future.done():
+ future.set_exception(it)
+
+ return _callback
+
+
+def operate_async_stream_call(it, wrapper):
+ future = futures.Future()
+ callback = on_event_callback(future, it, wrapper)
+
+ with it._state.condition:
+ if it._state.code is None:
+ it._state.on_event_handler_callback = callback
+ operating = it._call.operate(
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
+ _event_handler(it._state, it._response_deserializer),
+ )
+ if operating:
+ it._state.due.add(cygrpc.OperationType.receive_message)
+ elif it._state.code is grpc.StatusCode.OK:
+ future.set_exception(StopIteration())
+ else:
+ future.set_exception(it)
+ return future
+
+
+def monkey_patch_event_handler():
+ grpc._channel._event_handler = _event_handler
diff --git a/contrib/python/ydb/py3/ydb/issues.py b/contrib/python/ydb/py3/ydb/issues.py
new file mode 100644
index 0000000000..065dcbc80c
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/issues.py
@@ -0,0 +1,226 @@
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from google.protobuf import text_format
+import enum
+import queue
+import typing
+
+from . import _apis
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from _grpc.v4.protos import ydb_issue_message_pb2, ydb_operation_pb2
+else:
+ from ._grpc.common.protos import ydb_issue_message_pb2, ydb_operation_pb2
+
+
+_TRANSPORT_STATUSES_FIRST = 401000
+_CLIENT_STATUSES_FIRST = 402000
+
+
+@enum.unique
+class StatusCode(enum.IntEnum):
+ STATUS_CODE_UNSPECIFIED = _apis.StatusIds.STATUS_CODE_UNSPECIFIED
+ SUCCESS = _apis.StatusIds.SUCCESS
+ BAD_REQUEST = _apis.StatusIds.BAD_REQUEST
+ UNAUTHORIZED = _apis.StatusIds.UNAUTHORIZED
+ INTERNAL_ERROR = _apis.StatusIds.INTERNAL_ERROR
+ ABORTED = _apis.StatusIds.ABORTED
+ UNAVAILABLE = _apis.StatusIds.UNAVAILABLE
+ OVERLOADED = _apis.StatusIds.OVERLOADED
+ SCHEME_ERROR = _apis.StatusIds.SCHEME_ERROR
+ GENERIC_ERROR = _apis.StatusIds.GENERIC_ERROR
+ TIMEOUT = _apis.StatusIds.TIMEOUT
+ BAD_SESSION = _apis.StatusIds.BAD_SESSION
+ PRECONDITION_FAILED = _apis.StatusIds.PRECONDITION_FAILED
+ ALREADY_EXISTS = _apis.StatusIds.ALREADY_EXISTS
+ NOT_FOUND = _apis.StatusIds.NOT_FOUND
+ SESSION_EXPIRED = _apis.StatusIds.SESSION_EXPIRED
+ CANCELLED = _apis.StatusIds.CANCELLED
+ UNDETERMINED = _apis.StatusIds.UNDETERMINED
+ UNSUPPORTED = _apis.StatusIds.UNSUPPORTED
+ SESSION_BUSY = _apis.StatusIds.SESSION_BUSY
+ EXTERNAL_ERROR = _apis.StatusIds.EXTERNAL_ERROR
+
+ CONNECTION_LOST = _TRANSPORT_STATUSES_FIRST + 10
+ CONNECTION_FAILURE = _TRANSPORT_STATUSES_FIRST + 20
+ DEADLINE_EXCEEDED = _TRANSPORT_STATUSES_FIRST + 30
+ CLIENT_INTERNAL_ERROR = _TRANSPORT_STATUSES_FIRST + 40
+ UNIMPLEMENTED = _TRANSPORT_STATUSES_FIRST + 50
+
+ UNAUTHENTICATED = _CLIENT_STATUSES_FIRST + 30
+ SESSION_POOL_EMPTY = _CLIENT_STATUSES_FIRST + 40
+
+
+# TODO: convert from proto IssueMessage
+class _IssueMessage:
+ def __init__(self, message: str, issue_code: int, severity: int, issues) -> None:
+ self.message = message
+ self.issue_code = issue_code
+ self.severity = severity
+ self.issues = issues
+
+
+class Error(Exception):
+ status = None
+
+ def __init__(self, message: str, issues: typing.Optional[typing.Iterable[_IssueMessage]] = None):
+ super(Error, self).__init__(message)
+ self.issues = issues
+ self.message = message
+
+
+class TruncatedResponseError(Error):
+ status = None
+
+
+class ConnectionError(Error):
+ status = None
+
+
+class ConnectionFailure(ConnectionError):
+ status = StatusCode.CONNECTION_FAILURE
+
+
+class ConnectionLost(ConnectionError):
+ status = StatusCode.CONNECTION_LOST
+
+
+class DeadlineExceed(ConnectionError):
+ status = StatusCode.DEADLINE_EXCEEDED
+
+
+class Unimplemented(ConnectionError):
+ status = StatusCode.UNIMPLEMENTED
+
+
+class Unauthenticated(Error):
+ status = StatusCode.UNAUTHENTICATED
+
+
+class BadRequest(Error):
+ status = StatusCode.BAD_REQUEST
+
+
+class Unauthorized(Error):
+ status = StatusCode.UNAUTHORIZED
+
+
+class InternalError(Error):
+ status = StatusCode.INTERNAL_ERROR
+
+
+class Aborted(Error):
+ status = StatusCode.ABORTED
+
+
+class Unavailable(Error):
+ status = StatusCode.UNAVAILABLE
+
+
+class Overloaded(Error):
+ status = StatusCode.OVERLOADED
+
+
+class SchemeError(Error):
+ status = StatusCode.SCHEME_ERROR
+
+
+class GenericError(Error):
+ status = StatusCode.GENERIC_ERROR
+
+
+class BadSession(Error):
+ status = StatusCode.BAD_SESSION
+
+
+class Timeout(Error):
+ status = StatusCode.TIMEOUT
+
+
+class PreconditionFailed(Error):
+ status = StatusCode.PRECONDITION_FAILED
+
+
+class NotFound(Error):
+ status = StatusCode.NOT_FOUND
+
+
+class AlreadyExists(Error):
+ status = StatusCode.ALREADY_EXISTS
+
+
+class SessionExpired(Error):
+ status = StatusCode.SESSION_EXPIRED
+
+
+class Cancelled(Error):
+ status = StatusCode.CANCELLED
+
+
+class Undetermined(Error):
+ status = StatusCode.UNDETERMINED
+
+
+class Unsupported(Error):
+ status = StatusCode.UNSUPPORTED
+
+
+class SessionBusy(Error):
+ status = StatusCode.SESSION_BUSY
+
+
+class ExternalError(Error):
+ status = StatusCode.EXTERNAL_ERROR
+
+
+class SessionPoolEmpty(Error, queue.Empty):
+ status = StatusCode.SESSION_POOL_EMPTY
+
+
+class UnexpectedGrpcMessage(Error):
+ def __init__(self, message: str):
+ super().__init__(message)
+
+
+def _format_issues(issues: typing.Iterable[ydb_issue_message_pb2.IssueMessage]) -> str:
+ if not issues:
+ return ""
+
+ return " ,".join(text_format.MessageToString(issue, as_utf8=False, as_one_line=True) for issue in issues)
+
+
+def _format_response(response: ydb_operation_pb2.Operation) -> str:
+ fmt_issues = _format_issues(response.issues)
+ return f"{fmt_issues} (server_code: {response.status})"
+
+
+_success_status_codes = {StatusCode.STATUS_CODE_UNSPECIFIED, StatusCode.SUCCESS}
+_server_side_error_map = {
+ StatusCode.BAD_REQUEST: BadRequest,
+ StatusCode.UNAUTHORIZED: Unauthorized,
+ StatusCode.INTERNAL_ERROR: InternalError,
+ StatusCode.ABORTED: Aborted,
+ StatusCode.UNAVAILABLE: Unavailable,
+ StatusCode.OVERLOADED: Overloaded,
+ StatusCode.SCHEME_ERROR: SchemeError,
+ StatusCode.GENERIC_ERROR: GenericError,
+ StatusCode.TIMEOUT: Timeout,
+ StatusCode.BAD_SESSION: BadSession,
+ StatusCode.PRECONDITION_FAILED: PreconditionFailed,
+ StatusCode.ALREADY_EXISTS: AlreadyExists,
+ StatusCode.NOT_FOUND: NotFound,
+ StatusCode.SESSION_EXPIRED: SessionExpired,
+ StatusCode.CANCELLED: Cancelled,
+ StatusCode.UNDETERMINED: Undetermined,
+ StatusCode.UNSUPPORTED: Unsupported,
+ StatusCode.SESSION_BUSY: SessionBusy,
+ StatusCode.EXTERNAL_ERROR: ExternalError,
+}
+
+
+def _process_response(response_proto: ydb_operation_pb2.Operation) -> None:
+ if response_proto.status not in _success_status_codes:
+ exc_obj = _server_side_error_map.get(response_proto.status)
+ raise exc_obj(_format_response(response_proto), response_proto.issues)
diff --git a/contrib/python/ydb/py3/ydb/operation.py b/contrib/python/ydb/py3/ydb/operation.py
new file mode 100644
index 0000000000..6084ef0f18
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/operation.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+from . import issues
+from . import _apis
+
+
+def _forget_operation_request(operation_id):
+ request = _apis.ydb_operation.ForgetOperationRequest(id=operation_id)
+ return request
+
+
+def _forget_operation_response(rpc_state, response): # pylint: disable=W0613
+ issues._process_response(response)
+
+
+def _cancel_operation_request(operation_id):
+ request = _apis.ydb_operation.CancelOperationRequest(id=operation_id)
+ return request
+
+
+def _cancel_operation_response(rpc_state, response): # pylint: disable=W0613
+ issues._process_response(response)
+
+
+def _get_operation_request(self):
+ request = _apis.ydb_operation.GetOperationRequest(id=self.id)
+ return request
+
+
+class OperationClient(object):
+ def __init__(self, driver):
+ self._driver = driver
+
+ def cancel(self, operation_id, settings=None):
+ return self._driver(
+ _cancel_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.CancelOperation,
+ _cancel_operation_response,
+ settings,
+ )
+
+ def forget(self, operation_id, settings=None):
+ return self._driver(
+ _forget_operation_request(operation_id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.ForgetOperation,
+ _forget_operation_response,
+ settings,
+ )
+
+
+class Operation(object):
+ __slots__ = ("id", "_driver", "self_cls")
+
+ def __init__(self, rpc_state, response, driver=None): # pylint: disable=W0613
+ # implement proper interface a bit later
+ issues._process_response(response.operation)
+ self.id = response.operation.id
+ self._driver = driver
+ # self.ready = operation.ready
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "<Operation %s>" % (self.id,)
+
+ def _ensure_implements(self):
+ if self._driver is None:
+ raise ValueError("Operation doesn't implement request!")
+
+ def cancel(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _cancel_operation_request(self.id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.CancelOperation,
+ _cancel_operation_response,
+ settings,
+ )
+
+ def forget(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _forget_operation_request(self.id),
+ _apis.OperationService.Stub,
+ _apis.OperationService.ForgetOperation,
+ _forget_operation_response,
+ settings,
+ )
+
+ def get(self, settings=None):
+ self._ensure_implements()
+ return self._driver(
+ _get_operation_request(self),
+ _apis.OperationService.Stub,
+ _apis.OperationService.GetOperation,
+ self.__class__,
+ settings,
+ (self._driver,),
+ )
diff --git a/contrib/python/ydb/py3/ydb/pool.py b/contrib/python/ydb/py3/ydb/pool.py
new file mode 100644
index 0000000000..1e75950ea8
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/pool.py
@@ -0,0 +1,504 @@
+# -*- coding: utf-8 -*-
+import abc
+import threading
+import logging
+from concurrent import futures
+import collections
+import random
+import typing
+
+from . import connection as connection_impl, issues, resolver, _utilities, tracing
+from abc import abstractmethod
+
+from .connection import Connection, EndpointKey
+
+logger = logging.getLogger(__name__)
+
+
+class ConnectionsCache(object):
+ def __init__(self, use_all_nodes=False, tracer=tracing.Tracer(None)):
+ self.tracer = tracer
+ self.lock = threading.RLock()
+ self.connections = collections.OrderedDict()
+ self.connections_by_node_id = collections.OrderedDict()
+ self.outdated = collections.OrderedDict()
+ self.subscriptions = set()
+ self.preferred = collections.OrderedDict()
+ self.logger = logging.getLogger(__name__)
+ self.use_all_nodes = use_all_nodes
+ self.conn_lst_order = (self.connections,) if self.use_all_nodes else (self.preferred, self.connections)
+ self.fast_fail_subscriptions = set()
+
+ def add(self, connection, preferred=False):
+ if connection is None:
+ return False
+
+ connection.add_cleanup_callback(self.remove)
+ with self.lock:
+ if preferred:
+ self.preferred[connection.endpoint] = connection
+
+ self.connections_by_node_id[connection.node_id] = connection
+ self.connections[connection.endpoint] = connection
+ subscriptions = list(self.subscriptions)
+ self.subscriptions.clear()
+
+ if len(self.connections) > 0:
+ self.complete_discovery(None)
+
+ for subscription in subscriptions:
+ subscription.set_result(None)
+ return True
+
+ def _on_done_callback(self, subscription):
+ """
+ A done callback for the subscription future
+ :param subscription: A subscription
+ :return: None
+ """
+ with self.lock:
+ try:
+ self.subscriptions.remove(subscription)
+ except KeyError:
+ return subscription
+
+ @property
+ def size(self):
+ with self.lock:
+ return len(self.connections) - len(self.outdated)
+
+ def already_exists(self, endpoint):
+ with self.lock:
+ return endpoint in self.connections
+
+ def values(self):
+ with self.lock:
+ return list(self.connections.values())
+
+ def make_outdated(self, connection):
+ with self.lock:
+ self.outdated[connection.endpoint] = connection
+ return self
+
+ def cleanup_outdated(self):
+ with self.lock:
+ outdated_connections = list(self.outdated.values())
+ for outdated_connection in outdated_connections:
+ outdated_connection.close()
+ return self
+
+ def cleanup(self):
+ with self.lock:
+ actual_connections = list(self.connections.values())
+ for connection in actual_connections:
+ connection.close()
+
+ def complete_discovery(self, error):
+ with self.lock:
+ for subscription in self.fast_fail_subscriptions:
+ if error is None:
+ subscription.set_result(None)
+ else:
+ subscription.set_exception(error)
+
+ self.fast_fail_subscriptions.clear()
+
+ def add_fast_fail(self):
+ with self.lock:
+ subscription = futures.Future()
+ if len(self.connections) > 0:
+ subscription.set_result(None)
+ return subscription
+
+ self.fast_fail_subscriptions.add(subscription)
+ return subscription
+
+ def subscribe(self):
+ with self.lock:
+ subscription = futures.Future()
+ if len(self.connections) > 0:
+ subscription.set_result(None)
+ return subscription
+ self.subscriptions.add(subscription)
+ subscription.add_done_callback(self._on_done_callback)
+ return subscription
+
+ @tracing.with_trace()
+ def get(self, preferred_endpoint: typing.Optional[EndpointKey] = None) -> Connection:
+ with self.lock:
+ if preferred_endpoint is not None and preferred_endpoint.node_id in self.connections_by_node_id:
+ return self.connections_by_node_id[preferred_endpoint.node_id]
+
+ if preferred_endpoint is not None and preferred_endpoint.endpoint in self.connections:
+ return self.connections[preferred_endpoint.endpoint]
+
+ for conn_lst in self.conn_lst_order:
+ try:
+ endpoint, connection = conn_lst.popitem(last=False)
+ conn_lst[endpoint] = connection
+ tracing.trace(self.tracer, {"found_in_lists": True})
+ return connection
+ except KeyError:
+ continue
+
+ raise issues.ConnectionLost("Couldn't find valid connection")
+
+ def remove(self, connection):
+ with self.lock:
+ self.connections_by_node_id.pop(connection.node_id, None)
+ self.preferred.pop(connection.endpoint, None)
+ self.connections.pop(connection.endpoint, None)
+ self.outdated.pop(connection.endpoint, None)
+
+
+class Discovery(threading.Thread):
+ def __init__(self, store, driver_config):
+ """
+ A timer thread that implements endpoints discovery logic
+ :param store: A store with endpoints
+ :param driver_config: An instance of DriverConfig
+ """
+ super(Discovery, self).__init__()
+ self.logger = logger.getChild(self.__class__.__name__)
+ self.condition = threading.Condition()
+ self.daemon = True
+ self._cache = store
+ self._driver_config = driver_config
+ self._resolver = resolver.DiscoveryEndpointsResolver(self._driver_config)
+ self._base_discovery_interval = 60
+ self._ready_timeout = 4
+ self._discovery_request_timeout = 2
+ self._should_stop = threading.Event()
+ self._max_size = 9
+ self._base_emergency_retry_interval = 1
+ self._ssl_required = False
+ if driver_config.root_certificates is not None or driver_config.secure_channel:
+ self._ssl_required = True
+
+ def discovery_debug_details(self):
+ return self._resolver.debug_details()
+
+ def _emergency_retry_interval(self):
+ return (1 + random.random()) * self._base_emergency_retry_interval
+
+ def _discovery_interval(self):
+ return (1 + random.random()) * self._base_discovery_interval
+
+ def notify_disconnected(self):
+ self._send_wake_up()
+
+ def _send_wake_up(self):
+ acquired = self.condition.acquire(blocking=False)
+
+ if not acquired:
+ return
+
+ self.condition.notify_all()
+ self.condition.release()
+
+ def _handle_empty_database(self):
+ if self._cache.size > 0:
+ return True
+
+ return self._cache.add(
+ connection_impl.Connection.ready_factory(
+ self._driver_config.endpoint, self._driver_config, self._ready_timeout
+ )
+ )
+
+ def execute_discovery(self):
+ if self._driver_config.database is None:
+ return self._handle_empty_database()
+
+ with self._resolver.context_resolve() as resolve_details:
+ if resolve_details is None:
+ return False
+
+ resolved_endpoints = set(
+ endpoint
+ for resolved_endpoint in resolve_details.endpoints
+ for endpoint, endpoint_options in resolved_endpoint.endpoints_with_options()
+ )
+ for cached_endpoint in self._cache.values():
+ if cached_endpoint.endpoint not in resolved_endpoints:
+ self._cache.make_outdated(cached_endpoint)
+
+ for resolved_endpoint in resolve_details.endpoints:
+ if self._ssl_required and not resolved_endpoint.ssl:
+ continue
+
+ if not self._ssl_required and resolved_endpoint.ssl:
+ continue
+
+ preferred = resolve_details.self_location == resolved_endpoint.location
+
+ for (
+ endpoint,
+ endpoint_options,
+ ) in resolved_endpoint.endpoints_with_options():
+ if self._cache.size >= self._max_size or self._cache.already_exists(endpoint):
+ continue
+
+ ready_connection = connection_impl.Connection.ready_factory(
+ endpoint,
+ self._driver_config,
+ self._ready_timeout,
+ endpoint_options=endpoint_options,
+ )
+ self._cache.add(ready_connection, preferred)
+
+ self._cache.cleanup_outdated()
+
+ return self._cache.size > 0
+
+ def stop(self):
+ self._should_stop.set()
+ self._send_wake_up()
+
+ def run(self):
+ with self.condition:
+ while True:
+ if self._should_stop.is_set():
+ break
+
+ successful = self.execute_discovery()
+ if successful:
+ self._cache.complete_discovery(None)
+ else:
+ self._cache.complete_discovery(issues.ConnectionFailure(str(self.discovery_debug_details())))
+
+ if self._should_stop.is_set():
+ break
+
+ interval = self._discovery_interval() if successful else self._emergency_retry_interval()
+ self.condition.wait(interval)
+
+ self._cache.cleanup()
+ self.logger.info("Successfully terminated discovery process")
+
+
+class IConnectionPool(abc.ABC):
+ @abstractmethod
+ def __init__(self, driver_config):
+ """
+ An object that encapsulates discovery logic and provides ability to execute user requests
+ on discovered endpoints.
+ :param driver_config: An instance of DriverConfig
+ """
+ pass
+
+ @abstractmethod
+ def stop(self, timeout=10):
+ """
+ Stops underlying discovery process and cleanups
+ :param timeout: A timeout to wait for stop completion
+ :return: None
+ """
+ pass
+
+ @abstractmethod
+ def wait(self, timeout=None, fail_fast=False):
+ """
+ Waits for endpoints to be are available to serve user requests
+ :param timeout: A timeout to wait in seconds
+ :param fail_fast: Should wait fail fast?
+ :return: None
+ """
+
+ @abstractmethod
+ def discovery_debug_details(self):
+ """
+ Returns debug string about last errors
+ :return:
+ """
+ pass
+
+ @abstractmethod
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Sends request constructed by client library
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+ :return: A result of computation
+ """
+ pass
+
+
+class ConnectionPool(IConnectionPool):
+ def __init__(self, driver_config):
+ """
+ An object that encapsulates discovery logic and provides ability to execute user requests
+ on discovered endpoints.
+
+ :param driver_config: An instance of DriverConfig
+ """
+ self._driver_config = driver_config
+ self._store = ConnectionsCache(driver_config.use_all_nodes, driver_config.tracer)
+ self.tracer = driver_config.tracer
+ self._grpc_init = connection_impl.Connection(self._driver_config.endpoint, self._driver_config)
+ self._discovery_thread = Discovery(self._store, self._driver_config)
+ self._discovery_thread.start()
+ self._stopped = False
+ self._stop_guard = threading.Lock()
+
+ def stop(self, timeout=10):
+ """
+ Stops underlying discovery process and cleanups
+
+ :param timeout: A timeout to wait for stop completion
+ :return: None
+ """
+ with self._stop_guard:
+ if self._stopped:
+ return
+
+ self._stopped = True
+ self._discovery_thread.stop()
+ self._grpc_init.close()
+ self._discovery_thread.join(timeout)
+
+ def async_wait(self, fail_fast=False):
+ """
+ Returns a future to subscribe on endpoints availability.
+
+ :return: A concurrent.futures.Future instance.
+ """
+ if fail_fast:
+ return self._store.add_fast_fail()
+ return self._store.subscribe()
+
+ def wait(self, timeout=None, fail_fast=False):
+ """
+ Waits for endpoints to be are available to serve user requests
+
+ :param timeout: A timeout to wait in seconds
+ :return: None
+ """
+ if fail_fast:
+ self._store.add_fast_fail().result(timeout)
+ else:
+ self._store.subscribe().result(timeout)
+
+ def _on_disconnected(self, connection):
+ """
+ Removes bad discovered endpoint and triggers discovery process
+
+ :param connection: A disconnected connection
+ :return: None
+ """
+ connection.close()
+ self._discovery_thread.notify_disconnected()
+
+ def discovery_debug_details(self):
+ return self._discovery_thread.discovery_debug_details()
+
+ @tracing.with_trace()
+ def __call__(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Synchronously sends request constructed by client library
+
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+
+ :return: A result of computation
+ """
+ if self._stopped:
+ raise issues.Error("Driver was stopped")
+
+ tracing.trace(self.tracer, {"request": request, "stub": stub, "rpc_name": rpc_name})
+ try:
+ connection = self._store.get(preferred_endpoint)
+ except Exception:
+ self._discovery_thread.notify_disconnected()
+ raise
+
+ res = connection(
+ request,
+ stub,
+ rpc_name,
+ wrap_result,
+ settings,
+ wrap_args,
+ lambda: self._on_disconnected(connection),
+ )
+ tracing.trace(self.tracer, {"response": res}, trace_level=tracing.TraceLevel.DEBUG)
+ return res
+
+ @_utilities.wrap_async_call_exceptions
+ def future(
+ self,
+ request,
+ stub,
+ rpc_name,
+ wrap_result=None,
+ settings=None,
+ wrap_args=(),
+ preferred_endpoint=None,
+ ):
+ """
+ Sends request constructed by client
+
+ :param request: A request constructed by client
+ :param stub: A stub instance to wrap channel
+ :param rpc_name: A name of RPC to be executed
+ :param wrap_result: A callable that intercepts call and wraps received response
+ :param settings: An instance of BaseRequestSettings that can be used\
+ for RPC metadata construction
+ :param wrap_args: And arguments to be passed into wrap_result callable
+
+ :return: A future of computation
+ """
+ try:
+ connection = self._store.get(preferred_endpoint)
+ except Exception:
+ self._discovery_thread.notify_disconnected()
+ raise
+
+ return connection.future(
+ request,
+ stub,
+ rpc_name,
+ wrap_result,
+ settings,
+ wrap_args,
+ lambda: self._on_disconnected(connection),
+ )
+
+ def __enter__(self):
+ """
+ In some cases (scripts, for example) this context manager can be used.
+
+ :return:
+ """
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
diff --git a/contrib/python/ydb/py3/ydb/resolver.py b/contrib/python/ydb/py3/ydb/resolver.py
new file mode 100644
index 0000000000..b795af928c
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/resolver.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+import contextlib
+import logging
+import threading
+import random
+import itertools
+import typing
+from . import connection as conn_impl, driver, issues, settings as settings_impl, _apis
+
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_discovery_pb2
+else:
+ from ._grpc.common.protos import ydb_discovery_pb2
+
+
+logger = logging.getLogger(__name__)
+
+
+class EndpointInfo(object):
+ __slots__ = (
+ "address",
+ "endpoint",
+ "location",
+ "port",
+ "ssl",
+ "ipv4_addrs",
+ "ipv6_addrs",
+ "ssl_target_name_override",
+ "node_id",
+ )
+
+ def __init__(self, endpoint_info: ydb_discovery_pb2.EndpointInfo):
+ self.address = endpoint_info.address
+ self.endpoint = "%s:%s" % (endpoint_info.address, endpoint_info.port)
+ self.location = endpoint_info.location
+ self.port = endpoint_info.port
+ self.ssl = endpoint_info.ssl
+ self.ipv4_addrs = tuple(endpoint_info.ip_v4)
+ self.ipv6_addrs = tuple(endpoint_info.ip_v6)
+ self.ssl_target_name_override = endpoint_info.ssl_target_name_override
+ self.node_id = endpoint_info.node_id
+
+ def endpoints_with_options(self) -> typing.Generator[typing.Tuple[str, conn_impl.EndpointOptions], None, None]:
+ ssl_target_name_override = None
+ if self.ssl:
+ if self.ssl_target_name_override:
+ ssl_target_name_override = self.ssl_target_name_override
+ elif self.ipv6_addrs or self.ipv4_addrs:
+ ssl_target_name_override = self.address
+
+ endpoint_options = conn_impl.EndpointOptions(
+ ssl_target_name_override=ssl_target_name_override, node_id=self.node_id
+ )
+
+ if self.ipv6_addrs or self.ipv4_addrs:
+ for ipv6addr in self.ipv6_addrs:
+ yield ("ipv6:[%s]:%s" % (ipv6addr, self.port), endpoint_options)
+ for ipv4addr in self.ipv4_addrs:
+ yield ("ipv4:%s:%s" % (ipv4addr, self.port), endpoint_options)
+ else:
+ yield (self.endpoint, endpoint_options)
+
+ def __str__(self):
+ return "<Endpoint %s, location %s, ssl: %s>" % (
+ self.endpoint,
+ self.location,
+ self.ssl,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __hash__(self):
+ return hash(self.endpoint)
+
+ def __eq__(self, other):
+ if not hasattr(other, "endpoint"):
+ return False
+
+ return self.endpoint == other.endpoint
+
+
+def _list_endpoints_request_factory(connection_params: driver.DriverConfig) -> _apis.ydb_discovery.ListEndpointsRequest:
+ request = _apis.ydb_discovery.ListEndpointsRequest()
+ request.database = connection_params.database
+ return request
+
+
+class DiscoveryResult(object):
+ def __init__(self, self_location: str, endpoints: "list[EndpointInfo]"):
+ self.self_location = self_location
+ self.endpoints = endpoints
+
+ def __str__(self):
+ return "DiscoveryResult <self_location: %s, endpoints %s>" % (
+ self.self_location,
+ self.endpoints,
+ )
+
+ def __repr__(self):
+ return self.__str__()
+
+ @classmethod
+ def from_response(
+ cls,
+ rpc_state: conn_impl._RpcState,
+ response: ydb_discovery_pb2.ListEndpointsResponse,
+ use_all_nodes: bool = False,
+ ) -> DiscoveryResult:
+ issues._process_response(response.operation)
+ message = _apis.ydb_discovery.ListEndpointsResult()
+ response.operation.result.Unpack(message)
+ unique_local_endpoints = set()
+ unique_different_endpoints = set()
+ for info in message.endpoints:
+ if info.location == message.self_location:
+ unique_local_endpoints.add(EndpointInfo(info))
+ else:
+ unique_different_endpoints.add(EndpointInfo(info))
+
+ result = []
+ unique_local_endpoints = list(unique_local_endpoints)
+ unique_different_endpoints = list(unique_different_endpoints)
+ if use_all_nodes:
+ result.extend(unique_local_endpoints)
+ result.extend(unique_different_endpoints)
+ random.shuffle(result)
+ else:
+ random.shuffle(unique_local_endpoints)
+ random.shuffle(unique_different_endpoints)
+ result.extend(unique_local_endpoints)
+ result.extend(unique_different_endpoints)
+
+ return cls(message.self_location, result)
+
+
+class DiscoveryEndpointsResolver(object):
+ def __init__(self, driver_config: driver.DriverConfig):
+ self.logger = logger.getChild(self.__class__.__name__)
+ self._driver_config = driver_config
+ self._ready_timeout = getattr(self._driver_config, "discovery_request_timeout", 10)
+ self._lock = threading.Lock()
+ self._debug_details_history_size = 20
+ self._debug_details_items = []
+ self._endpoints = []
+ self._endpoints.append(driver_config.endpoint)
+ self._endpoints.extend(driver_config.endpoints)
+ random.shuffle(self._endpoints)
+ self._endpoints_iter = itertools.cycle(self._endpoints)
+
+ def _add_debug_details(self, message: str, *args):
+ self.logger.debug(message, *args)
+ message = message % args
+ with self._lock:
+ self._debug_details_items.append(message)
+ if len(self._debug_details_items) > self._debug_details_history_size:
+ self._debug_details_items.pop()
+
+ def debug_details(self) -> str:
+ """
+ Returns last resolver errors as a debug string.
+ """
+ with self._lock:
+ return "\n".join(self._debug_details_items)
+
+ def resolve(self) -> typing.ContextManager[typing.Optional[DiscoveryResult]]:
+ with self.context_resolve() as result:
+ return result
+
+ @contextlib.contextmanager
+ def context_resolve(self) -> typing.ContextManager[typing.Optional[DiscoveryResult]]:
+ self.logger.debug("Preparing initial endpoint to resolve endpoints")
+ endpoint = next(self._endpoints_iter)
+ initial = conn_impl.Connection.ready_factory(endpoint, self._driver_config, ready_timeout=self._ready_timeout)
+ if initial is None:
+ self._add_debug_details(
+ 'Failed to establish connection to YDB discovery endpoint: "%s". Check endpoint correctness.' % endpoint
+ )
+ yield
+ return
+
+ self.logger.debug("Resolving endpoints for database %s", self._driver_config.database)
+ try:
+ resolved = initial(
+ _list_endpoints_request_factory(self._driver_config),
+ _apis.DiscoveryService.Stub,
+ _apis.DiscoveryService.ListEndpoints,
+ DiscoveryResult.from_response,
+ settings=settings_impl.BaseRequestSettings().with_timeout(self._ready_timeout),
+ wrap_args=(self._driver_config.use_all_nodes,),
+ )
+
+ self._add_debug_details(
+ "Resolved endpoints for database %s: %s",
+ self._driver_config.database,
+ resolved,
+ )
+
+ yield resolved
+ except Exception as e:
+
+ self._add_debug_details(
+ 'Failed to resolve endpoints for database %s. Endpoint: "%s". Error details:\n %s',
+ self._driver_config.database,
+ endpoint,
+ e,
+ )
+
+ yield
+
+ finally:
+ initial.close()
diff --git a/contrib/python/ydb/py3/ydb/scheme.py b/contrib/python/ydb/py3/ydb/scheme.py
new file mode 100644
index 0000000000..6019c763e5
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/scheme.py
@@ -0,0 +1,510 @@
+# -*- coding: utf-8 -*-
+import abc
+import enum
+from abc import abstractmethod
+from . import issues, operation, settings as settings_impl, _apis
+
+
+@enum.unique
+class SchemeEntryType(enum.IntEnum):
+ """
+ Enumerates all available entry types.
+ """
+
+ TYPE_UNSPECIFIED = 0
+ DIRECTORY = 1
+ TABLE = 2
+ PERS_QUEUE_GROUP = 3
+ DATABASE = 4
+ RTMR_VOLUME = 5
+ BLOCK_STORE_VOLUME = 6
+ COORDINATION_NODE = 7
+ COLUMN_TABLE = 13
+ SEQUENCE = 15
+ REPLICATION = 16
+ TOPIC = 17
+
+ @classmethod
+ def _missing_(cls, value):
+ return cls.TYPE_UNSPECIFIED
+
+ @staticmethod
+ def is_table(entry):
+ """
+ Deprecated, use is_row_table instead of this.
+
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a row table and False otherwise (same as is_row_table)
+ """
+ return entry == SchemeEntryType.TABLE
+
+ @staticmethod
+ def is_any_table(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is table (independent of table type) and False otherwise
+ """
+ return entry in (SchemeEntryType.TABLE, SchemeEntryType.COLUMN_TABLE)
+
+ @staticmethod
+ def is_column_table(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a column table and False otherwise
+ """
+ return entry == SchemeEntryType.COLUMN_TABLE
+
+ @staticmethod
+ def is_row_table(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a row table and False otherwise (same as is_table)
+ """
+ return entry == SchemeEntryType.TABLE
+
+ @staticmethod
+ def is_directory(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a directory and False otherwise
+ """
+ return entry == SchemeEntryType.DIRECTORY
+
+ @staticmethod
+ def is_database(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a database and False otherwise
+ """
+ return entry == SchemeEntryType.DATABASE
+
+ @staticmethod
+ def is_coordination_node(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a coordination node and False otherwise
+ """
+ return entry == SchemeEntryType.COORDINATION_NODE
+
+ @staticmethod
+ def is_directory_or_database(entry):
+ """
+ :param entry: A scheme entry to check
+ :return: True if scheme entry is a directory or database and False otherwise
+ """
+ return entry == SchemeEntryType.DATABASE or entry == SchemeEntryType.DIRECTORY
+
+
+class SchemeEntry(object):
+ __slots__ = (
+ "name",
+ "owner",
+ "type",
+ "effective_permissions",
+ "permissions",
+ "size_bytes",
+ )
+
+ def __init__(self, name, owner, type, effective_permissions, permissions, size_bytes, *args, **kwargs):
+ """
+ Represents a scheme entry.
+ :param name: A name of a scheme entry
+ :param owner: A owner of a scheme entry
+ :param type: A type of scheme entry
+ :param effective_permissions: A list of effective permissions applied to this scheme entry
+ :param permissions: A list of permissions applied to this scheme entry
+ :param size_bytes: Size of entry in bytes
+ """
+ self.name = name
+ self.owner = owner
+ self.type = type
+ self.effective_permissions = effective_permissions
+ self.permissions = permissions
+ self.size_bytes = size_bytes
+
+ def is_directory(self):
+ """
+ :return: True if scheme entry is a directory and False otherwise
+ """
+ return SchemeEntryType.is_directory(self.type)
+
+ def is_table(self):
+ """
+ :return: True if scheme entry is a row table and False otherwise (same as is_row_table)
+ """
+ return SchemeEntryType.is_table(self.type)
+
+ def is_column_table(self):
+ """
+ :return: True if scheme entry is a column table and False otherwise (same as is_row_table)
+ """
+ return SchemeEntryType.is_column_table(self.type)
+
+ def is_row_table(self):
+ """
+ :return: True if scheme entry is a row table and False otherwise (same as is_table)
+ """
+ return SchemeEntryType.is_table(self.type)
+
+ def is_any_table(self):
+ """
+ :return: True if scheme entry is table (independent of table type) and False otherwise
+ """
+ return SchemeEntryType.is_any_table(self.type)
+
+ def is_database(self):
+ """
+ :return: True if scheme entry is a database and False otherwise
+ """
+ return SchemeEntryType.is_database(self.type)
+
+ def is_directory_or_database(self):
+ """
+ :return: True if scheme entry is a directory or a database and False otherwise
+ """
+ return SchemeEntryType.is_directory_or_database(self.type)
+
+ def is_coordination_node(self):
+ """
+ :return: True if scheme entry is a coordination node and False otherwise
+ """
+ return SchemeEntryType.is_coordination_node(self.type)
+
+
+class Directory(SchemeEntry):
+ __slots__ = ("children",)
+
+ def __init__(self, name, owner, type, effective_permissions, permissions, children, *args, **kwargs):
+ """
+ Represents a directory scheme entry.
+ :param name: A name of a scheme entry
+ :param owner: A owner of a scheme entry
+ :param type: A type of scheme entry
+ :param effective_permissions: A list of effective permissions applied to this scheme entry
+ :param permissions: A list of permissions applied to this scheme entry
+ :param children: A list of children
+ """
+ super(Directory, self).__init__(name, owner, type, effective_permissions, permissions, 0)
+ self.children = children
+
+
+def _describe_path_request_factory(path):
+ request = _apis.ydb_scheme.DescribePathRequest()
+ request.path = path
+ return request
+
+
+def _list_directory_request_factory(path):
+ request = _apis.ydb_scheme.ListDirectoryRequest()
+ request.path = path
+ return request
+
+
+def _remove_directory_request_factory(path):
+ request = _apis.ydb_scheme.RemoveDirectoryRequest()
+ request.path = path
+ return request
+
+
+def _make_directory_request_factory(path):
+ request = _apis.ydb_scheme.MakeDirectoryRequest()
+ request.path = path
+ return request
+
+
+class MakeDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class RemoveDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class ListDirectorySettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class DescribePathSettings(settings_impl.BaseRequestSettings):
+ pass
+
+
+class ModifyPermissionsSettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ModifyPermissionsSettings, self).__init__()
+ self._pb = _apis.ydb_scheme.ModifyPermissionsRequest()
+
+ def grant_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.grant.MergeFrom(Permissions(subject, permission_names).to_pb())
+ return self
+
+ def revoke_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.revoke.MergeFrom(Permissions(subject, permission_names).to_pb())
+ return self
+
+ def set_permissions(self, subject, permission_names):
+ permission_action = self._pb.actions.add()
+ permission_action.set.MergeFrom(Permissions(subject, permission_names).to_pb())
+ return self
+
+ def change_owner(self, owner):
+ permission_action = self._pb.actions.add()
+ permission_action.change_owner = owner
+ return self
+
+ def clear_permissions(self):
+ self._pb.clear_permissions = True
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class Permissions(object):
+ __slots__ = ("subject", "permission_names")
+
+ def __init__(self, subject, permission_names):
+ """
+ Represents permissions
+ :param subject: A subject of permission names
+ :param permission_names: A list of permission names
+ """
+ self.subject = subject
+ self.permission_names = permission_names
+
+ def to_pb(self):
+ """
+ :return: A protocol buffer representation of permissions
+ """
+ pb = _apis.ydb_scheme.Permissions()
+ pb.subject = self.subject
+ pb.permission_names.extend(self.permission_names)
+ return pb
+
+
+def _modify_permissions_request_factory(path, settings):
+ """
+ Constructs modify permissions request
+ :param path: A path to apply permissions
+ :param settings: An instance of ModifyPermissionsSettings
+ :return: A constructed request
+ """
+ modify_permissions_request = settings.to_pb()
+ modify_permissions_request.path = path
+ return modify_permissions_request
+
+
+def _wrap_permissions(permissions):
+ """
+ Wraps permissions protocol buffers into native Python objects
+ :param permissions: A protocol buffer representation of permissions
+ :return: A iterable of permissions
+ """
+ return tuple(Permissions(permission.subject, permission.permission_names) for permission in permissions)
+
+
+def _wrap_scheme_entry(entry_pb, scheme_entry_cls=None, *args, **kwargs):
+ """
+ Wraps scheme entry into native Python objects.
+ :param entry_pb: A protocol buffer representation of a scheme entry
+ :param scheme_entry_cls: A native Python class that represents scheme entry (
+ by default that is generic SchemeEntry)
+ :param args: A list of optional arguments
+ :param kwargs: A dictionary of with optional arguments
+ :return: A native Python reprensentation of scheme entry
+ """
+ scheme_entry_cls = SchemeEntry if scheme_entry_cls is None else scheme_entry_cls
+ return scheme_entry_cls(
+ entry_pb.name,
+ entry_pb.owner,
+ SchemeEntryType(entry_pb.type),
+ _wrap_permissions(entry_pb.effective_permissions),
+ _wrap_permissions(entry_pb.permissions),
+ entry_pb.size_bytes,
+ *args,
+ **kwargs
+ )
+
+
+def _wrap_list_directory_response(rpc_state, response):
+ """
+ Wraps list directory response
+ :param response: A list directory response
+ :return: A directory
+ """
+ issues._process_response(response.operation)
+ message = _apis.ydb_scheme.ListDirectoryResult()
+ response.operation.result.Unpack(message)
+ children = []
+ supported_items = set(i.value for i in SchemeEntryType)
+ for children_item in message.children:
+ if children_item.type not in supported_items:
+ continue
+
+ children.append(_wrap_scheme_entry(children_item))
+
+ return Directory(
+ message.self.name,
+ message.self.owner,
+ SchemeEntryType(message.self.type),
+ _wrap_permissions(message.self.effective_permissions),
+ _wrap_permissions(message.self.permissions),
+ tuple(children),
+ )
+
+
+def _wrap_describe_path_response(rpc_state, response):
+ issues._process_response(response.operation)
+ message = _apis.ydb_scheme.DescribePathResult()
+ response.operation.result.Unpack(message)
+ return _wrap_scheme_entry(message.self)
+
+
+class ISchemeClient(abc.ABC):
+ @abstractmethod
+ def __init__(self, driver):
+ pass
+
+ @abstractmethod
+ def make_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def remove_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def list_directory(self, path, settings):
+ pass
+
+ @abstractmethod
+ def describe_path(self, path, settings):
+ pass
+
+ @abstractmethod
+ def modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An operation if success or exception on case of failure
+ """
+ pass
+
+
+class BaseSchemeClient(ISchemeClient):
+ __slots__ = ("_driver",)
+
+ def __init__(self, driver):
+ self._driver = driver
+
+ def make_directory(self, path, settings=None):
+ return self._driver(
+ _make_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.MakeDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def remove_directory(self, path, settings=None):
+ return self._driver(
+ _remove_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.RemoveDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def list_directory(self, path, settings=None):
+ return self._driver(
+ _list_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ListDirectory,
+ _wrap_list_directory_response,
+ settings,
+ )
+
+ def describe_path(self, path, settings=None):
+ return self._driver(
+ _describe_path_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.DescribePath,
+ _wrap_describe_path_response,
+ settings,
+ )
+
+ def modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An operation if success or exception on case of failure
+ """
+ return self._driver(
+ _modify_permissions_request_factory(path, settings),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ModifyPermissions,
+ operation.Operation,
+ settings,
+ )
+
+
+class SchemeClient(BaseSchemeClient):
+ def async_make_directory(self, path, settings=None):
+ return self._driver.future(
+ _make_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.MakeDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def async_remove_directory(self, path, settings=None):
+ return self._driver.future(
+ _remove_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.RemoveDirectory,
+ operation.Operation,
+ settings,
+ )
+
+ def async_list_directory(self, path, settings=None):
+ return self._driver.future(
+ _list_directory_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ListDirectory,
+ _wrap_list_directory_response,
+ settings,
+ )
+
+ def async_describe_path(self, path, settings=None):
+ return self._driver.future(
+ _describe_path_request_factory(path),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.DescribePath,
+ _wrap_describe_path_response,
+ settings,
+ )
+
+ def async_modify_permissions(self, path, settings):
+ """
+ Modifies permissions for provided scheme entry
+
+ :param path: A path of scheme entry
+ :param settings: An instance of ModifyPermissionsSettings
+
+ :return: An future of computation
+ """
+ return self._driver.future(
+ _modify_permissions_request_factory(path, settings),
+ _apis.SchemeService.Stub,
+ _apis.SchemeService.ModifyPermissions,
+ operation.Operation,
+ settings,
+ )
diff --git a/contrib/python/ydb/py3/ydb/scripting.py b/contrib/python/ydb/py3/ydb/scripting.py
new file mode 100644
index 0000000000..595f4e1695
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/scripting.py
@@ -0,0 +1,104 @@
+import typing
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_scripting_pb2
+ from ._grpc.v4 import ydb_scripting_v1_pb2_grpc
+else:
+ from ._grpc.common.protos import ydb_scripting_pb2
+ from ._grpc.common import ydb_scripting_v1_pb2_grpc
+
+
+from . import issues, convert, settings
+
+
+class TypedParameters(object):
+ def __init__(self, parameters_types, parameters_values):
+ self.parameters_types = parameters_types
+ self.parameters_values = parameters_values
+
+
+class ScriptingClientSettings(object):
+ def __init__(self):
+ self._native_date_in_result_sets = False
+ self._native_datetime_in_result_sets = False
+
+ def with_native_date_in_result_sets(self, enabled):
+ self._native_date_in_result_sets = enabled
+ return self
+
+ def with_native_datetime_in_result_sets(self, enabled):
+ self._native_datetime_in_result_sets = enabled
+ return self
+
+
+class ExplainYqlScriptSettings(settings.BaseRequestSettings):
+ MODE_UNSPECIFIED = 0
+ MODE_PARSE = 1
+ MODE_VALIDATE = 2
+ MODE_EXPLAIN = 3
+
+ def __init__(self):
+ super(ExplainYqlScriptSettings, self).__init__()
+ self.mode = False
+
+ def with_mode(self, val):
+ self.mode = val
+ return self
+
+
+def _execute_yql_query_request_factory(script, tp=None, settings=None):
+ params = None if tp is None else convert.parameters_to_pb(tp.parameters_types, tp.parameters_values)
+ return ydb_scripting_pb2.ExecuteYqlRequest(script=script, parameters=params)
+
+
+class YqlQueryResult(object):
+ def __init__(self, result, scripting_client_settings=None):
+ self.result_sets = convert.ResultSets(result.result_sets, scripting_client_settings)
+
+
+class YqlExplainResult(object):
+ def __init__(self, result):
+ self.plan = result.plan
+
+
+def _wrap_response(rpc_state, response, scripting_client_settings):
+ issues._process_response(response.operation)
+ message = ydb_scripting_pb2.ExecuteYqlResult()
+ response.operation.result.Unpack(message)
+ return YqlQueryResult(message)
+
+
+def _wrap_explain_response(rpc_state, response):
+ issues._process_response(response.operation)
+ message = ydb_scripting_pb2.ExplainYqlResult()
+ response.operation.result.Unpack(message)
+ return YqlExplainResult(message)
+
+
+class ScriptingClient(object):
+ def __init__(self, driver, scripting_client_settings=None):
+ self.driver = driver
+ self.scripting_client_settings = (
+ scripting_client_settings if scripting_client_settings is not None else ScriptingClientSettings()
+ )
+
+ def execute_yql(self, script, typed_parameters=None, settings=None):
+ request = _execute_yql_query_request_factory(script, typed_parameters, settings)
+ return self.driver(
+ request,
+ ydb_scripting_v1_pb2_grpc.ScriptingServiceStub,
+ "ExecuteYql",
+ _wrap_response,
+ settings=settings,
+ wrap_args=(self.scripting_client_settings,),
+ )
+
+ def explain_yql(self, script, settings=None):
+ return self.driver(
+ ydb_scripting_pb2.ExplainYqlRequest(script=script, mode=settings.mode),
+ ydb_scripting_v1_pb2_grpc.ScriptingServiceStub,
+ "ExplainYql",
+ _wrap_explain_response,
+ settings=settings,
+ )
diff --git a/contrib/python/ydb/py3/ydb/settings.py b/contrib/python/ydb/py3/ydb/settings.py
new file mode 100644
index 0000000000..6739a46fab
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/settings.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+
+
+class BaseRequestSettings(object):
+ __slots__ = (
+ "trace_id",
+ "request_type",
+ "timeout",
+ "cancel_after",
+ "operation_timeout",
+ "tracer",
+ "compression",
+ "headers",
+ "need_rpc_auth",
+ )
+
+ def __init__(self):
+ """
+ Request settings to be used for RPC execution
+ """
+ self.trace_id = None
+ self.request_type = None
+ self.timeout = None
+ self.cancel_after = None
+ self.operation_timeout = None
+ self.compression = None
+ self.need_rpc_auth = True
+ self.headers = []
+
+ def make_copy(self):
+ return (
+ BaseRequestSettings()
+ .with_trace_id(self.trace_id)
+ .with_request_type(self.request_type)
+ .with_timeout(self.timeout)
+ .with_cancel_after(self.cancel_after)
+ .with_operation_timeout(self.operation_timeout)
+ .with_compression(self.compression)
+ .with_need_rpc_auth(self.need_rpc_auth)
+ )
+
+ def with_compression(self, compression):
+ """
+ Enables compression for the specific RPC
+ :param compression: An RPCCompression enum value.
+ :return The self instance.
+ """
+ self.compression = compression
+ return self
+
+ def with_need_rpc_auth(self, need_rpc_auth):
+ self.need_rpc_auth = need_rpc_auth
+ return self
+
+ def with_header(self, key, value):
+ """
+ Adds a key-value pair to the request headers.
+ :param key: A string with a header key.
+ :param value: A string with a header value.
+ :return The self instance.
+ """
+ self.headers.append((key, value))
+ return self
+
+ def with_trace_id(self, trace_id):
+ """
+ Includes trace id for RPC headers
+ :param trace_id: A trace id string
+ :return: The self instance
+ """
+ self.trace_id = trace_id
+ return self
+
+ def with_request_type(self, request_type):
+ """
+ Includes request type for RPC headers
+ :param request_type: A request type string
+ :return: The self instance
+ """
+ self.request_type = request_type
+ return self
+
+ def with_operation_timeout(self, timeout):
+ """
+ Indicates that client is no longer interested in the result of operation after the specified duration
+ starting from the time operation arrives at the server.
+ Server will try to stop the execution of operation and if no result is currently available the operation
+ will receive TIMEOUT status code, which will be sent back to client if it was waiting for the operation result.
+ Timeout of operation does not tell anything about its result, it might be completed successfully
+ or cancelled on server.
+ :param timeout:
+ :return:
+ """
+ self.operation_timeout = timeout
+ return self
+
+ def with_cancel_after(self, timeout):
+ """
+ Server will try to cancel the operation after the specified duration starting from the time
+ the operation arrives at server.
+ In case of successful cancellation operation will receive CANCELLED status code, which will be
+ sent back to client if it was waiting for the operation result.
+ In case when cancellation isn't possible, no action will be performed.
+ :param timeout:
+ :return:
+ """
+ self.cancel_after = timeout
+ return self
+
+ def with_timeout(self, timeout):
+ """
+ Client-side timeout to complete request.
+ Since YDB doesn't support request cancellation at this moment, this feature should be
+ used properly to avoid server overload.
+ :param timeout: timeout value in seconds
+ :return: The self instance
+ """
+ self.timeout = timeout
+ return self
diff --git a/contrib/python/ydb/py3/ydb/sqlalchemy/__init__.py b/contrib/python/ydb/py3/ydb/sqlalchemy/__init__.py
new file mode 100644
index 0000000000..d1fc4c90de
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/sqlalchemy/__init__.py
@@ -0,0 +1,293 @@
+"""
+Experimental
+Work in progress, breaking changes are possible.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+
+try:
+ import ydb
+ from ydb.dbapi.errors import NotSupportedError
+ from ydb.sqlalchemy.types import UInt32, UInt64
+
+ from sqlalchemy.engine.default import DefaultDialect
+ from sqlalchemy.sql.compiler import (
+ IdentifierPreparer,
+ GenericTypeCompiler,
+ SQLCompiler,
+ )
+ from sqlalchemy import Table
+ from sqlalchemy.sql.elements import ClauseList
+ from sqlalchemy.sql import functions
+ import sqlalchemy as sa
+ from sqlalchemy import exc
+ from sqlalchemy.util.compat import inspect_getfullargspec
+ from sqlalchemy.sql import literal_column
+
+ SQLALCHEMY_VERSION = tuple(sa.__version__.split("."))
+ SA_14 = SQLALCHEMY_VERSION >= ("1", "4")
+
+ class YqlIdentifierPreparer(IdentifierPreparer):
+ def __init__(self, dialect):
+ super(YqlIdentifierPreparer, self).__init__(
+ dialect,
+ initial_quote="`",
+ final_quote="`",
+ )
+
+ def _requires_quotes(self, value):
+ # Force all identifiers to get quoted unless already quoted.
+ return not (value.startswith(self.initial_quote) and value.endswith(self.final_quote))
+
+ class YqlTypeCompiler(GenericTypeCompiler):
+ def visit_VARCHAR(self, type_, **kw):
+ return "STRING"
+
+ def visit_unicode(self, type_, **kw):
+ return "UTF8"
+
+ def visit_NVARCHAR(self, type_, **kw):
+ return "UTF8"
+
+ def visit_TEXT(self, type_, **kw):
+ return "UTF8"
+
+ def visit_FLOAT(self, type_, **kw):
+ return "DOUBLE"
+
+ def visit_BOOLEAN(self, type_, **kw):
+ return "BOOL"
+
+ def visit_uint32(self, type_, **kw):
+ return "UInt32"
+
+ def visit_uint64(self, type_, **kw):
+ return "UInt64"
+
+ def visit_uint8(self, type_, **kw):
+ return "UInt8"
+
+ class ParametrizedFunction(functions.Function):
+ __visit_name__ = "parametrized_function"
+
+ def __init__(self, name, params, *args, **kwargs):
+ super(ParametrizedFunction, self).__init__(name, *args, **kwargs)
+ self._func_name = name
+ self._func_params = params
+ self.params_expr = ClauseList(
+ operator=functions.operators.comma_op, group_contents=True, *params
+ ).self_group()
+
+ class YqlCompiler(SQLCompiler):
+ def group_by_clause(self, select, **kw):
+ # Hack to ensure it is possible to define labels in groupby.
+ kw.update(within_columns_clause=True)
+ return super(YqlCompiler, self).group_by_clause(select, **kw)
+
+ def visit_lambda(self, lambda_, **kw):
+ func = lambda_.func
+ spec = inspect_getfullargspec(func)
+
+ if spec.varargs:
+ raise exc.CompileError("Lambdas with *args are not supported")
+
+ try:
+ keywords = spec.keywords
+ except AttributeError:
+ keywords = spec.varkw
+
+ if keywords:
+ raise exc.CompileError("Lambdas with **kwargs are not supported")
+
+ text = "(" + ", ".join("$" + arg for arg in spec.args) + ")" + " -> "
+
+ args = [literal_column("$" + arg) for arg in spec.args]
+ text += "{ RETURN " + self.process(func(*args), **kw) + " ;}"
+
+ return text
+
+ def visit_parametrized_function(self, func, **kwargs):
+ name = func.name
+ name_parts = []
+ for name in name.split("::"):
+ fname = (
+ self.preparer.quote(name)
+ if self.preparer._requires_quotes_illegal_chars(name)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else name
+ )
+
+ name_parts.append(fname)
+
+ name = "::".join(name_parts)
+ params = func.params_expr._compiler_dispatch(self, **kwargs)
+ args = self.function_argspec(func, **kwargs)
+ return "%(name)s%(params)s%(args)s" % dict(name=name, params=params, args=args)
+
+ def visit_function(self, func, add_to_result_map=None, **kwargs):
+ # Copypaste of `sa.sql.compiler.SQLCompiler.visit_function` with
+ # `::` as namespace separator instead of `.`
+ if add_to_result_map is not None:
+ add_to_result_map(func.name, func.name, (), func.type)
+
+ disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
+ if disp:
+ return disp(func, **kwargs)
+ else:
+ name = sa.sql.compiler.FUNCTIONS.get(func.__class__, None)
+ if name:
+ if func._has_args:
+ name += "%(expr)s"
+ else:
+ name = func.name
+ name = (
+ self.preparer.quote(name)
+ if self.preparer._requires_quotes_illegal_chars(name)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else name
+ )
+ name = name + "%(expr)s"
+ return "::".join(
+ [
+ (
+ self.preparer.quote(tok)
+ if self.preparer._requires_quotes_illegal_chars(tok)
+ or isinstance(name, sa.sql.elements.quoted_name)
+ else tok
+ )
+ for tok in func.packagenames
+ ]
+ + [name]
+ ) % {"expr": self.function_argspec(func, **kwargs)}
+
+ COLUMN_TYPES = {
+ ydb.PrimitiveType.Int8: sa.INTEGER,
+ ydb.PrimitiveType.Int16: sa.INTEGER,
+ ydb.PrimitiveType.Int32: sa.INTEGER,
+ ydb.PrimitiveType.Int64: sa.INTEGER,
+ ydb.PrimitiveType.Uint8: sa.INTEGER,
+ ydb.PrimitiveType.Uint16: sa.INTEGER,
+ ydb.PrimitiveType.Uint32: UInt32,
+ ydb.PrimitiveType.Uint64: UInt64,
+ ydb.PrimitiveType.Float: sa.FLOAT,
+ ydb.PrimitiveType.Double: sa.FLOAT,
+ ydb.PrimitiveType.String: sa.TEXT,
+ ydb.PrimitiveType.Utf8: sa.TEXT,
+ ydb.PrimitiveType.Json: sa.JSON,
+ ydb.PrimitiveType.JsonDocument: sa.JSON,
+ ydb.DecimalType: sa.DECIMAL,
+ ydb.PrimitiveType.Yson: sa.TEXT,
+ ydb.PrimitiveType.Date: sa.DATE,
+ ydb.PrimitiveType.Datetime: sa.DATETIME,
+ ydb.PrimitiveType.Timestamp: sa.DATETIME,
+ ydb.PrimitiveType.Interval: sa.INTEGER,
+ ydb.PrimitiveType.Bool: sa.BOOLEAN,
+ ydb.PrimitiveType.DyNumber: sa.TEXT,
+ }
+
+ def _get_column_info(t):
+ nullable = False
+ if isinstance(t, ydb.OptionalType):
+ nullable = True
+ t = t.item
+
+ if isinstance(t, ydb.DecimalType):
+ return sa.DECIMAL(precision=t.precision, scale=t.scale), nullable
+
+ return COLUMN_TYPES[t], nullable
+
+ class YqlDialect(DefaultDialect):
+ name = "yql"
+ supports_alter = False
+ max_identifier_length = 63
+ supports_sane_rowcount = False
+ supports_statement_cache = False
+
+ supports_native_enum = False
+ supports_native_boolean = True
+ supports_smallserial = False
+
+ supports_sequences = False
+ sequences_optional = True
+ preexecute_autoincrement_sequences = True
+ postfetch_lastrowid = False
+
+ supports_default_values = False
+ supports_empty_insert = False
+ supports_multivalues_insert = True
+ default_paramstyle = "qmark"
+
+ isolation_level = None
+
+ preparer = YqlIdentifierPreparer
+ statement_compiler = YqlCompiler
+ type_compiler = YqlTypeCompiler
+
+ @staticmethod
+ def dbapi():
+ import ydb.dbapi
+
+ return ydb.dbapi
+
+ def _check_unicode_returns(self, *args, **kwargs):
+ # Normally, this would do 2 SQL queries, which isn't quite necessary.
+ return "conditional"
+
+ def get_columns(self, connection, table_name, schema=None, **kw):
+ if schema is not None:
+ raise NotSupportedError
+
+ if isinstance(table_name, Table):
+ qt = table_name.name
+ else:
+ qt = table_name
+
+ if SA_14:
+ raw_conn = connection.connection
+ else:
+ raw_conn = connection.raw_connection()
+ columns = raw_conn.describe(qt)
+ as_compatible = []
+ for column in columns:
+ col_type, nullable = _get_column_info(column.type)
+ as_compatible.append(
+ {
+ "name": column.name,
+ "type": col_type,
+ "nullable": nullable,
+ }
+ )
+
+ return as_compatible
+
+ def has_table(self, connection, table_name, schema=None):
+ if schema is not None:
+ raise NotSupportedError
+
+ quote = self.identifier_preparer.quote_identifier
+ qtable = quote(table_name)
+
+ # TODO: use `get_columns` instead.
+ statement = "SELECT * FROM " + qtable
+ try:
+ connection.execute(statement)
+ return True
+ except Exception:
+ return False
+
+except ImportError:
+
+ class YqlDialect(object):
+ def __init__(self):
+ raise RuntimeError("could not import sqlalchemy")
+
+
+def register_dialect(
+ name="yql",
+ module=__name__,
+ cls="YqlDialect",
+):
+ import sqlalchemy as sa
+
+ return sa.dialects.registry.register(name, module, cls)
diff --git a/contrib/python/ydb/py3/ydb/sqlalchemy/types.py b/contrib/python/ydb/py3/ydb/sqlalchemy/types.py
new file mode 100644
index 0000000000..f6e10ccd6f
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/sqlalchemy/types.py
@@ -0,0 +1,32 @@
+try:
+ from sqlalchemy.types import Integer
+ from sqlalchemy.sql import type_api
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy import util, exc
+except ImportError:
+ Integer = object
+ ColumnElement = object
+
+
+class UInt32(Integer):
+ __visit_name__ = "uint32"
+
+
+class UInt64(Integer):
+ __visit_name__ = "uint64"
+
+
+class UInt8(Integer):
+ __visit_name__ = "uint8"
+
+
+class Lambda(ColumnElement):
+
+ __visit_name__ = "lambda"
+
+ def __init__(self, func):
+ if not util.callable(func):
+ raise exc.ArgumentError("func must be callable")
+
+ self.type = type_api.NULLTYPE
+ self.func = func
diff --git a/contrib/python/ydb/py3/ydb/table.py b/contrib/python/ydb/py3/ydb/table.py
new file mode 100644
index 0000000000..c21392bb4c
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/table.py
@@ -0,0 +1,2594 @@
+# -*- coding: utf-8 -*-
+import abc
+import ydb
+from abc import abstractmethod
+import logging
+import time
+import random
+import enum
+
+from . import (
+ issues,
+ convert,
+ settings as settings_impl,
+ scheme,
+ types,
+ _utilities,
+ _apis,
+ _sp_impl,
+ _session_impl,
+ _tx_ctx_impl,
+ tracing,
+)
+from ._errors import check_retriable_error
+
+try:
+ from . import interceptor
+except ImportError:
+ interceptor = None
+
+_default_allow_split_transaction = False
+
+logger = logging.getLogger(__name__)
+
+##################################################################
+# A deprecated aliases in case when direct import has been used #
+##################################################################
+SessionPoolEmpty = issues.SessionPoolEmpty
+DataQuery = types.DataQuery
+
+
+class DescribeTableSettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(DescribeTableSettings, self).__init__()
+ self.include_shard_key_bounds = False
+ self.include_table_stats = False
+
+ def with_include_shard_key_bounds(self, value):
+ self.include_shard_key_bounds = value
+ return self
+
+ def with_include_table_stats(self, value):
+ self.include_table_stats = value
+ return self
+
+
+class ExecDataQuerySettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ExecDataQuerySettings, self).__init__()
+ self.keep_in_cache = True
+
+ def with_keep_in_cache(self, value):
+ self.keep_in_cache = value
+ return self
+
+
+class KeyBound(object):
+ __slots__ = ("_equal", "value", "type")
+
+ def __init__(self, key_value, key_type=None, inclusive=False):
+ """
+ Represents key bound.
+ :param key_value: An iterable with key values
+ :param key_type: A type of key
+ :param inclusive: A flag that indicates bound includes key provided in the value.
+ """
+
+ try:
+ iter(key_value)
+ except TypeError:
+ assert False, "value must be iterable!"
+
+ if isinstance(key_type, types.TupleType):
+ key_type = key_type.proto
+
+ self._equal = inclusive
+ self.value = key_value
+ self.type = key_type
+
+ def is_inclusive(self):
+ return self._equal
+
+ def is_exclusive(self):
+ return not self._equal
+
+ def __str__(self):
+ if self._equal:
+ return "InclusiveKeyBound(Tuple%s)" % str(self.value)
+ return "ExclusiveKeyBound(Tuple%s)" % str(self.value)
+
+ @classmethod
+ def inclusive(cls, key_value, key_type):
+ return cls(key_value, key_type, True)
+
+ @classmethod
+ def exclusive(cls, key_value, key_type):
+ return cls(key_value, key_type, False)
+
+
+class KeyRange(object):
+ __slots__ = ("from_bound", "to_bound")
+
+ def __init__(self, from_bound, to_bound):
+ self.from_bound = from_bound
+ self.to_bound = to_bound
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "KeyRange(%s, %s)" % (str(self.from_bound), str(self.to_bound))
+
+
+class Column(object):
+ def __init__(self, name, type, family=None):
+ self._name = name
+ self._type = type
+ self.family = family
+
+ def __eq__(self, other):
+ return self.name == other.name and self._type.item == other.type.item
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def type(self):
+ return self._type
+
+ def with_family(self, family):
+ self.family = family
+ return self
+
+ @property
+ def type_pb(self):
+ try:
+ return self._type.proto
+ except Exception:
+ return self._type
+
+
+@enum.unique
+class FeatureFlag(enum.IntEnum):
+ UNSPECIFIED = 0
+ ENABLED = 1
+ DISABLED = 2
+
+
+@enum.unique
+class AutoPartitioningPolicy(enum.IntEnum):
+ AUTO_PARTITIONING_POLICY_UNSPECIFIED = 0
+ DISABLED = 1
+ AUTO_SPLIT = 2
+ AUTO_SPLIT_MERGE = 3
+
+
+@enum.unique
+class IndexStatus(enum.IntEnum):
+ INDEX_STATUS_UNSPECIFIED = 0
+ READY = 1
+ BUILDING = 2
+
+
+class CachingPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.CachingPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class ExecutionPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.ExecutionPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class CompactionPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.CompactionPolicy()
+ self.preset_name = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class SplitPoint(object):
+ def __init__(self, *args):
+ self._value = tuple(args)
+
+ @property
+ def value(self):
+ return self._value
+
+
+class ExplicitPartitions(object):
+ def __init__(self, split_points):
+ self.split_points = split_points
+
+
+class PartitioningPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.PartitioningPolicy()
+ self.preset_name = None
+ self.uniform_partitions = None
+ self.auto_partitioning = None
+ self.explicit_partitions = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_uniform_partitions(self, uniform_partitions):
+ self._pb.uniform_partitions = uniform_partitions
+ self.uniform_partitions = uniform_partitions
+ return self
+
+ def with_explicit_partitions(self, explicit_partitions):
+ self.explicit_partitions = explicit_partitions
+ return self
+
+ def with_auto_partitioning(self, auto_partitioning):
+ self._pb.auto_partitioning = auto_partitioning
+ self.auto_partitioning = auto_partitioning
+ return self
+
+ def to_pb(self, table_description):
+ if self.explicit_partitions is not None:
+ column_types = {}
+ pk = set(table_description.primary_key)
+ for column in table_description.columns:
+ if column.name in pk:
+ column_types[column.name] = column.type
+
+ for split_point in self.explicit_partitions.split_points:
+ typed_value = self._pb.explicit_partitions.split_points.add()
+ split_point_type = types.TupleType()
+ prefix_size = len(split_point.value)
+ for pl_el_id, pk_name in enumerate(table_description.primary_key):
+ if pl_el_id >= prefix_size:
+ break
+
+ split_point_type.add_element(column_types[pk_name])
+
+ typed_value.type.MergeFrom(split_point_type.proto)
+ typed_value.value.MergeFrom(convert.from_native_value(split_point_type.proto, split_point.value))
+
+ return self._pb
+
+
+class TableIndex(object):
+ def __init__(self, name):
+ self._pb = _apis.ydb_table.TableIndex()
+ self._pb.name = name
+ self.name = name
+ self.index_columns = []
+ # output only.
+ self.status = None
+
+ def with_global_index(self):
+ self._pb.global_index.SetInParent()
+ return self
+
+ def with_index_columns(self, *columns):
+ for column in columns:
+ self._pb.index_columns.append(column)
+ self.index_columns.append(column)
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class ReplicationPolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.ReplicationPolicy()
+ self.preset_name = None
+ self.replicas_count = None
+ self.allow_promotion = None
+ self.create_per_availability_zone = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_replicas_count(self, replicas_count):
+ self._pb.replicas_count = replicas_count
+ self.replicas_count = replicas_count
+ return self
+
+ def with_create_per_availability_zone(self, create_per_availability_zone):
+ self._pb.create_per_availability_zone = create_per_availability_zone
+ self.create_per_availability_zone = create_per_availability_zone
+ return self
+
+ def with_allow_promotion(self, allow_promotion):
+ self._pb.allow_promotion = allow_promotion
+ self.allow_promotion = allow_promotion
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class StoragePool(object):
+ def __init__(self, media):
+ self.media = media
+
+ def to_pb(self):
+ return _apis.ydb_table.StoragePool(media=self.media)
+
+
+class StoragePolicy(object):
+ def __init__(self):
+ self._pb = _apis.ydb_table.StoragePolicy()
+ self.preset_name = None
+ self.syslog = None
+ self.log = None
+ self.data = None
+ self.keep_in_memory = None
+ self.external = None
+
+ def with_preset_name(self, preset_name):
+ self._pb.preset_name = preset_name
+ self.preset_name = preset_name
+ return self
+
+ def with_syslog_storage_settings(self, syslog_settings):
+ self._pb.syslog.MergeFrom(syslog_settings.to_pb())
+ self.syslog = syslog_settings
+ return self
+
+ def with_log_storage_settings(self, log_settings):
+ self._pb.log.MergeFrom(log_settings.to_pb())
+ self.log = log_settings
+ return self
+
+ def with_data_storage_settings(self, data_settings):
+ self._pb.data.MergeFrom(data_settings.to_pb())
+ self.data = data_settings
+ return self
+
+ def with_external_storage_settings(self, external_settings):
+ self._pb.external.MergeFrom(external_settings.to_pb())
+ self.external = external_settings
+ return self
+
+ def with_keep_in_memory(self, keep_in_memory):
+ self._pb.keep_in_memory = keep_in_memory
+ self.keep_in_memory = keep_in_memory
+ return self
+
+ def to_pb(self):
+ return self._pb
+
+
+class TableProfile(object):
+ def __init__(self):
+ self.preset_name = None
+ self.compaction_policy = None
+ self.partitioning_policy = None
+ self.storage_policy = None
+ self.execution_policy = None
+ self.replication_policy = None
+ self.caching_policy = None
+
+ def with_preset_name(self, preset_name):
+ self.preset_name = preset_name
+ return self
+
+ def with_compaction_policy(self, compaction_policy):
+ self.compaction_policy = compaction_policy
+ return self
+
+ def with_partitioning_policy(self, partitioning_policy):
+ self.partitioning_policy = partitioning_policy
+ return self
+
+ def with_execution_policy(self, execution_policy):
+ self.execution_policy = execution_policy
+ return self
+
+ def with_caching_policy(self, caching_policy):
+ self.caching_policy = caching_policy
+ return self
+
+ def with_storage_policy(self, storage_policy):
+ self.storage_policy = storage_policy
+ return self
+
+ def with_replication_policy(self, replication_policy):
+ self.replication_policy = replication_policy
+ return self
+
+ def to_pb(self, table_description):
+ pb = _apis.ydb_table.TableProfile()
+
+ if self.preset_name is not None:
+ pb.preset_name = self.preset_name
+
+ if self.execution_policy is not None:
+ pb.execution_policy.MergeFrom(self.execution_policy.to_pb())
+
+ if self.storage_policy is not None:
+ pb.storage_policy.MergeFrom(self.storage_policy.to_pb())
+
+ if self.replication_policy is not None:
+ pb.replication_policy.MergeFrom(self.replication_policy.to_pb())
+
+ if self.caching_policy is not None:
+ pb.caching_policy.MergeFrom(self.caching_policy.to_pb())
+
+ if self.compaction_policy is not None:
+ pb.compaction_policy.MergeFrom(self.compaction_policy.to_pb())
+
+ if self.partitioning_policy is not None:
+ pb.partitioning_policy.MergeFrom(self.partitioning_policy.to_pb(table_description))
+
+ return pb
+
+
+class DateTypeColumnModeSettings(object):
+ def __init__(self, column_name, expire_after_seconds=0):
+ self.column_name = column_name
+ self.expire_after_seconds = expire_after_seconds
+
+ def to_pb(self):
+ pb = _apis.ydb_table.DateTypeColumnModeSettings()
+
+ pb.column_name = self.column_name
+ pb.expire_after_seconds = self.expire_after_seconds
+
+ return pb
+
+
+@enum.unique
+class ColumnUnit(enum.IntEnum):
+ UNIT_UNSPECIFIED = 0
+ UNIT_SECONDS = 1
+ UNIT_MILLISECONDS = 2
+ UNIT_MICROSECONDS = 3
+ UNIT_NANOSECONDS = 4
+
+
+class ValueSinceUnixEpochModeSettings(object):
+ def __init__(self, column_name, column_unit, expire_after_seconds=0):
+ self.column_name = column_name
+ self.column_unit = column_unit
+ self.expire_after_seconds = expire_after_seconds
+
+ def to_pb(self):
+ pb = _apis.ydb_table.ValueSinceUnixEpochModeSettings()
+
+ pb.column_name = self.column_name
+ pb.column_unit = self.column_unit
+ pb.expire_after_seconds = self.expire_after_seconds
+
+ return pb
+
+
+class TtlSettings(object):
+ def __init__(self):
+ self.date_type_column = None
+ self.value_since_unix_epoch = None
+
+ def with_date_type_column(self, column_name, expire_after_seconds=0):
+ self.date_type_column = DateTypeColumnModeSettings(column_name, expire_after_seconds)
+ return self
+
+ def with_value_since_unix_epoch(self, column_name, column_unit, expire_after_seconds=0):
+ self.value_since_unix_epoch = ValueSinceUnixEpochModeSettings(column_name, column_unit, expire_after_seconds)
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.TtlSettings()
+
+ if self.date_type_column is not None:
+ pb.date_type_column.MergeFrom(self.date_type_column.to_pb())
+ elif self.value_since_unix_epoch is not None:
+ pb.value_since_unix_epoch.MergeFrom(self.value_since_unix_epoch.to_pb())
+ else:
+ raise RuntimeError("Unspecified ttl settings mode")
+
+ return pb
+
+
+class TableStats(object):
+ def __init__(self):
+ self.partitions = None
+ self.store_size = 0
+
+ def with_store_size(self, store_size):
+ self.store_size = store_size
+ return self
+
+ def with_partitions(self, partitions):
+ self.partitions = partitions
+ return self
+
+
+class ReadReplicasSettings(object):
+ def __init__(self):
+ self.per_az_read_replicas_count = 0
+ self.any_az_read_replicas_count = 0
+
+ def with_any_az_read_replicas_count(self, any_az_read_replicas_count):
+ self.any_az_read_replicas_count = any_az_read_replicas_count
+ return self
+
+ def with_per_az_read_replicas_count(self, per_az_read_replicas_count):
+ self.per_az_read_replicas_count = per_az_read_replicas_count
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.ReadReplicasSettings()
+ if self.per_az_read_replicas_count > 0:
+ pb.per_az_read_replicas_count = self.per_az_read_replicas_count
+ elif self.any_az_read_replicas_count > 0:
+ pb.any_az_read_replicas_count = self.any_az_read_replicas_count
+ return pb
+
+
+class PartitioningSettings(object):
+ def __init__(self):
+ self.partitioning_by_size = 0
+ self.partition_size_mb = 0
+ self.partitioning_by_load = 0
+ self.min_partitions_count = 0
+ self.max_partitions_count = 0
+
+ def with_max_partitions_count(self, max_partitions_count):
+ self.max_partitions_count = max_partitions_count
+ return self
+
+ def with_min_partitions_count(self, min_partitions_count):
+ self.min_partitions_count = min_partitions_count
+ return self
+
+ def with_partitioning_by_load(self, partitioning_by_load):
+ self.partitioning_by_load = partitioning_by_load
+ return self
+
+ def with_partition_size_mb(self, partition_size_mb):
+ self.partition_size_mb = partition_size_mb
+ return self
+
+ def with_partitioning_by_size(self, partitioning_by_size):
+ self.partitioning_by_size = partitioning_by_size
+ return self
+
+ def to_pb(self):
+ pb = _apis.ydb_table.PartitioningSettings()
+ pb.partitioning_by_size = self.partitioning_by_size
+ pb.partition_size_mb = self.partition_size_mb
+ pb.partitioning_by_load = self.partitioning_by_load
+ pb.min_partitions_count = self.min_partitions_count
+ pb.max_partitions_count = self.max_partitions_count
+ return pb
+
+
+class StorageSettings(object):
+ def __init__(self):
+ self.tablet_commit_log0 = None
+ self.tablet_commit_log1 = None
+ self.external = None
+ self.store_external_blobs = 0
+
+ def with_store_external_blobs(self, store_external_blobs):
+ self.store_external_blobs = store_external_blobs
+ return self
+
+ def with_external(self, external):
+ self.external = external
+ return self
+
+ def with_tablet_commit_log1(self, tablet_commit_log1):
+ self.tablet_commit_log1 = tablet_commit_log1
+ return self
+
+ def with_tablet_commit_log0(self, tablet_commit_log0):
+ self.tablet_commit_log0 = tablet_commit_log0
+ return self
+
+ def to_pb(self):
+ st = _apis.ydb_table.StorageSettings()
+ st.store_external_blobs = self.store_external_blobs
+ if self.external:
+ st.external.MergeFrom(self.external.to_pb())
+ if self.tablet_commit_log0:
+ st.tablet_commit_log0.MergeFrom(self.tablet_commit_log0.to_pb())
+ if self.tablet_commit_log1:
+ st.tablet_commit_log1.MergeFrom(self.tablet_commit_log1.to_pb())
+ return st
+
+
+@enum.unique
+class Compression(enum.IntEnum):
+ UNSPECIFIED = 0
+ NONE = 1
+ LZ4 = 2
+
+
+class ColumnFamily(object):
+ def __init__(self):
+ self.compression = 0
+ self.name = None
+ self.data = None
+ self.keep_in_memory = 0
+
+ def with_name(self, name):
+ self.name = name
+ return self
+
+ def with_compression(self, compression):
+ self.compression = compression
+ return self
+
+ def with_data(self, data):
+ self.data = data
+ return self
+
+ def with_keep_in_memory(self, keep_in_memory):
+ self.keep_in_memory = keep_in_memory
+ return self
+
+ def to_pb(self):
+ cm = _apis.ydb_table.ColumnFamily()
+ cm.keep_in_memory = self.keep_in_memory
+ cm.compression = self.compression
+ if self.name is not None:
+ cm.name = self.name
+ if self.data is not None:
+ cm.data.MergeFrom(self.data.to_pb())
+ return cm
+
+
+class TableDescription(object):
+ def __init__(self):
+ self.columns = []
+ self.primary_key = []
+ self.profile = None
+ self.indexes = []
+ self.column_families = []
+ self.ttl_settings = None
+ self.attributes = {}
+ self.uniform_partitions = 0
+ self.partition_at_keys = None
+ self.compaction_policy = None
+ self.key_bloom_filter = 0
+ self.read_replicas_settings = None
+ self.partitioning_settings = None
+ self.storage_settings = None
+
+ def with_storage_settings(self, storage_settings):
+ self.storage_settings = storage_settings
+ return self
+
+ def with_column(self, column):
+ self.columns.append(column)
+ return self
+
+ def with_columns(self, *columns):
+ for column in columns:
+ self.with_column(column)
+ return self
+
+ def with_primary_key(self, key):
+ self.primary_key.append(key)
+ return self
+
+ def with_primary_keys(self, *keys):
+ for pk in keys:
+ self.with_primary_key(pk)
+ return self
+
+ def with_column_family(self, column_family):
+ self.column_families.append(column_family)
+ return self
+
+ def with_column_families(self, *column_families):
+ for column_family in column_families:
+ self.with_column_family(column_family)
+ return self
+
+ def with_indexes(self, *indexes):
+ for index in indexes:
+ self.with_index(index)
+ return self
+
+ def with_index(self, index):
+ self.indexes.append(index)
+ return self
+
+ def with_profile(self, profile):
+ self.profile = profile
+ return self
+
+ def with_ttl(self, ttl_settings):
+ self.ttl_settings = ttl_settings
+ return self
+
+ def with_attributes(self, attributes):
+ self.attributes = attributes
+ return self
+
+ def with_uniform_partitions(self, uniform_partitions):
+ self.uniform_partitions = uniform_partitions
+ return self
+
+ def with_partition_at_keys(self, partition_at_keys):
+ self.partition_at_keys = partition_at_keys
+ return self
+
+ def with_key_bloom_filter(self, key_bloom_filter):
+ self.key_bloom_filter = key_bloom_filter
+ return self
+
+ def with_partitioning_settings(self, partitioning_settings):
+ self.partitioning_settings = partitioning_settings
+ return self
+
+ def with_read_replicas_settings(self, read_replicas_settings):
+ self.read_replicas_settings = read_replicas_settings
+ return self
+
+ def with_compaction_policy(self, compaction_policy):
+ self.compaction_policy = compaction_policy
+ return self
+
+
+class AbstractTransactionModeBuilder(abc.ABC):
+ @property
+ @abc.abstractmethod
+ def name(self):
+ pass
+
+ @property
+ @abc.abstractmethod
+ def settings(self):
+ pass
+
+
+class SnapshotReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.SnapshotModeSettings()
+ self._name = "snapshot_read_only"
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class SerializableReadWrite(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._name = "serializable_read_write"
+ self._pb = _apis.ydb_table.SerializableModeSettings()
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class OnlineReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.OnlineModeSettings()
+ self._pb.allow_inconsistent_reads = False
+ self._name = "online_read_only"
+
+ def with_allow_inconsistent_reads(self):
+ self._pb.allow_inconsistent_reads = True
+ return self
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class StaleReadOnly(AbstractTransactionModeBuilder):
+ __slots__ = ("_pb", "_name")
+
+ def __init__(self):
+ self._pb = _apis.ydb_table.StaleModeSettings()
+ self._name = "stale_read_only"
+
+ @property
+ def settings(self):
+ return self._pb
+
+ @property
+ def name(self):
+ return self._name
+
+
+class BackoffSettings(object):
+ def __init__(self, ceiling=6, slot_duration=0.001, uncertain_ratio=0.5):
+ self.ceiling = ceiling
+ self.slot_duration = slot_duration
+ self.uncertain_ratio = uncertain_ratio
+
+ def calc_timeout(self, retry_number):
+ slots_count = 1 << min(retry_number, self.ceiling)
+ max_duration_ms = slots_count * self.slot_duration * 1000.0
+ # duration_ms = random.random() * max_duration_ms * uncertain_ratio) + max_duration_ms * (1 - uncertain_ratio)
+ duration_ms = max_duration_ms * (random.random() * self.uncertain_ratio + 1.0 - self.uncertain_ratio)
+ return duration_ms / 1000.0
+
+
+class RetrySettings(object):
+ def __init__(
+ self,
+ max_retries=10,
+ max_session_acquire_timeout=None,
+ on_ydb_error_callback=None,
+ backoff_ceiling=6,
+ backoff_slot_duration=1,
+ get_session_client_timeout=5,
+ fast_backoff_settings=None,
+ slow_backoff_settings=None,
+ idempotent=False,
+ ):
+ self.max_retries = max_retries
+ self.max_session_acquire_timeout = max_session_acquire_timeout
+ self.on_ydb_error_callback = (lambda e: None) if on_ydb_error_callback is None else on_ydb_error_callback
+ self.fast_backoff = BackoffSettings(10, 0.005) if fast_backoff_settings is None else fast_backoff_settings
+ self.slow_backoff = (
+ BackoffSettings(backoff_ceiling, backoff_slot_duration)
+ if slow_backoff_settings is None
+ else slow_backoff_settings
+ )
+ self.retry_not_found = True
+ self.idempotent = idempotent
+ self.retry_internal_error = True
+ self.unknown_error_handler = lambda e: None
+ self.get_session_client_timeout = get_session_client_timeout
+ if max_session_acquire_timeout is not None:
+ self.get_session_client_timeout = min(self.max_session_acquire_timeout, self.get_session_client_timeout)
+
+ def with_fast_backoff(self, backoff_settings):
+ self.fast_backoff = backoff_settings
+ return self
+
+ def with_slow_backoff(self, backoff_settings):
+ self.slow_backoff = backoff_settings
+ return self
+
+
+class YdbRetryOperationSleepOpt(object):
+ def __init__(self, timeout):
+ self.timeout = timeout
+
+ def __eq__(self, other):
+ return type(self) == type(other) and self.timeout == other.timeout
+
+ def __repr__(self):
+ return "YdbRetryOperationSleepOpt(%s)" % self.timeout
+
+
+class YdbRetryOperationFinalResult(object):
+ def __init__(self, result):
+ self.result = result
+ self.exc = None
+
+ def __eq__(self, other):
+ return type(self) == type(other) and self.result == other.result and self.exc == other.exc
+
+ def __repr__(self):
+ return "YdbRetryOperationFinalResult(%s, exc=%s)" % (self.result, self.exc)
+
+ def set_exception(self, exc):
+ self.exc = exc
+
+
+def retry_operation_impl(callee, retry_settings=None, *args, **kwargs):
+ retry_settings = RetrySettings() if retry_settings is None else retry_settings
+ status = None
+
+ for attempt in range(retry_settings.max_retries + 1):
+ try:
+ result = YdbRetryOperationFinalResult(callee(*args, **kwargs))
+ yield result
+
+ if result.exc is not None:
+ raise result.exc
+
+ except issues.Error as e:
+ status = e
+ retry_settings.on_ydb_error_callback(e)
+
+ retriable_info = check_retriable_error(e, retry_settings, attempt)
+ if not retriable_info.is_retriable:
+ raise
+
+ skip_yield_error_types = [
+ issues.Aborted,
+ issues.BadSession,
+ issues.NotFound,
+ issues.InternalError,
+ ]
+
+ yield_sleep = True
+ for t in skip_yield_error_types:
+ if isinstance(e, t):
+ yield_sleep = False
+
+ if yield_sleep:
+ yield YdbRetryOperationSleepOpt(retriable_info.sleep_timeout_seconds)
+
+ except Exception as e:
+ # you should provide your own handler you want
+ retry_settings.unknown_error_handler(e)
+ raise
+
+ raise status
+
+
+def retry_operation_sync(callee, retry_settings=None, *args, **kwargs):
+ opt_generator = retry_operation_impl(callee, retry_settings, *args, **kwargs)
+ for next_opt in opt_generator:
+ if isinstance(next_opt, YdbRetryOperationSleepOpt):
+ time.sleep(next_opt.timeout)
+ else:
+ return next_opt.result
+
+
+class TableClientSettings(object):
+ def __init__(self):
+ self._client_query_cache_enabled = False
+ self._native_datetime_in_result_sets = False
+ self._native_date_in_result_sets = False
+ self._make_result_sets_lazy = False
+ self._native_json_in_result_sets = False
+ self._native_interval_in_result_sets = False
+ self._native_timestamp_in_result_sets = False
+ self._allow_truncated_result = convert._default_allow_truncated_result
+
+ def with_native_timestamp_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_timestamp_in_result_sets = enabled
+ return self
+
+ def with_native_interval_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_interval_in_result_sets = enabled
+ return self
+
+ def with_native_json_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_json_in_result_sets = enabled
+ return self
+
+ def with_native_date_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_date_in_result_sets = enabled
+ return self
+
+ def with_native_datetime_in_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._native_datetime_in_result_sets = enabled
+ return self
+
+ def with_client_query_cache(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._client_query_cache_enabled = enabled
+ return self
+
+ def with_lazy_result_sets(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._make_result_sets_lazy = enabled
+ return self
+
+ def with_allow_truncated_result(self, enabled):
+ # type:(bool) -> ydb.TableClientSettings
+ self._allow_truncated_result = enabled
+ return self
+
+
+class ScanQueryResult(object):
+ def __init__(self, result, table_client_settings):
+ self._result = result
+ self.query_stats = result.query_stats
+ self.result_set = convert.ResultSet.from_message(self._result.result_set, table_client_settings)
+
+
+@enum.unique
+class QueryStatsCollectionMode(enum.IntEnum):
+ NONE = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_NONE
+ BASIC = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_BASIC
+ FULL = _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_FULL
+
+
+class ScanQuerySettings(settings_impl.BaseRequestSettings):
+ def __init__(self):
+ super(ScanQuerySettings, self).__init__()
+ self.collect_stats = None
+
+ def with_collect_stats(self, collect_stats_mode):
+ self.collect_stats = collect_stats_mode
+ return self
+
+
+class ScanQuery(object):
+ def __init__(self, yql_text, parameters_types):
+ self.yql_text = yql_text
+ self.parameters_types = parameters_types
+
+
+def _wrap_scan_query_response(response, table_client_settings):
+ issues._process_response(response)
+ return ScanQueryResult(response.result, table_client_settings)
+
+
+def _scan_query_request_factory(query, parameters=None, settings=None):
+ if not isinstance(query, ScanQuery):
+ query = ScanQuery(query, {})
+ parameters = {} if parameters is None else parameters
+ collect_stats = getattr(
+ settings,
+ "collect_stats",
+ _apis.ydb_table.QueryStatsCollection.Mode.STATS_COLLECTION_NONE,
+ )
+ return _apis.ydb_table.ExecuteScanQueryRequest(
+ mode=_apis.ydb_table.ExecuteScanQueryRequest.Mode.MODE_EXEC,
+ query=_apis.ydb_table.Query(yql_text=query.yql_text),
+ parameters=convert.parameters_to_pb(query.parameters_types, parameters),
+ collect_stats=collect_stats,
+ )
+
+
+class ISession(abc.ABC):
+ @abstractmethod
+ def __init__(self, driver, table_client_settings):
+ pass
+
+ @abstractmethod
+ def __lt__(self, other):
+ pass
+
+ @abstractmethod
+ def __eq__(self, other):
+ pass
+
+ @property
+ @abstractmethod
+ def session_id(self):
+ pass
+
+ @abstractmethod
+ def initialized(self):
+ """
+ Return True if session is successfully initialized with a session_id and False otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def pending_query(self):
+ pass
+
+ @abstractmethod
+ def reset(self):
+ """
+ Perform session state reset (that includes cleanup of the session_id, query cache, and etc.)
+ """
+ pass
+
+ @abstractmethod
+ def read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+ :param settings: Request settings
+
+ :return: SyncResponseIterator instance
+ """
+ pass
+
+ @abstractmethod
+ def keep_alive(self, settings=None):
+ pass
+
+ @abstractmethod
+ def create(self, settings=None):
+ pass
+
+ @abstractmethod
+ def delete(self, settings=None):
+ pass
+
+ @abstractmethod
+ def execute_scheme(self, yql_text, settings=None):
+ pass
+
+ @abstractmethod
+ def transaction(self, tx_mode=None, allow_split_transactions=None):
+ pass
+
+ @abstractmethod
+ def has_prepared(self, query):
+ pass
+
+ @abstractmethod
+ def prepare(self, query, settings=None):
+ pass
+
+ @abstractmethod
+ def explain(self, yql_text, settings=None):
+ """
+ Expiremental API.
+
+ :param yql_text:
+ :param settings:
+
+ :return:
+ """
+ pass
+
+ @abstractmethod
+ def create_table(self, path, table_description, settings=None):
+ """
+ Create a YDB table.
+
+ :param path: A table path
+ :param table_description: A description of table to create. An instance TableDescription
+ :param settings: An instance of BaseRequestSettings that describes how rpc should invoked.
+
+ :return: A description of created scheme entry or error otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def drop_table(self, path, settings=None):
+ pass
+
+ @abstractmethod
+ def alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ pass
+
+ @abstractmethod
+ def copy_table(self, source_path, destination_path, settings=None):
+ pass
+
+ @abstractmethod
+ def copy_tables(self, source_destination_pairs, settings=None):
+ pass
+
+ def describe_table(self, path, settings=None):
+ """
+ Returns a description of the table by provided path
+
+ :param path: A table path
+ :param settings: A request settings
+
+ :return: Description of a table
+ """
+ pass
+
+
+class ITableClient(abc.ABC):
+ def __init__(self, driver, table_client_settings=None):
+ pass
+
+ @abstractmethod
+ def session(self):
+ pass
+
+ @abstractmethod
+ def scan_query(self, query, parameters=None, settings=None):
+ pass
+
+ @abstractmethod
+ def bulk_upsert(self, table_path, rows, column_types, settings=None):
+ """
+ Bulk upsert data
+
+ :param table_path: A table path.
+ :param rows: A list of structures.
+ :param column_types: Bulk upsert column types.
+
+ """
+ pass
+
+
+class BaseTableClient(ITableClient):
+ def __init__(self, driver, table_client_settings=None):
+ # type:(ydb.Driver, ydb.TableClientSettings) -> None
+ self._driver = driver
+ self._table_client_settings = TableClientSettings() if table_client_settings is None else table_client_settings
+
+ def session(self):
+ # type: () -> ydb.Session
+ return Session(self._driver, self._table_client_settings)
+
+ def scan_query(self, query, parameters=None, settings=None):
+ # type: (ydb.ScanQuery, tuple, ydb.BaseRequestSettings) -> ydb.SyncResponseIterator
+ request = _scan_query_request_factory(query, parameters, settings)
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamExecuteScanQuery,
+ settings=settings,
+ )
+ return _utilities.SyncResponseIterator(
+ stream_it,
+ lambda resp: _wrap_scan_query_response(resp, self._table_client_settings),
+ )
+
+ def bulk_upsert(self, table_path, rows, column_types, settings=None):
+ # type: (str, list, ydb.AbstractTypeBuilder | ydb.PrimitiveType, ydb.BaseRequestSettings) -> None
+ """
+ Bulk upsert data
+
+ :param table_path: A table path.
+ :param rows: A list of structures.
+ :param column_types: Bulk upsert column types.
+
+ """
+ return self._driver(
+ _session_impl.bulk_upsert_request_factory(table_path, rows, column_types),
+ _apis.TableService.Stub,
+ _apis.TableService.BulkUpsert,
+ _session_impl.wrap_operation_bulk_upsert,
+ settings,
+ (),
+ )
+
+
+class TableClient(BaseTableClient):
+ def async_scan_query(self, query, parameters=None, settings=None):
+ # type: (ydb.ScanQuery, tuple, ydb.BaseRequestSettings) -> ydb.AsyncResponseIterator
+ request = _scan_query_request_factory(query, parameters, settings)
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamExecuteScanQuery,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(
+ stream_it,
+ lambda resp: _wrap_scan_query_response(resp, self._table_client_settings),
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_bulk_upsert(self, table_path, rows, column_types, settings=None):
+ # type: (str, list, ydb.AbstractTypeBuilder | ydb.PrimitiveType, ydb.BaseRequestSettings) -> None
+ return self._driver.future(
+ _session_impl.bulk_upsert_request_factory(table_path, rows, column_types),
+ _apis.TableService.Stub,
+ _apis.TableService.BulkUpsert,
+ _session_impl.wrap_operation_bulk_upsert,
+ settings,
+ (),
+ )
+
+
+def _make_index_description(index):
+ result = TableIndex(index.name).with_index_columns(*tuple(col for col in index.index_columns))
+ result.status = IndexStatus(index.status)
+ return result
+
+
+class TableSchemeEntry(scheme.SchemeEntry):
+ def __init__(
+ self,
+ name,
+ owner,
+ type,
+ effective_permissions,
+ permissions,
+ size_bytes,
+ columns,
+ primary_key,
+ shard_key_bounds,
+ indexes,
+ table_stats,
+ ttl_settings,
+ attributes,
+ partitioning_settings,
+ column_families,
+ key_bloom_filter,
+ read_replicas_settings,
+ storage_settings,
+ *args,
+ **kwargs
+ ):
+
+ super(TableSchemeEntry, self).__init__(
+ name, owner, type, effective_permissions, permissions, size_bytes, *args, **kwargs
+ )
+ self.primary_key = [pk for pk in primary_key]
+ self.columns = [Column(column.name, convert.type_to_native(column.type), column.family) for column in columns]
+ self.indexes = [_make_index_description(index) for index in indexes]
+ self.shard_key_ranges = []
+ self.column_families = []
+ self.key_bloom_filter = FeatureFlag(key_bloom_filter)
+ left_key_bound = None
+ for column_family in column_families:
+ self.column_families.append(
+ ColumnFamily()
+ .with_name(column_family.name)
+ .with_keep_in_memory(FeatureFlag(column_family.keep_in_memory))
+ .with_compression(Compression(column_family.compression))
+ )
+
+ if column_family.HasField("data"):
+ self.column_families[-1].with_data(StoragePool(column_family.data.media))
+
+ for shard_key_bound in shard_key_bounds:
+ # for next key range
+ key_bound_type = shard_key_bound.type
+ current_bound = convert.to_native_value(shard_key_bound)
+ self.shard_key_ranges.append(
+ KeyRange(
+ None if left_key_bound is None else KeyBound.inclusive(left_key_bound, key_bound_type),
+ KeyBound.exclusive(current_bound, key_bound_type),
+ )
+ )
+ left_key_bound = current_bound
+
+ assert isinstance(left_key_bound, tuple)
+
+ if len(shard_key_bounds) > 0:
+ self.shard_key_ranges.append(
+ KeyRange(
+ KeyBound.inclusive(left_key_bound, shard_key_bounds[-1].type),
+ None,
+ )
+ )
+
+ else:
+ self.shard_key_ranges.append(KeyRange(None, None))
+
+ self.read_replicas_settings = None
+ if read_replicas_settings is not None:
+ self.read_replicas_settings = ReadReplicasSettings()
+ for field in ("per_az_read_replicas_count", "any_az_read_replicas_count"):
+ if read_replicas_settings.WhichOneof("settings") == field:
+ setattr(
+ self.read_replicas_settings,
+ field,
+ getattr(read_replicas_settings, field),
+ )
+
+ self.storage_settings = None
+ if storage_settings is not None:
+ self.storage_settings = StorageSettings()
+ self.storage_settings.store_external_blobs = FeatureFlag(self.storage_settings.store_external_blobs)
+ if storage_settings.HasField("tablet_commit_log0"):
+ self.storage_settings.with_tablet_commit_log0(StoragePool(storage_settings.tablet_commit_log0.media))
+
+ if storage_settings.HasField("tablet_commit_log1"):
+ self.storage_settings.with_tablet_commit_log1(StoragePool(storage_settings.tablet_commit_log1.media))
+
+ if storage_settings.HasField("external"):
+ self.storage_settings.with_external(StoragePool(storage_settings.external.media))
+
+ self.partitioning_settings = None
+ if partitioning_settings is not None:
+ self.partitioning_settings = PartitioningSettings()
+ for field in (
+ "partitioning_by_size",
+ "partitioning_by_load",
+ "partition_size_mb",
+ "min_partitions_count",
+ "max_partitions_count",
+ ):
+ setattr(
+ self.partitioning_settings,
+ field,
+ getattr(partitioning_settings, field),
+ )
+
+ self.ttl_settings = None
+ if ttl_settings is not None:
+ if ttl_settings.HasField("date_type_column"):
+ self.ttl_settings = TtlSettings().with_date_type_column(
+ ttl_settings.date_type_column.column_name,
+ ttl_settings.date_type_column.expire_after_seconds,
+ )
+ elif ttl_settings.HasField("value_since_unix_epoch"):
+ self.ttl_settings = TtlSettings().with_value_since_unix_epoch(
+ ttl_settings.value_since_unix_epoch.column_name,
+ ColumnUnit(ttl_settings.value_since_unix_epoch.column_unit),
+ ttl_settings.value_since_unix_epoch.expire_after_seconds,
+ )
+
+ self.table_stats = None
+ if table_stats is not None:
+ self.table_stats = TableStats()
+ if table_stats.partitions != 0:
+ self.table_stats = self.table_stats.with_partitions(table_stats.partitions)
+
+ if table_stats.store_size != 0:
+ self.table_stats = self.table_stats.with_store_size(table_stats.store_size)
+
+ self.attributes = attributes
+
+
+class RenameItem:
+ def __init__(self, source_path, destination_path, replace_destination=False):
+ self._source_path = source_path
+ self._destination_path = destination_path
+ self._replace_destination = replace_destination
+
+ @property
+ def source_path(self):
+ return self._source_path
+
+ @property
+ def destination_path(self):
+ return self._destination_path
+
+ @property
+ def replace_destination(self):
+ return self._replace_destination
+
+
+class BaseSession(ISession):
+ def __init__(self, driver, table_client_settings):
+ self._driver = driver
+ self._state = _session_impl.SessionState(table_client_settings)
+
+ def __lt__(self, other):
+ return self.session_id < other.session_id
+
+ def __eq__(self, other):
+ return self.session_id == other.session_id
+
+ @property
+ def session_id(self):
+ """
+ Return session_id.
+ """
+ return self._state.session_id
+
+ def initialized(self):
+ """
+ Return True if session is successfully initialized with a session_id and False otherwise.
+ """
+ return self._state.session_id is not None
+
+ def pending_query(self):
+ return self._state.pending_query()
+
+ def closing(self):
+ """Returns True if session is closing."""
+ return self._state.closing()
+
+ def reset(self):
+ """
+ Perform session state reset (that includes cleanup of the session_id, query cache, and etc.)
+ """
+ return self._state.reset()
+
+ def read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+
+ :return: SyncResponseIterator instance
+ """
+ request = _session_impl.read_table_request_factory(
+ self._state,
+ path,
+ key_range,
+ columns,
+ ordered,
+ row_limit,
+ use_snapshot=use_snapshot,
+ )
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamReadTable,
+ settings=settings,
+ )
+ return _utilities.SyncResponseIterator(stream_it, _session_impl.wrap_read_table_response)
+
+ def keep_alive(self, settings=None):
+ return self._driver(
+ _session_impl.keep_alive_request_factory(self._state),
+ _apis.TableService.Stub,
+ _apis.TableService.KeepAlive,
+ _session_impl.wrap_keep_alive_response,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def create(self, settings=None):
+ if self._state.session_id is not None:
+ return self
+ create_settings = settings_impl.BaseRequestSettings()
+ if settings is not None:
+ create_settings = settings.make_copy()
+ create_settings = create_settings.with_header("x-ydb-client-capabilities", "session-balancer")
+ return self._driver(
+ _apis.ydb_table.CreateSessionRequest(),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateSession,
+ _session_impl.initialize_session,
+ create_settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def delete(self, settings=None):
+ return self._driver(
+ self._state.attach_request(_apis.ydb_table.DeleteSessionRequest()),
+ _apis.TableService.Stub,
+ _apis.TableService.DeleteSession,
+ _session_impl.cleanup_session,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ def execute_scheme(self, yql_text, settings=None):
+ return self._driver(
+ _session_impl.execute_scheme_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteSchemeQuery,
+ _session_impl.wrap_execute_scheme_result,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def transaction(self, tx_mode=None, allow_split_transactions=None):
+ return TxContext(
+ self._driver,
+ self._state,
+ self,
+ tx_mode,
+ allow_split_transactions=allow_split_transactions,
+ )
+
+ def has_prepared(self, query):
+ return query in self._state
+
+ def prepare(self, query, settings=None):
+ data_query, _ = self._state.lookup(query)
+ if data_query is not None:
+ return data_query
+ return self._driver(
+ _session_impl.prepare_request_factory(self._state, query),
+ _apis.TableService.Stub,
+ _apis.TableService.PrepareDataQuery,
+ _session_impl.wrap_prepare_query_response,
+ settings,
+ (self._state, query),
+ self._state.endpoint,
+ )
+
+ def explain(self, yql_text, settings=None):
+ """
+ Expiremental API.
+
+ :param yql_text:
+ :param settings:
+
+ :return:
+ """
+ return self._driver(
+ _session_impl.explain_data_query_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExplainDataQuery,
+ _session_impl.wrap_explain_response,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def create_table(self, path, table_description, settings=None):
+ """
+ Create a YDB table.
+
+ :param path: A table path
+ :param table_description: A description of table to create. An instance TableDescription
+ :param settings: An instance of BaseRequestSettings that describes how rpc should invoked.
+
+ :return: A description of created scheme entry or error otherwise.
+ """
+ return self._driver(
+ _session_impl.create_table_request_factory(self._state, path, table_description),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def drop_table(self, path, settings=None):
+ return self._driver(
+ self._state.attach_request(_apis.ydb_table.DropTableRequest(path=path)),
+ _apis.TableService.Stub,
+ _apis.TableService.DropTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ return self._driver(
+ _session_impl.alter_table_request_factory(
+ self._state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.AlterTable,
+ _session_impl.AlterTableOperation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def describe_table(self, path, settings=None):
+ """
+ Returns a description of the table by provided path
+
+ :param path: A table path
+ :param settings: A request settings
+
+ :return: Description of a table
+ """
+ return self._driver(
+ _session_impl.describe_table_request_factory(self._state, path, settings),
+ _apis.TableService.Stub,
+ _apis.TableService.DescribeTable,
+ _session_impl.wrap_describe_table_response,
+ settings,
+ (self._state, TableSchemeEntry),
+ self._state.endpoint,
+ )
+
+ def copy_table(self, source_path, destination_path, settings=None):
+ return self.copy_tables([(source_path, destination_path)], settings=settings)
+
+ def copy_tables(self, source_destination_pairs, settings=None):
+ return self._driver(
+ _session_impl.copy_tables_request_factory(self._state, source_destination_pairs),
+ _apis.TableService.Stub,
+ _apis.TableService.CopyTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ def rename_tables(self, rename_items, settings=None):
+ return self._driver(
+ _session_impl.rename_tables_request_factory(self._state, rename_items),
+ _apis.TableService.Stub,
+ _apis.TableService.RenameTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+
+class Session(BaseSession):
+ def async_read_table(
+ self,
+ path,
+ key_range=None,
+ columns=(),
+ ordered=False,
+ row_limit=None,
+ settings=None,
+ use_snapshot=None,
+ ):
+ """
+ Perform an read table request.
+
+ :param path: A path to the table
+ :param key_range: (optional) A KeyRange instance that describes a range to read. The KeyRange instance\
+ should include from_bound and/or to_bound. Each of the bounds (if provided) should specify a value of the\
+ key bound, and type of the key prefix. See an example above.
+ :param columns: (optional) An iterable with table columns to read.
+ :param ordered: (optional) A flag that indicates that result should be ordered.
+ :param row_limit: (optional) A number of rows to read.
+
+ :return: AsyncResponseIterator instance
+ """
+ if interceptor is None:
+ raise RuntimeError("Async read table is not available due to import issues")
+ request = _session_impl.read_table_request_factory(
+ self._state,
+ path,
+ key_range,
+ columns,
+ ordered,
+ row_limit,
+ use_snapshot=use_snapshot,
+ )
+ stream_it = self._driver(
+ request,
+ _apis.TableService.Stub,
+ _apis.TableService.StreamReadTable,
+ settings=settings,
+ )
+ return _utilities.AsyncResponseIterator(stream_it, _session_impl.wrap_read_table_response)
+
+ @_utilities.wrap_async_call_exceptions
+ def async_keep_alive(self, settings=None):
+ return self._driver.future(
+ _session_impl.keep_alive_request_factory(self._state),
+ _apis.TableService.Stub,
+ _apis.TableService.KeepAlive,
+ _session_impl.wrap_keep_alive_response,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_create(self, settings=None):
+ if self._state.session_id is not None:
+ return _utilities.wrap_result_in_future(self)
+ create_settings = settings_impl.BaseRequestSettings()
+ if settings is not None:
+ create_settings = settings.make_copy()
+ create_settings = create_settings.with_header("x-ydb-client-capabilities", "session-balancer")
+ return self._driver.future(
+ _apis.ydb_table.CreateSessionRequest(),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateSession,
+ _session_impl.initialize_session,
+ create_settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_delete(self, settings=None):
+ return self._driver.future(
+ self._state.attach_request(_apis.ydb_table.DeleteSessionRequest()),
+ _apis.TableService.Stub,
+ _apis.TableService.DeleteSession,
+ _session_impl.cleanup_session,
+ settings,
+ (self._state, self),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_execute_scheme(self, yql_text, settings=None):
+ return self._driver.future(
+ _session_impl.execute_scheme_request_factory(self._state, yql_text),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteSchemeQuery,
+ _session_impl.wrap_execute_scheme_result,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_prepare(self, query, settings=None):
+ data_query, _ = self._state.lookup(query)
+ if data_query is not None:
+ return _utilities.wrap_result_in_future(data_query)
+ return self._driver.future(
+ _session_impl.prepare_request_factory(self._state, query),
+ _apis.TableService.Stub,
+ _apis.TableService.PrepareDataQuery,
+ _session_impl.wrap_prepare_query_response,
+ settings,
+ (
+ self._state,
+ query,
+ ),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_create_table(self, path, table_description, settings=None):
+ return self._driver.future(
+ _session_impl.create_table_request_factory(self._state, path, table_description),
+ _apis.TableService.Stub,
+ _apis.TableService.CreateTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_drop_table(self, path, settings=None):
+ return self._driver.future(
+ self._state.attach_request(_apis.ydb_table.DropTableRequest(path=path)),
+ _apis.TableService.Stub,
+ _apis.TableService.DropTable,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_alter_table(
+ self,
+ path,
+ add_columns=None,
+ drop_columns=None,
+ settings=None,
+ alter_attributes=None,
+ add_indexes=None,
+ drop_indexes=None,
+ set_ttl_settings=None,
+ drop_ttl_settings=None,
+ add_column_families=None,
+ alter_column_families=None,
+ alter_storage_settings=None,
+ set_compaction_policy=None,
+ alter_partitioning_settings=None,
+ set_key_bloom_filter=None,
+ set_read_replicas_settings=None,
+ ):
+ return self._driver.future(
+ _session_impl.alter_table_request_factory(
+ self._state,
+ path,
+ add_columns,
+ drop_columns,
+ alter_attributes,
+ add_indexes,
+ drop_indexes,
+ set_ttl_settings,
+ drop_ttl_settings,
+ add_column_families,
+ alter_column_families,
+ alter_storage_settings,
+ set_compaction_policy,
+ alter_partitioning_settings,
+ set_key_bloom_filter,
+ set_read_replicas_settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.AlterTable,
+ _session_impl.AlterTableOperation,
+ settings,
+ (self._driver,),
+ self._state.endpoint,
+ )
+
+ def async_copy_table(self, source_path, destination_path, settings=None):
+ return self.async_copy_tables([(source_path, destination_path)], settings=settings)
+
+ @_utilities.wrap_async_call_exceptions
+ def async_copy_tables(self, source_destination_pairs, settings=None):
+ return self._driver.future(
+ _session_impl.copy_tables_request_factory(self._state, source_destination_pairs),
+ _apis.TableService.Stub,
+ _apis.TableService.CopyTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_rename_tables(self, rename_tables, settings=None):
+ return self._driver.future(
+ _session_impl.rename_tables_request_factory(self._state, rename_tables),
+ _apis.TableService.Stub,
+ _apis.TableService.RenameTables,
+ _session_impl.wrap_operation,
+ settings,
+ (self._state,),
+ self._state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_describe_table(self, path, settings=None):
+ return self._driver.future(
+ _session_impl.describe_table_request_factory(self._state, path, settings),
+ _apis.TableService.Stub,
+ _apis.TableService.DescribeTable,
+ _session_impl.wrap_describe_table_response,
+ settings,
+ (self._state, TableSchemeEntry),
+ self._state.endpoint,
+ )
+
+
+class ITxContext(abc.ABC):
+ @abstractmethod
+ def __init__(self, driver, session_state, session, tx_mode=None):
+ """
+ An object that provides a simple transaction context manager that allows statements execution
+ in a transaction. You don't have to open transaction explicitly, because context manager encapsulates
+ transaction control logic, and opens new transaction if:
+ 1) By explicit .begin();
+ 2) On execution of a first statement, which is strictly recommended method, because that avoids
+ useless round trip
+
+ This context manager is not thread-safe, so you should not manipulate on it concurrently.
+
+ :param driver: A driver instance
+ :param session_state: A state of session
+ :param tx_mode: A transaction mode, which is a one from the following choices:
+ 1) SerializableReadWrite() which is default mode;
+ 2) OnlineReadOnly();
+ 3) StaleReadOnly().
+ """
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ """
+ Enters a context manager and returns a session
+
+ :return: A session instance
+ """
+ pass
+
+ @abstractmethod
+ def __exit__(self, *args, **kwargs):
+ """
+ Closes a transaction context manager and rollbacks transaction if
+ it is not rolled back explicitly
+ """
+ pass
+
+ @property
+ @abstractmethod
+ def session_id(self):
+ """
+ A transaction's session id
+
+ :return: A transaction's session id
+ """
+ pass
+
+ @property
+ @abstractmethod
+ def tx_id(self):
+ """
+ Returns a id of open transaction or None otherwise
+
+ :return: A id of open transaction or None otherwise
+ """
+ pass
+
+ @abstractmethod
+ def execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and is not supported yql text queries.
+
+ :param query: A query, yql text or DataQuery instance.
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: An additional request settings
+
+ :return: A result sets or exception in case of execution errors
+ """
+ pass
+
+ @abstractmethod
+ def commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A committed transaction or exception if commit is failed
+ """
+ pass
+
+ @abstractmethod
+ def rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A rolled back transaction or exception if rollback is failed
+ """
+ pass
+
+ @abstractmethod
+ def begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: An open transaction
+ """
+ pass
+
+
+class BaseTxContext(ITxContext):
+ __slots__ = (
+ "_tx_state",
+ "_session_state",
+ "_driver",
+ "session",
+ "_finished",
+ "_allow_split_transactions",
+ )
+
+ _COMMIT = "commit"
+ _ROLLBACK = "rollback"
+
+ def __init__(self, driver, session_state, session, tx_mode=None, *, allow_split_transactions=None):
+ """
+ An object that provides a simple transaction context manager that allows statements execution
+ in a transaction. You don't have to open transaction explicitly, because context manager encapsulates
+ transaction control logic, and opens new transaction if:
+
+ 1) By explicit .begin() and .async_begin() methods;
+ 2) On execution of a first statement, which is strictly recommended method, because that avoids useless round trip
+
+ This context manager is not thread-safe, so you should not manipulate on it concurrently.
+
+ :param driver: A driver instance
+ :param session_state: A state of session
+ :param tx_mode: A transaction mode, which is a one from the following choices:
+ 1) SerializableReadWrite() which is default mode;
+ 2) OnlineReadOnly();
+ 3) StaleReadOnly().
+ """
+ self._driver = driver
+ tx_mode = SerializableReadWrite() if tx_mode is None else tx_mode
+ self._tx_state = _tx_ctx_impl.TxState(tx_mode)
+ self._session_state = session_state
+ self.session = session
+ self._finished = ""
+ self._allow_split_transactions = allow_split_transactions
+
+ def __enter__(self):
+ """
+ Enters a context manager and returns a session
+
+ :return: A session instance
+ """
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ """
+ Closes a transaction context manager and rollbacks transaction if
+ it is not rolled back explicitly
+ """
+ if self._tx_state.tx_id is not None:
+ # It's strictly recommended to close transactions directly
+ # by using commit_tx=True flag while executing statement or by
+ # .commit() or .rollback() methods, but here we trying to do best
+ # effort to avoid useless open transactions
+ logger.warning("Potentially leaked tx: %s", self._tx_state.tx_id)
+ try:
+ self.rollback()
+ except issues.Error:
+ logger.warning("Failed to rollback leaked tx: %s", self._tx_state.tx_id)
+
+ self._tx_state.tx_id = None
+
+ @property
+ def session_id(self):
+ """
+ A transaction's session id
+
+ :return: A transaction's session id
+ """
+ return self._session_state.session_id
+
+ @property
+ def tx_id(self):
+ """
+ Returns a id of open transaction or None otherwise
+
+ :return: A id of open transaction or None otherwise
+ """
+ return self._tx_state.tx_id
+
+ def execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and is not supported yql text queries.
+
+ :param query: A query, yql text or DataQuery instance.
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: An additional request settings
+
+ :return: A result sets or exception in case of execution errors
+ """
+
+ self._check_split()
+ if commit_tx:
+ self._set_finish(self._COMMIT)
+
+ return self._driver(
+ _tx_ctx_impl.execute_request_factory(
+ self._session_state,
+ self._tx_state,
+ query,
+ parameters,
+ commit_tx,
+ settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteDataQuery,
+ _tx_ctx_impl.wrap_result_and_tx_id,
+ settings,
+ (self._session_state, self._tx_state, query),
+ self._session_state.endpoint,
+ )
+
+ def commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A committed transaction or exception if commit is failed
+ """
+
+ self._set_finish(self._COMMIT)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return self
+
+ return self._driver(
+ _tx_ctx_impl.commit_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.CommitTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A rolled back transaction or exception if rollback is failed
+ """
+
+ self._set_finish(self._ROLLBACK)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return self
+
+ return self._driver(
+ _tx_ctx_impl.rollback_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.RollbackTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: An open transaction
+ """
+ if self._tx_state.tx_id is not None:
+ return self
+
+ self._check_split()
+
+ return self._driver(
+ _tx_ctx_impl.begin_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.BeginTransaction,
+ _tx_ctx_impl.wrap_tx_begin_response,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ def _set_finish(self, val):
+ self._check_split(val)
+ self._finished = val
+
+ def _check_split(self, allow=""):
+ """
+ Deny all operaions with transaction after commit/rollback.
+ Exception: double commit and double rollbacks, because it is safe
+ """
+ allow_split_transaction = (
+ self._allow_split_transactions
+ if self._allow_split_transactions is not None
+ else _default_allow_split_transaction
+ )
+
+ if allow_split_transaction:
+ return
+
+ if self._finished != "" and self._finished != allow:
+ raise RuntimeError("Any operation with finished transaction is denied")
+
+
+class TxContext(BaseTxContext):
+ @_utilities.wrap_async_call_exceptions
+ def async_execute(self, query, parameters=None, commit_tx=False, settings=None):
+ """
+ Sends a query (yql text or an instance of DataQuery) to be executed with parameters.
+ Execution with parameters supported only for DataQuery instances and not supported for YQL text.
+
+ :param query: A query: YQL text or DataQuery instance. E
+ :param parameters: A dictionary with parameters values.
+ :param commit_tx: A special flag that allows transaction commit
+ :param settings: A request settings (an instance of ExecDataQuerySettings)
+
+ :return: A future of query execution
+ """
+
+ self._check_split()
+
+ return self._driver.future(
+ _tx_ctx_impl.execute_request_factory(
+ self._session_state,
+ self._tx_state,
+ query,
+ parameters,
+ commit_tx,
+ settings,
+ ),
+ _apis.TableService.Stub,
+ _apis.TableService.ExecuteDataQuery,
+ _tx_ctx_impl.wrap_result_and_tx_id,
+ settings,
+ (
+ self._session_state,
+ self._tx_state,
+ query,
+ ),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_commit(self, settings=None):
+ """
+ Calls commit on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings (an instance of BaseRequestSettings)
+
+ :return: A future of commit call
+ """
+ self._set_finish(self._COMMIT)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return _utilities.wrap_result_in_future(self)
+
+ return self._driver.future(
+ _tx_ctx_impl.commit_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.CommitTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_rollback(self, settings=None):
+ """
+ Calls rollback on a transaction if it is open otherwise is no-op. If transaction execution
+ failed then this method raises PreconditionFailed.
+
+ :param settings: A request settings
+
+ :return: A future of rollback call
+ """
+ self._set_finish(self._ROLLBACK)
+
+ if self._tx_state.tx_id is None and not self._tx_state.dead:
+ return _utilities.wrap_result_in_future(self)
+
+ return self._driver.future(
+ _tx_ctx_impl.rollback_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.RollbackTransaction,
+ _tx_ctx_impl.wrap_result_on_rollback_or_commit_tx,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+ @_utilities.wrap_async_call_exceptions
+ def async_begin(self, settings=None):
+ """
+ Explicitly begins a transaction
+
+ :param settings: A request settings
+
+ :return: A future of begin call
+ """
+ if self._tx_state.tx_id is not None:
+ return _utilities.wrap_result_in_future(self)
+
+ self._check_split()
+
+ return self._driver.future(
+ _tx_ctx_impl.begin_request_factory(self._session_state, self._tx_state),
+ _apis.TableService.Stub,
+ _apis.TableService.BeginTransaction,
+ _tx_ctx_impl.wrap_tx_begin_response,
+ settings,
+ (self._session_state, self._tx_state, self),
+ self._session_state.endpoint,
+ )
+
+
+class SessionPool(object):
+ def __init__(
+ self,
+ driver,
+ size=100,
+ workers_threads_count=4,
+ initializer=None,
+ min_pool_size=0,
+ ):
+ """
+ An object that encapsulates session creation, deletion and etc. and maintains
+ a pool of active sessions of specified size
+
+ :param driver: A Driver instance
+ :param size: A maximum number of sessions to maintain in the pool
+ """
+ self._logger = logger.getChild(self.__class__.__name__)
+ self._pool_impl = _sp_impl.SessionPoolImpl(
+ self._logger,
+ driver,
+ size,
+ workers_threads_count,
+ initializer,
+ min_pool_size,
+ )
+ if hasattr(driver, "_driver_config"):
+ self.tracer = driver._driver_config.tracer
+ else:
+ self.tracer = ydb.Tracer(None)
+
+ def retry_operation_sync(self, callee, retry_settings=None, *args, **kwargs):
+
+ retry_settings = RetrySettings() if retry_settings is None else retry_settings
+
+ def wrapped_callee():
+ with self.checkout(timeout=retry_settings.get_session_client_timeout) as session:
+ return callee(session, *args, **kwargs)
+
+ return retry_operation_sync(wrapped_callee, retry_settings)
+
+ @property
+ def active_size(self):
+ return self._pool_impl.active_size
+
+ @property
+ def free_size(self):
+ return self._pool_impl.free_size
+
+ @property
+ def busy_size(self):
+ return self._pool_impl.busy_size
+
+ @property
+ def max_size(self):
+ return self._pool_impl.max_size
+
+ @property
+ def waiters_count(self):
+ return self._pool_impl.waiters_count
+
+ @tracing.with_trace()
+ def subscribe(self):
+ return self._pool_impl.subscribe()
+
+ @tracing.with_trace()
+ def unsubscribe(self, waiter):
+ return self._pool_impl.unsubscribe(waiter)
+
+ @tracing.with_trace()
+ def acquire(self, blocking=True, timeout=None):
+ return self._pool_impl.acquire(blocking, timeout)
+
+ @tracing.with_trace()
+ def release(self, session):
+ return self._pool_impl.put(session)
+
+ def async_checkout(self):
+ """
+ Returns a context manager that asynchronously checkouts a session from the pool.
+
+ """
+ return AsyncSessionCheckout(self)
+
+ def checkout(self, blocking=True, timeout=None):
+ return SessionCheckout(self, blocking, timeout)
+
+ def stop(self, timeout=None):
+ self._pool_impl.stop(timeout)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
+
+
+class AsyncSessionCheckout(object):
+ __slots__ = ("subscription", "pool")
+
+ def __init__(self, pool):
+ """
+ A context manager that asynchronously checkouts a session for the specified pool
+ and returns it on manager exit.
+
+ :param pool: A SessionPool instance.
+ """
+ self.pool = pool
+ self.subscription = None
+
+ def __enter__(self):
+ self.subscription = self.pool.subscribe()
+ return self.subscription
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.pool.unsubscribe(self.subscription)
+
+
+class SessionCheckout(object):
+ __slots__ = ("_acquired", "_pool", "_blocking", "_timeout")
+
+ def __init__(self, pool, blocking, timeout):
+ """
+ A context manager that checkouts a session from the specified pool and
+ returns it on manager exit.
+
+ :param pool: A SessionPool instance
+ :param blocking: A flag that specifies that session acquire method should blocks
+ :param timeout: A timeout in seconds for session acquire
+ """
+ self._pool = pool
+ self._acquired = None
+ self._blocking = blocking
+ self._timeout = timeout
+
+ def __enter__(self):
+ self._acquired = self._pool.acquire(self._blocking, self._timeout)
+ return self._acquired
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self._acquired is not None:
+ self._pool.release(self._acquired)
diff --git a/contrib/python/ydb/py3/ydb/topic.py b/contrib/python/ydb/py3/ydb/topic.py
new file mode 100644
index 0000000000..2175af47f7
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/topic.py
@@ -0,0 +1,398 @@
+from __future__ import annotations
+
+__all__ = [
+ "TopicClient",
+ "TopicClientAsyncIO",
+ "TopicClientSettings",
+ "TopicCodec",
+ "TopicConsumer",
+ "TopicDescription",
+ "TopicError",
+ "TopicMeteringMode",
+ "TopicReader",
+ "TopicReaderAsyncIO",
+ "TopicReaderBatch",
+ "TopicReaderMessage",
+ "TopicReaderSelector",
+ "TopicReaderSettings",
+ "TopicReaderPartitionExpiredError",
+ "TopicStatWindow",
+ "TopicWriteResult",
+ "TopicWriter",
+ "TopicWriterAsyncIO",
+ "TopicWriterInitInfo",
+ "TopicWriterMessage",
+ "TopicWriterSettings",
+]
+
+import concurrent.futures
+import datetime
+from dataclasses import dataclass
+from typing import List, Union, Mapping, Optional, Dict, Callable
+
+from . import aio, Credentials, _apis, issues
+
+from . import driver
+
+from ._topic_reader.datatypes import (
+ PublicBatch as TopicReaderBatch,
+ PublicMessage as TopicReaderMessage,
+)
+
+from ._topic_reader.topic_reader import (
+ PublicReaderSettings as TopicReaderSettings,
+ PublicTopicSelector as TopicReaderSelector,
+)
+
+from ._topic_reader.topic_reader_sync import TopicReaderSync as TopicReader
+
+from ._topic_reader.topic_reader_asyncio import (
+ PublicAsyncIOReader as TopicReaderAsyncIO,
+ PublicTopicReaderPartitionExpiredError as TopicReaderPartitionExpiredError,
+)
+
+from ._topic_writer.topic_writer import ( # noqa: F401
+ PublicWriterSettings as TopicWriterSettings,
+ PublicMessage as TopicWriterMessage,
+ RetryPolicy as TopicWriterRetryPolicy,
+ PublicWriterInitInfo as TopicWriterInitInfo,
+ PublicWriteResult as TopicWriteResult,
+)
+
+from ydb._topic_writer.topic_writer_asyncio import WriterAsyncIO as TopicWriterAsyncIO
+from ._topic_writer.topic_writer_sync import WriterSync as TopicWriter
+
+from ._topic_common.common import (
+ wrap_operation as _wrap_operation,
+ create_result_wrapper as _create_result_wrapper,
+)
+
+from ._grpc.grpcwrapper import ydb_topic as _ydb_topic
+from ._grpc.grpcwrapper import ydb_topic_public_types as _ydb_topic_public_types
+from ._grpc.grpcwrapper.ydb_topic_public_types import ( # noqa: F401
+ PublicDescribeTopicResult as TopicDescription,
+ PublicMultipleWindowsStat as TopicStatWindow,
+ PublicPartitionStats as TopicPartitionStats,
+ PublicCodec as TopicCodec,
+ PublicConsumer as TopicConsumer,
+ PublicMeteringMode as TopicMeteringMode,
+)
+
+
+class TopicClientAsyncIO:
+ _closed: bool
+ _driver: aio.Driver
+ _credentials: Union[Credentials, None]
+ _settings: TopicClientSettings
+ _executor: concurrent.futures.Executor
+
+ def __init__(self, driver: aio.Driver, settings: Optional[TopicClientSettings] = None):
+ if not settings:
+ settings = TopicClientSettings()
+ self._closed = False
+ self._driver = driver
+ self._settings = settings
+ self._executor = concurrent.futures.ThreadPoolExecutor(
+ max_workers=settings.encode_decode_threads_count,
+ thread_name_prefix="topic_asyncio_executor",
+ )
+
+ def __del__(self):
+ self.close()
+
+ async def create_topic(
+ self,
+ path: str,
+ min_active_partitions: Optional[int] = None,
+ partition_count_limit: Optional[int] = None,
+ retention_period: Optional[datetime.timedelta] = None,
+ retention_storage_mb: Optional[int] = None,
+ supported_codecs: Optional[List[Union[TopicCodec, int]]] = None,
+ partition_write_speed_bytes_per_second: Optional[int] = None,
+ partition_write_burst_bytes: Optional[int] = None,
+ attributes: Optional[Dict[str, str]] = None,
+ consumers: Optional[List[Union[TopicConsumer, str]]] = None,
+ metering_mode: Optional[TopicMeteringMode] = None,
+ ):
+ """
+ create topic command
+
+ :param path: full path to topic
+ :param min_active_partitions: Minimum partition count auto merge would stop working at.
+ :param partition_count_limit: Limit for total partition count, including active (open for write)
+ and read-only partitions.
+ :param retention_period: How long data in partition should be stored
+ :param retention_storage_mb: How much data in partition should be stored
+ :param supported_codecs: List of allowed codecs for writers. Writes with codec not from this list are forbidden.
+ Empty list mean disable codec compatibility checks for the topic.
+ :param partition_write_speed_bytes_per_second: Partition write speed in bytes per second
+ :param partition_write_burst_bytes: Burst size for write in partition, in bytes
+ :param attributes: User and server attributes of topic.
+ Server attributes starts from "_" and will be validated by server.
+ :param consumers: List of consumers for this topic
+ :param metering_mode: Metering mode for the topic in a serverless database
+ """
+ args = locals().copy()
+ del args["self"]
+ req = _ydb_topic_public_types.CreateTopicRequestParams(**args)
+ req = _ydb_topic.CreateTopicRequest.from_public(req)
+ await self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.CreateTopic,
+ _wrap_operation,
+ )
+
+ async def describe_topic(self, path: str, include_stats: bool = False) -> TopicDescription:
+ args = locals().copy()
+ del args["self"]
+ req = _ydb_topic_public_types.DescribeTopicRequestParams(**args)
+ res = await self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.DescribeTopic,
+ _create_result_wrapper(_ydb_topic.DescribeTopicResult),
+ ) # type: _ydb_topic.DescribeTopicResult
+ return res.to_public()
+
+ async def drop_topic(self, path: str):
+ req = _ydb_topic_public_types.DropTopicRequestParams(path=path)
+ await self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.DropTopic,
+ _wrap_operation,
+ )
+
+ def reader(
+ self,
+ topic: Union[str, TopicReaderSelector, List[Union[str, TopicReaderSelector]]],
+ consumer: str,
+ buffer_size_bytes: int = 50 * 1024 * 1024,
+ # decoders: map[codec_code] func(encoded_bytes)->decoded_bytes
+ # the func will be called from multiply threads in parallel
+ decoders: Union[Mapping[int, Callable[[bytes], bytes]], None] = None,
+ # custom decoder executor for call builtin and custom decoders. If None - use shared executor pool.
+ # if max_worker in the executor is 1 - then decoders will be called from the thread without parallel
+ decoder_executor: Optional[concurrent.futures.Executor] = None,
+ ) -> TopicReaderAsyncIO:
+
+ if not decoder_executor:
+ decoder_executor = self._executor
+
+ args = locals().copy()
+ del args["self"]
+
+ settings = TopicReaderSettings(**args)
+
+ return TopicReaderAsyncIO(self._driver, settings, _parent=self)
+
+ def writer(
+ self,
+ topic,
+ *,
+ producer_id: Optional[str] = None, # default - random
+ session_metadata: Mapping[str, str] = None,
+ partition_id: Union[int, None] = None,
+ auto_seqno: bool = True,
+ auto_created_at: bool = True,
+ codec: Optional[TopicCodec] = None, # default mean auto-select
+ # encoders: map[codec_code] func(encoded_bytes)->decoded_bytes
+ # the func will be called from multiply threads in parallel.
+ encoders: Optional[Mapping[_ydb_topic_public_types.PublicCodec, Callable[[bytes], bytes]]] = None,
+ # custom encoder executor for call builtin and custom decoders. If None - use shared executor pool.
+ # If max_worker in the executor is 1 - then encoders will be called from the thread without parallel.
+ encoder_executor: Optional[concurrent.futures.Executor] = None,
+ ) -> TopicWriterAsyncIO:
+ args = locals().copy()
+ del args["self"]
+
+ settings = TopicWriterSettings(**args)
+
+ if not settings.encoder_executor:
+ settings.encoder_executor = self._executor
+
+ return TopicWriterAsyncIO(self._driver, settings, _client=self)
+
+ def close(self):
+ if self._closed:
+ return
+
+ self._closed = True
+ self._executor.shutdown(wait=False)
+
+ def _check_closed(self):
+ if not self._closed:
+ return
+
+ raise RuntimeError("Topic client closed")
+
+
+class TopicClient:
+ _closed: bool
+ _driver: driver.Driver
+ _credentials: Union[Credentials, None]
+ _settings: TopicClientSettings
+ _executor: concurrent.futures.Executor
+
+ def __init__(self, driver: driver.Driver, settings: Optional[TopicClientSettings]):
+ if not settings:
+ settings = TopicClientSettings()
+
+ self._closed = False
+ self._driver = driver
+ self._settings = settings
+ self._executor = concurrent.futures.ThreadPoolExecutor(
+ max_workers=settings.encode_decode_threads_count,
+ thread_name_prefix="topic_asyncio_executor",
+ )
+
+ def __del__(self):
+ self.close()
+
+ def create_topic(
+ self,
+ path: str,
+ min_active_partitions: Optional[int] = None,
+ partition_count_limit: Optional[int] = None,
+ retention_period: Optional[datetime.timedelta] = None,
+ retention_storage_mb: Optional[int] = None,
+ supported_codecs: Optional[List[Union[TopicCodec, int]]] = None,
+ partition_write_speed_bytes_per_second: Optional[int] = None,
+ partition_write_burst_bytes: Optional[int] = None,
+ attributes: Optional[Dict[str, str]] = None,
+ consumers: Optional[List[Union[TopicConsumer, str]]] = None,
+ metering_mode: Optional[TopicMeteringMode] = None,
+ ):
+ """
+ create topic command
+
+ :param path: full path to topic
+ :param min_active_partitions: Minimum partition count auto merge would stop working at.
+ :param partition_count_limit: Limit for total partition count, including active (open for write)
+ and read-only partitions.
+ :param retention_period: How long data in partition should be stored
+ :param retention_storage_mb: How much data in partition should be stored
+ :param supported_codecs: List of allowed codecs for writers. Writes with codec not from this list are forbidden.
+ Empty list mean disable codec compatibility checks for the topic.
+ :param partition_write_speed_bytes_per_second: Partition write speed in bytes per second
+ :param partition_write_burst_bytes: Burst size for write in partition, in bytes
+ :param attributes: User and server attributes of topic.
+ Server attributes starts from "_" and will be validated by server.
+ :param consumers: List of consumers for this topic
+ :param metering_mode: Metering mode for the topic in a serverless database
+ """
+ args = locals().copy()
+ del args["self"]
+ self._check_closed()
+
+ req = _ydb_topic_public_types.CreateTopicRequestParams(**args)
+ req = _ydb_topic.CreateTopicRequest.from_public(req)
+ self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.CreateTopic,
+ _wrap_operation,
+ )
+
+ def describe_topic(self, path: str, include_stats: bool = False) -> TopicDescription:
+ args = locals().copy()
+ del args["self"]
+ self._check_closed()
+
+ req = _ydb_topic_public_types.DescribeTopicRequestParams(**args)
+ res = self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.DescribeTopic,
+ _create_result_wrapper(_ydb_topic.DescribeTopicResult),
+ ) # type: _ydb_topic.DescribeTopicResult
+ return res.to_public()
+
+ def drop_topic(self, path: str):
+ self._check_closed()
+
+ req = _ydb_topic_public_types.DropTopicRequestParams(path=path)
+ self._driver(
+ req.to_proto(),
+ _apis.TopicService.Stub,
+ _apis.TopicService.DropTopic,
+ _wrap_operation,
+ )
+
+ def reader(
+ self,
+ topic: Union[str, TopicReaderSelector, List[Union[str, TopicReaderSelector]]],
+ consumer: str,
+ buffer_size_bytes: int = 50 * 1024 * 1024,
+ # decoders: map[codec_code] func(encoded_bytes)->decoded_bytes
+ # the func will be called from multiply threads in parallel
+ decoders: Union[Mapping[int, Callable[[bytes], bytes]], None] = None,
+ # custom decoder executor for call builtin and custom decoders. If None - use shared executor pool.
+ # if max_worker in the executor is 1 - then decoders will be called from the thread without parallel
+ decoder_executor: Optional[concurrent.futures.Executor] = None, # default shared client executor pool
+ ) -> TopicReader:
+ if not decoder_executor:
+ decoder_executor = self._executor
+
+ args = locals().copy()
+ del args["self"]
+ self._check_closed()
+
+ settings = TopicReaderSettings(**args)
+
+ return TopicReader(self._driver, settings, _parent=self)
+
+ def writer(
+ self,
+ topic,
+ *,
+ producer_id: Optional[str] = None, # default - random
+ session_metadata: Mapping[str, str] = None,
+ partition_id: Union[int, None] = None,
+ auto_seqno: bool = True,
+ auto_created_at: bool = True,
+ codec: Optional[TopicCodec] = None, # default mean auto-select
+ # encoders: map[codec_code] func(encoded_bytes)->decoded_bytes
+ # the func will be called from multiply threads in parallel.
+ encoders: Optional[Mapping[_ydb_topic_public_types.PublicCodec, Callable[[bytes], bytes]]] = None,
+ # custom encoder executor for call builtin and custom decoders. If None - use shared executor pool.
+ # If max_worker in the executor is 1 - then encoders will be called from the thread without parallel.
+ encoder_executor: Optional[concurrent.futures.Executor] = None, # default shared client executor pool
+ ) -> TopicWriter:
+ args = locals().copy()
+ del args["self"]
+ self._check_closed()
+
+ settings = TopicWriterSettings(**args)
+
+ if not settings.encoder_executor:
+ settings.encoder_executor = self._executor
+
+ return TopicWriter(self._driver, settings, _parent=self)
+
+ def close(self):
+ if self._closed:
+ return
+
+ self._closed = True
+ self._executor.shutdown(wait=False)
+
+ def _check_closed(self):
+ if not self._closed:
+ return
+
+ raise RuntimeError("Topic client closed")
+
+
+@dataclass
+class TopicClientSettings:
+ # ATTENTION
+ # When set the encode_decode_threads_count - all custom encoders/decoders for topic reader/writer
+ # MUST be thread-safe
+ # because they will be called from parallel threads
+ encode_decode_threads_count: int = 1
+
+
+class TopicError(issues.Error):
+ pass
diff --git a/contrib/python/ydb/py3/ydb/tracing.py b/contrib/python/ydb/py3/ydb/tracing.py
new file mode 100644
index 0000000000..fcc780d6b5
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/tracing.py
@@ -0,0 +1,184 @@
+from enum import IntEnum
+import functools
+
+
+class TraceLevel(IntEnum):
+ DEBUG = 0
+ INFO = 1
+ ERROR = 2
+ NONE = 3
+
+
+class _TracingCtx:
+ def __init__(self, tracer, span_name):
+ self._enabled = tracer._open_tracer is not None
+ self._scope = None
+ self._tracer = tracer
+ self._span_name = span_name
+
+ def __enter__(self):
+ """
+ Creates new span
+ :return: self
+ """
+ if not self._enabled:
+ return self
+ self._scope = self._tracer._open_tracer.start_active_span(self._span_name)
+ self._scope.span.set_baggage_item("ctx", self)
+ self.trace(self._tracer._pre_tags)
+ return self
+
+ @property
+ def enabled(self):
+ """
+ :return: Is tracing enabled
+ """
+ return self._enabled
+
+ def trace(self, tags, trace_level=TraceLevel.INFO):
+ """
+ Add tags to current span
+
+ :param ydb.TraceLevel trace_level: level of tracing
+ :param dict tags: Dict of tags
+ """
+ if self._tracer._verbose_level < trace_level:
+ return
+ if not self.enabled or self._scope is None:
+ return
+ for key, value in tags.items():
+ self._scope.span.set_tag(key, value)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.enabled:
+ return
+ if exc_val:
+ self.trace(self._tracer._post_tags_err, trace_level=TraceLevel.ERROR)
+ self._tracer._on_err(self, exc_type, exc_val, exc_tb)
+ else:
+ self.trace(self._tracer._post_tags_ok)
+ self._scope.close()
+ self._scope = None
+
+
+def with_trace(span_name=None):
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(self, *args, **kwargs):
+ name = span_name if span_name is not None else self.__class__.__name__ + "." + f.__name__
+ with self.tracer.trace(name):
+ return f(self, *args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def trace(tracer, tags, trace_level=TraceLevel.INFO):
+ if tracer.enabled:
+ scope = tracer._open_tracer.scope_manager.active
+ if not scope:
+ return False
+
+ ctx = scope.span.get_baggage_item("ctx")
+ if ctx is None:
+ return False
+
+ return ctx.trace(tags, trace_level)
+
+
+class Tracer:
+ def __init__(self, tracer):
+ """
+ Init an tracer to trace requests
+
+ :param opentracing.Tracer tracer: opentracing.Tracer implementation. If None - tracing not enabled
+ """
+ self._open_tracer = tracer
+ self._pre_tags = {}
+ self._post_tags_ok = {}
+ self._post_tags_err = {}
+ self._on_err = lambda *args, **kwargs: None
+ self._verbose_level = TraceLevel.NONE
+
+ @property
+ def enabled(self):
+ return self._open_tracer is not None
+
+ def trace(self, span_name):
+ """
+ Create tracing context
+
+ :param str span_name:
+
+ :return: A tracing context
+ :rtype: _TracingCtx
+ """
+ return _TracingCtx(self, span_name)
+
+ def with_pre_tags(self, tags):
+ """
+ Add `tags` to every span immediately after creation
+
+ :param dict tags: tags dict
+
+ :return: self
+ """
+ self._pre_tags = tags
+ return self
+
+ def with_post_tags(self, ok_tags, err_tags):
+ """
+ Add some tags before span close
+
+ :param ok_tags: Add this tags if no error raised
+ :param err_tags: Add this tags if there is an exception
+
+ :return: self
+ """
+ self._post_tags_ok = ok_tags
+ self._post_tags_err = err_tags
+ return self
+
+ def with_on_error_callback(self, callee):
+ """
+ Add an callback, that will be called if there is an exception in span
+
+ :param callable[_TracingCtx, exc_type, exc_val, exc_tb] callee:
+
+ :return: self
+ """
+ self._on_err = callee
+ return self
+
+ def with_verbose_level(self, level):
+ self._verbose_level = level
+ return self
+
+ @classmethod
+ def default(cls, tracer):
+ """
+ Create default tracer
+
+ :param tracer:
+
+ :return: new tracer
+ """
+ return (
+ cls(tracer)
+ .with_post_tags({"ok": True}, {"ok": False})
+ .with_pre_tags({"started": True})
+ .with_on_error_callback(_default_on_error_callback)
+ .with_verbose_level(TraceLevel.INFO)
+ )
+
+
+def _default_on_error_callback(ctx, exc_type, exc_val, exc_tb):
+ ctx.trace(
+ {
+ "error.type": exc_type.__name__,
+ "error.value": exc_val,
+ "error.traceback": exc_tb,
+ },
+ trace_level=TraceLevel.ERROR,
+ )
diff --git a/contrib/python/ydb/py3/ydb/types.py b/contrib/python/ydb/py3/ydb/types.py
new file mode 100644
index 0000000000..cf13aac0a2
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/types.py
@@ -0,0 +1,438 @@
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+import abc
+import enum
+import json
+from . import _utilities, _apis
+from datetime import date, datetime, timedelta
+import typing
+import uuid
+import struct
+from google.protobuf import struct_pb2
+
+from . import table
+
+
+# Workaround for good IDE and universal for runtime
+if typing.TYPE_CHECKING:
+ from ._grpc.v4.protos import ydb_value_pb2
+else:
+ from ._grpc.common.protos import ydb_value_pb2
+
+
+_SECONDS_IN_DAY = 60 * 60 * 24
+_EPOCH = datetime(1970, 1, 1)
+
+
+def _from_date(x: ydb_value_pb2.Value, table_client_settings: table.TableClientSettings) -> typing.Union[date, int]:
+ if table_client_settings is not None and table_client_settings._native_date_in_result_sets:
+ return _EPOCH.date() + timedelta(days=x.uint32_value)
+ return x.uint32_value
+
+
+def _to_date(pb: ydb_value_pb2.Value, value: typing.Union[date, int]) -> None:
+ if isinstance(value, date):
+ pb.uint32_value = (value - _EPOCH.date()).days
+ else:
+ pb.uint32_value = value
+
+
+def _from_datetime_number(
+ x: typing.Union[float, datetime], table_client_settings: table.TableClientSettings
+) -> datetime:
+ if table_client_settings is not None and table_client_settings._native_datetime_in_result_sets:
+ return datetime.utcfromtimestamp(x)
+ return x
+
+
+def _from_json(x: typing.Union[str, bytearray, bytes], table_client_settings: table.TableClientSettings):
+ if table_client_settings is not None and table_client_settings._native_json_in_result_sets:
+ return json.loads(x)
+ return x
+
+
+def _to_uuid(value_pb: ydb_value_pb2.Value, table_client_settings: table.TableClientSettings) -> uuid.UUID:
+ return uuid.UUID(bytes_le=struct.pack("QQ", value_pb.low_128, value_pb.high_128))
+
+
+def _from_uuid(pb: ydb_value_pb2.Value, value: uuid.UUID):
+ pb.low_128 = struct.unpack("Q", value.bytes_le[0:8])[0]
+ pb.high_128 = struct.unpack("Q", value.bytes_le[8:16])[0]
+
+
+def _from_interval(
+ value_pb: ydb_value_pb2.Value, table_client_settings: table.TableClientSettings
+) -> typing.Union[timedelta, int]:
+ if table_client_settings is not None and table_client_settings._native_interval_in_result_sets:
+ return timedelta(microseconds=value_pb.int64_value)
+ return value_pb.int64_value
+
+
+def _timedelta_to_microseconds(value: timedelta) -> int:
+ return (value.days * _SECONDS_IN_DAY + value.seconds) * 1000000 + value.microseconds
+
+
+def _to_interval(pb: ydb_value_pb2.Value, value: typing.Union[timedelta, int]):
+ if isinstance(value, timedelta):
+ pb.int64_value = _timedelta_to_microseconds(value)
+ else:
+ pb.int64_value = value
+
+
+def _from_timestamp(
+ value_pb: ydb_value_pb2.Value, table_client_settings: table.TableClientSettings
+) -> typing.Union[datetime, int]:
+ if table_client_settings is not None and table_client_settings._native_timestamp_in_result_sets:
+ return _EPOCH + timedelta(microseconds=value_pb.uint64_value)
+ return value_pb.uint64_value
+
+
+def _to_timestamp(pb: ydb_value_pb2.Value, value: typing.Union[datetime, int]):
+ if isinstance(value, datetime):
+ pb.uint64_value = _timedelta_to_microseconds(value - _EPOCH)
+ else:
+ pb.uint64_value = value
+
+
+@enum.unique
+class PrimitiveType(enum.Enum):
+ """
+ Enumerates all available primitive types that can be used
+ in computations.
+ """
+
+ Int32 = _apis.primitive_types.INT32, "int32_value"
+ Uint32 = _apis.primitive_types.UINT32, "uint32_value"
+ Int64 = _apis.primitive_types.INT64, "int64_value"
+ Uint64 = _apis.primitive_types.UINT64, "uint64_value"
+ Int8 = _apis.primitive_types.INT8, "int32_value"
+ Uint8 = _apis.primitive_types.UINT8, "uint32_value"
+ Int16 = _apis.primitive_types.INT16, "int32_value"
+ Uint16 = _apis.primitive_types.UINT16, "uint32_value"
+ Bool = _apis.primitive_types.BOOL, "bool_value"
+ Double = _apis.primitive_types.DOUBLE, "double_value"
+ Float = _apis.primitive_types.FLOAT, "float_value"
+
+ String = _apis.primitive_types.STRING, "bytes_value"
+ Utf8 = _apis.primitive_types.UTF8, "text_value"
+
+ Yson = _apis.primitive_types.YSON, "bytes_value"
+ Json = _apis.primitive_types.JSON, "text_value", _from_json
+ JsonDocument = _apis.primitive_types.JSON_DOCUMENT, "text_value", _from_json
+ UUID = (_apis.primitive_types.UUID, None, _to_uuid, _from_uuid)
+ Date = (
+ _apis.primitive_types.DATE,
+ None,
+ _from_date,
+ _to_date,
+ )
+ Datetime = (
+ _apis.primitive_types.DATETIME,
+ "uint32_value",
+ _from_datetime_number,
+ )
+ Timestamp = (
+ _apis.primitive_types.TIMESTAMP,
+ None,
+ _from_timestamp,
+ _to_timestamp,
+ )
+ Interval = (
+ _apis.primitive_types.INTERVAL,
+ None,
+ _from_interval,
+ _to_interval,
+ )
+
+ DyNumber = _apis.primitive_types.DYNUMBER, "text_value"
+
+ def __init__(
+ self, idn: ydb_value_pb2.Type.PrimitiveTypeId, proto_field: typing.Optional[str], to_obj=None, from_obj=None
+ ):
+ self._idn_ = idn
+ self._to_obj = to_obj
+ self._from_obj = from_obj
+ self._proto_field = proto_field
+
+ def get_value(self, value_pb: ydb_value_pb2.Value, table_client_settings: table.TableClientSettings):
+ """
+ Extracts value from protocol buffer
+ :param value_pb: A protocol buffer
+ :return: A valid value of primitive type
+ """
+ if self._to_obj is not None and self._proto_field:
+ return self._to_obj(getattr(value_pb, self._proto_field), table_client_settings)
+
+ if self._to_obj is not None:
+ return self._to_obj(value_pb, table_client_settings)
+
+ return getattr(value_pb, self._proto_field)
+
+ def set_value(self, pb: ydb_value_pb2.Value, value):
+ """
+ Sets value in a protocol buffer
+ :param pb: A protocol buffer
+ :param value: A valid value to set
+ :return: None
+ """
+ if self._from_obj:
+ self._from_obj(pb, value)
+ else:
+ setattr(pb, self._proto_field, value)
+
+ def __str__(self):
+ return self._name_
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a primitive type
+ :return: A protocol buffer representation
+ """
+ return _apis.ydb_value.Type(type_id=self._idn_)
+
+
+class DataQuery(object):
+ __slots__ = ("yql_text", "parameters_types", "name")
+
+ def __init__(
+ self, query_id: str, parameters_types: "dict[str, ydb_value_pb2.Type]", name: typing.Optional[str] = None
+ ):
+ self.yql_text = query_id
+ self.parameters_types = parameters_types
+ self.name = _utilities.get_query_hash(self.yql_text) if name is None else name
+
+
+#######################
+# A deprecated alias #
+#######################
+DataType = PrimitiveType
+
+
+class AbstractTypeBuilder(object):
+ __metaclass__ = abc.ABCMeta
+
+ @property
+ @abc.abstractmethod
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ pass
+
+
+class DecimalType(AbstractTypeBuilder):
+ __slots__ = ("_proto", "_precision", "_scale")
+
+ def __init__(self, precision=22, scale=9):
+ """
+ Represents a decimal type
+ :param precision: A precision value
+ :param scale: A scale value
+ """
+ self._precision = precision
+ self._scale = scale
+ self._proto = _apis.ydb_value.Type()
+ self._proto.decimal_type.MergeFrom(_apis.ydb_value.DecimalType(precision=self._precision, scale=self._scale))
+
+ @property
+ def precision(self):
+ return self._precision
+
+ @property
+ def scale(self):
+ return self._scale
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __eq__(self, other):
+ return self._precision == other.precision and self._scale == other.scale
+
+ def __str__(self):
+ """
+ Returns string representation of a type
+ :return: A string representation
+ """
+ return "Decimal(%d,%d)" % (self._precision, self._scale)
+
+
+class NullType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto")
+
+ def __init__(self):
+ self._proto = _apis.ydb_value.Type(null_type=struct_pb2.NULL_VALUE)
+
+ @property
+ def proto(self):
+ return self._proto
+
+ def __str__(self):
+ return "NullType"
+
+
+class OptionalType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto", "_item")
+
+ def __init__(self, optional_type: typing.Union[AbstractTypeBuilder, PrimitiveType]):
+ """
+ Represents optional type that wraps inner type
+ :param optional_type: An instance of an inner type
+ """
+ self._repr = "%s?" % str(optional_type)
+ self._proto = _apis.ydb_value.Type()
+ self._item = optional_type
+ self._proto.optional_type.MergeFrom(_apis.ydb_value.OptionalType(item=optional_type.proto))
+
+ @property
+ def item(self):
+ return self._item
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of a type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __eq__(self, other):
+ return self._item == other.item
+
+ def __str__(self):
+ return self._repr
+
+
+class ListType(AbstractTypeBuilder):
+ __slots__ = ("_repr", "_proto")
+
+ def __init__(self, list_type: typing.Union[AbstractTypeBuilder, PrimitiveType]):
+ """
+ :param list_type: List item type builder
+ """
+ self._repr = "List<%s>" % str(list_type)
+ self._proto = _apis.ydb_value.Type(list_type=_apis.ydb_value.ListType(item=list_type.proto))
+
+ @property
+ def proto(self):
+ """
+ Returns protocol buffer representation of type
+ :return: A protocol buffer representation
+ """
+ return self._proto
+
+ def __str__(self):
+ return self._repr
+
+
+class DictType(AbstractTypeBuilder):
+ __slots__ = ("__repr", "__proto")
+
+ def __init__(
+ self,
+ key_type: typing.Union[AbstractTypeBuilder, PrimitiveType],
+ payload_type: typing.Union[AbstractTypeBuilder, PrimitiveType],
+ ):
+ """
+ :param key_type: Key type builder
+ :param payload_type: Payload type builder
+ """
+ self._repr = "Dict<%s,%s>" % (str(key_type), str(payload_type))
+ self._proto = _apis.ydb_value.Type(
+ dict_type=_apis.ydb_value.DictType(
+ key=key_type.proto,
+ payload=payload_type.proto,
+ )
+ )
+
+ @property
+ def proto(self):
+ return self._proto
+
+ def __str__(self):
+ return self._repr
+
+
+class TupleType(AbstractTypeBuilder):
+ __slots__ = ("__elements_repr", "__proto")
+
+ def __init__(self):
+ self.__elements_repr = []
+ self.__proto = _apis.ydb_value.Type(tuple_type=_apis.ydb_value.TupleType())
+
+ def add_element(self, element_type: typing.Union[AbstractTypeBuilder, PrimitiveType]):
+ """
+ :param element_type: Adds additional element of tuple
+ :return: self
+ """
+ self.__elements_repr.append(str(element_type))
+ element = self.__proto.tuple_type.elements.add()
+ element.MergeFrom(element_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "Tuple<%s>" % ",".join(self.__elements_repr)
+
+
+class StructType(AbstractTypeBuilder):
+ __slots__ = ("__members_repr", "__proto")
+
+ def __init__(self):
+ self.__members_repr = []
+ self.__proto = _apis.ydb_value.Type(struct_type=_apis.ydb_value.StructType())
+
+ def add_member(self, name: str, member_type: typing.Union[AbstractTypeBuilder, PrimitiveType]):
+ """
+ :param name:
+ :param member_type:
+ :return:
+ """
+ self.__members_repr.append("%s:%s" % (name, str(member_type)))
+ member = self.__proto.struct_type.members.add()
+ member.name = name
+ member.type.MergeFrom(member_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "Struct<%s>" % ",".join(self.__members_repr)
+
+
+class BulkUpsertColumns(AbstractTypeBuilder):
+ __slots__ = ("__columns_repr", "__proto")
+
+ def __init__(self):
+ self.__columns_repr = []
+ self.__proto = _apis.ydb_value.Type(struct_type=_apis.ydb_value.StructType())
+
+ def add_column(self, name: str, column_type: typing.Union[AbstractTypeBuilder, PrimitiveType]):
+ """
+ :param name: A column name
+ :param column_type: A column type
+ """
+ self.__columns_repr.append("%s:%s" % (name, column_type))
+ column = self.__proto.struct_type.members.add()
+ column.name = name
+ column.type.MergeFrom(column_type.proto)
+ return self
+
+ @property
+ def proto(self):
+ return self.__proto
+
+ def __str__(self):
+ return "BulkUpsertColumns<%s>" % ",".join(self.__columns_repr)
diff --git a/contrib/python/ydb/py3/ydb/ydb_version.py b/contrib/python/ydb/py3/ydb/ydb_version.py
new file mode 100644
index 0000000000..709cb0a9e5
--- /dev/null
+++ b/contrib/python/ydb/py3/ydb/ydb_version.py
@@ -0,0 +1 @@
+VERSION = "3.7.0"