aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksandr <ivansduck@gmail.com>2022-02-10 16:47:52 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:47:52 +0300
commitb05913d1c3c02a773578bceb7285084d2933ae86 (patch)
treec0748b5dcbade83af788c0abfa89c0383d6b779c
parentea6c5b7f172becca389cacaff7d5f45f6adccbe6 (diff)
downloadydb-b05913d1c3c02a773578bceb7285084d2933ae86.tar.gz
Restoring authorship annotation for Aleksandr <ivansduck@gmail.com>. Commit 2 of 2.
-rw-r--r--build/config/tests/clang_tidy/config.yaml2
-rw-r--r--build/config/tests/clang_tidy/ya.make8
-rw-r--r--build/config/tests/flake8/flake8.conf18
-rw-r--r--build/config/tests/flake8/ya.make8
-rw-r--r--build/config/tests/ya.make6
-rw-r--r--build/config/ya.make6
-rw-r--r--build/platform/python/python27/ya.make30
-rw-r--r--build/platform/python/python34/ya.make30
-rw-r--r--build/platform/python/python35/ya.make30
-rw-r--r--build/platform/python/python36/ya.make30
-rw-r--r--build/platform/python/python37/ya.make30
-rw-r--r--build/platform/python/python38/ya.make30
-rw-r--r--build/platform/python/resources.inc4
-rw-r--r--build/platform/python/tests/test_common.py36
-rw-r--r--build/platform/python/tests/testlib.py36
-rw-r--r--build/platform/python/tests/ya.make68
-rw-r--r--build/platform/python/ya.make20
-rw-r--r--build/platform/test_tool/a.yaml28
-rw-r--r--build/platform/test_tool/ya.make8
-rw-r--r--build/plugins/_common.py14
-rw-r--r--build/plugins/_requirements.py24
-rw-r--r--build/plugins/_test_const.py282
-rw-r--r--build/plugins/code_generator.py2
-rw-r--r--build/plugins/macros_with_error.py34
-rw-r--r--build/plugins/pybuild.py282
-rw-r--r--build/plugins/res.py16
-rw-r--r--build/plugins/suppressions.py38
-rw-r--r--build/plugins/tests/test_requirements.py2
-rw-r--r--build/plugins/ytest.py648
-rw-r--r--build/rules/contrib_deps.policy2
-rw-r--r--build/rules/flake8/migrations.yaml4
-rw-r--r--build/scripts/append_file.py4
-rw-r--r--build/scripts/clang_tidy.py136
-rw-r--r--build/scripts/compile_cuda.py18
-rwxr-xr-xbuild/scripts/configure_file.py2
-rw-r--r--build/scripts/copy_to_dir.py2
-rw-r--r--build/scripts/coverage-info.py18
-rw-r--r--build/scripts/create_jcoverage_report.py70
-rw-r--r--build/scripts/error.py58
-rwxr-xr-xbuild/scripts/fetch_from.py114
-rw-r--r--build/scripts/fetch_from_archive.py6
-rw-r--r--build/scripts/fetch_from_external.py6
-rw-r--r--build/scripts/fetch_from_mds.py6
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py24
-rw-r--r--build/scripts/go_tool.py2
-rw-r--r--build/scripts/java_pack_to_file.py56
-rw-r--r--build/scripts/link_dyn_lib.py4
-rw-r--r--build/scripts/link_exe.py54
-rw-r--r--build/scripts/link_fat_obj.py8
-rw-r--r--build/scripts/retry.py58
-rw-r--r--build/scripts/run_junit.py250
-rw-r--r--build/scripts/unpacking_jtest_runner.py82
-rw-r--r--build/scripts/with_coverage.py4
-rw-r--r--build/scripts/with_crash_on_timeout.py4
-rw-r--r--build/scripts/ya.make22
-rw-r--r--build/ya.conf.json144
-rw-r--r--build/ya.make18
-rw-r--r--build/ymake.core.conf180
-rwxr-xr-xbuild/ymake_conf.py62
-rw-r--r--contrib/libs/cxxsupp/libcxx/ya.make10
-rw-r--r--contrib/libs/cxxsupp/libcxxrt/ya.make4
-rw-r--r--contrib/libs/cxxsupp/libsan/ya.make2
-rw-r--r--contrib/libs/protobuf/ya.make2
-rw-r--r--contrib/python/botocore/botocore/loaders.py2
-rw-r--r--contrib/python/cffi/lsan.supp4
-rw-r--r--contrib/python/cffi/ya.make4
-rw-r--r--contrib/python/ipdb/ya.make18
-rw-r--r--contrib/python/py/py/_vendored_packages/iniconfig/__init__.py16
-rw-r--r--contrib/python/pytest/py2/_pytest/python.py50
-rw-r--r--contrib/python/pytest/py3/_pytest/python.py50
-rw-r--r--contrib/python/six/ya.make2
-rw-r--r--contrib/python/toml/LICENSE42
-rw-r--r--contrib/python/toml/ya.make18
-rw-r--r--contrib/python/traitlets/py2/tests/ya.make2
-rw-r--r--contrib/python/ya.make84
-rw-r--r--contrib/tools/cython/Cython/Build/BuildExecutable.py2
-rw-r--r--contrib/tools/cython/Cython/Build/Dependencies.py336
-rw-r--r--contrib/tools/cython/Cython/Build/IpythonMagic.py480
-rw-r--r--contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py190
-rw-r--r--contrib/tools/cython/Cython/CodeWriter.py596
-rw-r--r--contrib/tools/cython/Cython/Compiler/Annotate.py22
-rw-r--r--contrib/tools/cython/Cython/Compiler/AutoDocTransforms.py94
-rw-r--r--contrib/tools/cython/Cython/Compiler/Buffer.py56
-rw-r--r--contrib/tools/cython/Cython/Compiler/Builtin.py82
-rw-r--r--contrib/tools/cython/Cython/Compiler/CmdLine.py8
-rw-r--r--contrib/tools/cython/Cython/Compiler/Code.pxd40
-rw-r--r--contrib/tools/cython/Cython/Compiler/Code.py408
-rw-r--r--contrib/tools/cython/Cython/Compiler/CodeGeneration.py2
-rw-r--r--contrib/tools/cython/Cython/Compiler/CythonScope.py8
-rw-r--r--contrib/tools/cython/Cython/Compiler/Errors.py36
-rw-r--r--contrib/tools/cython/Cython/Compiler/ExprNodes.py1764
-rw-r--r--contrib/tools/cython/Cython/Compiler/FusedNode.py218
-rw-r--r--contrib/tools/cython/Cython/Compiler/Main.py82
-rw-r--r--contrib/tools/cython/Cython/Compiler/MemoryView.py30
-rw-r--r--contrib/tools/cython/Cython/Compiler/ModuleNode.py860
-rw-r--r--contrib/tools/cython/Cython/Compiler/Naming.py10
-rw-r--r--contrib/tools/cython/Cython/Compiler/Nodes.py1554
-rw-r--r--contrib/tools/cython/Cython/Compiler/Optimize.py952
-rw-r--r--contrib/tools/cython/Cython/Compiler/Options.py38
-rw-r--r--contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.pxd24
-rw-r--r--contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py666
-rw-r--r--contrib/tools/cython/Cython/Compiler/Parsing.pxd6
-rw-r--r--contrib/tools/cython/Cython/Compiler/Parsing.py438
-rw-r--r--contrib/tools/cython/Cython/Compiler/Pipeline.py40
-rw-r--r--contrib/tools/cython/Cython/Compiler/PyrexTypes.py896
-rw-r--r--contrib/tools/cython/Cython/Compiler/Pythran.py328
-rw-r--r--contrib/tools/cython/Cython/Compiler/Scanning.py40
-rw-r--r--contrib/tools/cython/Cython/Compiler/StringEncoding.py18
-rw-r--r--contrib/tools/cython/Cython/Compiler/Symtab.py518
-rw-r--r--contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py2
-rw-r--r--contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py2
-rw-r--r--contrib/tools/cython/Cython/Compiler/Tests/TestTreePath.py72
-rw-r--r--contrib/tools/cython/Cython/Compiler/Tests/TestTypes.py38
-rw-r--r--contrib/tools/cython/Cython/Compiler/Tests/TestUtilityLoad.py20
-rw-r--r--contrib/tools/cython/Cython/Compiler/TreeFragment.py8
-rw-r--r--contrib/tools/cython/Cython/Compiler/TreePath.py4
-rw-r--r--contrib/tools/cython/Cython/Compiler/TypeInference.py66
-rw-r--r--contrib/tools/cython/Cython/Compiler/TypeSlots.py82
-rw-r--r--contrib/tools/cython/Cython/Compiler/UtilNodes.py8
-rw-r--r--contrib/tools/cython/Cython/Compiler/UtilityCode.py48
-rw-r--r--contrib/tools/cython/Cython/Compiler/Visitor.py22
-rw-r--r--contrib/tools/cython/Cython/Coverage.py288
-rw-r--r--contrib/tools/cython/Cython/Debugger/Cygdb.py30
-rw-r--r--contrib/tools/cython/Cython/Debugger/Tests/TestLibCython.py4
-rw-r--r--contrib/tools/cython/Cython/Debugger/Tests/cfuncs.h2
-rw-r--r--contrib/tools/cython/Cython/Debugger/Tests/codefile12
-rw-r--r--contrib/tools/cython/Cython/Debugger/Tests/test_libcython_in_gdb.py6
-rw-r--r--contrib/tools/cython/Cython/Debugger/Tests/test_libpython_in_gdb.py18
-rw-r--r--contrib/tools/cython/Cython/Debugger/libcython.py28
-rw-r--r--contrib/tools/cython/Cython/Debugger/libpython.py1052
-rw-r--r--contrib/tools/cython/Cython/Distutils/build_ext.py8
-rw-r--r--contrib/tools/cython/Cython/Distutils/old_build_ext.py6
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/__init__.pxd4
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/array.pxd46
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/datetime.pxd26
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/long.pxd70
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/module.pxd26
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/object.pxd32
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/pythread.pxd32
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/set.pxd12
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/tuple.pxd2
-rw-r--r--contrib/tools/cython/Cython/Includes/cpython/weakref.pxd2
-rw-r--r--contrib/tools/cython/Cython/Includes/libc/limits.pxd38
-rw-r--r--contrib/tools/cython/Cython/Includes/libc/signal.pxd98
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/algorithm.pxd24
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/deque.pxd52
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/functional.pxd26
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/iterator.pxd4
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/limits.pxd4
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/memory.pxd4
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/string.pxd24
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd4
-rw-r--r--contrib/tools/cython/Cython/Includes/libcpp/vector.pxd58
-rw-r--r--contrib/tools/cython/Cython/Includes/numpy/__init__.pxd90
-rw-r--r--contrib/tools/cython/Cython/Includes/posix/signal.pxd14
-rw-r--r--contrib/tools/cython/Cython/Includes/posix/stat.pxd10
-rw-r--r--contrib/tools/cython/Cython/Includes/posix/types.pxd4
-rw-r--r--contrib/tools/cython/Cython/Parser/Grammar2
-rw-r--r--contrib/tools/cython/Cython/Plex/Scanners.pxd8
-rw-r--r--contrib/tools/cython/Cython/Runtime/refnanny.pyx2
-rw-r--r--contrib/tools/cython/Cython/Shadow.py26
-rw-r--r--contrib/tools/cython/Cython/StringIOTree.pxd34
-rw-r--r--contrib/tools/cython/Cython/StringIOTree.py72
-rw-r--r--contrib/tools/cython/Cython/Tempita/_tempita.py2
-rw-r--r--contrib/tools/cython/Cython/Tests/TestCodeWriter.py2
-rw-r--r--contrib/tools/cython/Cython/Tests/TestJediTyper.py8
-rw-r--r--contrib/tools/cython/Cython/Utility/AsyncGen.c2118
-rw-r--r--contrib/tools/cython/Cython/Utility/Buffer.c208
-rw-r--r--contrib/tools/cython/Cython/Utility/Builtins.c100
-rw-r--r--contrib/tools/cython/Cython/Utility/CConvert.pyx8
-rw-r--r--contrib/tools/cython/Cython/Utility/CMath.c4
-rw-r--r--contrib/tools/cython/Cython/Utility/CommonStructures.c172
-rw-r--r--contrib/tools/cython/Cython/Utility/Complex.c8
-rw-r--r--contrib/tools/cython/Cython/Utility/Coroutine.c1356
-rw-r--r--contrib/tools/cython/Cython/Utility/CppConvert.pyx38
-rw-r--r--contrib/tools/cython/Cython/Utility/CppSupport.cpp20
-rw-r--r--contrib/tools/cython/Cython/Utility/CythonFunction.c66
-rw-r--r--contrib/tools/cython/Cython/Utility/Embed.c28
-rw-r--r--contrib/tools/cython/Cython/Utility/Exceptions.c212
-rw-r--r--contrib/tools/cython/Cython/Utility/ExtensionTypes.c284
-rw-r--r--contrib/tools/cython/Cython/Utility/FunctionArguments.c34
-rw-r--r--contrib/tools/cython/Cython/Utility/ImportExport.c214
-rw-r--r--contrib/tools/cython/Cython/Utility/MemoryView.pyx32
-rw-r--r--contrib/tools/cython/Cython/Utility/MemoryView_C.c44
-rw-r--r--contrib/tools/cython/Cython/Utility/ModuleSetupCode.c1312
-rw-r--r--contrib/tools/cython/Cython/Utility/ObjectHandling.c932
-rw-r--r--contrib/tools/cython/Cython/Utility/Optimize.c374
-rw-r--r--contrib/tools/cython/Cython/Utility/Overflow.c6
-rw-r--r--contrib/tools/cython/Cython/Utility/Profile.c68
-rw-r--r--contrib/tools/cython/Cython/Utility/StringTools.c284
-rw-r--r--contrib/tools/cython/Cython/Utility/TypeConversion.c234
-rw-r--r--contrib/tools/cython/Cython/Utility/arrayarray.h12
-rw-r--r--contrib/tools/cython/Cython/Utils.py32
-rw-r--r--contrib/tools/cython/Cython/ya.make32
-rw-r--r--contrib/tools/python/ya.make8
-rw-r--r--contrib/tools/python3/lib/lsan.supp4
-rw-r--r--contrib/tools/python3/lib/ya.make4
-rw-r--r--contrib/tools/python3/src/tsan.supp2
-rw-r--r--contrib/tools/yasm/libyasm/errwarn.c16
-rw-r--r--contrib/tools/yasm/libyasm/errwarn.h6
-rw-r--r--contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c2
-rw-r--r--contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c2
-rw-r--r--contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c2
-rw-r--r--library/cpp/accurate_accumulate/benchmark/metrics/ya.make4
-rw-r--r--library/cpp/actors/interconnect/ut_fat/ya.make2
-rw-r--r--library/cpp/digest/argonish/ut_fat/ya.make2
-rw-r--r--library/cpp/http/io/fuzz/ya.make4
-rw-r--r--library/cpp/http/io/stream_ut.cpp12
-rw-r--r--library/cpp/http/server/http_ut.cpp8
-rw-r--r--library/cpp/json/yson/ut/ya.make6
-rw-r--r--library/cpp/scheme/tests/fuzz_json/ya.make4
-rw-r--r--library/cpp/string_utils/base64/bench/metrics/ya.make4
-rw-r--r--library/cpp/testing/benchmark/examples/metrics/ya.make4
-rw-r--r--library/cpp/testing/common/env.cpp198
-rw-r--r--library/cpp/testing/common/env.h54
-rw-r--r--library/cpp/testing/common/ut/env_ut.cpp128
-rw-r--r--library/cpp/testing/unittest/fat/test_port_manager.cpp66
-rw-r--r--library/cpp/testing/unittest/fat/ya.make28
-rw-r--r--library/cpp/testing/unittest/registar.cpp30
-rw-r--r--library/cpp/testing/unittest/tests_data.cpp76
-rw-r--r--library/cpp/testing/unittest/tests_data.h20
-rw-r--r--library/cpp/testing/unittest/ut/main.cpp12
-rw-r--r--library/cpp/testing/unittest/utmain.cpp148
-rw-r--r--library/cpp/testing/ya.make2
-rw-r--r--library/cpp/ya.make20
-rw-r--r--library/python/cores/__init__.py2
-rw-r--r--library/python/cores/ya.make2
-rw-r--r--library/python/filelock/__init__.py56
-rw-r--r--library/python/filelock/ut/lib/test_filelock.py56
-rw-r--r--library/python/fs/__init__.py96
-rw-r--r--library/python/fs/test/test_fs.py62
-rw-r--r--library/python/func/__init__.py194
-rw-r--r--library/python/func/ut/test_func.py162
-rw-r--r--library/python/func/ut/ya.make22
-rw-r--r--library/python/func/ya.make8
-rw-r--r--library/python/pytest/main.py96
-rw-r--r--library/python/pytest/plugins/collection.py2
-rw-r--r--library/python/pytest/plugins/fakeid_py2.py4
-rw-r--r--library/python/pytest/plugins/fakeid_py3.py4
-rw-r--r--library/python/pytest/plugins/ya.make32
-rw-r--r--library/python/pytest/plugins/ya.py478
-rw-r--r--library/python/pytest/ya.make22
-rw-r--r--library/python/pytest/yatest_tools.py196
-rw-r--r--library/python/reservoir_sampling/README.md22
-rw-r--r--library/python/resource/__init__.py16
-rw-r--r--library/python/resource/ut/lib/test_simple.py22
-rw-r--r--library/python/runtime_py3/entry_points.py16
-rw-r--r--library/python/runtime_py3/main/main.c30
-rw-r--r--library/python/strings/__init__.py34
-rw-r--r--library/python/strings/strings.py48
-rw-r--r--library/python/strings/ut/test_strings.py14
-rw-r--r--library/python/strings/ya.make10
-rw-r--r--library/python/testing/filter/ya.make2
-rw-r--r--library/python/testing/import_test/import_test.py30
-rw-r--r--library/python/testing/import_test/ya.make2
-rw-r--r--library/python/testing/recipe/__init__.py8
-rw-r--r--library/python/testing/recipe/ya.make10
-rw-r--r--library/python/testing/ya.make8
-rw-r--r--library/python/testing/yatest_common/ya.make14
-rw-r--r--library/python/testing/yatest_common/yatest/common/canonical.py10
-rw-r--r--library/python/testing/yatest_common/yatest/common/environment.py8
-rw-r--r--library/python/testing/yatest_common/yatest/common/errors.py6
-rw-r--r--library/python/testing/yatest_common/yatest/common/misc.py38
-rw-r--r--library/python/testing/yatest_common/yatest/common/network.py436
-rw-r--r--library/python/testing/yatest_common/yatest/common/path.py56
-rw-r--r--library/python/testing/yatest_common/yatest/common/process.py634
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime.py196
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime_java.py36
-rw-r--r--library/python/testing/yatest_common/yatest/common/ya.make2
-rw-r--r--library/python/testing/yatest_lib/external.py36
-rw-r--r--library/python/testing/yatest_lib/test_splitter.py8
-rw-r--r--library/python/testing/yatest_lib/tests/test_external.py40
-rw-r--r--library/python/testing/yatest_lib/tests/ya.make6
-rw-r--r--library/python/testing/yatest_lib/ya.make14
-rw-r--r--library/python/windows/__init__.py78
-rw-r--r--library/python/ya.make2
-rw-r--r--tools/archiver/tests/test.py2
-rw-r--r--util/charset/benchmark/to_lower/metrics/ya.make4
-rw-r--r--util/charset/benchmark/utf8_to_wide/metrics/ya.make4
-rw-r--r--util/charset/ut/ya.make4
-rw-r--r--util/datetime/ut/ya.make4
-rw-r--r--util/digest/ut/ya.make4
-rw-r--r--util/draft/ut/ya.make4
-rw-r--r--util/folder/ut/ya.make4
-rw-r--r--util/generic/benchmark/fastclp2/metrics/ya.make4
-rw-r--r--util/generic/benchmark/log2/metrics/ya.make4
-rw-r--r--util/generic/benchmark/rotate_bits/metrics/ya.make4
-rw-r--r--util/generic/benchmark/vector_count_ctor/metrics/ya.make4
-rw-r--r--util/generic/string.pxd4
-rw-r--r--util/generic/ut/ya.make8
-rw-r--r--util/memory/benchmark/pool/metrics/ya.make4
-rw-r--r--util/memory/ut/ya.make4
-rw-r--r--util/network/ut/ya.make4
-rw-r--r--util/random/benchmark/prng/metrics/ya.make4
-rw-r--r--util/random/ut/ya.make4
-rw-r--r--util/stream/ut/ya.make4
-rw-r--r--util/string/benchmark/float_to_string/metrics/ya.make4
-rw-r--r--util/string/benchmark/join/metrics/ya.make4
-rw-r--r--util/string/benchmark/subst_global/metrics/ya.make4
-rw-r--r--util/string/ut/ya.make4
-rw-r--r--util/system/atomic_ut.cpp18
-rw-r--r--util/system/benchmark/cpu_id/metrics/ya.make4
-rw-r--r--util/system/benchmark/create_destroy_thread/metrics/ya.make4
-rw-r--r--util/system/shellcommand.cpp202
-rw-r--r--util/system/shellcommand.h6
-rw-r--r--util/system/shellcommand_ut.cpp98
-rw-r--r--util/system/ut/stdin_osfhandle/main.cpp30
-rw-r--r--util/system/ut/stdin_osfhandle/ya.make26
-rw-r--r--util/system/ut/ya.make24
-rw-r--r--util/system/yassert.cpp22
-rw-r--r--util/tests/sym_versions/test_glibc.py2
-rw-r--r--util/tests/ya_util_tests.inc8
-rw-r--r--ydb/core/blobstorage/backpressure/ut/ya.make2
-rw-r--r--ydb/core/blobstorage/base/ut/ya.make4
-rw-r--r--ydb/core/blobstorage/dsproxy/ut/ya.make4
-rw-r--r--ydb/core/blobstorage/groupinfo/ut/ya.make4
-rw-r--r--ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp2
-rw-r--r--ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp8
-rw-r--r--ydb/core/blobstorage/vdisk/hulldb/blobstorage_hullgcmap.h2
-rw-r--r--ydb/core/client/ut/ya.make4
-rw-r--r--ydb/core/driver_lib/cli_base/cli_cmds_db.cpp6
-rw-r--r--ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp4
-rw-r--r--ydb/core/driver_lib/cli_utils/cli_cmds_get.cpp2
-rw-r--r--ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp2
-rw-r--r--ydb/core/engine/mkql_engine_flat.cpp8
-rw-r--r--ydb/core/erasure/ut/ya.make4
-rw-r--r--ydb/core/keyvalue/ut/ya.make4
-rw-r--r--ydb/core/kqp/provider/ut/ya.make4
-rw-r--r--ydb/core/mind/ut/ya.make4
-rw-r--r--ydb/core/tablet_flat/test/libs/rows/layout.h2
-rw-r--r--ydb/core/testlib/test_client.cpp6
-rw-r--r--ydb/core/tx/schemeshard/ut_bsvolume/ya.make2
-rw-r--r--ydb/core/tx/schemeshard/ut_pq/ya.make2
-rw-r--r--ydb/core/tx/schemeshard/ut_rtmr/ya.make2
-rw-r--r--ydb/core/tx/schemeshard/ut_subdomain/ya.make2
-rw-r--r--ydb/core/tx/tx_proxy/ut_base_tenant/ya.make2
-rw-r--r--ydb/core/tx/tx_proxy/ut_storage_tenant/ya.make4
-rw-r--r--ydb/library/yql/core/yql_graph_transformer.cpp2
-rw-r--r--ydb/library/yql/minikql/benchmark/pack_num/metrics/ya.make4
-rw-r--r--ydb/library/yql/minikql/comp_nodes/ut/ya.make4
-rw-r--r--ydb/library/yql/minikql/computation/ut/ya.make4
-rw-r--r--ydb/library/yql/minikql/invoke_builtins/ut/ya.make4
-rw-r--r--ydb/library/yql/minikql/ut/ya.make4
-rw-r--r--ydb/public/sdk/cpp/client/ydb_params/ut/ya.make2
-rw-r--r--ydb/services/persqueue_v1/ut/ya.make2
-rw-r--r--ydb/tests/functional/blobstorage/ya.make2
346 files changed, 15092 insertions, 15092 deletions
diff --git a/build/config/tests/clang_tidy/config.yaml b/build/config/tests/clang_tidy/config.yaml
index fba2fc718b..d55707592c 100644
--- a/build/config/tests/clang_tidy/config.yaml
+++ b/build/config/tests/clang_tidy/config.yaml
@@ -1,6 +1,6 @@
Checks: >
-*,
- arcadia-typeid-name-restriction,
+ arcadia-typeid-name-restriction,
bugprone-use-after-move,
readability-identifier-naming,
CheckOptions:
diff --git a/build/config/tests/clang_tidy/ya.make b/build/config/tests/clang_tidy/ya.make
index 2c63a7978d..fc6544f38f 100644
--- a/build/config/tests/clang_tidy/ya.make
+++ b/build/config/tests/clang_tidy/ya.make
@@ -1,4 +1,4 @@
-OWNER(
- g:cpp-committee
- g:yatest
-)
+OWNER(
+ g:cpp-committee
+ g:yatest
+)
diff --git a/build/config/tests/flake8/flake8.conf b/build/config/tests/flake8/flake8.conf
index 2e5516a02f..8af22b5d4d 100644
--- a/build/config/tests/flake8/flake8.conf
+++ b/build/config/tests/flake8/flake8.conf
@@ -11,17 +11,17 @@ select =
#Q, # quotes
ignore =
- E122, # continuation line missing indentation or outdented
+ E122, # continuation line missing indentation or outdented
E123, # closing bracket does not match indentation of opening bracket's line
E127, # continuation line over-indented for visual indent
- E131, # continuation line unaligned for hanging
+ E131, # continuation line unaligned for hanging
E203, # whitespace before ':'
- E225, # missing whitespace around operator
+ E225, # missing whitespace around operator
E226, # missing whitespace around arithmetic operator
E24, # multiple spaces after ',' or tab after ','
- E275, # missing whitespace after keyword
- E305, # expected 2 blank lines after end of function or class
- E306, # expected 1 blank line before a nested definition
+ E275, # missing whitespace after keyword
+ E305, # expected 2 blank lines after end of function or class
+ E306, # expected 1 blank line before a nested definition
E402, # module level import not at top of file
E722, # do not use bare except, specify exception instead
E731, # do not assign a lambda expression, use a def
@@ -29,7 +29,7 @@ ignore =
F722, # syntax error in forward annotation
- W503, # line break before binary operator
- W504, # line break after binary operator
-
+ W503, # line break before binary operator
+ W504, # line break after binary operator
+
max-line-length = 200
diff --git a/build/config/tests/flake8/ya.make b/build/config/tests/flake8/ya.make
index 6aff16ef0b..c4ba4105e0 100644
--- a/build/config/tests/flake8/ya.make
+++ b/build/config/tests/flake8/ya.make
@@ -1,4 +1,4 @@
-OWNER(
- g:python-committee
- g:yatest
-)
+OWNER(
+ g:python-committee
+ g:yatest
+)
diff --git a/build/config/tests/ya.make b/build/config/tests/ya.make
index fad817f908..146c3f63ad 100644
--- a/build/config/tests/ya.make
+++ b/build/config/tests/ya.make
@@ -1,4 +1,4 @@
-OWNER(
+OWNER(
shadchin
- g:yatest
-)
+ g:yatest
+)
diff --git a/build/config/ya.make b/build/config/ya.make
index 8c485fc68e..ff93704d5b 100644
--- a/build/config/ya.make
+++ b/build/config/ya.make
@@ -1,3 +1,3 @@
-RECURSE(
- tests
-)
+RECURSE(
+ tests
+)
diff --git a/build/platform/python/python27/ya.make b/build/platform/python/python27/ya.make
index f9811d2998..35d2ef8405 100644
--- a/build/platform/python/python27/ya.make
+++ b/build/platform/python/python27/ya.make
@@ -1,22 +1,22 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
IF (ARCH_ARM64 OR ARCH_AARCH64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_LINUX_ARM64})
ELSE()
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_LINUX})
ENDIF()
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python34/ya.make b/build/platform/python/python34/ya.make
index 2d9bfa38ae..61be788bec 100644
--- a/build/platform/python/python34/ya.make
+++ b/build/platform/python/python34/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python35/ya.make b/build/platform/python/python35/ya.make
index 9baedcd926..7a6292efa0 100644
--- a/build/platform/python/python35/ya.make
+++ b/build/platform/python/python35/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python36/ya.make b/build/platform/python/python36/ya.make
index 2debf2a37d..c9dcf5d88b 100644
--- a/build/platform/python/python36/ya.make
+++ b/build/platform/python/python36/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python37/ya.make b/build/platform/python/python37/ya.make
index ed50e55bb9..724c3f5b76 100644
--- a/build/platform/python/python37/ya.make
+++ b/build/platform/python/python37/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python38/ya.make b/build/platform/python/python38/ya.make
index e9d3ba7db7..b6820ca6ca 100644
--- a/build/platform/python/python38/ya.make
+++ b/build/platform/python/python38/ya.make
@@ -1,23 +1,23 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN})
IF (ARCH_ARM64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN_ARM64})
ELSEIF(ARCH_X86_64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN})
ENDIF()
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/resources.inc b/build/platform/python/resources.inc
index 6bace07e85..a730a5039b 100644
--- a/build/platform/python/resources.inc
+++ b/build/platform/python/resources.inc
@@ -6,7 +6,7 @@ SET(PYTHON37_LINUX sbr:616675620)
SET(PYTHON38_LINUX sbr:1211259884)
SET(PYTHON39_LINUX sbr:2019057022)
SET(PYTHON310_LINUX sbr:2505611617)
-
+
SET(PYTHON27_LINUX_ARM64 sbr:2145006545)
SET(PYTHON27_DARWIN sbr:426406952)
@@ -17,7 +17,7 @@ SET(PYTHON37_DARWIN sbr:616740054)
SET(PYTHON38_DARWIN sbr:1211286783)
SET(PYTHON39_DARWIN sbr:2046345566)
SET(PYTHON310_DARWIN sbr:2505677448)
-
+
SET(PYTHON38_DARWIN_ARM64 sbr:2577173323)
SET(PYTHON39_DARWIN_ARM64 sbr:2533263197)
SET(PYTHON310_DARWIN_ARM64 sbr:2577250782)
diff --git a/build/platform/python/tests/test_common.py b/build/platform/python/tests/test_common.py
index f4689ff0a4..7a685330ea 100644
--- a/build/platform/python/tests/test_common.py
+++ b/build/platform/python/tests/test_common.py
@@ -1,22 +1,22 @@
-import subprocess
-
-import pytest
-
-from build.platform.python.tests import testlib
-
+import subprocess
+
+import pytest
+
+from build.platform.python.tests import testlib
+
PYTHON_VERSIONS = ["2.7", "3.4", "3.5", "3.6"] # 3.7, 3.8 are not runnable
-
-
-@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
-def test_version_matched(pyver):
- testlib.check_python_version(pyver)
-
-
-@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
-def test_python_max_unicode_bytes(pyver):
- cmd = [testlib.get_python_bin(pyver), '-c', 'import sys; print(sys.maxunicode)']
- maxunicode = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8')
- assert int(maxunicode) > 65535, "Found UCS2 build"
+
+
+@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
+def test_version_matched(pyver):
+ testlib.check_python_version(pyver)
+
+
+@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
+def test_python_max_unicode_bytes(pyver):
+ cmd = [testlib.get_python_bin(pyver), '-c', 'import sys; print(sys.maxunicode)']
+ maxunicode = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8')
+ assert int(maxunicode) > 65535, "Found UCS2 build"
@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
diff --git a/build/platform/python/tests/testlib.py b/build/platform/python/tests/testlib.py
index 92fc571778..d12f2815d4 100644
--- a/build/platform/python/tests/testlib.py
+++ b/build/platform/python/tests/testlib.py
@@ -1,21 +1,21 @@
-import os
-import subprocess
-
-import yatest.common
-
-
-def get_python_bin(ver):
+import os
+import subprocess
+
+import yatest.common
+
+
+def get_python_bin(ver):
res_name = 'EXTERNAL_PYTHON{}_RESOURCE_GLOBAL'.format(ver.replace('.', ''))
- gr = yatest.common.global_resources()
- if res_name in gr:
- bindir = os.path.join(gr[res_name], 'python', 'bin')
+ gr = yatest.common.global_resources()
+ if res_name in gr:
+ bindir = os.path.join(gr[res_name], 'python', 'bin')
if ('python' + ver) in os.listdir(bindir):
return os.path.join(bindir, 'python' + ver)
- return os.path.join(bindir, 'python')
-
- raise AssertionError("Resource '{}' is not available: {}".format(res_name, gr))
-
-
-def check_python_version(version):
- ver = subprocess.check_output([get_python_bin(version), '-V'], stderr=subprocess.STDOUT).decode('utf-8')
- assert version in ver
+ return os.path.join(bindir, 'python')
+
+ raise AssertionError("Resource '{}' is not available: {}".format(res_name, gr))
+
+
+def check_python_version(version):
+ ver = subprocess.check_output([get_python_bin(version), '-V'], stderr=subprocess.STDOUT).decode('utf-8')
+ assert version in ver
diff --git a/build/platform/python/tests/ya.make b/build/platform/python/tests/ya.make
index 3915de8969..0d8965240e 100644
--- a/build/platform/python/tests/ya.make
+++ b/build/platform/python/tests/ya.make
@@ -1,36 +1,36 @@
-PY3TEST()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-IF (OS_DARWIN)
- SIZE(LARGE)
-
- TAG(
- ya:fat
- ya:force_sandbox ya:exotic_platform
- )
-ENDIF()
-
-PY_SRCS(
- testlib.py
-)
-
-TEST_SRCS(
- test_common.py
-)
-
-PEERDIR(
- build/platform/python/python27
- build/platform/python/python34
- build/platform/python/python35
- build/platform/python/python36
- build/platform/python/python37
- build/platform/python/python38
+PY3TEST()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+IF (OS_DARWIN)
+ SIZE(LARGE)
+
+ TAG(
+ ya:fat
+ ya:force_sandbox ya:exotic_platform
+ )
+ENDIF()
+
+PY_SRCS(
+ testlib.py
+)
+
+TEST_SRCS(
+ test_common.py
+)
+
+PEERDIR(
+ build/platform/python/python27
+ build/platform/python/python34
+ build/platform/python/python35
+ build/platform/python/python36
+ build/platform/python/python37
+ build/platform/python/python38
build/platform/python/python39
build/platform/python/python310
-)
-
-END()
+)
+
+END()
diff --git a/build/platform/python/ya.make b/build/platform/python/ya.make
index 67be319056..247e65f4c9 100644
--- a/build/platform/python/ya.make
+++ b/build/platform/python/ya.make
@@ -1,12 +1,12 @@
RESOURCES_LIBRARY()
-OWNER(
- g:contrib
- g:yatool
-)
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(resources.inc)
-INCLUDE(resources.inc)
-
IF (USE_SYSTEM_PYTHON)
IF (OS_LINUX)
IF (ARCH_ARM64 OR ARCH_AARCH64)
@@ -130,7 +130,7 @@ ELSEIF (NOT USE_ARCADIA_PYTHON)
ENDIF()
END()
-
-RECURSE_FOR_TESTS(
- tests
-)
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/platform/test_tool/a.yaml b/build/platform/test_tool/a.yaml
index e7540dfb74..298148e450 100644
--- a/build/platform/test_tool/a.yaml
+++ b/build/platform/test_tool/a.yaml
@@ -1,16 +1,16 @@
service: devtools
-title: test_tool acceptance
-ci:
+title: test_tool acceptance
+ci:
release-title-source: flow
- secret: sec-01ekd5wc1dmdd544yp1xt3s9b8
- runtime:
- sandbox-owner: DEVTOOLS-LARGE
- autocheck:
- large-autostart:
- - target: devtools/ya/build/tests/cross_compilation/mac_builds_linux
- toolchains:
- - default-darwin-x86_64-release
- - target: devtools/ya/test/tests/port_manager/fat
- toolchains:
- - default-linux-x86_64-release-musl
- - default-darwin-x86_64-release
+ secret: sec-01ekd5wc1dmdd544yp1xt3s9b8
+ runtime:
+ sandbox-owner: DEVTOOLS-LARGE
+ autocheck:
+ large-autostart:
+ - target: devtools/ya/build/tests/cross_compilation/mac_builds_linux
+ toolchains:
+ - default-darwin-x86_64-release
+ - target: devtools/ya/test/tests/port_manager/fat
+ toolchains:
+ - default-linux-x86_64-release-musl
+ - default-darwin-x86_64-release
diff --git a/build/platform/test_tool/ya.make b/build/platform/test_tool/ya.make
index ffc48022fa..fd97d51ed8 100644
--- a/build/platform/test_tool/ya.make
+++ b/build/platform/test_tool/ya.make
@@ -1,8 +1,8 @@
RESOURCES_LIBRARY()
-OWNER(
- g:yatest
- heretic
-)
+OWNER(
+ g:yatest
+ heretic
+)
IF (TEST_TOOL_HOST_LOCAL)
MESSAGE(WARNING Host test tool $TEST_TOOL_HOST_LOCAL will be used)
diff --git a/build/plugins/_common.py b/build/plugins/_common.py
index 7bf29b4d6f..2f831a94db 100644
--- a/build/plugins/_common.py
+++ b/build/plugins/_common.py
@@ -188,13 +188,13 @@ def filter_out_by_keyword(test_data, keyword):
def generate_chunks(lst, chunk_size):
for i in xrange(0, len(lst), chunk_size):
yield lst[i:(i + chunk_size)]
-
-
-def strip_roots(path):
- for prefix in ["$B/", "$S/"]:
- if path.startswith(prefix):
- return path[len(prefix):]
- return path
+
+
+def strip_roots(path):
+ for prefix in ["$B/", "$S/"]:
+ if path.startswith(prefix):
+ return path[len(prefix):]
+ return path
def to_yesno(x):
diff --git a/build/plugins/_requirements.py b/build/plugins/_requirements.py
index 11cb92ebe7..c27635e852 100644
--- a/build/plugins/_requirements.py
+++ b/build/plugins/_requirements.py
@@ -6,22 +6,22 @@ def check_cpu(suite_cpu_requirements, test_size, is_kvm=False):
max_cpu_requirements = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
if isinstance(suite_cpu_requirements, str):
if all(consts.TestRequirementsConstants.is_all_cpu(req) for req in (max_cpu_requirements, suite_cpu_requirements)):
- return None
- return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+ return None
+ return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
if not isinstance(suite_cpu_requirements, int):
- return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
+ return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
if suite_cpu_requirements < min_cpu_requirements or suite_cpu_requirements > consts.TestRequirementsConstants.get_cpu_value(max_cpu_requirements):
- return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+ return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
- return None
+ return None
# TODO: Remove is_kvm param when there will be guarantees on RAM
def check_ram(suite_ram_requirements, test_size, is_kvm=False):
if not isinstance(suite_ram_requirements, int):
- return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
+ return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
min_ram_requirements = consts.TestRequirementsConstants.MinRam
max_ram_requirements = consts.MAX_RAM_REQUIREMENTS_FOR_KVM if is_kvm else consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
if suite_ram_requirements < min_ram_requirements or suite_ram_requirements > max_ram_requirements:
@@ -29,7 +29,7 @@ def check_ram(suite_ram_requirements, test_size, is_kvm=False):
if is_kvm:
err_msg += ' with kvm requirements'
return err_msg
- return None
+ return None
def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
@@ -37,13 +37,13 @@ def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
if isinstance(suite_ram_disk, str):
if all(consts.TestRequirementsConstants.is_all_ram_disk(req) for req in (max_ram_disk, suite_ram_disk)):
- return None
- return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, 0, max_ram_disk, test_size)
+ return None
+ return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, 0, max_ram_disk, test_size)
if not isinstance(suite_ram_disk, int):
- return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
+ return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
if suite_ram_disk < min_ram_disk or suite_ram_disk > consts.TestRequirementsConstants.get_ram_disk_value(max_ram_disk):
- return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, min_ram_disk, max_ram_disk, test_size)
+ return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, min_ram_disk, max_ram_disk, test_size)
- return None
+ return None
diff --git a/build/plugins/_test_const.py b/build/plugins/_test_const.py
index 33e9b989c3..0d03cc3d17 100644
--- a/build/plugins/_test_const.py
+++ b/build/plugins/_test_const.py
@@ -1,34 +1,34 @@
-# coding: utf-8
+# coding: utf-8
import re
import sys
-
-RESTART_TEST_INDICATOR = '##restart-test##'
-INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
-
-RESTART_TEST_INDICATORS = [
- RESTART_TEST_INDICATOR,
- "network error",
-]
-
-# testing
-BIN_DIRECTORY = 'bin'
-CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
-CONSOLE_SNIPPET_LIMIT = 5000
-LIST_NODE_LOG_FILE = "test_list.log"
-LIST_NODE_RESULT_FILE = "test_list.json"
-LIST_RESULT_NODE_LOG_FILE = "list_result.log"
-MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
-MAX_TEST_RESTART_COUNT = 3
-REPORT_SNIPPET_LIMIT = 10000
-SANITIZER_ERROR_RC = 100
-TEST_SUBTEST_SEPARATOR = '::'
-TESTING_OUT_DIR_NAME = "testing_out_stuff"
+
+RESTART_TEST_INDICATOR = '##restart-test##'
+INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
+
+RESTART_TEST_INDICATORS = [
+ RESTART_TEST_INDICATOR,
+ "network error",
+]
+
+# testing
+BIN_DIRECTORY = 'bin'
+CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
+CONSOLE_SNIPPET_LIMIT = 5000
+LIST_NODE_LOG_FILE = "test_list.log"
+LIST_NODE_RESULT_FILE = "test_list.json"
+LIST_RESULT_NODE_LOG_FILE = "list_result.log"
+MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
+MAX_TEST_RESTART_COUNT = 3
+REPORT_SNIPPET_LIMIT = 10000
+SANITIZER_ERROR_RC = 100
+TEST_SUBTEST_SEPARATOR = '::'
+TESTING_OUT_DIR_NAME = "testing_out_stuff"
TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar"
-TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
-TRACE_FILE_NAME = "ytest.report.trace"
-TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, "run_test.log"}
-
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+TRACE_FILE_NAME = "ytest.report.trace"
+TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, "run_test.log"}
+
# kvm
DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4
MAX_RAM_REQUIREMENTS_FOR_KVM = 16
@@ -37,29 +37,29 @@ MAX_RAM_REQUIREMENTS_FOR_KVM = 16
TEST_NODE_FINISHING_TIME = 5 * 60
DEFAULT_TEST_NODE_TIMEOUT = 15 * 60
-# coverage
+# coverage
COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5
COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json"
CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp")
JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java")
PYTHON_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("python")
CLANG_COVERAGE_TEST_TYPES = ("unittest", "coverage_extractor", "pytest", "py3test", "gtest", "boost_test", "exectest")
-COVERAGE_TABLE_CHUNKS = 20
-COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
-COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
-COVERAGE_YT_TABLE_PREFIX = "datatable"
-
-# fuzzing
-CORPUS_DATA_FILE_NAME = 'corpus.json'
-CORPUS_DATA_ROOT_DIR = 'fuzzing'
-CORPUS_DIR_NAME = 'corpus'
-FUZZING_COMPRESSION_COEF = 1.1
-FUZZING_DEFAULT_TIMEOUT = 3600
+COVERAGE_TABLE_CHUNKS = 20
+COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
+COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
+COVERAGE_YT_TABLE_PREFIX = "datatable"
+
+# fuzzing
+CORPUS_DATA_FILE_NAME = 'corpus.json'
+CORPUS_DATA_ROOT_DIR = 'fuzzing'
+CORPUS_DIR_NAME = 'corpus'
+FUZZING_COMPRESSION_COEF = 1.1
+FUZZING_DEFAULT_TIMEOUT = 3600
FUZZING_FINISHING_TIME = 600
-FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
-GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
-MAX_CORPUS_RESOURCES_ALLOWED = 5
-
+FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
+GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
+MAX_CORPUS_RESOURCES_ALLOWED = 5
+
TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL'
TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL'
TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL'
@@ -73,15 +73,15 @@ FLAKES_PY2_RESOURCE = 'FLAKES_PY2_RESOURCE_GLOBAL'
FLAKES_PY3_RESOURCE = 'FLAKES_PY3_RESOURCE_GLOBAL'
FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL'
FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL'
-
-
-class Enum(object):
-
- @classmethod
- def enumerate(cls):
- return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
-
-
+
+
+class Enum(object):
+
+ @classmethod
+ def enumerate(cls):
+ return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
+
+
class TestRequirements(Enum):
Container = 'container'
Cpu = 'cpu'
@@ -119,7 +119,7 @@ class TestRequirementsConstants(Enum):
return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value
-class TestSize(Enum):
+class TestSize(Enum):
Small = 'small'
Medium = 'medium'
Large = 'large'
@@ -172,7 +172,7 @@ class TestSize(Enum):
TestRequirements.RamDisk: 4,
},
Large: {
- TestRequirements.Cpu: 4,
+ TestRequirements.Cpu: 4,
TestRequirements.Ram: 32,
TestRequirements.RamDisk: 4,
},
@@ -207,14 +207,14 @@ class TestSize(Enum):
raise Exception("Unknown test size '{}'".format(size))
-class TestRunExitCode(Enum):
+class TestRunExitCode(Enum):
Skipped = 2
Failed = 3
TimeOut = 10
- InfrastructureError = 12
+ InfrastructureError = 12
-class YaTestTags(Enum):
+class YaTestTags(Enum):
Manual = "ya:manual"
Notags = "ya:notags"
Norestart = "ya:norestart"
@@ -228,100 +228,100 @@ class YaTestTags(Enum):
class Status(object):
- GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
SKIPPED = -100
NOT_LAUNCHED = -200
CANON_DIFF = -300
DESELECTED = -400
INTERNAL = -sys.maxint
- FLAKY = -50
+ FLAKY = -50
BY_NAME = {'good': GOOD, 'fail': FAIL, 'xfail': XFAIL, 'xpass': XPASS, 'missing': MISSING, 'crashed': CRASHED,
'skipped': SKIPPED, 'flaky': FLAKY, 'not_launched': NOT_LAUNCHED, 'timeout': TIMEOUT, 'diff': CANON_DIFF,
'internal': INTERNAL, 'deselected': DESELECTED}
TO_STR = {GOOD: 'good', FAIL: 'fail', XFAIL: 'xfail', XPASS: 'xpass', MISSING: 'missing', CRASHED: 'crashed',
SKIPPED: 'skipped', FLAKY: 'flaky', NOT_LAUNCHED: 'not_launched', TIMEOUT: 'timeout', CANON_DIFF: 'diff',
INTERNAL: 'internal', DESELECTED: 'deselected'}
-
-
-class _Colors(object):
-
- _NAMES = [
- "blue",
- "cyan",
- "default",
- "green",
- "grey",
- "magenta",
- "red",
- "white",
- "yellow",
- ]
+
+
+class _Colors(object):
+
+ _NAMES = [
+ "blue",
+ "cyan",
+ "default",
+ "green",
+ "grey",
+ "magenta",
+ "red",
+ "white",
+ "yellow",
+ ]
_PREFIXES = ["", "light", "dark"]
-
- def __init__(self):
- self._table = {}
- for prefix in self._PREFIXES:
- for value in self._NAMES:
- name = value
- if prefix:
- name = "{}_{}".format(prefix, value)
- value = "{}-{}".format(prefix, value)
- self.__add_color(name.upper(), value)
-
- def __add_color(self, name, value):
- self._table[name] = value
- self.__setattr__(name, value)
-
-
-Colors = _Colors()
-
-
-class _Highlight(object):
-
- _MARKERS = {
- # special
- "RESET": "rst",
-
- "IMPORTANT": "imp",
- "UNIMPORTANT": "unimp",
- "BAD": "bad",
- "WARNING": "warn",
- "GOOD": "good",
- "PATH": "path",
- "ALTERNATIVE1": "alt1",
- "ALTERNATIVE2": "alt2",
- "ALTERNATIVE3": "alt3",
- }
-
- def __init__(self):
- # setting attributes because __getattr__ is much slower
- for attr, value in self._MARKERS.items():
- self.__setattr__(attr, value)
-
-
-Highlight = _Highlight()
-
-
-class _StatusColorMap(object):
-
- _MAP = {
- 'good': Highlight.GOOD,
- 'fail': Highlight.BAD,
- 'missing': Highlight.ALTERNATIVE1,
- 'crashed': Highlight.WARNING,
- 'skipped': Highlight.UNIMPORTANT,
- 'not_launched': Highlight.BAD,
- 'timeout': Highlight.BAD,
- 'flaky': Highlight.ALTERNATIVE3,
- 'xfail': Highlight.WARNING,
+
+ def __init__(self):
+ self._table = {}
+ for prefix in self._PREFIXES:
+ for value in self._NAMES:
+ name = value
+ if prefix:
+ name = "{}_{}".format(prefix, value)
+ value = "{}-{}".format(prefix, value)
+ self.__add_color(name.upper(), value)
+
+ def __add_color(self, name, value):
+ self._table[name] = value
+ self.__setattr__(name, value)
+
+
+Colors = _Colors()
+
+
+class _Highlight(object):
+
+ _MARKERS = {
+ # special
+ "RESET": "rst",
+
+ "IMPORTANT": "imp",
+ "UNIMPORTANT": "unimp",
+ "BAD": "bad",
+ "WARNING": "warn",
+ "GOOD": "good",
+ "PATH": "path",
+ "ALTERNATIVE1": "alt1",
+ "ALTERNATIVE2": "alt2",
+ "ALTERNATIVE3": "alt3",
+ }
+
+ def __init__(self):
+ # setting attributes because __getattr__ is much slower
+ for attr, value in self._MARKERS.items():
+ self.__setattr__(attr, value)
+
+
+Highlight = _Highlight()
+
+
+class _StatusColorMap(object):
+
+ _MAP = {
+ 'good': Highlight.GOOD,
+ 'fail': Highlight.BAD,
+ 'missing': Highlight.ALTERNATIVE1,
+ 'crashed': Highlight.WARNING,
+ 'skipped': Highlight.UNIMPORTANT,
+ 'not_launched': Highlight.BAD,
+ 'timeout': Highlight.BAD,
+ 'flaky': Highlight.ALTERNATIVE3,
+ 'xfail': Highlight.WARNING,
'xpass': Highlight.WARNING,
- 'diff': Highlight.BAD,
- 'internal': Highlight.BAD,
- 'deselected': Highlight.UNIMPORTANT,
- }
-
- def __getitem__(self, item):
- return self._MAP[item]
-
-
-StatusColorMap = _StatusColorMap()
+ 'diff': Highlight.BAD,
+ 'internal': Highlight.BAD,
+ 'deselected': Highlight.UNIMPORTANT,
+ }
+
+ def __getitem__(self, item):
+ return self._MAP[item]
+
+
+StatusColorMap = _StatusColorMap()
diff --git a/build/plugins/code_generator.py b/build/plugins/code_generator.py
index e527c2b8bb..ca8bb18c15 100644
--- a/build/plugins/code_generator.py
+++ b/build/plugins/code_generator.py
@@ -3,7 +3,7 @@ import os
import _import_wrapper as iw
-pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
+pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
class CodeGeneratorTemplateParser(object):
diff --git a/build/plugins/macros_with_error.py b/build/plugins/macros_with_error.py
index 4cd0544d18..e82fb56d2c 100644
--- a/build/plugins/macros_with_error.py
+++ b/build/plugins/macros_with_error.py
@@ -1,26 +1,26 @@
import sys
-import _common
+import _common
+
+import ymake
+
-import ymake
-
-
def onmacros_with_error(unit, *args):
print >> sys.stderr, 'This macros will fail'
raise Exception('Expected fail in MACROS_WITH_ERROR')
-
-
-def onrestrict_path(unit, *args):
- if args:
- if 'MSG' in args:
- pos = args.index('MSG')
- paths, msg = args[:pos], args[pos + 1:]
- msg = ' '.join(msg)
- else:
- paths, msg = args, 'forbidden'
- if not _common.strip_roots(unit.path()).startswith(paths):
- error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
- ymake.report_configure_error(error_msg)
+
+
+def onrestrict_path(unit, *args):
+ if args:
+ if 'MSG' in args:
+ pos = args.index('MSG')
+ paths, msg = args[:pos], args[pos + 1:]
+ msg = ' '.join(msg)
+ else:
+ paths, msg = args, 'forbidden'
+ if not _common.strip_roots(unit.path()).startswith(paths):
+ error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
+ ymake.report_configure_error(error_msg)
def onassert(unit, *args):
val = unit.get(args[0])
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
index 1e18f14051..f32a2d39a0 100644
--- a/build/plugins/pybuild.py
+++ b/build/plugins/pybuild.py
@@ -1,7 +1,7 @@
-import os
-import collections
+import os
+import collections
from hashlib import md5
-
+
import ymake
from _common import stripext, rootrel_arc_src, tobuilddir, listid, resolve_to_ymake_path, generate_chunks, pathid
@@ -61,55 +61,55 @@ def mangle(name):
return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-def parse_pyx_includes(filename, path, source_root, seen=None):
- normpath = lambda *x: os.path.normpath(os.path.join(*x))
-
- abs_path = normpath(source_root, filename)
- seen = seen or set()
- if abs_path in seen:
- return
- seen.add(abs_path)
-
- if not os.path.exists(abs_path):
- # File might be missing, because it might be generated
- return
-
- with open(abs_path, 'rb') as f:
- # Don't parse cimports and etc - irrelevant for cython, it's linker work
+def parse_pyx_includes(filename, path, source_root, seen=None):
+ normpath = lambda *x: os.path.normpath(os.path.join(*x))
+
+ abs_path = normpath(source_root, filename)
+ seen = seen or set()
+ if abs_path in seen:
+ return
+ seen.add(abs_path)
+
+ if not os.path.exists(abs_path):
+ # File might be missing, because it might be generated
+ return
+
+ with open(abs_path, 'rb') as f:
+ # Don't parse cimports and etc - irrelevant for cython, it's linker work
includes = ymake.parse_cython_includes(f.read())
-
- abs_dirname = os.path.dirname(abs_path)
- # All includes are relative to the file which include
- path_dirname = os.path.dirname(path)
- file_dirname = os.path.dirname(filename)
-
- for incfile in includes:
- abs_path = normpath(abs_dirname, incfile)
- if os.path.exists(abs_path):
- incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
- yield (incname, incpath)
- # search for includes in the included files
- for e in parse_pyx_includes(incname, incpath, source_root, seen):
- yield e
- else:
- # There might be arcadia root or cython relative include.
- # Don't treat such file as missing, because there must be PEERDIR on py_library
- # which contains it.
- for path in [
- source_root,
- source_root + "/contrib/tools/cython/Cython/Includes",
- ]:
- if os.path.exists(normpath(path, incfile)):
- break
- else:
- ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
-
+
+ abs_dirname = os.path.dirname(abs_path)
+ # All includes are relative to the file which include
+ path_dirname = os.path.dirname(path)
+ file_dirname = os.path.dirname(filename)
+
+ for incfile in includes:
+ abs_path = normpath(abs_dirname, incfile)
+ if os.path.exists(abs_path):
+ incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
+ yield (incname, incpath)
+ # search for includes in the included files
+ for e in parse_pyx_includes(incname, incpath, source_root, seen):
+ yield e
+ else:
+ # There might be arcadia root or cython relative include.
+ # Don't treat such file as missing, because there must be PEERDIR on py_library
+ # which contains it.
+ for path in [
+ source_root,
+ source_root + "/contrib/tools/cython/Cython/Includes",
+ ]:
+ if os.path.exists(normpath(path, incfile)):
+ break
+ else:
+ ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
+
def has_pyx(args):
return any(arg.endswith('.pyx') for arg in args)
-
-def get_srcdir(path, unit):
- return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
-
+
+def get_srcdir(path, unit):
+ return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
+
def add_python_lint_checks(unit, py_ver, files):
def get_resolved_files():
resolved_files = []
@@ -119,27 +119,27 @@ def add_python_lint_checks(unit, py_ver, files):
resolved_files.append(resolved)
return resolved_files
- if unit.get('LINT_LEVEL_VALUE') == "none":
-
- no_lint_allowed_paths = (
- "contrib/",
- "devtools/",
- "junk/",
- # temporary allowed, TODO: remove
- "taxi/uservices/",
- "travel/",
+ if unit.get('LINT_LEVEL_VALUE') == "none":
+
+ no_lint_allowed_paths = (
+ "contrib/",
+ "devtools/",
+ "junk/",
+ # temporary allowed, TODO: remove
+ "taxi/uservices/",
+ "travel/",
"market/report/lite/", # MARKETOUT-38662, deadline: 2021-08-12
"passport/backend/oauth/", # PASSP-35982
- )
-
- upath = unit.path()[3:]
-
- if not upath.startswith(no_lint_allowed_paths):
- ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
-
- if files and unit.get('LINT_LEVEL_VALUE') not in ("none", "none_internal"):
+ )
+
+ upath = unit.path()[3:]
+
+ if not upath.startswith(no_lint_allowed_paths):
+ ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
+
+ if files and unit.get('LINT_LEVEL_VALUE') not in ("none", "none_internal"):
resolved_files = get_resolved_files()
- flake8_cfg = 'build/config/tests/flake8/flake8.conf'
+ flake8_cfg = 'build/config/tests/flake8/flake8.conf'
unit.onadd_check(["flake8.py{}".format(py_ver), flake8_cfg] + resolved_files)
@@ -214,14 +214,14 @@ def onpy_srcs(unit, *args):
ns = ""
else:
ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.'
-
- cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
+
+ cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
cythonize_py = False
optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes'
-
+
cython_directives = []
- if cython_coverage:
- cython_directives += ['-X', 'linetrace=True']
+ if cython_coverage:
+ cython_directives += ['-X', 'linetrace=True']
pyxs_c = []
pyxs_c_h = []
@@ -353,41 +353,41 @@ def onpy_srcs(unit, *args):
dump_output.close()
if pyxs:
- files2res = set()
- # Include map stores files which were included in the processing pyx file,
- # to be able to find source code of the included file inside generated file
- # for currently processing pyx file.
- include_map = collections.defaultdict(set)
-
- if cython_coverage:
- def process_pyx(filename, path, out_suffix, noext):
- # skip generated files
- if not is_arc_src(path, unit):
- return
- # source file
- files2res.add((filename, path))
- # generated
- if noext:
- files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
- else:
- files2res.add((filename + out_suffix, path + out_suffix))
- # used includes
- for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
- files2res.add(entry)
- include_arc_rel = entry[0]
- include_map[filename].add(include_arc_rel)
- else:
- def process_pyx(filename, path, out_suffix, noext):
- pass
-
- for pyxs, cython, out_suffix, noext in [
+ files2res = set()
+ # Include map stores files which were included in the processing pyx file,
+ # to be able to find source code of the included file inside generated file
+ # for currently processing pyx file.
+ include_map = collections.defaultdict(set)
+
+ if cython_coverage:
+ def process_pyx(filename, path, out_suffix, noext):
+ # skip generated files
+ if not is_arc_src(path, unit):
+ return
+ # source file
+ files2res.add((filename, path))
+ # generated
+ if noext:
+ files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
+ else:
+ files2res.add((filename + out_suffix, path + out_suffix))
+ # used includes
+ for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
+ files2res.add(entry)
+ include_arc_rel = entry[0]
+ include_map[filename].add(include_arc_rel)
+ else:
+ def process_pyx(filename, path, out_suffix, noext):
+ pass
+
+ for pyxs, cython, out_suffix, noext in [
(pyxs_c, unit.on_buildwith_cython_c_dep, ".c", False),
(pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True),
(pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True),
(pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False),
]:
for path, mod in pyxs:
- filename = rootrel_arc_src(path, unit)
+ filename = rootrel_arc_src(path, unit)
cython_args = [path]
dep = path
@@ -400,26 +400,26 @@ def onpy_srcs(unit, *args):
cython_args += [
'--module-name', mod,
'--init-suffix', mangle(mod),
- '--source-root', '${ARCADIA_ROOT}',
- # set arcadia root relative __file__ for generated modules
- '-X', 'set_initial_path={}'.format(filename),
+ '--source-root', '${ARCADIA_ROOT}',
+ # set arcadia root relative __file__ for generated modules
+ '-X', 'set_initial_path={}'.format(filename),
] + cython_directives
cython(cython_args)
py_register(unit, mod, py3)
- process_pyx(filename, path, out_suffix, noext)
-
- if files2res:
- # Compile original and generated sources into target for proper cython coverage calculation
- unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
-
- if include_map:
- data = []
- prefix = 'resfs/cython/include'
- for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
- data += ['-', line]
- unit.onresource(data)
-
+ process_pyx(filename, path, out_suffix, noext)
+
+ if files2res:
+ # Compile original and generated sources into target for proper cython coverage calculation
+ unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
+
+ if include_map:
+ data = []
+ prefix = 'resfs/cython/include'
+ for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
+ data += ['-', line]
+ unit.onresource(data)
+
for swigs, on_swig_python in [
(swigs_c, unit.on_swig_python_c),
(swigs_cpp, unit.on_swig_python_cpp),
@@ -433,11 +433,11 @@ def onpy_srcs(unit, *args):
onpy_srcs(unit, swg_py + '=' + mod)
if pys:
- pys_seen = set()
- pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
- if pys_dups:
- ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
-
+ pys_seen = set()
+ pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
+ if pys_dups:
+ ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
+
res = []
if py3:
@@ -523,10 +523,10 @@ def onpy_srcs(unit, *args):
def _check_test_srcs(*args):
- used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
- if used:
- param = list(used)[0]
- ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
+ used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
+ if used:
+ param = list(used)[0]
+ ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
def ontest_srcs(unit, *args):
@@ -606,21 +606,21 @@ def onpy_main(unit, arg):
arg += ':main'
py_main(unit, arg)
-
-
-def onpy_constructor(unit, arg):
- """
- @usage: PY_CONSTRUCTOR(package.module[:func])
-
- Specifies the module or function which will be started before python's main()
- init() is expected in the target module if no function is specified
- Can be considered as __attribute__((constructor)) for python
- """
- if ':' not in arg:
- arg = arg + '=init'
- else:
- arg[arg.index(':')] = '='
- unit.onresource(['-', 'py/constructors/{}'.format(arg)])
+
+
+def onpy_constructor(unit, arg):
+ """
+ @usage: PY_CONSTRUCTOR(package.module[:func])
+
+ Specifies the module or function which will be started before python's main()
+ init() is expected in the target module if no function is specified
+ Can be considered as __attribute__((constructor)) for python
+ """
+ if ':' not in arg:
+ arg = arg + '=init'
+ else:
+ arg[arg.index(':')] = '='
+ unit.onresource(['-', 'py/constructors/{}'.format(arg)])
def onpy_enums_serialization(unit, *args):
ns = ''
diff --git a/build/plugins/res.py b/build/plugins/res.py
index fccfb51eb5..a937caba81 100644
--- a/build/plugins/res.py
+++ b/build/plugins/res.py
@@ -9,7 +9,7 @@ def split(lst, limit):
filepath = None
lenght = 0
bucket = []
-
+
for item in lst:
if filepath:
lenght += root_lenght + len(filepath) + len(item)
@@ -17,17 +17,17 @@ def split(lst, limit):
yield bucket
bucket = []
lenght = 0
-
+
bucket.append(filepath)
bucket.append(item)
filepath = None
else:
filepath = item
-
+
if bucket:
yield bucket
-
-
+
+
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
@@ -38,8 +38,8 @@ def onfat_resource(unit, *args):
unit.onpeerdir(['library/cpp/resource'])
# Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters,
- # we make several calls of rescompiler
- # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx
+ # we make several calls of rescompiler
+ # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx
for part_args in split(args, 8000):
output = listid(part_args) + '.cpp'
inputs = [x for x, y in iterpair(part_args) if x != '-']
@@ -47,7 +47,7 @@ def onfat_resource(unit, *args):
inputs = ['IN'] + inputs
unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output])
- unit.onsrcs(['GLOBAL', output])
+ unit.onsrcs(['GLOBAL', output])
def onresource_files(unit, *args):
diff --git a/build/plugins/suppressions.py b/build/plugins/suppressions.py
index a5e6bd2188..6f4a1b4f03 100644
--- a/build/plugins/suppressions.py
+++ b/build/plugins/suppressions.py
@@ -1,19 +1,19 @@
-def onsuppressions(unit, *args):
- """
- SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
- address, leak or thread sanitizer runtime by default.
- Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
- and tsan.supp for thread sanitizer suppressions respectively.
- See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
- for details.
- """
- import os
-
- valid = ("asan.supp", "tsan.supp", "lsan.supp")
-
- if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
- for x in args:
- if os.path.basename(x) not in valid:
- unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
- return
- unit.onsrcs(["GLOBAL"] + list(args))
+def onsuppressions(unit, *args):
+ """
+ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
+ address, leak or thread sanitizer runtime by default.
+ Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
+ and tsan.supp for thread sanitizer suppressions respectively.
+ See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
+ for details.
+ """
+ import os
+
+ valid = ("asan.supp", "tsan.supp", "lsan.supp")
+
+ if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
+ for x in args:
+ if os.path.basename(x) not in valid:
+ unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
+ return
+ unit.onsrcs(["GLOBAL"] + list(args))
diff --git a/build/plugins/tests/test_requirements.py b/build/plugins/tests/test_requirements.py
index 7d1a9b98b1..24d57ac901 100644
--- a/build/plugins/tests/test_requirements.py
+++ b/build/plugins/tests/test_requirements.py
@@ -32,7 +32,7 @@ class TestRequirements(object):
assert not requirements.check_ram(1, test_size)
assert not requirements.check_ram(4, test_size)
assert not requirements.check_ram(5, test_size)
- assert not requirements.check_ram(32, consts.TestSize.Large)
+ assert not requirements.check_ram(32, consts.TestSize.Large)
assert requirements.check_ram(48, consts.TestSize.Large)
assert not requirements.check_ram(1, test_size, is_kvm=True)
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index f58d00c99c..8970837f0f 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -16,20 +16,20 @@ import collections
import ymake
-MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
-MDS_SHEME = 'mds'
-CANON_DATA_DIR_NAME = 'canondata'
-CANON_OUTPUT_STORAGE = 'canondata_storage'
-CANON_RESULT_FILE_NAME = 'result.json'
-CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
-CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
-CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
-
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+MDS_SHEME = 'mds'
+CANON_DATA_DIR_NAME = 'canondata'
+CANON_OUTPUT_STORAGE = 'canondata_storage'
+CANON_RESULT_FILE_NAME = 'result.json'
+CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
+CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
+CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
+
VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
BLOCK_SEPARATOR = '============================================================='
-SPLIT_FACTOR_MAX_VALUE = 1000
-SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
+SPLIT_FACTOR_MAX_VALUE = 1000
+SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
PARTITION_MODS = ('SEQUENTIAL', 'MODULO')
DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml"
DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json"
@@ -69,65 +69,65 @@ def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + per
return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec)
-def validate_sb_vault(name, value):
- if not CANON_SB_VAULT_REGEX.match(value):
- return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value)
-
-
-def validate_numerical_requirement(name, value):
- if mr.resolve_value(value) is None:
- return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
-
-
-def validate_choice_requirement(name, val, valid):
- if val not in valid:
- return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid))
-
-
+def validate_sb_vault(name, value):
+ if not CANON_SB_VAULT_REGEX.match(value):
+ return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value)
+
+
+def validate_numerical_requirement(name, value):
+ if mr.resolve_value(value) is None:
+ return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
+
+
+def validate_choice_requirement(name, val, valid):
+ if val not in valid:
+ return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid))
+
+
def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func):
if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run:
- if value == 'all':
- return
- return validate_numerical_requirement(name, value)
- error_msg = validate_numerical_requirement(name, value)
- if error_msg:
- return error_msg
+ if value == 'all':
+ return
+ return validate_numerical_requirement(name, value)
+ error_msg = validate_numerical_requirement(name, value)
+ if error_msg:
+ return error_msg
return check_func(mr.resolve_value(value), test_size, is_kvm)
-
-
+
+
# TODO: Remove is_kvm param when there will be guarantees on RAM
def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run):
- req_checks = {
- 'container': validate_numerical_requirement,
+ req_checks = {
+ 'container': validate_numerical_requirement,
'cpu': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_cpu),
- 'disk_usage': validate_numerical_requirement,
- 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
- 'kvm': None,
- 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
+ 'disk_usage': validate_numerical_requirement,
+ 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
+ 'kvm': None,
+ 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
'ram': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram),
'ram_disk': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram_disk),
- 'sb': None,
- 'sb_vault': validate_sb_vault,
- }
-
- if req_name not in req_checks:
- return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(req_name, ", ".join(sorted(req_checks)))
-
- if req_name in ('container', 'disk') and not is_force_sandbox:
- return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(req_name)
-
- check_func = req_checks[req_name]
- if check_func:
- return check_func(req_name, value)
-
-
-def validate_test(unit, kw):
+ 'sb': None,
+ 'sb_vault': validate_sb_vault,
+ }
+
+ if req_name not in req_checks:
+ return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(req_name, ", ".join(sorted(req_checks)))
+
+ if req_name in ('container', 'disk') and not is_force_sandbox:
+ return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(req_name)
+
+ check_func = req_checks[req_name]
+ if check_func:
+ return check_func(req_name, value)
+
+
+def validate_test(unit, kw):
def get_list(key):
return deserialize_list(kw.get(key, ""))
valid_kw = copy.deepcopy(kw)
errors = []
- warnings = []
+ warnings = []
if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test':
project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
@@ -143,66 +143,66 @@ def validate_test(unit, kw):
size = valid_kw.get('SIZE', consts.TestSize.Small).lower()
# TODO: use set instead list
tags = get_list("TAG")
- requirements_orig = get_list("REQUIREMENTS")
+ requirements_orig = get_list("REQUIREMENTS")
in_autocheck = "ya:not_autocheck" not in tags and 'ya:manual' not in tags
- is_fat = 'ya:fat' in tags
- is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat
+ is_fat = 'ya:fat' in tags
+ is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat
is_ytexec_run = 'ya:yt' in tags
- is_fuzzing = valid_kw.get("FUZZING", False)
- is_kvm = 'kvm' in requirements_orig
+ is_fuzzing = valid_kw.get("FUZZING", False)
+ is_kvm = 'kvm' in requirements_orig
requirements = {}
- list_requirements = ('sb_vault')
- for req in requirements_orig:
+ list_requirements = ('sb_vault')
+ for req in requirements_orig:
if req in ('kvm', ):
requirements[req] = str(True)
continue
if ":" in req:
req_name, req_value = req.split(":", 1)
- if req_name in list_requirements:
- requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
- else:
- if req_name in requirements:
- if req_value in ["0"]:
- warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
- del requirements[req_name]
- elif requirements[req_name] != req_value:
- warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
- requirements[req_name] = req_value
+ if req_name in list_requirements:
+ requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
+ else:
+ if req_name in requirements:
+ if req_value in ["0"]:
+ warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ del requirements[req_name]
+ elif requirements[req_name] != req_value:
+ warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ requirements[req_name] = req_value
else:
- requirements[req_name] = req_value
+ requirements[req_name] = req_value
else:
errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
- if not errors:
- for req_name, req_value in requirements.items():
+ if not errors:
+ for req_name, req_value in requirements.items():
error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run)
- if error_msg:
- errors += [error_msg]
-
+ if error_msg:
+ errors += [error_msg]
+
invalid_requirements_for_distbuild = [requirement for requirement in requirements.keys() if requirement not in ('ram', 'ram_disk', 'cpu', 'network')]
- sb_tags = [tag for tag in tags if tag.startswith('sb:')]
+ sb_tags = [tag for tag in tags if tag.startswith('sb:')]
if is_fat:
- if size != consts.TestSize.Large:
- errors.append("Only LARGE test may have ya:fat tag")
-
+ if size != consts.TestSize.Large:
+ errors.append("Only LARGE test may have ya:fat tag")
+
if in_autocheck and not is_force_sandbox:
- if invalid_requirements_for_distbuild:
- errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
- if sb_tags:
- errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
- if 'ya:sandbox_coverage' in tags:
- errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
- else:
+ if invalid_requirements_for_distbuild:
+ errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
+ if sb_tags:
+ errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
+ if 'ya:sandbox_coverage' in tags:
+ errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
+ else:
if is_force_sandbox:
- errors.append('ya:force_sandbox can be used with LARGE tests only')
- if 'ya:nofuse' in tags:
- errors.append('ya:nofuse can be used with LARGE tests only')
- if 'ya:privileged' in tags:
- errors.append("ya:privileged can be used with LARGE tests only")
- if in_autocheck and size == consts.TestSize.Large:
- errors.append("LARGE test must have ya:fat tag")
+ errors.append('ya:force_sandbox can be used with LARGE tests only')
+ if 'ya:nofuse' in tags:
+ errors.append('ya:nofuse can be used with LARGE tests only')
+ if 'ya:privileged' in tags:
+ errors.append("ya:privileged can be used with LARGE tests only")
+ if in_autocheck and size == consts.TestSize.Large:
+ errors.append("LARGE test must have ya:fat tag")
if 'ya:privileged' in tags and 'container' not in requirements:
errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag")
@@ -236,34 +236,34 @@ def validate_test(unit, kw):
valid_kw['REQUIREMENTS'] = serialize_list(requiremtens_list)
if valid_kw.get("FUZZ-OPTS"):
- for option in get_list("FUZZ-OPTS"):
- if not option.startswith("-"):
- errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
- break
- eqpos = option.find("=")
- if eqpos == -1 or len(option) == eqpos + 1:
- errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
- break
- if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
- errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
- break
- if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
- errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
- break
-
- if valid_kw.get("YT-SPEC"):
+ for option in get_list("FUZZ-OPTS"):
+ if not option.startswith("-"):
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
+ break
+ eqpos = option.find("=")
+ if eqpos == -1 or len(option) == eqpos + 1:
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
+ break
+ if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
+ errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
+ break
+ if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
+ errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
+ break
+
+ if valid_kw.get("YT-SPEC"):
if not is_ytexec_run:
- errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
- else:
- for filename in get_list("YT-SPEC"):
- filename = unit.resolve('$S/' + filename)
- if not os.path.exists(filename):
- errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
- continue
+ errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
+ else:
+ for filename in get_list("YT-SPEC"):
+ filename = unit.resolve('$S/' + filename)
+ if not os.path.exists(filename):
+ errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
+ continue
if is_yt_spec_contain_pool_info(filename) and "ya:external" not in tags:
tags.append("ya:external")
tags.append("ya:yt_research_pool")
-
+
if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test":
errors.append("PYTEST_SCRIPT is deprecated")
@@ -274,23 +274,23 @@ def validate_test(unit, kw):
if valid_kw.get('SPLIT-FACTOR'):
if valid_kw.get('FORK-MODE') == 'none':
errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
-
- value = 1
+
+ value = 1
try:
value = int(valid_kw.get('SPLIT-FACTOR'))
if value <= 0:
raise ValueError("must be > 0")
- if value > SPLIT_FACTOR_MAX_VALUE:
- raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
+ if value > SPLIT_FACTOR_MAX_VALUE:
+ raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
except ValueError as e:
errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
- if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
- nfiles = count_entries(valid_kw.get('TEST-FILES'))
- if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
- errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
- nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
-
+ if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
+ nfiles = count_entries(valid_kw.get('TEST-FILES'))
+ if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
+ errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
+ nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
+
unit_path = get_norm_unit_path(unit)
if not is_fat and "ya:noretries" in tags and not is_ytexec_run \
and not unit_path.startswith("devtools/") \
@@ -300,25 +300,25 @@ def validate_test(unit, kw):
and not unit_path.startswith("yp/tests"):
errors.append("Only LARGE tests can have 'ya:noretries' tag")
- if errors:
- return None, warnings, errors
-
- return valid_kw, warnings, errors
-
-
-def get_norm_unit_path(unit, extra=None):
- path = _common.strip_roots(unit.path())
- if extra:
- return '{}/{}'.format(path, extra)
- return path
-
-
-def dump_test(unit, kw):
- valid_kw, warnings, errors = validate_test(unit, kw)
- for w in warnings:
- unit.message(['warn', w])
- for e in errors:
- ymake.report_configure_error(e)
+ if errors:
+ return None, warnings, errors
+
+ return valid_kw, warnings, errors
+
+
+def get_norm_unit_path(unit, extra=None):
+ path = _common.strip_roots(unit.path())
+ if extra:
+ return '{}/{}'.format(path, extra)
+ return path
+
+
+def dump_test(unit, kw):
+ valid_kw, warnings, errors = validate_test(unit, kw)
+ for w in warnings:
+ unit.message(['warn', w])
+ for e in errors:
+ ymake.report_configure_error(e)
if valid_kw is None:
return None
string_handler = StringIO.StringIO()
@@ -339,14 +339,14 @@ def deserialize_list(val):
return filter(None, val.replace('"', "").split(";"))
-def count_entries(x):
- # see (de)serialize_list
- assert x is None or isinstance(x, str), type(x)
- if not x:
- return 0
- return x.count(";") + 1
-
-
+def count_entries(x):
+ # see (de)serialize_list
+ assert x is None or isinstance(x, str), type(x)
+ if not x:
+ return 0
+ return x.count(";") + 1
+
+
def get_values_list(unit, key):
res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split())
return [r for r in res if r and r not in ['""', "''"]]
@@ -357,31 +357,31 @@ def get_norm_paths(unit, key):
return [x.rstrip('\\/') for x in get_values_list(unit, key)]
-def get_unit_list_variable(unit, name):
- items = unit.get(name)
- if items:
- items = items.split(' ')
- assert items[0] == "${}".format(name), (items, name)
- return items[1:]
- return []
-
-
-def implies(a, b):
- return bool((not a) or b)
-
-
-def match_coverage_extractor_requirements(unit):
- # we shouldn't add test if
- return all([
- # tests are not requested
- unit.get("TESTS_REQUESTED") == "yes",
- # build doesn't imply clang coverage, which supports segment extraction from the binaries
- unit.get("CLANG_COVERAGE") == "yes",
- # contrib wasn't requested
- implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
- ])
-
-
+def get_unit_list_variable(unit, name):
+ items = unit.get(name)
+ if items:
+ items = items.split(' ')
+ assert items[0] == "${}".format(name), (items, name)
+ return items[1:]
+ return []
+
+
+def implies(a, b):
+ return bool((not a) or b)
+
+
+def match_coverage_extractor_requirements(unit):
+ # we shouldn't add test if
+ return all([
+ # tests are not requested
+ unit.get("TESTS_REQUESTED") == "yes",
+ # build doesn't imply clang coverage, which supports segment extraction from the binaries
+ unit.get("CLANG_COVERAGE") == "yes",
+ # contrib wasn't requested
+ implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
+ ])
+
+
def get_tidy_config_map(unit):
global tidy_config_map
if tidy_config_map is None:
@@ -420,16 +420,16 @@ def onadd_ytest(unit, *args):
test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
- if flat_args[1] == "fuzz.test":
- unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
+ if flat_args[1] == "fuzz.test":
+ unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
elif flat_args[1] == "go.test":
data, _ = get_canonical_test_resources(unit)
test_data += data
- elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
- # XXX
- # Current ymake implementation doesn't allow to call macro inside the 'when' body
- # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
- return
+ elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
+ # XXX
+ # Current ymake implementation doesn't allow to call macro inside the 'when' body
+ # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
+ return
elif flat_args[1] == "clang_tidy" and unit.get("TIDY") != "yes":
# Graph is not prepared
return
@@ -439,7 +439,7 @@ def onadd_ytest(unit, *args):
test_tags = serialize_list(_get_test_tags(unit, spec_args))
test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or ''
test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
-
+
if flat_args[1] != "clang_tidy" and unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"):
@@ -471,17 +471,17 @@ def onadd_ytest(unit, *args):
fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
fork_mode = ' '.join(fork_mode) if fork_mode else ''
- unit_path = get_norm_unit_path(unit)
-
+ unit_path = get_norm_unit_path(unit)
+
test_record = {
'TEST-NAME': flat_args[0],
'SCRIPT-REL-PATH': flat_args[1],
'TESTED-PROJECT-NAME': unit.name(),
'TESTED-PROJECT-FILENAME': unit.filename(),
- 'SOURCE-FOLDER-PATH': unit_path,
- # TODO get rid of BUILD-FOLDER-PATH
- 'BUILD-FOLDER-PATH': unit_path,
- 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
+ 'SOURCE-FOLDER-PATH': unit_path,
+ # TODO get rid of BUILD-FOLDER-PATH
+ 'BUILD-FOLDER-PATH': unit_path,
+ 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
'GLOBAL-LIBRARY-PATH': unit.global_filename(),
'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
@@ -495,9 +495,9 @@ def onadd_ytest(unit, *args):
'TAG': test_tags,
'REQUIREMENTS': serialize_list(test_requirements),
'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
- 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
- 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
- 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
+ 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
+ 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
+ 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
'BLOB': unit.get('TEST_BLOB_DATA') or '',
'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
@@ -513,12 +513,12 @@ def onadd_ytest(unit, *args):
else:
test_record["TEST-NAME"] += "_bench"
- if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
- test_record['FUZZING'] = '1'
+ if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
+ test_record['FUZZING'] = '1'
# use all cores if fuzzing requested
test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))
-
- data = dump_test(unit, test_record)
+
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -550,7 +550,7 @@ def onadd_check(unit, *args):
flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
"SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
check_type = flat_args[0]
- test_dir = get_norm_unit_path(unit)
+ test_dir = get_norm_unit_path(unit)
test_timeout = ''
fork_mode = ''
@@ -558,7 +558,7 @@ def onadd_check(unit, *args):
extra_test_dart_data = {}
ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
- if check_type in ["flake8.py2", "flake8.py3"]:
+ if check_type in ["flake8.py2", "flake8.py3"]:
script_rel_path = check_type
fork_mode = unit.get('TEST_FORK_MODE') or ''
elif check_type == "JAVA_STYLE":
@@ -628,18 +628,18 @@ def onadd_check(unit, *args):
'FORK-TEST-FILES': '',
'SIZE': 'SMALL',
'TAG': '',
- 'REQUIREMENTS': '',
+ 'REQUIREMENTS': '',
'USE_ARCADIA_PYTHON': use_arcadia_python or '',
'OLD_PYTEST': 'no',
'PYTHON-PATHS': '',
- # TODO remove FILES, see DEVTOOLS-7052
- 'FILES': test_files,
- 'TEST-FILES': test_files,
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
'NO_JBUILD': 'yes' if ymake_java_test else 'no',
}
test_record.update(extra_test_dart_data)
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -659,10 +659,10 @@ def onadd_check_py_imports(unit, *args):
return
unit.onpeerdir(['library/python/testing/import_test'])
check_type = "py.imports"
- test_dir = get_norm_unit_path(unit)
+ test_dir = get_norm_unit_path(unit)
use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
- test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
+ test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
test_record = {
'TEST-NAME': "pyimports",
'TEST-TIMEOUT': '',
@@ -681,15 +681,15 @@ def onadd_check_py_imports(unit, *args):
'USE_ARCADIA_PYTHON': use_arcadia_python or '',
'OLD_PYTEST': 'no',
'PYTHON-PATHS': '',
- # TODO remove FILES, see DEVTOOLS-7052
- 'FILES': test_files,
- 'TEST-FILES': test_files,
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
}
if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
else:
test_record["NO-CHECK"] = ''
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -714,14 +714,14 @@ def onadd_pytest_script(unit, *args):
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
tags = _get_test_tags(unit)
- requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- data, data_files = get_canonical_test_resources(unit)
+ data, data_files = get_canonical_test_resources(unit)
test_data += data
python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
binary_path = None
test_cwd = unit.get('TEST_CWD_VALUE') or ''
- _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
def onadd_pytest_bin(unit, *args):
@@ -756,40 +756,40 @@ def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None):
test_size = unit.get('TEST_SIZE_NAME') or ''
test_cwd = unit.get('TEST_CWD_VALUE') or ''
- unit_path = unit.path()
+ unit_path = unit.path()
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
tags = _get_test_tags(unit)
- requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- data, data_files = get_canonical_test_resources(unit)
+ data, data_files = get_canonical_test_resources(unit)
test_data += data
python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
- yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
+ yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
if not binary_path:
- binary_path = os.path.join(unit_path, unit.filename())
- _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
+ binary_path = os.path.join(unit_path, unit.filename())
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
def extract_java_system_properties(unit, args):
if len(args) % 2:
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
- props = []
+ props = []
for x, y in zip(args[::2], args[1::2]):
if x == 'FILE':
if y.startswith('${BINDIR}') or y.startswith('${ARCADIA_BUILD_ROOT}') or y.startswith('/'):
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
y = _common.rootrel_arc_src(y, unit)
if not os.path.exists(unit.resolve('$S/' + y)):
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
y = '${ARCADIA_ROOT}/' + y
props.append({'type': 'file', 'path': y})
else:
props.append({'type': 'inline', 'key': x, 'value': y})
- return props, None
+ return props, None
def onjava_test(unit, *args):
@@ -801,28 +801,28 @@ def onjava_test(unit, *args):
if unit.get('MODULE_TYPE') == 'JTEST_FOR':
if not unit.get('UNITTEST_DIR'):
- ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
- return
+ ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
+ return
java_cp_arg_type = unit.get('JAVA_CLASSPATH_CMD_TYPE_VALUE') or 'MANIFEST'
if java_cp_arg_type not in ('MANIFEST', 'COMMAND_FILE', 'LIST'):
ymake.report_configure_error('{}: TEST_JAVA_CLASSPATH_CMD_TYPE({}) are invalid. Choose argument from MANIFEST, COMMAND_FILE or LIST)'.format(unit.path(), java_cp_arg_type))
return
- unit_path = unit.path()
- path = _common.strip_roots(unit_path)
+ unit_path = unit.path()
+ path = _common.strip_roots(unit_path)
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- test_data.append('arcadia/build/scripts/run_junit.py')
+ test_data.append('arcadia/build/scripts/run_junit.py')
test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py')
- data, data_files = get_canonical_test_resources(unit)
- test_data += data
-
- props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
- if error_mgs:
- ymake.report_configure_error(error_mgs)
- return
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+
+ props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
+ if error_mgs:
+ ymake.report_configure_error(error_mgs)
+ return
for prop in props:
if prop['type'] == 'file':
test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia'))
@@ -831,7 +831,7 @@ def onjava_test(unit, *args):
test_cwd = unit.get('TEST_CWD_VALUE') or '' # TODO: validate test_cwd value
- if unit.get('MODULE_TYPE') == 'JUNIT5':
+ if unit.get('MODULE_TYPE') == 'JUNIT5':
script_rel_path = 'junit5.test'
else:
script_rel_path = 'junit.test'
@@ -842,7 +842,7 @@ def onjava_test(unit, *args):
'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]),
'SCRIPT-REL-PATH': script_rel_path,
'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
- 'TESTED-PROJECT-NAME': path,
+ 'TESTED-PROJECT-NAME': path,
'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
# 'TEST-PRESERVE-ENV': 'da',
'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
@@ -878,7 +878,7 @@ def onjava_test(unit, *args):
else:
test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(['DART_DATA', data])
@@ -892,7 +892,7 @@ def onjava_test_deps(unit, *args):
assert len(args) == 1
mode = args[0]
- path = get_norm_unit_path(unit)
+ path = get_norm_unit_path(unit)
ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
test_record = {
@@ -923,7 +923,7 @@ def onjava_test_deps(unit, *args):
if ymake_java_test:
test_record['CLASSPATH'] = '$B/{}/{}.jar ${{DART_CLASSPATH}}'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
unit.set_property(['DART_DATA', data])
@@ -951,12 +951,12 @@ def _dump_test(
fork_mode,
test_size,
tags,
- requirements,
+ requirements,
binary_path='',
old_pytest=False,
test_cwd=None,
- runner_bin=None,
- yt_spec=None,
+ runner_bin=None,
+ yt_spec=None,
data_files=None
):
@@ -965,7 +965,7 @@ def _dump_test(
else:
script_rel_path = test_type
- unit_path = unit.path()
+ unit_path = unit.path()
fork_test_files = unit.get('FORK_TEST_FILES_MODE')
fork_mode = ' '.join(fork_mode) if fork_mode else ''
use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
@@ -1029,11 +1029,11 @@ def onrun(unit, *args):
def onsetup_exectest(unit, *args):
- command = unit.get(["EXECTEST_COMMAND_VALUE"])
- if command is None:
- ymake.report_configure_error("EXECTEST must have at least one RUN macro")
- return
- command = command.replace("$EXECTEST_COMMAND_VALUE", "")
+ command = unit.get(["EXECTEST_COMMAND_VALUE"])
+ if command is None:
+ ymake.report_configure_error("EXECTEST must have at least one RUN macro")
+ return
+ command = command.replace("$EXECTEST_COMMAND_VALUE", "")
if "PYTHON_BIN" in command:
unit.ondepends('contrib/tools/python')
unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
@@ -1043,71 +1043,71 @@ def onsetup_exectest(unit, *args):
def onsetup_run_python(unit):
if unit.get("USE_ARCADIA_PYTHON") == "yes":
unit.ondepends('contrib/tools/python')
-
-
-def get_canonical_test_resources(unit):
- unit_path = unit.path()
+
+
+def get_canonical_test_resources(unit):
+ unit_path = unit.path()
canon_data_dir = os.path.join(unit.resolve(unit_path), CANON_DATA_DIR_NAME, unit.get('CANONIZE_SUB_PATH') or '')
-
- try:
- _, dirs, files = next(os.walk(canon_data_dir))
- except StopIteration:
- # path doesn't exist
- return [], []
-
- if CANON_RESULT_FILE_NAME in files:
- return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
- return [], []
-
-
-def _load_canonical_file(filename, unit_path):
- try:
- with open(filename) as results_file:
- return json.load(results_file)
- except Exception as e:
- print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
- return {}
-
-
-def _get_resource_from_uri(uri):
- m = CANON_MDS_RESOURCE_REGEX.match(uri)
- if m:
- res_id = m.group(1)
- return "{}:{}".format(MDS_SHEME, res_id)
-
- m = CANON_SBR_RESOURCE_REGEX.match(uri)
- if m:
- # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
- # That's why we use notation with '=' to specify specific path for resource
- uri = m.group(1)
- res_id = m.group(2)
- return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
-
-
-def _get_external_resources_from_canon_data(data):
- # Method should work with both canonization versions:
- # result.json: {'uri':X 'checksum':Y}
- # result.json: {'testname': {'uri':X 'checksum':Y}}
- # result.json: {'testname': [{'uri':X 'checksum':Y}]}
- # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
- # That's why we check 'uri' and 'checksum' fields presence
- # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
- res = set()
-
- if isinstance(data, dict):
- if 'uri' in data and 'checksum' in data:
- resource = _get_resource_from_uri(data['uri'])
- if resource:
- res.add(resource)
- else:
- for k, v in data.iteritems():
- res.update(_get_external_resources_from_canon_data(v))
- elif isinstance(data, list):
- for e in data:
- res.update(_get_external_resources_from_canon_data(e))
-
- return res
-
-
-def _get_canonical_data_resources_v2(filename, unit_path):
+
+ try:
+ _, dirs, files = next(os.walk(canon_data_dir))
+ except StopIteration:
+ # path doesn't exist
+ return [], []
+
+ if CANON_RESULT_FILE_NAME in files:
+ return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
+ return [], []
+
+
+def _load_canonical_file(filename, unit_path):
+ try:
+ with open(filename) as results_file:
+ return json.load(results_file)
+ except Exception as e:
+ print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
+ return {}
+
+
+def _get_resource_from_uri(uri):
+ m = CANON_MDS_RESOURCE_REGEX.match(uri)
+ if m:
+ res_id = m.group(1)
+ return "{}:{}".format(MDS_SHEME, res_id)
+
+ m = CANON_SBR_RESOURCE_REGEX.match(uri)
+ if m:
+ # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
+ # That's why we use notation with '=' to specify specific path for resource
+ uri = m.group(1)
+ res_id = m.group(2)
+ return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
+
+
+def _get_external_resources_from_canon_data(data):
+ # Method should work with both canonization versions:
+ # result.json: {'uri':X 'checksum':Y}
+ # result.json: {'testname': {'uri':X 'checksum':Y}}
+ # result.json: {'testname': [{'uri':X 'checksum':Y}]}
+ # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
+ # That's why we check 'uri' and 'checksum' fields presence
+ # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
+ res = set()
+
+ if isinstance(data, dict):
+ if 'uri' in data and 'checksum' in data:
+ resource = _get_resource_from_uri(data['uri'])
+ if resource:
+ res.add(resource)
+ else:
+ for k, v in data.iteritems():
+ res.update(_get_external_resources_from_canon_data(v))
+ elif isinstance(data, list):
+ for e in data:
+ res.update(_get_external_resources_from_canon_data(e))
+
+ return res
+
+
+def _get_canonical_data_resources_v2(filename, unit_path):
return (_get_external_resources_from_canon_data(_load_canonical_file(filename, unit_path)), [filename])
diff --git a/build/rules/contrib_deps.policy b/build/rules/contrib_deps.policy
index cd07358c77..9af4b85cc2 100644
--- a/build/rules/contrib_deps.policy
+++ b/build/rules/contrib_deps.policy
@@ -36,7 +36,7 @@ ALLOW contrib/(deprecated/)?python/django -> library/python/gunicorn
ALLOW contrib/python/python-magic -> library/python/symbols/libmagic
ALLOW contrib/python/typecode -> library/python/symbols/libmagic
ALLOW contrib/tools/jdk/test -> devtools/ya/yalibrary/tools
-ALLOW contrib/libs/clang12/tools/extra/clang-tidy/tool -> library/cpp/clang_tidy
+ALLOW contrib/libs/clang12/tools/extra/clang-tidy/tool -> library/cpp/clang_tidy
ALLOW contrib/libs/inja -> library/cpp/scheme
ALLOW contrib/libs/geos/capi/ctypes -> library/python/ctypes
ALLOW contrib/libs/leveldb -> library/cpp/deprecated/mapped_file
diff --git a/build/rules/flake8/migrations.yaml b/build/rules/flake8/migrations.yaml
index be6071572f..6e54bf2e62 100644
--- a/build/rules/flake8/migrations.yaml
+++ b/build/rules/flake8/migrations.yaml
@@ -3662,5 +3662,5 @@ migrations:
- ads/libs/py_lmcompute/ft
- ads/libs/py_autobudget/mt/test_attribution_preprocessor
- ads/bigkv/tensor_transport/tests
- - scarab/api/python3
- - mssngr/botplatform/src/bots/core/migrations
+ - scarab/api/python3
+ - mssngr/botplatform/src/bots/core/migrations
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py
index d7fd7b30fd..6b5d53bc71 100644
--- a/build/scripts/append_file.py
+++ b/build/scripts/append_file.py
@@ -5,5 +5,5 @@ if __name__ == "__main__":
file_path = sys.argv[1]
with open(file_path, "a") as f:
- for text in sys.argv[2:]:
- print >>f, text
+ for text in sys.argv[2:]:
+ print >>f, text
diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py
index cc8f88e70c..eb1b690ee9 100644
--- a/build/scripts/clang_tidy.py
+++ b/build/scripts/clang_tidy.py
@@ -1,11 +1,11 @@
import argparse
-import contextlib
+import contextlib
import json
-import os
-import re
-import shutil
+import os
+import re
+import shutil
import sys
-import tempfile
+import tempfile
import subprocess
@@ -20,12 +20,12 @@ def setup_script(args):
def parse_args():
parser = argparse.ArgumentParser()
- parser.add_argument("--testing-src", required=True)
- parser.add_argument("--clang-tidy-bin", required=True)
+ parser.add_argument("--testing-src", required=True)
+ parser.add_argument("--clang-tidy-bin", required=True)
parser.add_argument("--config-validation-script", required=True)
parser.add_argument("--ymake-python", required=True)
- parser.add_argument("--tidy-json", required=True)
- parser.add_argument("--source-root", required=True)
+ parser.add_argument("--tidy-json", required=True)
+ parser.add_argument("--source-root", required=True)
parser.add_argument("--build-root", required=True)
parser.add_argument("--default-config-file", required=True)
parser.add_argument("--project-config-file", required=True)
@@ -35,27 +35,27 @@ def parse_args():
return parser.parse_known_args()
-def generate_compilation_database(clang_cmd, source_root, filename, path):
- compile_database = [
- {
- "file": filename,
+def generate_compilation_database(clang_cmd, source_root, filename, path):
+ compile_database = [
+ {
+ "file": filename,
"command": subprocess.list2cmdline(clang_cmd),
- "directory": source_root,
- }
- ]
- compilation_database_json = os.path.join(path, "compile_commands.json")
- with open(compilation_database_json, "w") as afile:
+ "directory": source_root,
+ }
+ ]
+ compilation_database_json = os.path.join(path, "compile_commands.json")
+ with open(compilation_database_json, "w") as afile:
json.dump(compile_database, afile)
return compilation_database_json
-@contextlib.contextmanager
-def gen_tmpdir():
- path = tempfile.mkdtemp()
- yield path
- shutil.rmtree(path)
-
-
+@contextlib.contextmanager
+def gen_tmpdir():
+ path = tempfile.mkdtemp()
+ yield path
+ shutil.rmtree(path)
+
+
@contextlib.contextmanager
def gen_tmpfile():
_, path = tempfile.mkstemp()
@@ -63,21 +63,21 @@ def gen_tmpfile():
os.remove(path)
-def load_profile(path):
- if os.path.exists(path):
- files = os.listdir(path)
- if len(files) == 1:
- with open(os.path.join(path, files[0])) as afile:
- return json.load(afile)["profile"]
- elif len(files) > 1:
- return {
- "error": "found several profile files: {}".format(files),
- }
- return {
- "error": "profile file is missing",
- }
-
-
+def load_profile(path):
+ if os.path.exists(path):
+ files = os.listdir(path)
+ if len(files) == 1:
+ with open(os.path.join(path, files[0])) as afile:
+ return json.load(afile)["profile"]
+ elif len(files) > 1:
+ return {
+ "error": "found several profile files: {}".format(files),
+ }
+ return {
+ "error": "profile file is missing",
+ }
+
+
def load_fixes(path):
if os.path.exists(path):
with open(path, 'r') as afile:
@@ -125,46 +125,46 @@ def main():
filter_configs(args.project_config_file, filtered_config)
result_config_file = tidy_config_validation.merge_tidy_configs(base_config_path=args.default_config_file, additional_config_path=filtered_config, result_config_path=result_config)
compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir)
- cmd = [
- clang_tidy_bin,
- args.testing_src,
- "-p",
- compile_command_path,
- "--warnings-as-errors",
- "*",
- "--config-file",
+ cmd = [
+ clang_tidy_bin,
+ args.testing_src,
+ "-p",
+ compile_command_path,
+ "--warnings-as-errors",
+ "*",
+ "--config-file",
result_config_file,
- "--header-filter",
- header_filter,
- "--use-color",
- "--enable-check-profile",
+ "--header-filter",
+ header_filter,
+ "--use-color",
+ "--enable-check-profile",
"--store-check-profile={}".format(profile_tmpdir),
- ]
+ ]
if args.export_fixes == "yes":
cmd += ["--export-fixes", fixes_file]
if args.checks:
cmd += ["--checks", args.checks]
- res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = res.communicate()
- exit_code = res.returncode
+ res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = res.communicate()
+ exit_code = res.returncode
profile = load_profile(profile_tmpdir)
testing_src = os.path.relpath(args.testing_src, args.source_root)
tidy_fixes = load_fixes(fixes_file)
-
- with open(output_json, "wb") as afile:
- json.dump(
- {
+
+ with open(output_json, "wb") as afile:
+ json.dump(
+ {
"file": testing_src,
- "exit_code": exit_code,
- "profile": profile,
- "stderr": err,
- "stdout": out,
+ "exit_code": exit_code,
+ "profile": profile,
+ "stderr": err,
+ "stdout": out,
"fixes": tidy_fixes,
- },
- afile,
- )
-
+ },
+ afile,
+ )
+
if __name__ == "__main__":
main()
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
index bf85ae053c..c0bec50b2a 100644
--- a/build/scripts/compile_cuda.py
+++ b/build/scripts/compile_cuda.py
@@ -45,27 +45,27 @@ def main():
cflags.append('-fopenmp')
cflags.remove('-fopenmp=libomp')
- skip_list = [
- '-gline-tables-only',
- # clang coverage
- '-fprofile-instr-generate',
- '-fcoverage-mapping',
+ skip_list = [
+ '-gline-tables-only',
+ # clang coverage
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
'/Zc:inline', # disable unreferenced functions (kernel registrators) remove
'-Wno-c++17-extensions',
'-flto',
'-faligned-allocation',
- ]
-
+ ]
+
if skip_nocxxinc:
skip_list.append('-nostdinc++')
- for flag in skip_list:
+ for flag in skip_list:
if flag in cflags:
cflags.remove(flag)
skip_prefix_list = [
'-fsanitize=',
- '-fsanitize-coverage=',
+ '-fsanitize-coverage=',
'-fsanitize-blacklist=',
'--system-header-prefix',
]
diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py
index 6d434c3e8c..1873ed70eb 100755
--- a/build/scripts/configure_file.py
+++ b/build/scripts/configure_file.py
@@ -53,7 +53,7 @@ if __name__ == "__main__":
usage()
varDict = {}
for x in sys.argv[3:]:
- key, value = str(x).split('=', 1)
+ key, value = str(x).split('=', 1)
varDict[key] = value
main(sys.argv[1], sys.argv[2], varDict)
diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py
index 53f3207bb7..9baeb5ffac 100644
--- a/build/scripts/copy_to_dir.py
+++ b/build/scripts/copy_to_dir.py
@@ -34,7 +34,7 @@ def hardlink_or_copy(src, dst):
if e.errno == errno.EEXIST:
return
elif e.errno == errno.EXDEV:
- sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
+ sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
shutil.copy(src, dst)
else:
raise
diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py
index d3bf13c4e7..94491d9256 100644
--- a/build/scripts/coverage-info.py
+++ b/build/scripts/coverage-info.py
@@ -149,7 +149,7 @@ def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args):
lcov_args.append(cov_info)
-def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
+def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
with tarfile.open(gcno_archive) as gcno_tf:
for gcno_item in gcno_tf:
if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT):
@@ -157,13 +157,13 @@ def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_execut
gcno_name = gcno_item.name
source_fname = gcno_name[:-len(GCNO_EXT)]
- if prefix_filter and not source_fname.startswith(prefix_filter):
- sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
- continue
- if exclude_files and exclude_files.search(source_fname):
- sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
- continue
-
+ if prefix_filter and not source_fname.startswith(prefix_filter):
+ sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
+ continue
+ if exclude_files and exclude_files.search(source_fname):
+ sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
+ continue
+
fname2gcno[source_fname] = gcno_name
if os.path.getsize(gcno_name) > 0:
@@ -234,7 +234,7 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt
def gen_info(cmd, cov_info):
gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args)
- init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
+ init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info)
if coverage_report_path:
diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py
index cb7918ff04..45083ff4f7 100644
--- a/build/scripts/create_jcoverage_report.py
+++ b/build/scripts/create_jcoverage_report.py
@@ -3,7 +3,7 @@ import tarfile
import zipfile
import os
import sys
-import time
+import time
import subprocess
@@ -14,23 +14,23 @@ def mkdir_p(path):
pass
-class Timer(object):
-
- def __init__(self):
- self.start = time.time()
-
- def step(self, msg):
- sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
- self.start = time.time()
-
-
+class Timer(object):
+
+ def __init__(self):
+ self.start = time.time()
+
+ def step(self, msg):
+ sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
+ self.start = time.time()
+
+
def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths):
- timer = Timer()
+ timer = Timer()
reports_dir = 'jacoco_reports_dir'
mkdir_p(reports_dir)
with tarfile.open(source) as tf:
tf.extractall(reports_dir)
- timer.step("Coverage data extracted")
+ timer.step("Coverage data extracted")
reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)]
with open(jars_list) as f:
@@ -52,10 +52,10 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_
if jar.endswith('devtools-jacoco-agent.jar'):
agent_disposition = jar
- # Skip java contrib - it's irrelevant coverage
- if jar.startswith('contrib/java'):
- continue
-
+ # Skip java contrib - it's irrelevant coverage
+ if jar.startswith('contrib/java'):
+ continue
+
with zipfile.ZipFile(jar) as jf:
for entry in jf.infolist():
if entry.filename.endswith('.java'):
@@ -67,35 +67,35 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_
else:
continue
- entry.filename = entry.filename.encode('utf-8')
+ entry.filename = entry.filename.encode('utf-8')
jf.extract(entry, dest)
- timer.step("Jar files extracted")
+ timer.step("Jar files extracted")
if not agent_disposition:
print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.'
- if tar_output:
- report_dir = 'java.report.temp'
- else:
- report_dir = output
+ if tar_output:
+ report_dir = 'java.report.temp'
+ else:
+ report_dir = output
mkdir_p(report_dir)
if agent_disposition:
- agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
+ agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
agent_cmd += reports
subprocess.check_call(agent_cmd)
- timer.step("Jacoco finished")
+ timer.step("Jacoco finished")
- if tar_output:
- with tarfile.open(output, 'w') as outf:
- outf.add(report_dir, arcname='.')
+ if tar_output:
+ with tarfile.open(output, 'w') as outf:
+ outf.add(report_dir, arcname='.')
if __name__ == '__main__':
- if 'LC_ALL' in os.environ:
- if os.environ['LC_ALL'] == 'C':
- os.environ['LC_ALL'] = 'en_GB.UTF-8'
-
+ if 'LC_ALL' in os.environ:
+ if os.environ['LC_ALL'] == 'C':
+ os.environ['LC_ALL'] = 'en_GB.UTF-8'
+
parser = argparse.ArgumentParser()
parser.add_argument('--source', action='store')
@@ -104,9 +104,9 @@ if __name__ == '__main__':
parser.add_argument('--prefix-filter', action='store')
parser.add_argument('--exclude-filter', action='store')
parser.add_argument('--jars-list', action='store')
- parser.add_argument('--output-format', action='store', default="html")
- parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
- parser.add_argument('--agent-disposition', action='store')
+ parser.add_argument('--output-format', action='store', default="html")
+ parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
+ parser.add_argument('--agent-disposition', action='store')
parser.add_argument('--runner-path', dest='runners_paths', action='append', default=[])
args = parser.parse_args()
main(**vars(args))
diff --git a/build/scripts/error.py b/build/scripts/error.py
index 6e4256e5c2..f7d8ecb2cc 100644
--- a/build/scripts/error.py
+++ b/build/scripts/error.py
@@ -1,19 +1,19 @@
-# Sync content of this file with devtools/ya/core/error/__init__.py
-
+# Sync content of this file with devtools/ya/core/error/__init__.py
+
TEMPORARY_ERROR_MESSAGES = [
- 'Connection reset by peer',
- 'Connection timed out',
- 'Function not implemented',
- 'I/O operation on closed file',
- 'Internal Server Error',
- 'Network connection closed unexpectedly',
+ 'Connection reset by peer',
+ 'Connection timed out',
+ 'Function not implemented',
+ 'I/O operation on closed file',
+ 'Internal Server Error',
+ 'Network connection closed unexpectedly',
'Network is unreachable',
'No route to host',
- 'No space left on device',
- 'Not enough space',
- 'Temporary failure in name resolution',
+ 'No space left on device',
+ 'Not enough space',
+ 'Temporary failure in name resolution',
'The read operation timed out',
- 'timeout: timed out',
+ 'timeout: timed out',
]
@@ -23,55 +23,55 @@ class ExitCodes(object):
COMPILATION_FAILED = 11
INFRASTRUCTURE_ERROR = 12
NOT_RETRIABLE_ERROR = 13
- YT_STORE_FETCH_ERROR = 14
+ YT_STORE_FETCH_ERROR = 14
def merge_exit_codes(exit_codes):
- return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
+ return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
def is_temporary_error(exc):
- import logging
- logger = logging.getLogger(__name__)
-
+ import logging
+ logger = logging.getLogger(__name__)
+
if getattr(exc, 'temporary', False):
- logger.debug("Exception has temporary attribute: %s", exc)
+ logger.debug("Exception has temporary attribute: %s", exc)
return True
import errno
err = getattr(exc, 'errno', None)
if err == errno.ECONNREFUSED or err == errno.ENETUNREACH:
- logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
+ logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
return True
import socket
if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout):
- logger.debug("Socket timeout exception: %s", exc)
+ logger.debug("Socket timeout exception: %s", exc)
return True
if isinstance(exc, socket.gaierror):
- logger.debug("Getaddrinfo exception: %s", exc)
+ logger.debug("Getaddrinfo exception: %s", exc)
+ return True
+
+ import urllib2
+
+ if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
+ logger.debug("urllib2.HTTPError: %s", exc)
return True
- import urllib2
-
- if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
- logger.debug("urllib2.HTTPError: %s", exc)
- return True
-
import httplib
if isinstance(exc, httplib.IncompleteRead):
- logger.debug("IncompleteRead exception: %s", exc)
+ logger.debug("IncompleteRead exception: %s", exc)
return True
exc_str = str(exc)
for message in TEMPORARY_ERROR_MESSAGES:
if message in exc_str:
- logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
+ logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
return True
return False
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index bbca65219f..db4fea50bf 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -1,19 +1,19 @@
-import datetime as dt
-import errno
+import datetime as dt
+import errno
import hashlib
-import json
-import logging
-import os
+import json
+import logging
+import os
import platform
import random
-import shutil
-import socket
+import shutil
+import socket
import string
import sys
-import tarfile
-import urllib2
+import tarfile
+import urllib2
-import retry
+import retry
def make_user_agent():
@@ -29,7 +29,7 @@ def add_common_arguments(parser):
parser.add_argument('--executable', action='store_true', help='make outputs executable')
parser.add_argument('--log-path')
parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity')
- parser.add_argument('outputs', nargs='*', default=[])
+ parser.add_argument('outputs', nargs='*', default=[])
def ensure_dir(path):
@@ -37,7 +37,7 @@ def ensure_dir(path):
os.makedirs(path)
-# Reference code: library/python/fs/__init__.py
+# Reference code: library/python/fs/__init__.py
def hardlink_or_copy(src, dst):
ensure_dir(os.path.dirname(dst))
@@ -49,23 +49,23 @@ def hardlink_or_copy(src, dst):
except OSError as e:
if e.errno == errno.EEXIST:
return
- elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
- sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
+ elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
+ sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
shutil.copy(src, dst)
else:
raise
-def rename_or_copy_and_remove(src, dst):
+def rename_or_copy_and_remove(src, dst):
ensure_dir(os.path.dirname(dst))
-
- try:
- os.rename(src, dst)
- except OSError:
+
+ try:
+ os.rename(src, dst)
+ except OSError:
shutil.copy(src, dst)
- os.remove(src)
-
-
+ os.remove(src)
+
+
class BadChecksumFetchError(Exception):
pass
@@ -114,17 +114,17 @@ def is_temporary(e):
def is_broken(e):
return isinstance(e, urllib2.HTTPError) and e.code in (410, 404)
- if is_broken(e):
- return False
+ if is_broken(e):
+ return False
+
+ if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
+ return True
+
+ import error
+
+ return error.is_temporary_error(e)
- if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
- return True
- import error
-
- return error.is_temporary_error(e)
-
-
def uniq_string_generator(size=6, chars=string.ascii_lowercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
@@ -292,28 +292,28 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
return tmp_file_name
-def chmod(filename, mode):
+def chmod(filename, mode):
if platform.system().lower() == 'windows':
# https://docs.microsoft.com/en-us/windows/win32/fileio/hard-links-and-junctions:
# hard to reset read-only attribute for removal if there are multiple hardlinks
return
- stat = os.stat(filename)
- if stat.st_mode & 0o777 != mode:
- try:
- os.chmod(filename, mode)
- except OSError:
+ stat = os.stat(filename)
+ if stat.st_mode & 0o777 != mode:
+ try:
+ os.chmod(filename, mode)
+ except OSError:
import pwd
- sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
- raise
-
-
+ sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
+ raise
+
+
def process(fetched_file, file_name, args, remove=True):
assert len(args.rename) <= len(args.outputs), (
'too few outputs to rename', args.rename, 'into', args.outputs)
- # Forbid changes to the loaded resource
- chmod(fetched_file, 0o444)
-
+ # Forbid changes to the loaded resource
+ chmod(fetched_file, 0o444)
+
if not os.path.isfile(fetched_file):
raise ResourceIsDirectoryError('Resource must be a file, not a directory: %s' % fetched_file)
@@ -332,16 +332,16 @@ def process(fetched_file, file_name, args, remove=True):
if args.untar_to:
ensure_dir(args.untar_to)
- # Extract only requested files
+ # Extract only requested files
try:
with tarfile.open(fetched_file, mode='r:*') as tar:
- inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
- members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
- tar.extractall(args.untar_to, members=members)
- # Forbid changes to the loaded resource data
- for root, _, files in os.walk(args.untar_to):
- for filename in files:
- chmod(os.path.join(root, filename), 0o444)
+ inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
+ members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
+ tar.extractall(args.untar_to, members=members)
+ # Forbid changes to the loaded resource data
+ for root, _, files in os.walk(args.untar_to):
+ for filename in files:
+ chmod(os.path.join(root, filename), 0o444)
except tarfile.ReadError as e:
logging.exception(e)
raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file))
@@ -354,12 +354,12 @@ def process(fetched_file, file_name, args, remove=True):
hardlink_or_copy(src, dst)
else:
logging.info('Renaming %s to %s', src, dst)
- if os.path.exists(dst):
- raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
+ if os.path.exists(dst):
+ raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
if remove:
rename_or_copy_and_remove(src, dst)
else:
- hardlink_or_copy(src, dst)
+ hardlink_or_copy(src, dst)
for path in args.outputs:
if not os.path.exists(path):
@@ -367,9 +367,9 @@ def process(fetched_file, file_name, args, remove=True):
if not os.path.isfile(path):
raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
if args.executable:
- chmod(path, os.stat(path).st_mode | 0o111)
+ chmod(path, os.stat(path).st_mode | 0o111)
if os.path.abspath(path) == os.path.abspath(fetched_file):
remove = False
-
+
if remove:
os.remove(fetched_file)
diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py
index 765a3004f6..57aff91b5e 100644
--- a/build/scripts/fetch_from_archive.py
+++ b/build/scripts/fetch_from_archive.py
@@ -31,6 +31,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py
index cf3c967a49..d4ed6f4221 100644
--- a/build/scripts/fetch_from_external.py
+++ b/build/scripts/fetch_from_external.py
@@ -55,6 +55,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py
index 7ee05b7c2e..5e4e656394 100644
--- a/build/scripts/fetch_from_mds.py
+++ b/build/scripts/fetch_from_mds.py
@@ -45,6 +45,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index 511d7bf9dd..a99542e174 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -106,13 +106,13 @@ def _urlopen(url, data=None, headers=None):
time.sleep(retry_after)
-def _query(url):
- return json.loads(_urlopen(url))
-
-
+def _query(url):
+ return json.loads(_urlopen(url))
+
+
_SANDBOX_BASE_URL = 'https://sandbox.yandex-team.ru/api/v1.0'
-
-
+
+
def get_resource_info(resource_id, touch=False, no_links=False):
url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id)))
headers = {}
@@ -136,10 +136,10 @@ def fetch(resource_id, custom_fetcher):
try:
resource_info = get_resource_info(resource_id, touch=True, no_links=True)
except Exception as e:
- sys.stderr.write(
- "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
+ sys.stderr.write(
+ "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
)
- raise
+ raise
if resource_info.get('state', 'DELETED') != 'READY':
raise ResourceInfoError("Resource {} is not READY".format(resource_id))
@@ -264,6 +264,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py
index 5e5ba8c024..c1e98b20c0 100644
--- a/build/scripts/go_tool.py
+++ b/build/scripts/go_tool.py
@@ -781,7 +781,7 @@ def do_link_test(args):
if __name__ == '__main__':
args = pcf.get_args(sys.argv[1:])
-
+
parser = argparse.ArgumentParser(prefix_chars='+')
parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True)
parser.add_argument('++srcs', nargs='*', required=True)
diff --git a/build/scripts/java_pack_to_file.py b/build/scripts/java_pack_to_file.py
index f6911c7796..8d2aeb93fd 100644
--- a/build/scripts/java_pack_to_file.py
+++ b/build/scripts/java_pack_to_file.py
@@ -1,25 +1,25 @@
-import os
-import re
-import optparse
-
-PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.add_option('-o', '--output')
- parser.add_option('-a', '--source-root', dest='source_root')
- return parser.parse_args()
-
-
-def get_package_name(filename):
- with open(filename) as afile:
- match = PACKAGE_REGEX.search(afile.read())
- if match:
- return match.group(1).replace('\n\t ', '').replace('.', '/')
- return ''
-
-
+import os
+import re
+import optparse
+
+PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.add_option('-o', '--output')
+ parser.add_option('-a', '--source-root', dest='source_root')
+ return parser.parse_args()
+
+
+def get_package_name(filename):
+ with open(filename) as afile:
+ match = PACKAGE_REGEX.search(afile.read())
+ if match:
+ return match.group(1).replace('\n\t ', '').replace('.', '/')
+ return ''
+
+
def write_coverage_sources(output, srcroot, files):
with open(output, 'w') as afile:
for filename in files:
@@ -27,10 +27,10 @@ def write_coverage_sources(output, srcroot, files):
afile.write(os.path.join(pname, os.path.basename(filename)) + ':' + filename + '\n')
-def main():
- opts, files = parse_args()
+def main():
+ opts, files = parse_args()
write_coverage_sources(opts.output, opts.source_root, files)
-
-
-if __name__ == '__main__':
- exit(main())
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py
index 58faf37bbe..23487f5c1e 100644
--- a/build/scripts/link_dyn_lib.py
+++ b/build/scripts/link_dyn_lib.py
@@ -148,8 +148,8 @@ def fix_cmd(arch, musl, c):
return list(f(list(parse_export_file(fname))))
if p.endswith('.supp'):
- return []
-
+ return []
+
if p.endswith('.pkg.fake'):
return []
diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py
index 1c3cc4e516..f469e3b442 100644
--- a/build/scripts/link_exe.py
+++ b/build/scripts/link_exe.py
@@ -5,16 +5,16 @@ import optparse
from process_whole_archive_option import ProcessWholeArchiveOption
-def get_leaks_suppressions(cmd):
- supp, newcmd = [], []
- for arg in cmd:
+def get_leaks_suppressions(cmd):
+ supp, newcmd = [], []
+ for arg in cmd:
if arg.endswith(".supp"):
- supp.append(arg)
- else:
- newcmd.append(arg)
- return supp, newcmd
-
-
+ supp.append(arg)
+ else:
+ newcmd.append(arg)
+ return supp, newcmd
+
+
musl_libs = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
@@ -23,26 +23,26 @@ def fix_cmd(musl, c):
def gen_default_suppressions(inputs, output, source_root):
- import collections
- import os
-
- supp_map = collections.defaultdict(set)
+ import collections
+ import os
+
+ supp_map = collections.defaultdict(set)
for filename in inputs:
- sanitizer = os.path.basename(filename).split('.', 1)[0]
+ sanitizer = os.path.basename(filename).split('.', 1)[0]
with open(os.path.join(source_root, filename)) as src:
- for line in src:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- supp_map[sanitizer].add(line)
-
+ for line in src:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ supp_map[sanitizer].add(line)
+
with open(output, "wb") as dst:
- for supp_type, supps in supp_map.items():
- dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
- dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
- dst.write('}\n')
-
-
+ for supp_type, supps in supp_map.items():
+ dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
+ dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
+ dst.write('}\n')
+
+
def parse_args():
parser = optparse.OptionParser()
parser.disable_interspersed_args()
@@ -69,7 +69,7 @@ if __name__ == '__main__':
supp, cmd = get_leaks_suppressions(cmd)
if supp:
- src_file = "default_suppressions.cpp"
+ src_file = "default_suppressions.cpp"
gen_default_suppressions(supp, src_file, opts.source_root)
cmd += [src_file]
diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py
index 9458c0ebfb..c189668b9e 100644
--- a/build/scripts/link_fat_obj.py
+++ b/build/scripts/link_fat_obj.py
@@ -35,10 +35,10 @@ def get_args():
return parser.parse_args(groups['default']), groups
-def strip_suppression_files(srcs):
+def strip_suppression_files(srcs):
return [s for s in srcs if not s.endswith('.supp')]
-
-
+
+
def main():
args, groups = get_args()
@@ -51,7 +51,7 @@ def main():
# Dependencies
global_srcs = groups['global_srcs']
- global_srcs = strip_suppression_files(global_srcs)
+ global_srcs = strip_suppression_files(global_srcs)
global_srcs = ProcessWholeArchiveOption(args.arch).construct_cmd(global_srcs)
peers = groups['peers']
diff --git a/build/scripts/retry.py b/build/scripts/retry.py
index ac417f7c5f..d14170bfec 100644
--- a/build/scripts/retry.py
+++ b/build/scripts/retry.py
@@ -1,29 +1,29 @@
-import time
-import functools
-
-
-# Partly copy-pasted from contrib/python/retry
-def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
- _tries, _delay = tries, delay
- while _tries:
- try:
- return f()
- except exceptions as e:
- _tries -= 1
- if not _tries:
- raise
-
- time.sleep(_delay)
- _delay *= backoff
-
- if max_delay is not None:
- _delay = min(_delay, max_delay)
-
-
-def retry(**retry_kwargs):
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
- return wrapper
- return decorator
+import time
+import functools
+
+
+# Partly copy-pasted from contrib/python/retry
+def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
+ _tries, _delay = tries, delay
+ while _tries:
+ try:
+ return f()
+ except exceptions as e:
+ _tries -= 1
+ if not _tries:
+ raise
+
+ time.sleep(_delay)
+ _delay *= backoff
+
+ if max_delay is not None:
+ _delay = min(_delay, max_delay)
+
+
+def retry(**retry_kwargs):
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
+ return wrapper
+ return decorator
diff --git a/build/scripts/run_junit.py b/build/scripts/run_junit.py
index 6944144fa7..5f56403bed 100644
--- a/build/scripts/run_junit.py
+++ b/build/scripts/run_junit.py
@@ -1,125 +1,125 @@
-import collections
-import json
-import time
-import os
-import sys
-
-SHUTDOWN_SIGNAL = 'SIGUSR1'
-
-PROVIDES = {
- "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner",
- "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner",
-}
-
-
-class SignalInterruptionError(Exception):
- pass
-
-
-def on_shutdown(s, f):
- raise SignalInterruptionError()
-
-
-def get_tracefile_path(args):
- return args[args.index('--output') + 1]
-
-
-def dump_chunk_error(tracefile, name, imps):
- with open(tracefile, 'a') as afile:
- msg = {
- "timestamp": time.time(),
- "name": "chunk-event",
- "value": {
- "errors": [
- [
- "fail",
- "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format(
- name, ', '.join(imps)
- ),
- ],
- ],
- },
- }
- json.dump(msg, afile)
- afile.write("\n")
-
-
-def verify_classpath(args):
- cpfile = args[args.index('-classpath') + 1]
- assert cpfile.startswith('@'), cpfile
-
- cpfile = cpfile[1:]
- assert os.path.exists(cpfile)
-
- with open(cpfile) as afile:
- data = afile.read().splitlines()
-
- collisions = collections.defaultdict(set)
- for cp in data:
- if cp in PROVIDES:
- collisions[PROVIDES[cp]].add(cp)
-
- for name, imps in collisions.items():
- if len(imps) > 1:
- tracefile = get_tracefile_path(args)
- dump_chunk_error(tracefile, name, imps)
- return False
- return True
-
-
-def main():
- args = sys.argv[1:]
-
- # Emulates PROVIDES(X) for junit-runner and junit5-runner.
- # For more info see DEVTOOLSSUPPORT-7454
- if not verify_classpath(args):
- return 1
-
- def execve():
- os.execve(args[0], args, os.environ)
-
- jar_binary = args[args.index('--jar-binary') + 1]
- java_bin_dir = os.path.dirname(jar_binary)
- jstack_binary = os.path.join(java_bin_dir, 'jstack')
-
- if not os.path.exists(jstack_binary):
- sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
- execve()
-
- import signal
-
- signum = getattr(signal, SHUTDOWN_SIGNAL, None)
-
- if signum is None:
- execve()
-
- import subprocess
-
- proc = subprocess.Popen(args)
- signal.signal(signum, on_shutdown)
- timeout = False
-
- try:
- proc.wait()
- except SignalInterruptionError:
- sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
- # Dump stack traces
- subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
- # Kill junit - for more info see DEVTOOLS-7636
- os.kill(proc.pid, signal.SIGKILL)
- proc.wait()
- timeout = True
-
- if proc.returncode:
- sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
- if timeout:
- # In case of timeout return specific exit code
- # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
- proc.returncode = 10
- sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
-
- return proc.returncode
-
-
-if __name__ == '__main__':
- exit(main())
+import collections
+import json
+import time
+import os
+import sys
+
+SHUTDOWN_SIGNAL = 'SIGUSR1'
+
+PROVIDES = {
+ "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner",
+ "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner",
+}
+
+
+class SignalInterruptionError(Exception):
+ pass
+
+
+def on_shutdown(s, f):
+ raise SignalInterruptionError()
+
+
+def get_tracefile_path(args):
+ return args[args.index('--output') + 1]
+
+
+def dump_chunk_error(tracefile, name, imps):
+ with open(tracefile, 'a') as afile:
+ msg = {
+ "timestamp": time.time(),
+ "name": "chunk-event",
+ "value": {
+ "errors": [
+ [
+ "fail",
+ "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format(
+ name, ', '.join(imps)
+ ),
+ ],
+ ],
+ },
+ }
+ json.dump(msg, afile)
+ afile.write("\n")
+
+
+def verify_classpath(args):
+ cpfile = args[args.index('-classpath') + 1]
+ assert cpfile.startswith('@'), cpfile
+
+ cpfile = cpfile[1:]
+ assert os.path.exists(cpfile)
+
+ with open(cpfile) as afile:
+ data = afile.read().splitlines()
+
+ collisions = collections.defaultdict(set)
+ for cp in data:
+ if cp in PROVIDES:
+ collisions[PROVIDES[cp]].add(cp)
+
+ for name, imps in collisions.items():
+ if len(imps) > 1:
+ tracefile = get_tracefile_path(args)
+ dump_chunk_error(tracefile, name, imps)
+ return False
+ return True
+
+
+def main():
+ args = sys.argv[1:]
+
+ # Emulates PROVIDES(X) for junit-runner and junit5-runner.
+ # For more info see DEVTOOLSSUPPORT-7454
+ if not verify_classpath(args):
+ return 1
+
+ def execve():
+ os.execve(args[0], args, os.environ)
+
+ jar_binary = args[args.index('--jar-binary') + 1]
+ java_bin_dir = os.path.dirname(jar_binary)
+ jstack_binary = os.path.join(java_bin_dir, 'jstack')
+
+ if not os.path.exists(jstack_binary):
+ sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
+ execve()
+
+ import signal
+
+ signum = getattr(signal, SHUTDOWN_SIGNAL, None)
+
+ if signum is None:
+ execve()
+
+ import subprocess
+
+ proc = subprocess.Popen(args)
+ signal.signal(signum, on_shutdown)
+ timeout = False
+
+ try:
+ proc.wait()
+ except SignalInterruptionError:
+ sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
+ # Dump stack traces
+ subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
+ # Kill junit - for more info see DEVTOOLS-7636
+ os.kill(proc.pid, signal.SIGKILL)
+ proc.wait()
+ timeout = True
+
+ if proc.returncode:
+ sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
+ if timeout:
+ # In case of timeout return specific exit code
+ # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
+ proc.returncode = 10
+ sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
+
+ return proc.returncode
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py
index 9bb314a98a..9730dcd711 100644
--- a/build/scripts/unpacking_jtest_runner.py
+++ b/build/scripts/unpacking_jtest_runner.py
@@ -1,10 +1,10 @@
-import io
-import json
-import optparse
-import os
+import io
+import json
+import optparse
+import os
import sys
import subprocess
-import time
+import time
import zipfile
import platform
@@ -15,7 +15,7 @@ import platform
def parse_args():
parser = optparse.OptionParser()
parser.disable_interspersed_args()
- parser.add_option('--trace-file')
+ parser.add_option('--trace-file')
parser.add_option('--jar-binary')
parser.add_option('--tests-jar-path')
parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest')
@@ -49,27 +49,27 @@ def fix_cmd(cmd):
return cmd
-def dump_event(etype, data, filename):
- event = {
- 'timestamp': time.time(),
- 'value': data,
- 'name': etype,
- }
-
- with io.open(filename, 'a', encoding='utf8') as afile:
- afile.write(unicode(json.dumps(event) + '\n'))
-
-
-def dump_chunk_event(data, filename):
- return dump_event('chunk-event', data, filename)
-
-
-def extract_jars(dest, archive):
- os.makedirs(dest)
- with zipfile.ZipFile(archive) as zf:
- zf.extractall(dest)
-
-
+def dump_event(etype, data, filename):
+ event = {
+ 'timestamp': time.time(),
+ 'value': data,
+ 'name': etype,
+ }
+
+ with io.open(filename, 'a', encoding='utf8') as afile:
+ afile.write(unicode(json.dumps(event) + '\n'))
+
+
+def dump_chunk_event(data, filename):
+ return dump_event('chunk-event', data, filename)
+
+
+def extract_jars(dest, archive):
+ os.makedirs(dest)
+ with zipfile.ZipFile(archive) as zf:
+ zf.extractall(dest)
+
+
def make_bfg_from_cp(class_path, out):
class_path = ' '.join(
map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path)
@@ -89,7 +89,7 @@ def make_command_file_from_cp(class_path, out):
def main():
- s = time.time()
+ s = time.time()
opts, args = parse_args()
# unpack tests jar
@@ -100,13 +100,13 @@ def main():
build_root = ''
dest = os.path.abspath('test-classes')
- extract_jars(dest, opts.tests_jar_path)
-
- metrics = {
- 'suite_jtest_extract_jars_(seconds)': time.time() - s,
- }
-
- s = time.time()
+ extract_jars(dest, opts.tests_jar_path)
+
+ metrics = {
+ 'suite_jtest_extract_jars_(seconds)': time.time() - s,
+ }
+
+ s = time.time()
# fix java classpath
cp_idx = args.index('-classpath')
if args[cp_idx + 1].startswith('@'):
@@ -131,12 +131,12 @@ def main():
else:
args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest)
args = fix_cmd(args[:cp_idx]) + args[cp_idx:]
-
- metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
-
- if opts.trace_file:
- dump_chunk_event({'metrics': metrics}, opts.trace_file)
-
+
+ metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
+
+ if opts.trace_file:
+ dump_chunk_event({'metrics': metrics}, opts.trace_file)
+
# run java cmd
if platform.system() == 'Windows':
sys.exit(subprocess.Popen(args).wait())
diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py
index 52937490bc..d62435c3b8 100644
--- a/build/scripts/with_coverage.py
+++ b/build/scripts/with_coverage.py
@@ -1,5 +1,5 @@
-# TODO prettyboy remove after ya-bin release
-
+# TODO prettyboy remove after ya-bin release
+
import os
import sys
import subprocess
diff --git a/build/scripts/with_crash_on_timeout.py b/build/scripts/with_crash_on_timeout.py
index 775347f9c1..bde864ed29 100644
--- a/build/scripts/with_crash_on_timeout.py
+++ b/build/scripts/with_crash_on_timeout.py
@@ -1,5 +1,5 @@
-# TODO prettyboy remove after ya-bin release
-
+# TODO prettyboy remove after ya-bin release
+
import os
import sys
import subprocess
diff --git a/build/scripts/ya.make b/build/scripts/ya.make
index 105f8dfc7b..710165e40d 100644
--- a/build/scripts/ya.make
+++ b/build/scripts/ya.make
@@ -8,12 +8,12 @@ TEST_SRCS(
build_java_codenav_index.py
build_java_with_error_prone.py
build_java_with_error_prone2.py
- build_mn.py
- build_pln_header.py
+ build_mn.py
+ build_pln_header.py
cat.py
- cgo1_wrapper.py
+ cgo1_wrapper.py
check_config_h.py
- collect_java_srcs.py
+ collect_java_srcs.py
compile_cuda.py
compile_java.py
compile_jsrc.py
@@ -29,7 +29,7 @@ TEST_SRCS(
extract_jacoco_report.py
f2c.py
fail_module_cmd.py
- fetch_from.py
+ fetch_from.py
fetch_from_external.py
fetch_from_mds.py
fetch_from_npm.py
@@ -40,10 +40,10 @@ TEST_SRCS(
fix_msvc_output.py
fs_tools.py
gen_aar_gradle_script.py
- gen_java_codenav_entry.py
+ gen_java_codenav_entry.py
gen_java_codenav_protobuf.py
gen_mx_table.py
- gen_py3_reg.py
+ gen_py3_reg.py
gen_py_reg.py
gen_test_apk_gradle_script.py
gen_ub.py
@@ -51,7 +51,7 @@ TEST_SRCS(
go_proto_wrapper.py
go_tool.py
ios_wrapper.py
- java_pack_to_file.py
+ java_pack_to_file.py
link_asrc.py
link_dyn_lib.py
link_exe.py
@@ -71,14 +71,14 @@ TEST_SRCS(
py_compile.py
run_ios_simulator.py
run_javac.py
- run_junit.py
+ run_junit.py
run_llvm_dsymutil.py
run_msvc_wine.py
run_tool.py
sky.py
stdout2stderr.py
symlink.py
- tar_directory.py
+ tar_directory.py
tar_sources.py
tared_protoc.py
touch.py
@@ -87,7 +87,7 @@ TEST_SRCS(
with_coverage.py
with_crash_on_timeout.py
with_pathsep_resolve.py
- wrap_groovyc.py
+ wrap_groovyc.py
wrapper.py
writer.py
xargs.py
diff --git a/build/ya.conf.json b/build/ya.conf.json
index 6757c68936..5f7cc875d6 100644
--- a/build/ya.conf.json
+++ b/build/ya.conf.json
@@ -323,10 +323,10 @@
"dlv": {
"description": "Debugger for the Go programming language"
},
- "node-search": {
- "description": "Autocheck data inspector tool",
- "visible": false
- },
+ "node-search": {
+ "description": "Autocheck data inspector tool",
+ "visible": false
+ },
"releaser": {
"description": "Release tool"
},
@@ -426,10 +426,10 @@
"rdtset": {
"description": "Task CPU affinity and Intel(R) Resource Director Technology control tool"
},
- "optrace": {
- "description": "optrace records output files written by each process",
- "visible": false
- },
+ "optrace": {
+ "description": "optrace records output files written by each process",
+ "visible": false
+ },
"yoimports": {
"description": "Go imports formatting tool"
},
@@ -3809,44 +3809,44 @@
}
]
},
- "optrace": {
- "tools": {
- "optrace": {
- "bottle": "optrace",
- "executable": "optrace"
- }
- },
- "platforms": [
- {
- "host": {
- "os": "LINUX"
- },
- "default": true
- }
- ]
- },
- "node-search": {
- "tools": {
- "node-search": {
- "bottle": "node-search",
- "executable": "node-search"
- }
- },
- "platforms": [
- {
- "host": {
- "os": "LINUX"
- },
- "default": true
- },
- {
- "host": {
- "os": "DARWIN"
- },
- "default": true
- }
- ]
- },
+ "optrace": {
+ "tools": {
+ "optrace": {
+ "bottle": "optrace",
+ "executable": "optrace"
+ }
+ },
+ "platforms": [
+ {
+ "host": {
+ "os": "LINUX"
+ },
+ "default": true
+ }
+ ]
+ },
+ "node-search": {
+ "tools": {
+ "node-search": {
+ "bottle": "node-search",
+ "executable": "node-search"
+ }
+ },
+ "platforms": [
+ {
+ "host": {
+ "os": "LINUX"
+ },
+ "default": true
+ },
+ {
+ "host": {
+ "os": "DARWIN"
+ },
+ "default": true
+ }
+ ]
+ },
"gpt": {
"tools": {
"gpt_perf": {
@@ -6923,32 +6923,32 @@
]
}
},
- "optrace": {
- "formula": {
- "sandbox_id": [
- 894130496
- ],
- "match": "optrace"
- },
- "executable": {
- "optrace": [
- "optrace"
- ]
- }
- },
- "node-search": {
- "formula": {
- "sandbox_id": [
- 1157378401
- ],
- "match": "node_search"
- },
- "executable": {
- "node-search": [
- "node_search"
- ]
- }
- },
+ "optrace": {
+ "formula": {
+ "sandbox_id": [
+ 894130496
+ ],
+ "match": "optrace"
+ },
+ "executable": {
+ "optrace": [
+ "optrace"
+ ]
+ }
+ },
+ "node-search": {
+ "formula": {
+ "sandbox_id": [
+ 1157378401
+ ],
+ "match": "node_search"
+ },
+ "executable": {
+ "node-search": [
+ "node_search"
+ ]
+ }
+ },
"rsync": {
"formula": {
"sandbox_id": [
diff --git a/build/ya.make b/build/ya.make
index 760f63ae22..407b8f13e0 100644
--- a/build/ya.make
+++ b/build/ya.make
@@ -7,10 +7,10 @@ NEED_CHECK()
PY2_LIBRARY()
-PY_SRCS(
- ymake_conf.py
-)
-
+PY_SRCS(
+ ymake_conf.py
+)
+
PEERDIR(
library/cpp/deprecated/enum_codegen
library/cpp/deprecated/split
@@ -21,14 +21,14 @@ END()
RECURSE(
conf_fatal_error
- config
+ config
docs/empty
external_resources
- platform/java
+ platform/java
platform/local_so
- platform/perl
- platform/python
- platform/python/ldflags
+ platform/perl
+ platform/python
+ platform/python/ldflags
plugins
prebuilt
scripts
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index 3101432ffe..081833998b 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -302,14 +302,14 @@ ENUM_PARSER_TOOL=${tool:"tools/enum_parser/enum_parser"}
# tag:python-specific tag:cython-specific
CYTHON_SCRIPT=${input:"${ARCADIA_ROOT}/contrib/tools/cython/cython.py"}
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
-CYTHON_OUTPUT_INCLUDES=\
+CYTHON_OUTPUT_INCLUDES=\
${output_include;hide:"contrib/libs/python/Include/compile.h"} \
${output_include;hide:"contrib/libs/python/Include/frameobject.h"} \
-${output_include;hide:"contrib/libs/python/Include/longintrepr.h"} \
-${output_include;hide:"contrib/libs/python/Include/pyconfig.h"} \
-${output_include;hide:"contrib/libs/python/Include/Python.h"} \
-${output_include;hide:"contrib/libs/python/Include/pythread.h"} \
-${output_include;hide:"contrib/libs/python/Include/structmember.h"} \
+${output_include;hide:"contrib/libs/python/Include/longintrepr.h"} \
+${output_include;hide:"contrib/libs/python/Include/pyconfig.h"} \
+${output_include;hide:"contrib/libs/python/Include/Python.h"} \
+${output_include;hide:"contrib/libs/python/Include/pythread.h"} \
+${output_include;hide:"contrib/libs/python/Include/structmember.h"} \
${output_include;hide:"contrib/libs/python/Include/traceback.h"} \
${output_include;hide:"contrib/tools/cython/generated_c_headers.h"} \
${output_include;hide:"omp.h"}
@@ -1190,12 +1190,12 @@ module _BASE_UNIT: _BARE_UNIT {
CFLAGS+=-fsanitize-coverage=$SANITIZE_COVERAGE
LDFLAGS+=-fsanitize-coverage=$SANITIZE_COVERAGE
}
-
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- CFLAGS+=-fprofile-instr-generate -fcoverage-mapping -DCLANG_COVERAGE
- LDFLAGS+=-fprofile-instr-generate -fcoverage-mapping
- }
-
+
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ CFLAGS+=-fprofile-instr-generate -fcoverage-mapping -DCLANG_COVERAGE
+ LDFLAGS+=-fprofile-instr-generate -fcoverage-mapping
+ }
+
when ($NLG_COVERAGE && $NLG_COVERAGE != "no") {
CFLAGS+=-DNLG_COVERAGE
}
@@ -1308,10 +1308,10 @@ module _BASE_UNIT: _BARE_UNIT {
PEERDIR += build/external_resources/codenavigation
}
- when ($CYTHON_COVERAGE && $CYTHON_COVERAGE == "yes") {
- CFLAGS+=-DCYTHON_TRACE=1 -DCYTHON_TRACE_NOGIL=1
- }
-
+ when ($CYTHON_COVERAGE && $CYTHON_COVERAGE == "yes") {
+ CFLAGS+=-DCYTHON_TRACE=1 -DCYTHON_TRACE_NOGIL=1
+ }
+
DEFAULT(USE_SSE4 yes)
when ($NOSSE != "yes") {
@@ -1652,10 +1652,10 @@ module _BASE_PROGRAM: _LINK_UNIT {
when ($SANITIZER_DEFINED == "yes") {
PEERDIR += contrib/libs/cxxsupp/libsan
}
-
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- PEERDIR+=library/cpp/testing/dump_clang_coverage
- }
+
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ PEERDIR+=library/cpp/testing/dump_clang_coverage
+ }
when ($IDE_MSVS == "yes") {
PEERDIR+=build/scripts/c_templates
@@ -1664,8 +1664,8 @@ module _BASE_PROGRAM: _LINK_UNIT {
when ($_CUSTOM_LINK_STEP_SCRIPT) {
LINK_SCRIPT_EXE_FLAGS+=--python=$YMAKE_PYTHON --custom-step=${input:_CUSTOM_LINK_STEP_SCRIPT}
}
-}
-
+}
+
CPP_PROGRAM_SEM=add_executable $MODDIR $REALPRJNAME ${hide:TARGET} ${hide:AUTO_INPUT} && vcs_info && target_link_flags PUBLIC $OBJADDE_LIB $OBJADDE
### @usage: PROGRAM([progname])
###
@@ -1707,8 +1707,8 @@ module PY2_PROGRAM: _PY2_PROGRAM {
PEERDIR+=build/rules/py2_deprecation
}
ASSERT(_OK You are using deprecated Python2-only code (PY2_PROGRAM). Please consider rewriting to Python 3.)
-}
-
+}
+
# tag:python-specific
### @usage: NO_EXTENDED_SOURCE_SEARCH()
###
@@ -1837,7 +1837,7 @@ macro CUSTOM_LINK_STEP_SCRIPT(Name) {
module _BASE_UNITTEST: _BASE_PROGRAM {
.FINAL_TARGET=no
.NODE_TYPE=Program
- .ALLOWED=YT_SPEC
+ .ALLOWED=YT_SPEC
when ($UT_SKIP_EXCEPTIONS == "yes") {
C_DEFINES+=-DUT_SKIP_EXCEPTIONS
}
@@ -1970,7 +1970,7 @@ module BOOSTTEST_WITH_MAIN: BOOSTTEST {
PEERDIR(library/cpp/testing/boost_test_main)
}
-FUZZ_DICTS_VALUE=
+FUZZ_DICTS_VALUE=
### @usage: FUZZ_DICTS(path1 [path2...])
###
### Allows you to specify dictionaries, relative to the root of Arcadia, which will be used in Fuzzing.
@@ -1980,9 +1980,9 @@ FUZZ_DICTS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/fuzzing/
macro FUZZ_DICTS(Data...) {
SET_APPEND(FUZZ_DICTS_VALUE $Data)
-}
-
-FUZZ_OPTS_VALUE=
+}
+
+FUZZ_OPTS_VALUE=
### @usage: FUZZ_OPTS(opt1 [Opt2...])
###
### Overrides or adds options to the corpus mining and fuzzer run.
@@ -1999,22 +1999,22 @@ FUZZ_OPTS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/fuzzing/
macro FUZZ_OPTS(Data...) {
SET_APPEND(FUZZ_OPTS_VALUE $Data)
-}
-
+}
+
# tag:yt-specific tag:test
-TEST_YT_SPEC_VALUE=
-### @usage: YT_SPEC(path1 [path2...])
-###
-### Allows you to specify json-files with YT task and operation specs,
-### which will be used to run test node in the YT.
-### Test must be marked with ya:yt tag.
-### Files must be relative to the root of Arcadia.
-###
-### Documentation: https://wiki.yandex-team.ru/yatool/test/
-macro YT_SPEC(Data...) {
- SET_APPEND(TEST_YT_SPEC_VALUE $Data)
-}
-
+TEST_YT_SPEC_VALUE=
+### @usage: YT_SPEC(path1 [path2...])
+###
+### Allows you to specify json-files with YT task and operation specs,
+### which will be used to run test node in the YT.
+### Test must be marked with ya:yt tag.
+### Files must be relative to the root of Arcadia.
+###
+### Documentation: https://wiki.yandex-team.ru/yatool/test/
+macro YT_SPEC(Data...) {
+ SET_APPEND(TEST_YT_SPEC_VALUE $Data)
+}
+
# tag:test
TEST_SRCS_VALUE=
### @usage: TEST_SRCS(Files...)
@@ -2067,8 +2067,8 @@ TEST_REQUIREMENTS_VALUE=
### Documentation about the Arcadia test system: https://wiki.yandex-team.ru/yatool/test/
macro REQUIREMENTS(Tags...) {
SET_APPEND(TEST_REQUIREMENTS_VALUE $Tags)
-}
-
+}
+
# tag:test
TEST_ENV_VALUE=
### @usage: ENV(key[=value])
@@ -2289,7 +2289,7 @@ module GTEST_UGLY: _BASE_PROGRAM {
module EXECTEST: _BARE_UNIT {
.NODE_TYPE=Program
.FINAL_TARGET=no
- .ALLOWED=YT_SPEC
+ .ALLOWED=YT_SPEC
.RESTRICTED=FORK_TEST_FILES
SET(MODULE_SUFFIX .pkg.fake)
SETUP_EXECTEST()
@@ -2635,14 +2635,14 @@ module DLL_UNIT: _LINK_UNIT {
LINK_DYN_LIB_FLAGS+=--fix-elf ${tool:"tools/fix_elf"}
}
}
-
+
when ($DARWIN == "yes") {
LDFLAGS += -undefined dynamic_lookup
}
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- PEERDIR+=library/cpp/testing/dump_clang_coverage
- }
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ PEERDIR+=library/cpp/testing/dump_clang_coverage
+ }
when ($IDE_MSVS == "yes") {
PEERDIR+=build/scripts/c_templates
@@ -3941,7 +3941,7 @@ module _JAR_RUNABLE: _COMPILABLE_JAR_BASE {
otherwise {
_SCRIPTGEN_FLAGS=-D IS_UBERJAR=no
}
- CHECK_PROVIDES()
+ CHECK_PROVIDES()
}
# tag:java-specific
@@ -3972,7 +3972,7 @@ module _JAR_TEST: _COMPILABLE_JAR_BASE {
SET(MODULE_SUFFIX .test.cp.jar)
ENABLE(YMAKE_JAVA_TEST)
JAVA_TEST()
- CHECK_PROVIDES()
+ CHECK_PROVIDES()
}
# tag:java-specific
@@ -4126,9 +4126,9 @@ module _BASE_PY_PROGRAM: _BASE_PROGRAM {
PEERDIR += contrib/tools/python/src/Modules/_sqlite
}
}
- when ($PYTHON_COVERAGE == "yes") {
- PEERDIR+=library/python/coverage
- }
+ when ($PYTHON_COVERAGE == "yes") {
+ PEERDIR+=library/python/coverage
+ }
when ($ARCH_PPC64LE == "yes") {
_MY_ALLOCATOR=SYSTEM
@@ -4199,9 +4199,9 @@ module _BASE_PY3_PROGRAM: _BASE_PROGRAM {
when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
NO_STRIP=yes
}
- when ($PYTHON_COVERAGE == "yes") {
- PEERDIR+=library/python/coverage
- }
+ when ($PYTHON_COVERAGE == "yes") {
+ PEERDIR+=library/python/coverage
+ }
when ($CODENAVIGATION && $NOCODENAVIGATION != "yes") {
PEERDIR += contrib/python/six
}
@@ -5546,7 +5546,7 @@ macro SPLIT_FACTOR(Factor) {
}
# tag:test
-FORK_TEST_FILES_MODE=
+FORK_TEST_FILES_MODE=
### @usage: FORK_TEST_FILES()
###
### Only for PY2TEST and PY3TEST: splits a file executable with the tests on chunks in the files listed in TEST_SRCS
@@ -5567,8 +5567,8 @@ TEST_SIZE_NAME=SMALL
### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/
macro SIZE(Type) {
SET(TEST_SIZE_NAME $Type)
-}
-
+}
+
### @usage: JOIN_SRCS(Out Src...)
###
### Join set of sources into single file named Out and send it for further processing.
@@ -6746,32 +6746,32 @@ macro NO_SANITIZE_COVERAGE() {
### @usage: NO_CLANG_COVERAGE()
###
### Disable heavyweight clang coverage for the module
-macro NO_CLANG_COVERAGE() {
- DISABLE(CLANG_COVERAGE)
-}
-
+macro NO_CLANG_COVERAGE() {
+ DISABLE(CLANG_COVERAGE)
+}
+
macro NO_CLANG_TIDY() {
DISABLE(TIDY)
}
# tag:python-specific tag:coverage
-### @usage: NO_PYTHON_COVERAGE()
-###
-### Disable python coverage for module
-macro NO_PYTHON_COVERAGE() {
- DISABLE(PYTHON_COVERAGE)
-}
-
+### @usage: NO_PYTHON_COVERAGE()
+###
+### Disable python coverage for module
+macro NO_PYTHON_COVERAGE() {
+ DISABLE(PYTHON_COVERAGE)
+}
+
# tag:python-specific tag:coverage tag:cython
-### @usage: NO_CYTHON_COVERAGE()
-###
-### Disable cython and cythonized python coverage (CYTHONIZE_PY)
-### Implies NO_CLANG_COVERAGE() - right now, we can't disable instrumentation for .py.cpp files, but enable for .cpp
-macro NO_CYTHON_COVERAGE() {
- DISABLE(CYTHON_COVERAGE)
- NO_CLANG_COVERAGE()
-}
-
+### @usage: NO_CYTHON_COVERAGE()
+###
+### Disable cython and cythonized python coverage (CYTHONIZE_PY)
+### Implies NO_CLANG_COVERAGE() - right now, we can't disable instrumentation for .py.cpp files, but enable for .cpp
+macro NO_CYTHON_COVERAGE() {
+ DISABLE(CYTHON_COVERAGE)
+ NO_CLANG_COVERAGE()
+}
+
# tag:lua-specific
LUAJIT_PATH=${ARCADIA_ROOT}/contrib/libs/luajit
macro _LUAJIT_OBJDUMP(Src, OUT="") {
@@ -7525,7 +7525,7 @@ multimodule PROTO_LIBRARY {
OPTIMIZE_PY_PROTOS()
OBJ_SUF=.py2
# Can not use NO_LINT(), because is not allowed outside of contrib directory
- SET(LINT_LEVEL_VALUE none_internal)
+ SET(LINT_LEVEL_VALUE none_internal)
when ($_COMMON_GOOGLE_APIS != "None") {
PEERDIR += contrib/libs/googleapis-common-protos
@@ -7555,7 +7555,7 @@ multimodule PROTO_LIBRARY {
}
OBJ_SUF=.py3
# Can not use NO_LINT(), because is not allowed outside of contrib directory
- SET(LINT_LEVEL_VALUE none_internal)
+ SET(LINT_LEVEL_VALUE none_internal)
when ($_COMMON_GOOGLE_APIS != "None") {
PEERDIR += contrib/libs/googleapis-common-protos
@@ -9015,26 +9015,26 @@ when ($OPENGL_REQUIRED) {
}
# tag:python-specific
-multimodule PY23_TEST {
+multimodule PY23_TEST {
module PY2 : PYTEST_BIN {
MODULE_PREFIX=py2_
- OBJ_SUF=.py2
+ OBJ_SUF=.py2
CANONIZE_SUB_PATH=py2test
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
- }
+ }
module PY3TEST_PROGRAM: PY3TEST_BIN {
.FINAL_TARGET=yes
- OBJ_SUF=.py3
+ OBJ_SUF=.py3
CANONIZE_SUB_PATH=py3test
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
- }
+ }
module PY3TEST_LIBRARY: _PY3_LIBRARY {
PEERDIR+=library/python/pytest
_REQUIRE_EXPLICIT_LICENSE()
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
}
-}
+}
# tag:windows-specific
WINDOWS_MANIFEST=
diff --git a/build/ymake_conf.py b/build/ymake_conf.py
index 327c639568..30219eb85e 100755
--- a/build/ymake_conf.py
+++ b/build/ymake_conf.py
@@ -1665,44 +1665,44 @@ class GnuCompiler(Compiler):
append('EXTRA_OUTPUT')
style = ['${requirements;hide:CC_REQUIREMENTS} ${hide;kv:"p CC"} ${hide;kv:"pc green"}']
- cxx_args = [
+ cxx_args = [
'$CLANG_TIDY_ARGS',
- '$YNDEXER_ARGS',
- '$CXX_COMPILER',
- '$C_FLAGS_PLATFORM',
- '$GCC_COMPILE_FLAGS',
- '$CXXFLAGS',
- '$CL_MACRO_INFO',
- '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
+ '$YNDEXER_ARGS',
+ '$CXX_COMPILER',
+ '$C_FLAGS_PLATFORM',
+ '$GCC_COMPILE_FLAGS',
+ '$CXXFLAGS',
+ '$CL_MACRO_INFO',
+ '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
'$COMPILER_TIME_TRACE_FLAGS',
- '$EXTRA_OUTPUT',
- '$SRCFLAGS',
+ '$EXTRA_OUTPUT',
+ '$SRCFLAGS',
'$_LANG_CFLAGS_VALUE',
'${input:SRC}',
- '$TOOLCHAIN_ENV',
+ '$TOOLCHAIN_ENV',
'$YNDEXER_OUTPUT',
'&& $COMPILER_TIME_TRACE_POSTPROCESS',
- ] + style
+ ] + style
- c_args = [
+ c_args = [
'$CLANG_TIDY_ARGS',
- '$YNDEXER_ARGS',
- '$C_COMPILER',
- '$C_FLAGS_PLATFORM',
- '$GCC_COMPILE_FLAGS',
- '$CFLAGS',
- '$CL_MACRO_INFO',
- '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
- '$CONLYFLAGS',
+ '$YNDEXER_ARGS',
+ '$C_COMPILER',
+ '$C_FLAGS_PLATFORM',
+ '$GCC_COMPILE_FLAGS',
+ '$CFLAGS',
+ '$CL_MACRO_INFO',
+ '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
+ '$CONLYFLAGS',
'$COMPILER_TIME_TRACE_FLAGS',
- '$EXTRA_OUTPUT',
- '$SRCFLAGS',
+ '$EXTRA_OUTPUT',
+ '$SRCFLAGS',
'${input:SRC}',
- '$TOOLCHAIN_ENV',
+ '$TOOLCHAIN_ENV',
'$YNDEXER_OUTPUT',
'&& $COMPILER_TIME_TRACE_POSTPROCESS',
- ] + style
-
+ ] + style
+
ignore_c_args_no_deps = [
'${input:SRC}',
'$SRCFLAGS',
@@ -2269,11 +2269,11 @@ class MSVCToolchainOptions(ToolchainOptions):
def prefix(_type, _path):
if not self.under_wine:
return _path
- return '{wine} {type} $WINE_ENV ${{ARCADIA_ROOT}} ${{ARCADIA_BUILD_ROOT}} {path}'.format(
- wine='${YMAKE_PYTHON} ${input:\"build/scripts/run_msvc_wine.py\"} $(WINE_TOOL-sbr:1093314933)/bin/wine64 -v140',
- type=_type,
- path=_path
- )
+ return '{wine} {type} $WINE_ENV ${{ARCADIA_ROOT}} ${{ARCADIA_BUILD_ROOT}} {path}'.format(
+ wine='${YMAKE_PYTHON} ${input:\"build/scripts/run_msvc_wine.py\"} $(WINE_TOOL-sbr:1093314933)/bin/wine64 -v140',
+ type=_type,
+ path=_path
+ )
self.masm_compiler = prefix('masm', os.path.join(bindir, tools_name, asm_name))
self.link = prefix('link', os.path.join(bindir, tools_name, 'link.exe'))
diff --git a/contrib/libs/cxxsupp/libcxx/ya.make b/contrib/libs/cxxsupp/libcxx/ya.make
index 54c624207d..15403fe6d5 100644
--- a/contrib/libs/cxxsupp/libcxx/ya.make
+++ b/contrib/libs/cxxsupp/libcxx/ya.make
@@ -183,11 +183,11 @@ NO_RUNTIME()
NO_COMPILER_WARNINGS()
-IF (FUZZING)
- NO_SANITIZE()
- NO_SANITIZE_COVERAGE()
-ENDIF()
-
+IF (FUZZING)
+ NO_SANITIZE()
+ NO_SANITIZE_COVERAGE()
+ENDIF()
+
SRCS(
src/algorithm.cpp
src/any.cpp
diff --git a/contrib/libs/cxxsupp/libcxxrt/ya.make b/contrib/libs/cxxsupp/libcxxrt/ya.make
index df8589b4e4..12dccbd505 100644
--- a/contrib/libs/cxxsupp/libcxxrt/ya.make
+++ b/contrib/libs/cxxsupp/libcxxrt/ya.make
@@ -41,9 +41,9 @@ ELSE()
)
ENDIF()
-IF (SANITIZER_TYPE == undefined OR FUZZING)
+IF (SANITIZER_TYPE == undefined OR FUZZING)
NO_SANITIZE()
- NO_SANITIZE_COVERAGE()
+ NO_SANITIZE_COVERAGE()
ENDIF()
SRCS(
diff --git a/contrib/libs/cxxsupp/libsan/ya.make b/contrib/libs/cxxsupp/libsan/ya.make
index bbb967cfd3..2fb16630be 100644
--- a/contrib/libs/cxxsupp/libsan/ya.make
+++ b/contrib/libs/cxxsupp/libsan/ya.make
@@ -8,7 +8,7 @@ NO_PLATFORM()
NO_SANITIZE()
-NO_SANITIZE_COVERAGE()
+NO_SANITIZE_COVERAGE()
OWNER(somov)
diff --git a/contrib/libs/protobuf/ya.make b/contrib/libs/protobuf/ya.make
index fcebade12a..044e24badd 100644
--- a/contrib/libs/protobuf/ya.make
+++ b/contrib/libs/protobuf/ya.make
@@ -23,7 +23,7 @@ LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
PEERDIR(
contrib/libs/zlib
)
-
+
ADDINCL(
GLOBAL contrib/libs/protobuf/src
GLOBAL FOR
diff --git a/contrib/python/botocore/botocore/loaders.py b/contrib/python/botocore/botocore/loaders.py
index 59f99ce830..8eaf58aab7 100644
--- a/contrib/python/botocore/botocore/loaders.py
+++ b/contrib/python/botocore/botocore/loaders.py
@@ -207,7 +207,7 @@ class HybridJsonLoader(JSONFileLoader):
@classmethod
def path_in_arcadia_resources(cls, file_path):
for prefix in cls.arcadia_resources_path:
- path = '{}{}.json'.format(prefix, file_path)
+ path = '{}{}.json'.format(prefix, file_path)
if path in resource.resfs_files():
return path
return
diff --git a/contrib/python/cffi/lsan.supp b/contrib/python/cffi/lsan.supp
index a96d7728fd..8533ad0d29 100644
--- a/contrib/python/cffi/lsan.supp
+++ b/contrib/python/cffi/lsan.supp
@@ -1,2 +1,2 @@
-leak:b_init_cffi_1_0_external_module
-leak:lib_build_and_cache_attr
+leak:b_init_cffi_1_0_external_module
+leak:lib_build_and_cache_attr
diff --git a/contrib/python/cffi/ya.make b/contrib/python/cffi/ya.make
index b3043b5981..7bff9477f9 100644
--- a/contrib/python/cffi/ya.make
+++ b/contrib/python/cffi/ya.make
@@ -18,8 +18,8 @@ ADDINCL(
NO_COMPILER_WARNINGS()
NO_LINT()
-SUPPRESSIONS(lsan.supp)
-
+SUPPRESSIONS(lsan.supp)
+
SRCS(
c/_cffi_backend.c
)
diff --git a/contrib/python/ipdb/ya.make b/contrib/python/ipdb/ya.make
index fe303e0bdf..c1c769c05c 100644
--- a/contrib/python/ipdb/ya.make
+++ b/contrib/python/ipdb/ya.make
@@ -20,15 +20,15 @@ PY_SRCS(
ipdb/stdout.py
)
-NO_CHECK_IMPORTS(
- # Modules presented below leads to initialization of pdb,
- # which try to create ~/.ipython/profile_default/history.sqlite-journal,
- # due to which import tests may crash
- ipdb.__init__
- ipdb.__main__
- ipdb.stdout
-)
-
+NO_CHECK_IMPORTS(
+ # Modules presented below leads to initialization of pdb,
+ # which try to create ~/.ipython/profile_default/history.sqlite-journal,
+ # due to which import tests may crash
+ ipdb.__init__
+ ipdb.__main__
+ ipdb.stdout
+)
+
RESOURCE_FILES(
PREFIX contrib/python/ipdb/
.dist-info/METADATA
diff --git a/contrib/python/py/py/_vendored_packages/iniconfig/__init__.py b/contrib/python/py/py/_vendored_packages/iniconfig/__init__.py
index fc89ba6e50..ebef1fd720 100644
--- a/contrib/python/py/py/_vendored_packages/iniconfig/__init__.py
+++ b/contrib/python/py/py/_vendored_packages/iniconfig/__init__.py
@@ -2,7 +2,7 @@
(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
"""
import io
-
+
__all__ = ['IniConfig', 'ParseError']
COMMENTCHARS = "#;"
@@ -51,14 +51,14 @@ class IniConfig(object):
def __init__(self, path, data=None):
self.path = str(path) # convenience
if data is None:
- if self.path.startswith('pkg:'):
- import pkgutil
-
- _, package, resource = self.path.split(':')
- content = pkgutil.get_data(package, resource)
+ if self.path.startswith('pkg:'):
+ import pkgutil
+
+ _, package, resource = self.path.split(':')
+ content = pkgutil.get_data(package, resource)
f = io.StringIO(content.decode('utf-8'))
- else:
- f = open(self.path)
+ else:
+ f = open(self.path)
try:
tokens = self._parse(iter(f))
finally:
diff --git a/contrib/python/pytest/py2/_pytest/python.py b/contrib/python/pytest/py2/_pytest/python.py
index 7af7571bff..f7c368b0c4 100644
--- a/contrib/python/pytest/py2/_pytest/python.py
+++ b/contrib/python/pytest/py2/_pytest/python.py
@@ -1204,33 +1204,33 @@ def _idval(val, argname, idx, idfn, item, config):
return str(argname) + str(idx)
-def limit_idval(limit):
- import functools
-
- names = {}
- limit -= 6
- assert limit > 0
-
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kw):
- idval = func(*args, **kw)
- if len(idval) > limit:
- prefix = idval[:limit]
- # There might be same prefix for the different test cases - take item into account
+def limit_idval(limit):
+ import functools
+
+ names = {}
+ limit -= 6
+ assert limit > 0
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kw):
+ idval = func(*args, **kw)
+ if len(idval) > limit:
+ prefix = idval[:limit]
+ # There might be same prefix for the different test cases - take item into account
name = "{}-{}".format(kw.get('item', ''), safe_str(prefix))
- idx = names.setdefault(name, -1) + 1
- names[name] = idx
+ idx = names.setdefault(name, -1) + 1
+ names[name] = idx
idval = "{}-{}".format(safe_str(prefix), idx)
- return idval
-
- return wrapper
-
- return decorator
-
-
-# XXX limit testnames in the name of sanity and readability
-@limit_idval(limit=500)
+ return idval
+
+ return wrapper
+
+ return decorator
+
+
+# XXX limit testnames in the name of sanity and readability
+@limit_idval(limit=500)
def _idvalset(idx, parameterset, argnames, idfn, ids, item, config):
if parameterset.id is not None:
return parameterset.id
diff --git a/contrib/python/pytest/py3/_pytest/python.py b/contrib/python/pytest/py3/_pytest/python.py
index 01011cbc15..f1a47d7d33 100644
--- a/contrib/python/pytest/py3/_pytest/python.py
+++ b/contrib/python/pytest/py3/_pytest/python.py
@@ -1339,33 +1339,33 @@ def _idval(
return str(argname) + str(idx)
-def limit_idval(limit):
- import functools
-
- names = {}
- limit -= 6
- assert limit > 0
-
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kw):
- idval = func(*args, **kw)
- if len(idval) > limit:
- prefix = idval[:limit]
- # There might be same prefix for the different test cases - take item into account
+def limit_idval(limit):
+ import functools
+
+ names = {}
+ limit -= 6
+ assert limit > 0
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kw):
+ idval = func(*args, **kw)
+ if len(idval) > limit:
+ prefix = idval[:limit]
+ # There might be same prefix for the different test cases - take item into account
name = "{}-{}".format(kw.get('item', ''), prefix)
- idx = names.setdefault(name, -1) + 1
- names[name] = idx
+ idx = names.setdefault(name, -1) + 1
+ names[name] = idx
idval = "{}-{}".format(prefix, idx)
- return idval
-
- return wrapper
-
- return decorator
-
-
-# XXX limit testnames in the name of sanity and readability
-@limit_idval(limit=500)
+ return idval
+
+ return wrapper
+
+ return decorator
+
+
+# XXX limit testnames in the name of sanity and readability
+@limit_idval(limit=500)
def _idvalset(
idx: int,
parameterset: ParameterSet,
diff --git a/contrib/python/six/ya.make b/contrib/python/six/ya.make
index e49fe7a30d..e0c7849214 100644
--- a/contrib/python/six/ya.make
+++ b/contrib/python/six/ya.make
@@ -1,5 +1,5 @@
OWNER(g:python-contrib)
-
+
PY23_LIBRARY()
LICENSE(MIT)
diff --git a/contrib/python/toml/LICENSE b/contrib/python/toml/LICENSE
index ef2d096e59..5010e3075e 100644
--- a/contrib/python/toml/LICENSE
+++ b/contrib/python/toml/LICENSE
@@ -1,27 +1,27 @@
-The MIT License
-
+The MIT License
+
Copyright 2013-2019 William Pearson
Copyright 2015-2016 Julien Enselme
-Copyright 2016 Google Inc.
+Copyright 2016 Google Inc.
Copyright 2017 Samuel Vasko
Copyright 2017 Nate Prewitt
Copyright 2017 Jack Evans
Copyright 2019 Filippo Broggini
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE. \ No newline at end of file
diff --git a/contrib/python/toml/ya.make b/contrib/python/toml/ya.make
index bda683c55a..104e501e8e 100644
--- a/contrib/python/toml/ya.make
+++ b/contrib/python/toml/ya.make
@@ -1,20 +1,20 @@
OWNER(g:python-contrib)
-
+
PY23_LIBRARY()
-
+
LICENSE(MIT)
VERSION(0.10.2)
-PY_SRCS(
- TOP_LEVEL
+PY_SRCS(
+ TOP_LEVEL
toml/__init__.py
toml/decoder.py
toml/encoder.py
toml/ordered.py
toml/tz.py
-)
-
+)
+
RESOURCE_FILES(
PREFIX contrib/python/toml/
.dist-info/METADATA
@@ -26,6 +26,6 @@ RESOURCE_FILES(
toml/tz.pyi
)
-NO_LINT()
-
-END()
+NO_LINT()
+
+END()
diff --git a/contrib/python/traitlets/py2/tests/ya.make b/contrib/python/traitlets/py2/tests/ya.make
index 105ca6c7ef..d2d3e3b9bf 100644
--- a/contrib/python/traitlets/py2/tests/ya.make
+++ b/contrib/python/traitlets/py2/tests/ya.make
@@ -3,7 +3,7 @@ PY2TEST()
OWNER(g:python-contrib borman nslus)
PEERDIR(
- contrib/python/traitlets
+ contrib/python/traitlets
)
ENV(
diff --git a/contrib/python/ya.make b/contrib/python/ya.make
index 8b0de6b929..d01ced9f3a 100644
--- a/contrib/python/ya.make
+++ b/contrib/python/ya.make
@@ -4,7 +4,7 @@ RECURSE(
absl-py
adblockparser
aenum
- ago
+ ago
aio-pika
aioboto3
aiobotocore
@@ -45,8 +45,8 @@ RECURSE(
alembic
allpairspy
amqp
- aniso8601
- annoy
+ aniso8601
+ annoy
antlr4
ansiwrap
anyconfig
@@ -55,17 +55,17 @@ RECURSE(
apipkg
apispec
apispec-flask-restful
- appdirs
+ appdirs
APScheduler
apsw
aresponses
- argcomplete
+ argcomplete
argon2-cffi
argon2-cffi-bindings
argparse-addons
arq
arrow
- asciitree
+ asciitree
asgiref
asn1crypto
astroid
@@ -75,7 +75,7 @@ RECURSE(
async-lru
async-timeout
asyncio-pool
- asyncmc
+ asyncmc
asyncpg
asyncssh
asynctest
@@ -101,18 +101,18 @@ RECURSE(
banal
bandit
bcrypt
- beautifulsoup4
+ beautifulsoup4
behave
betamax
betamax-serializers
billiard
binaryornot
bincopy
- biplist
+ biplist
bitarray
black
- bleach
- blinker
+ bleach
+ blinker
blis
bokeh
boltons
@@ -125,7 +125,7 @@ RECURSE(
bravado
bravado-core
bsddb3
- bson
+ bson
bz2file
cached-property
cachelib
@@ -165,7 +165,7 @@ RECURSE(
colorhash
colorlog
commoncode
- commonmark
+ commonmark
ConfigArgParse
configobj
configparser
@@ -176,7 +176,7 @@ RECURSE(
convertdate
cookies
cov-core
- coverage
+ coverage
coverage/bin
cpu-cores
crcmod
@@ -322,13 +322,13 @@ RECURSE(
dominate
dotmap
dparse
- dpath
+ dpath
dpkt
drf-extensions
drf_ujson
drf-yasg
- easywebdav
- ecdsa
+ easywebdav
+ ecdsa
edera
editdistance
elasticsearch
@@ -353,7 +353,7 @@ RECURSE(
facebook-business
factory-boy
Faker
- fakeredis
+ fakeredis
falcon
falcon-cors
falcon-multipart
@@ -365,7 +365,7 @@ RECURSE(
fasteners
fastjsonschema
fastsnmp
- faulthandler
+ faulthandler
fbprophet
feedparser
ffmpeg-python
@@ -424,7 +424,7 @@ RECURSE(
ftfy
funcparserlib
funcsigs
- functools32
+ functools32
furl
future
futures
@@ -461,10 +461,10 @@ RECURSE(
graphene-sqlalchemy
graphql-core
graphql-relay
- graphviz
+ graphviz
greenify
greenlet
- grequests
+ grequests
grpcio-opentracing
gspread
gunicorn
@@ -481,11 +481,11 @@ RECURSE(
horovod
hpack
hstspreload
- html2text
+ html2text
html5lib
httmock
http-parser
- httpagentparser
+ httpagentparser
httpcore
httplib2
httpretty
@@ -542,10 +542,10 @@ RECURSE(
janus
jaraco.functools
javaproperties
- jdcal
+ jdcal
jedi
Jinja2
- jinja2-time
+ jinja2-time
jmespath
joblib
jmespath
@@ -598,7 +598,7 @@ RECURSE(
lunardate
lunr
lxml
- lz4
+ lz4
M2Crypto
m3u8
Mako
@@ -664,7 +664,7 @@ RECURSE(
nested-diff
netaddr
netifaces
- networkx
+ networkx
nltk
normality
nose
@@ -673,7 +673,7 @@ RECURSE(
numpy
oauth2client
oauthlib
- objgraph
+ objgraph
observable
odfpy
Office365-REST-Python-Client
@@ -751,7 +751,7 @@ RECURSE(
plumbum
ply
plyvel
- polib
+ polib
portalocker
portpicker
ppdeep
@@ -760,7 +760,7 @@ RECURSE(
prance
premailer
preshed
- pretend
+ pretend
prettytable
priority
progressbar2
@@ -792,7 +792,7 @@ RECURSE(
pyaml
pyasn1
pyasn1-modules
- pybreaker
+ pybreaker
pycares
pycbrf
pycodestyle
@@ -806,7 +806,7 @@ RECURSE(
pydantic
pydash
PyDispatcher
- pyDOE
+ pyDOE
pydocstyle
pydot
pydub
@@ -858,7 +858,7 @@ RECURSE(
pysyncobj
pyTelegramBotAPI
pytest
- pytest-allure-adaptor
+ pytest-allure-adaptor
pytest-asyncio
pytest-bdd
pytest-datadir
@@ -943,7 +943,7 @@ RECURSE(
requests
requests-file
requests-mock
- requests-oauthlib
+ requests-oauthlib
requests-toolbelt
requests-unixsocket
responses
@@ -955,7 +955,7 @@ RECURSE(
RPi.GPIO
RPI-ST7789
rsa
- rstr
+ rstr
ruamel.std.pathlib
ruamel.yaml
Rx
@@ -1007,7 +1007,7 @@ RECURSE(
smmap
snappy
sniffio
- snowballstemmer
+ snowballstemmer
sobol-seq
sockjs
soft-webauthn
@@ -1029,7 +1029,7 @@ RECURSE(
sqltap
srptools
srsly
- sshpubkeys
+ sshpubkeys
sshtunnel
stack-data
starlette
@@ -1070,7 +1070,7 @@ RECURSE(
tinycss2
tinyrpc
tldextract
- toml
+ toml
tomli
toolz
toposort
@@ -1084,7 +1084,7 @@ RECURSE(
traitlets
transfer_manager_client
transitions
- transliterate
+ transliterate
trollius
trollsift
Twiggy
@@ -1142,14 +1142,14 @@ RECURSE(
webauthn
webcolors
webencodings
- WebOb
+ WebOb
websocket-client
websockets
webstruct
WebTest
webvtt-py
weighted-levenshtein
- Werkzeug
+ Werkzeug
wheel
whitenoise
whodap
diff --git a/contrib/tools/cython/Cython/Build/BuildExecutable.py b/contrib/tools/cython/Cython/Build/BuildExecutable.py
index 8a6ca57362..2db9e5d745 100644
--- a/contrib/tools/cython/Cython/Build/BuildExecutable.py
+++ b/contrib/tools/cython/Cython/Build/BuildExecutable.py
@@ -71,7 +71,7 @@ def runcmd(cmd, shell=True):
returncode = os.system(cmd)
else:
returncode = subprocess.call(cmd, shell=shell)
-
+
if returncode:
sys.exit(returncode)
diff --git a/contrib/tools/cython/Cython/Build/Dependencies.py b/contrib/tools/cython/Cython/Build/Dependencies.py
index c9396344c5..7eb55e2607 100644
--- a/contrib/tools/cython/Cython/Build/Dependencies.py
+++ b/contrib/tools/cython/Cython/Build/Dependencies.py
@@ -6,16 +6,16 @@ from .. import __version__
import collections
import contextlib
import hashlib
-import os
-import shutil
-import subprocess
-import re, sys, time
+import os
+import shutil
+import subprocess
+import re, sys, time
import warnings
from glob import iglob
-from io import open as io_open
-from os.path import relpath as _relpath
-from distutils.extension import Extension
-from distutils.util import strtobool
+from io import open as io_open
+from os.path import relpath as _relpath
+from distutils.extension import Extension
+from distutils.util import strtobool
import zipfile
try:
@@ -38,8 +38,8 @@ except ImportError:
zipfile_compression_mode = zipfile.ZIP_STORED
try:
- import pythran
-except:
+ import pythran
+except:
pythran = None
from .. import Utils
@@ -66,15 +66,15 @@ else:
basestring = str
-def _make_relative(file_paths, base=None):
- if not base:
- base = os.getcwd()
- if base[-1] != os.path.sep:
- base += os.path.sep
- return [_relpath(path, base) if path.startswith(base) else path
- for path in file_paths]
-
-
+def _make_relative(file_paths, base=None):
+ if not base:
+ base = os.getcwd()
+ if base[-1] != os.path.sep:
+ base += os.path.sep
+ return [_relpath(path, base) if path.startswith(base) else path
+ for path in file_paths]
+
+
def extended_iglob(pattern):
if '{' in pattern:
m = re.match('(.*){([^}]+)}(.*)', pattern)
@@ -118,7 +118,7 @@ def nonempty(it, error_msg="expected non-empty iterator"):
def file_hash(filename):
path = os.path.normpath(filename)
prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
- m = hashlib.md5(prefix)
+ m = hashlib.md5(prefix)
with open(path, 'rb') as f:
data = f.read(65000)
while data:
@@ -184,7 +184,7 @@ def parse_list(s):
transitive_str = object()
transitive_list = object()
-bool_or = object()
+bool_or = object()
distutils_settings = {
'name': str,
@@ -201,7 +201,7 @@ distutils_settings = {
'export_symbols': list,
'depends': transitive_list,
'language': transitive_str,
- 'np_pythran': bool_or
+ 'np_pythran': bool_or
}
@@ -233,23 +233,23 @@ class DistutilsInfo(object):
if line[0] != '#':
break
line = line[1:].lstrip()
- kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
+ kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
if kind is not None:
- key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
- type = distutils_settings.get(key, None)
- if line.startswith("cython:") and type is None: continue
+ key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
+ type = distutils_settings.get(key, None)
+ if line.startswith("cython:") and type is None: continue
if type in (list, transitive_list):
value = parse_list(value)
if key == 'define_macros':
value = [tuple(macro.split('=', 1))
if '=' in macro else (macro, None)
for macro in value]
- if type is bool_or:
- value = strtobool(value)
+ if type is bool_or:
+ value = strtobool(value)
self.values[key] = value
elif exn is not None:
for key in distutils_settings:
- if key in ('name', 'sources','np_pythran'):
+ if key in ('name', 'sources','np_pythran'):
continue
value = getattr(exn, key, None)
if value:
@@ -271,8 +271,8 @@ class DistutilsInfo(object):
all.append(v)
value = all
self.values[key] = value
- elif type is bool_or:
- self.values[key] = self.values.get(key, False) | value
+ elif type is bool_or:
+ self.values[key] = self.values.get(key, False) | value
return self
def subs(self, aliases):
@@ -415,30 +415,30 @@ def normalize_existing(base_path, rel_paths):
@cached_function
def normalize_existing0(base_dir, rel_paths):
- """
- Given some base directory ``base_dir`` and a list of path names
- ``rel_paths``, normalize each relative path name ``rel`` by
- replacing it by ``os.path.join(base, rel)`` if that file exists.
-
- Return a couple ``(normalized, needed_base)`` where ``normalized``
- if the list of normalized file names and ``needed_base`` is
- ``base_dir`` if we actually needed ``base_dir``. If no paths were
- changed (for example, if all paths were already absolute), then
- ``needed_base`` is ``None``.
- """
+ """
+ Given some base directory ``base_dir`` and a list of path names
+ ``rel_paths``, normalize each relative path name ``rel`` by
+ replacing it by ``os.path.join(base, rel)`` if that file exists.
+
+ Return a couple ``(normalized, needed_base)`` where ``normalized``
+ if the list of normalized file names and ``needed_base`` is
+ ``base_dir`` if we actually needed ``base_dir``. If no paths were
+ changed (for example, if all paths were already absolute), then
+ ``needed_base`` is ``None``.
+ """
normalized = []
- needed_base = None
+ needed_base = None
for rel in rel_paths:
- if os.path.isabs(rel):
- normalized.append(rel)
- continue
+ if os.path.isabs(rel):
+ normalized.append(rel)
+ continue
path = join_path(base_dir, rel)
if path_exists(path):
normalized.append(os.path.normpath(path))
- needed_base = base_dir
+ needed_base = base_dir
else:
normalized.append(rel)
- return (normalized, needed_base)
+ return (normalized, needed_base)
def resolve_depends(depends, include_dirs):
@@ -543,25 +543,25 @@ class DependencyTree(object):
return all
@cached_method
- def cimports_externs_incdirs(self, filename):
+ def cimports_externs_incdirs(self, filename):
# This is really ugly. Nested cimports are resolved with respect to the
# includer, but includes are resolved with respect to the includee.
cimports, includes, externs = self.parse_dependencies(filename)[:3]
cimports = set(cimports)
externs = set(externs)
- incdirs = set()
+ incdirs = set()
for include in self.included_files(filename):
- included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
+ included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
cimports.update(included_cimports)
externs.update(included_externs)
- incdirs.update(included_incdirs)
- externs, incdir = normalize_existing(filename, externs)
- if incdir:
- incdirs.add(incdir)
- return tuple(cimports), externs, incdirs
+ incdirs.update(included_incdirs)
+ externs, incdir = normalize_existing(filename, externs)
+ if incdir:
+ incdirs.add(incdir)
+ return tuple(cimports), externs, incdirs
def cimports(self, filename):
- return self.cimports_externs_incdirs(filename)[0]
+ return self.cimports_externs_incdirs(filename)[0]
def package(self, filename):
return package(filename)
@@ -638,11 +638,11 @@ class DependencyTree(object):
incorporate everything that has an influence on the generated code.
"""
try:
- m = hashlib.md5(__version__.encode('UTF-8'))
- m.update(file_hash(filename).encode('UTF-8'))
+ m = hashlib.md5(__version__.encode('UTF-8'))
+ m.update(file_hash(filename).encode('UTF-8'))
for x in sorted(self.all_dependencies(filename)):
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
- m.update(file_hash(x).encode('UTF-8'))
+ m.update(file_hash(x).encode('UTF-8'))
# Include the module attributes that change the compilation result
# in the fingerprint. We do not iterate over module.__dict__ and
# include almost everything here as users might extend Extension
@@ -661,24 +661,24 @@ class DependencyTree(object):
def distutils_info0(self, filename):
info = self.parse_dependencies(filename)[3]
- kwds = info.values
- cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
- basedir = os.getcwd()
- # Add dependencies on "cdef extern from ..." files
+ kwds = info.values
+ cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
+ basedir = os.getcwd()
+ # Add dependencies on "cdef extern from ..." files
if externs:
- externs = _make_relative(externs, basedir)
- if 'depends' in kwds:
- kwds['depends'] = list(set(kwds['depends']).union(externs))
+ externs = _make_relative(externs, basedir)
+ if 'depends' in kwds:
+ kwds['depends'] = list(set(kwds['depends']).union(externs))
else:
- kwds['depends'] = list(externs)
- # Add include_dirs to ensure that the C compiler will find the
- # "cdef extern from ..." files
- if incdirs:
- include_dirs = list(kwds.get('include_dirs', []))
- for inc in _make_relative(incdirs, basedir):
- if inc not in include_dirs:
- include_dirs.append(inc)
- kwds['include_dirs'] = include_dirs
+ kwds['depends'] = list(externs)
+ # Add include_dirs to ensure that the C compiler will find the
+ # "cdef extern from ..." files
+ if incdirs:
+ include_dirs = list(kwds.get('include_dirs', []))
+ for inc in _make_relative(incdirs, basedir):
+ if inc not in include_dirs:
+ include_dirs.append(inc)
+ kwds['include_dirs'] = include_dirs
return info
def distutils_info(self, filename, aliases=None, base=None):
@@ -731,20 +731,20 @@ def create_dependency_tree(ctx=None, quiet=False):
return _dep_tree
-# If this changes, change also docs/src/reference/compilation.rst
-# which mentions this function
-def default_create_extension(template, kwds):
- if 'depends' in kwds:
- include_dirs = kwds.get('include_dirs', []) + ["."]
- depends = resolve_depends(kwds['depends'], include_dirs)
- kwds['depends'] = sorted(set(depends + template.depends))
-
- t = template.__class__
- ext = t(**kwds)
- metadata = dict(distutils=kwds, module_name=kwds['name'])
- return (ext, metadata)
-
-
+# If this changes, change also docs/src/reference/compilation.rst
+# which mentions this function
+def default_create_extension(template, kwds):
+ if 'depends' in kwds:
+ include_dirs = kwds.get('include_dirs', []) + ["."]
+ depends = resolve_depends(kwds['depends'], include_dirs)
+ kwds['depends'] = sorted(set(depends + template.depends))
+
+ t = template.__class__
+ ext = t(**kwds)
+ metadata = dict(distutils=kwds, module_name=kwds['name'])
+ return (ext, metadata)
+
+
# This may be useful for advanced users?
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
exclude_failures=False):
@@ -778,14 +778,14 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
Extension_distutils = Extension
class Extension_setuptools(Extension): pass
- # if no create_extension() function is defined, use a simple
- # default function.
- create_extension = ctx.options.create_extension or default_create_extension
-
+ # if no create_extension() function is defined, use a simple
+ # default function.
+ create_extension = ctx.options.create_extension or default_create_extension
+
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
- template = Extension(pattern, []) # Fake Extension without sources
+ template = Extension(pattern, []) # Fake Extension without sources
name = '*'
base = None
ext_language = language
@@ -793,11 +793,11 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
cython_sources = [s for s in pattern.sources
if os.path.splitext(s)[1] in ('.py', '.pyx')]
if cython_sources:
- filepattern = cython_sources[0]
- if len(cython_sources) > 1:
- print("Warning: Multiple cython sources found for extension '%s': %s\n"
- "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
- "for sharing declarations among Cython files." % (pattern.name, cython_sources))
+ filepattern = cython_sources[0]
+ if len(cython_sources) > 1:
+ print("Warning: Multiple cython sources found for extension '%s': %s\n"
+ "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
+ "for sharing declarations among Cython files." % (pattern.name, cython_sources))
else:
# ignore non-cython modules
module_list.append(pattern)
@@ -820,11 +820,11 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
if '*' in name:
if module_name in explicit_modules:
continue
- elif name:
+ elif name:
module_name = name
Utils.raise_error_if_module_name_forbidden(module_name)
-
+
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
@@ -837,41 +837,41 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
if key not in kwds:
kwds[key] = value
- kwds['name'] = module_name
-
- sources = [file] + [m for m in template.sources if m != filepattern]
+ kwds['name'] = module_name
+
+ sources = [file] + [m for m in template.sources if m != filepattern]
if 'sources' in kwds:
# allow users to add .c files etc.
for source in kwds['sources']:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
- kwds['sources'] = sources
+ kwds['sources'] = sources
if ext_language and 'language' not in kwds:
kwds['language'] = ext_language
- np_pythran = kwds.pop('np_pythran', False)
-
- # Create the new extension
- m, metadata = create_extension(template, kwds)
- m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
- if m.np_pythran:
- update_pythran_extension(m)
- module_list.append(m)
-
- # Store metadata (this will be written as JSON in the
- # generated C file but otherwise has no purpose)
- module_metadata[module_name] = metadata
-
+ np_pythran = kwds.pop('np_pythran', False)
+
+ # Create the new extension
+ m, metadata = create_extension(template, kwds)
+ m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
+ if m.np_pythran:
+ update_pythran_extension(m)
+ module_list.append(m)
+
+ # Store metadata (this will be written as JSON in the
+ # generated C file but otherwise has no purpose)
+ module_metadata[module_name] = metadata
+
if file not in m.sources:
- # Old setuptools unconditionally replaces .pyx with .c/.cpp
- target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
- try:
- m.sources.remove(target_file)
- except ValueError:
- # never seen this in the wild, but probably better to warn about this unexpected case
- print("Warning: Cython source file not found in sources list, adding %s" % file)
+ # Old setuptools unconditionally replaces .pyx with .c/.cpp
+ target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
+ try:
+ m.sources.remove(target_file)
+ except ValueError:
+ # never seen this in the wild, but probably better to warn about this unexpected case
+ print("Warning: Cython source file not found in sources list, adding %s" % file)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
@@ -953,16 +953,16 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
options['include_path'] = ['.']
if 'common_utility_include_dir' in options:
safe_makedirs(options['common_utility_include_dir'])
-
+
depfile = options.pop('depfile', None)
if pythran is None:
pythran_options = None
else:
- pythran_options = CompilationOptions(**options)
- pythran_options.cplus = True
- pythran_options.np_pythran = True
-
+ pythran_options = CompilationOptions(**options)
+ pythran_options.cplus = True
+ pythran_options.np_pythran = True
+
c_options = CompilationOptions(**options)
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
ctx = c_options.create_context()
@@ -978,42 +978,42 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None)
- def copy_to_build_dir(filepath, root=os.getcwd()):
- filepath_abs = os.path.abspath(filepath)
- if os.path.isabs(filepath):
- filepath = filepath_abs
- if filepath_abs.startswith(root):
- # distutil extension depends are relative to cwd
- mod_dir = join_path(build_dir,
- os.path.dirname(_relpath(filepath, root)))
- copy_once_if_newer(filepath_abs, mod_dir)
-
- modules_by_cfile = collections.defaultdict(list)
+ def copy_to_build_dir(filepath, root=os.getcwd()):
+ filepath_abs = os.path.abspath(filepath)
+ if os.path.isabs(filepath):
+ filepath = filepath_abs
+ if filepath_abs.startswith(root):
+ # distutil extension depends are relative to cwd
+ mod_dir = join_path(build_dir,
+ os.path.dirname(_relpath(filepath, root)))
+ copy_once_if_newer(filepath_abs, mod_dir)
+
+ modules_by_cfile = collections.defaultdict(list)
to_compile = []
for m in module_list:
if build_dir:
for dep in m.depends:
copy_to_build_dir(dep)
- cy_sources = [
- source for source in m.sources
- if os.path.splitext(source)[1] in ('.pyx', '.py')]
- if len(cy_sources) == 1:
- # normal "special" case: believe the Extension module name to allow user overrides
- full_module_name = m.name
- else:
- # infer FQMN from source files
- full_module_name = None
-
+ cy_sources = [
+ source for source in m.sources
+ if os.path.splitext(source)[1] in ('.pyx', '.py')]
+ if len(cy_sources) == 1:
+ # normal "special" case: believe the Extension module name to allow user overrides
+ full_module_name = m.name
+ else:
+ # infer FQMN from source files
+ full_module_name = None
+
new_sources = []
for source in m.sources:
base, ext = os.path.splitext(source)
if ext in ('.pyx', '.py'):
- if m.np_pythran:
+ if m.np_pythran:
+ c_file = base + '.cpp'
+ options = pythran_options
+ elif m.language == 'c++':
c_file = base + '.cpp'
- options = pythran_options
- elif m.language == 'c++':
- c_file = base + '.cpp'
options = cpp_options
else:
c_file = base + '.c'
@@ -1061,7 +1061,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
dep_timestamp, dep = deps.newest_dependency(source)
priority = 2 - (dep in deps.immediate_dependencies(source))
if force or c_timestamp < dep_timestamp:
- if not quiet and not force:
+ if not quiet and not force:
if source == dep:
print("Compiling %s because it changed." % source)
else:
@@ -1070,12 +1070,12 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
fingerprint = deps.transitive_fingerprint(source, m, options)
else:
fingerprint = None
- to_compile.append((
- priority, source, c_file, fingerprint, quiet,
- options, not exclude_failures, module_metadata.get(m.name),
- full_module_name))
+ to_compile.append((
+ priority, source, c_file, fingerprint, quiet,
+ options, not exclude_failures, module_metadata.get(m.name),
+ full_module_name))
new_sources.append(c_file)
- modules_by_cfile[c_file].append(m)
+ modules_by_cfile[c_file].append(m)
else:
new_sources.append(source)
if build_dir:
@@ -1191,15 +1191,15 @@ else:
# TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results
-def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
- raise_on_failure=True, embedded_metadata=None, full_module_name=None,
- progress=""):
- from ..Compiler.Main import compile_single, default_options
+def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
+ raise_on_failure=True, embedded_metadata=None, full_module_name=None,
+ progress=""):
+ from ..Compiler.Main import compile_single, default_options
from ..Compiler.Errors import CompileError, PyrexError
if fingerprint:
if not os.path.exists(options.cache):
- safe_makedirs(options.cache)
+ safe_makedirs(options.cache)
# Cython-generated c files are highly compressible.
# (E.g. a compression ratio of about 10 for Sage).
fingerprint_file_base = join_path(
@@ -1230,7 +1230,7 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
any_failures = 0
try:
- result = compile_single(pyx_file, options, full_module_name=full_module_name)
+ result = compile_single(pyx_file, options, full_module_name=full_module_name)
if result.num_errors > 0:
any_failures = 1
except (EnvironmentError, PyrexError) as e:
diff --git a/contrib/tools/cython/Cython/Build/IpythonMagic.py b/contrib/tools/cython/Cython/Build/IpythonMagic.py
index 3b56be5525..7abb97ec70 100644
--- a/contrib/tools/cython/Cython/Build/IpythonMagic.py
+++ b/contrib/tools/cython/Cython/Build/IpythonMagic.py
@@ -52,13 +52,13 @@ import os
import re
import sys
import time
-import copy
-import distutils.log
-import textwrap
+import copy
+import distutils.log
+import textwrap
IO_ENCODING = sys.getfilesystemencoding()
IS_PY2 = sys.version_info[0] < 3
-
+
try:
reload
except NameError: # Python 3
@@ -88,20 +88,20 @@ from .Inline import cython_inline
from .Dependencies import cythonize
-PGO_CONFIG = {
- 'gcc': {
- 'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
- 'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
- },
- # blind copy from 'configure' script in CPython 3.7
- 'icc': {
- 'gen': ['-prof-gen'],
- 'use': ['-prof-use'],
- }
-}
-PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
-
-
+PGO_CONFIG = {
+ 'gcc': {
+ 'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
+ 'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
+ },
+ # blind copy from 'configure' script in CPython 3.7
+ 'icc': {
+ 'gen': ['-prof-gen'],
+ 'use': ['-prof-use'],
+ }
+}
+PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
+
+
if IS_PY2:
def encode_fs(name):
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
@@ -114,25 +114,25 @@ else:
class CythonMagics(Magics):
def __init__(self, shell):
- super(CythonMagics, self).__init__(shell)
+ super(CythonMagics, self).__init__(shell)
self._reloads = {}
self._code_cache = {}
self._pyximport_installed = False
def _import_all(self, module):
- mdict = module.__dict__
- if '__all__' in mdict:
- keys = mdict['__all__']
- else:
- keys = [k for k in mdict if not k.startswith('_')]
-
- for k in keys:
- try:
- self.shell.push({k: mdict[k]})
- except KeyError:
- msg = "'module' object has no attribute '%s'" % k
- raise AttributeError(msg)
-
+ mdict = module.__dict__
+ if '__all__' in mdict:
+ keys = mdict['__all__']
+ else:
+ keys = [k for k in mdict if not k.startswith('_')]
+
+ for k in keys:
+ try:
+ self.shell.push({k: mdict[k]})
+ except KeyError:
+ msg = "'module' object has no attribute '%s'" % k
+ raise AttributeError(msg)
+
@cell_magic
def cython_inline(self, line, cell):
"""Compile and run a Cython code cell using Cython.inline.
@@ -192,14 +192,14 @@ class CythonMagics(Magics):
@magic_arguments.magic_arguments()
@magic_arguments.argument(
- '-a', '--annotate', action='store_true', default=False,
- help="Produce a colorized HTML version of the source."
- )
- @magic_arguments.argument(
- '-+', '--cplus', action='store_true', default=False,
- help="Output a C++ rather than C file."
- )
- @magic_arguments.argument(
+ '-a', '--annotate', action='store_true', default=False,
+ help="Produce a colorized HTML version of the source."
+ )
+ @magic_arguments.argument(
+ '-+', '--cplus', action='store_true', default=False,
+ help="Output a C++ rather than C file."
+ )
+ @magic_arguments.argument(
'-3', dest='language_level', action='store_const', const=3, default=None,
help="Select Python 3 syntax."
)
@@ -208,11 +208,11 @@ class CythonMagics(Magics):
help="Select Python 2 syntax."
)
@magic_arguments.argument(
- '-f', '--force', action='store_true', default=False,
- help="Force the compilation of a new module, even if the source has been "
- "previously compiled."
- )
- @magic_arguments.argument(
+ '-f', '--force', action='store_true', default=False,
+ help="Force the compilation of a new module, even if the source has been "
+ "previously compiled."
+ )
+ @magic_arguments.argument(
'-c', '--compile-args', action='append', default=[],
help="Extra flags to pass to compiler via the `extra_compile_args` "
"Extension flag (can be specified multiple times)."
@@ -242,19 +242,19 @@ class CythonMagics(Magics):
"multiple times)."
)
@magic_arguments.argument(
- '-S', '--src', action='append', default=[],
- help="Add a path to the list of src files (can be specified "
- "multiple times)."
+ '-S', '--src', action='append', default=[],
+ help="Add a path to the list of src files (can be specified "
+ "multiple times)."
)
@magic_arguments.argument(
- '--pgo', dest='pgo', action='store_true', default=False,
- help=("Enable profile guided optimisation in the C compiler. "
- "Compiles the cell twice and executes it in between to generate a runtime profile.")
+ '--pgo', dest='pgo', action='store_true', default=False,
+ help=("Enable profile guided optimisation in the C compiler. "
+ "Compiles the cell twice and executes it in between to generate a runtime profile.")
)
@magic_arguments.argument(
- '--verbose', dest='quiet', action='store_false', default=True,
- help=("Print debug information like generated .c/.cpp file location "
- "and exact gcc/g++ command invoked.")
+ '--verbose', dest='quiet', action='store_false', default=True,
+ help=("Print debug information like generated .c/.cpp file location "
+ "and exact gcc/g++ command invoked.")
)
@cell_magic
def cython(self, line, cell):
@@ -276,78 +276,78 @@ class CythonMagics(Magics):
%%cython --compile-args=-fopenmp --link-args=-fopenmp
...
-
- To enable profile guided optimisation, pass the ``--pgo`` option.
- Note that the cell itself needs to take care of establishing a suitable
- profile when executed. This can be done by implementing the functions to
- optimise, and then calling them directly in the same cell on some realistic
- training data like this::
-
- %%cython --pgo
- def critical_function(data):
- for item in data:
- ...
-
- # execute function several times to build profile
- from somewhere import some_typical_data
- for _ in range(100):
- critical_function(some_typical_data)
-
- In Python 3.5 and later, you can distinguish between the profile and
- non-profile runs as follows::
-
- if "_pgo_" in __name__:
- ... # execute critical code here
+
+ To enable profile guided optimisation, pass the ``--pgo`` option.
+ Note that the cell itself needs to take care of establishing a suitable
+ profile when executed. This can be done by implementing the functions to
+ optimise, and then calling them directly in the same cell on some realistic
+ training data like this::
+
+ %%cython --pgo
+ def critical_function(data):
+ for item in data:
+ ...
+
+ # execute function several times to build profile
+ from somewhere import some_typical_data
+ for _ in range(100):
+ critical_function(some_typical_data)
+
+ In Python 3.5 and later, you can distinguish between the profile and
+ non-profile runs as follows::
+
+ if "_pgo_" in __name__:
+ ... # execute critical code here
"""
args = magic_arguments.parse_argstring(self.cython, line)
- code = cell if cell.endswith('\n') else cell + '\n'
+ code = cell if cell.endswith('\n') else cell + '\n'
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
- key = (code, line, sys.version_info, sys.executable, cython_version)
+ key = (code, line, sys.version_info, sys.executable, cython_version)
if not os.path.exists(lib_dir):
os.makedirs(lib_dir)
- if args.pgo:
- key += ('pgo',)
+ if args.pgo:
+ key += ('pgo',)
if args.force:
# Force a new module name by adding the current time to the
# key which is hashed to determine the module name.
- key += (time.time(),)
+ key += (time.time(),)
if args.name:
module_name = str(args.name) # no-op in Py3
else:
module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
- html_file = os.path.join(lib_dir, module_name + '.html')
+ html_file = os.path.join(lib_dir, module_name + '.html')
module_path = os.path.join(lib_dir, module_name + self.so_ext)
have_module = os.path.isfile(module_path)
- need_cythonize = args.pgo or not have_module
+ need_cythonize = args.pgo or not have_module
if args.annotate:
if not os.path.isfile(html_file):
need_cythonize = True
- extension = None
+ extension = None
if need_cythonize:
- extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
+ extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
if extensions is None:
# Compilation failed and printed error message
return None
- assert len(extensions) == 1
- extension = extensions[0]
+ assert len(extensions) == 1
+ extension = extensions[0]
self._code_cache[key] = module_name
- if args.pgo:
- self._profile_pgo_wrapper(extension, lib_dir)
-
+ if args.pgo:
+ self._profile_pgo_wrapper(extension, lib_dir)
+
try:
self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None,
quiet=args.quiet)
except distutils.errors.CompileError:
# Build failed and printed error message
return None
-
+
module = imp.load_dynamic(module_name, module_path)
self._import_all(module)
@@ -366,129 +366,129 @@ class CythonMagics(Magics):
else:
return display.HTML(self.clean_annotated_html(annotated_html))
- def _profile_pgo_wrapper(self, extension, lib_dir):
- """
- Generate a .c file for a separate extension module that calls the
- module init function of the original module. This makes sure that the
- PGO profiler sees the correct .o file of the final module, but it still
- allows us to import the module under a different name for profiling,
- before recompiling it into the PGO optimised module. Overwriting and
- reimporting the same shared library is not portable.
- """
- extension = copy.copy(extension) # shallow copy, do not modify sources in place!
- module_name = extension.name
- pgo_module_name = '_pgo_' + module_name
- pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
- with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
- f.write(textwrap.dedent(u"""
- #include "Python.h"
- #if PY_MAJOR_VERSION < 3
- extern PyMODINIT_FUNC init%(module_name)s(void);
- PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
- PyMODINIT_FUNC init%(pgo_module_name)s(void) {
- PyObject *sys_modules;
- init%(module_name)s(); if (PyErr_Occurred()) return;
- sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
- if (sys_modules) {
- PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
- PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
- Py_DECREF(module);
- }
- }
- #else
- extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
- PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
- PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
- return PyInit_%(module_name)s();
- }
- #endif
- """ % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
-
- extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
- extension.name = pgo_module_name
-
- self._build_extension(extension, lib_dir, pgo_step_name='gen')
-
- # import and execute module code to generate profile
- so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
- imp.load_dynamic(pgo_module_name, so_module_path)
-
- def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
- pyx_file = os.path.join(lib_dir, module_name + '.pyx')
+ def _profile_pgo_wrapper(self, extension, lib_dir):
+ """
+ Generate a .c file for a separate extension module that calls the
+ module init function of the original module. This makes sure that the
+ PGO profiler sees the correct .o file of the final module, but it still
+ allows us to import the module under a different name for profiling,
+ before recompiling it into the PGO optimised module. Overwriting and
+ reimporting the same shared library is not portable.
+ """
+ extension = copy.copy(extension) # shallow copy, do not modify sources in place!
+ module_name = extension.name
+ pgo_module_name = '_pgo_' + module_name
+ pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
+ with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
+ f.write(textwrap.dedent(u"""
+ #include "Python.h"
+ #if PY_MAJOR_VERSION < 3
+ extern PyMODINIT_FUNC init%(module_name)s(void);
+ PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
+ PyMODINIT_FUNC init%(pgo_module_name)s(void) {
+ PyObject *sys_modules;
+ init%(module_name)s(); if (PyErr_Occurred()) return;
+ sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
+ if (sys_modules) {
+ PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
+ PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
+ Py_DECREF(module);
+ }
+ }
+ #else
+ extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
+ PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
+ PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
+ return PyInit_%(module_name)s();
+ }
+ #endif
+ """ % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
+
+ extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
+ extension.name = pgo_module_name
+
+ self._build_extension(extension, lib_dir, pgo_step_name='gen')
+
+ # import and execute module code to generate profile
+ so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
+ imp.load_dynamic(pgo_module_name, so_module_path)
+
+ def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
+ pyx_file = os.path.join(lib_dir, module_name + '.pyx')
pyx_file = encode_fs(pyx_file)
-
- c_include_dirs = args.include
- c_src_files = list(map(str, args.src))
- if 'numpy' in code:
- import numpy
- c_include_dirs.append(numpy.get_include())
- with io.open(pyx_file, 'w', encoding='utf-8') as f:
- f.write(code)
- extension = Extension(
- name=module_name,
- sources=[pyx_file] + c_src_files,
- include_dirs=c_include_dirs,
- library_dirs=args.library_dirs,
- extra_compile_args=args.compile_args,
- extra_link_args=args.link_args,
- libraries=args.lib,
- language='c++' if args.cplus else 'c',
- )
- try:
- opts = dict(
- quiet=quiet,
- annotate=args.annotate,
- force=True,
- )
- if args.language_level is not None:
- assert args.language_level in (2, 3)
- opts['language_level'] = args.language_level
- elif sys.version_info[0] >= 3:
- opts['language_level'] = 3
- return cythonize([extension], **opts)
- except CompileError:
- return None
-
- def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
- build_extension = self._get_build_extension(
- extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
- old_threshold = None
- try:
- if not quiet:
- old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
- build_extension.run()
- finally:
- if not quiet and old_threshold is not None:
- distutils.log.set_threshold(old_threshold)
-
- def _add_pgo_flags(self, build_extension, step_name, temp_dir):
- compiler_type = build_extension.compiler.compiler_type
- if compiler_type == 'unix':
- compiler_cmd = build_extension.compiler.compiler_so
- # TODO: we could try to call "[cmd] --version" for better insights
- if not compiler_cmd:
- pass
- elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
- compiler_type = 'clang'
- elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
- compiler_type = 'icc'
- elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
- compiler_type = 'gcc'
- elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
- compiler_type = 'gcc'
- config = PGO_CONFIG.get(compiler_type)
- orig_flags = []
- if config and step_name in config:
- flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
- for extension in build_extension.extensions:
- orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
- extension.extra_compile_args = extension.extra_compile_args + flags
- extension.extra_link_args = extension.extra_link_args + flags
- else:
- print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
- file=sys.stderr)
- return orig_flags
-
+
+ c_include_dirs = args.include
+ c_src_files = list(map(str, args.src))
+ if 'numpy' in code:
+ import numpy
+ c_include_dirs.append(numpy.get_include())
+ with io.open(pyx_file, 'w', encoding='utf-8') as f:
+ f.write(code)
+ extension = Extension(
+ name=module_name,
+ sources=[pyx_file] + c_src_files,
+ include_dirs=c_include_dirs,
+ library_dirs=args.library_dirs,
+ extra_compile_args=args.compile_args,
+ extra_link_args=args.link_args,
+ libraries=args.lib,
+ language='c++' if args.cplus else 'c',
+ )
+ try:
+ opts = dict(
+ quiet=quiet,
+ annotate=args.annotate,
+ force=True,
+ )
+ if args.language_level is not None:
+ assert args.language_level in (2, 3)
+ opts['language_level'] = args.language_level
+ elif sys.version_info[0] >= 3:
+ opts['language_level'] = 3
+ return cythonize([extension], **opts)
+ except CompileError:
+ return None
+
+ def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
+ build_extension = self._get_build_extension(
+ extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
+ old_threshold = None
+ try:
+ if not quiet:
+ old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
+ build_extension.run()
+ finally:
+ if not quiet and old_threshold is not None:
+ distutils.log.set_threshold(old_threshold)
+
+ def _add_pgo_flags(self, build_extension, step_name, temp_dir):
+ compiler_type = build_extension.compiler.compiler_type
+ if compiler_type == 'unix':
+ compiler_cmd = build_extension.compiler.compiler_so
+ # TODO: we could try to call "[cmd] --version" for better insights
+ if not compiler_cmd:
+ pass
+ elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
+ compiler_type = 'clang'
+ elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
+ compiler_type = 'icc'
+ elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
+ compiler_type = 'gcc'
+ elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
+ compiler_type = 'gcc'
+ config = PGO_CONFIG.get(compiler_type)
+ orig_flags = []
+ if config and step_name in config:
+ flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
+ for extension in build_extension.extensions:
+ orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
+ extension.extra_compile_args = extension.extra_compile_args + flags
+ extension.extra_link_args = extension.extra_link_args + flags
+ else:
+ print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
+ file=sys.stderr)
+ return orig_flags
+
@property
def so_ext(self):
"""The extension suffix for compiled modules."""
@@ -510,8 +510,8 @@ class CythonMagics(Magics):
else:
_path_created.clear()
- def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
- pgo_step_name=None, _build_ext=build_ext):
+ def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
+ pgo_step_name=None, _build_ext=build_ext):
self._clear_distutils_mkpath_cache()
dist = Distribution()
config_files = dist.find_config_files()
@@ -520,28 +520,28 @@ class CythonMagics(Magics):
except ValueError:
pass
dist.parse_config_files(config_files)
-
- if not temp_dir:
- temp_dir = lib_dir
- add_pgo_flags = self._add_pgo_flags
-
- if pgo_step_name:
- base_build_ext = _build_ext
- class _build_ext(_build_ext):
- def build_extensions(self):
- add_pgo_flags(self, pgo_step_name, temp_dir)
- base_build_ext.build_extensions(self)
-
- build_extension = _build_ext(dist)
+
+ if not temp_dir:
+ temp_dir = lib_dir
+ add_pgo_flags = self._add_pgo_flags
+
+ if pgo_step_name:
+ base_build_ext = _build_ext
+ class _build_ext(_build_ext):
+ def build_extensions(self):
+ add_pgo_flags(self, pgo_step_name, temp_dir)
+ base_build_ext.build_extensions(self)
+
+ build_extension = _build_ext(dist)
build_extension.finalize_options()
- if temp_dir:
+ if temp_dir:
temp_dir = encode_fs(temp_dir)
- build_extension.build_temp = temp_dir
- if lib_dir:
+ build_extension.build_temp = temp_dir
+ if lib_dir:
lib_dir = encode_fs(lib_dir)
- build_extension.build_lib = lib_dir
- if extension is not None:
- build_extension.extensions = [extension]
+ build_extension.build_lib = lib_dir
+ if extension is not None:
+ build_extension.extensions = [extension]
return build_extension
@staticmethod
@@ -556,10 +556,10 @@ class CythonMagics(Magics):
return html
__doc__ = __doc__.format(
- # rST doesn't see the -+ flag as part of an option list, so we
- # hide it from the module-level docstring.
- CYTHON_DOC=dedent(CythonMagics.cython.__doc__\
- .replace('-+, --cplus', '--cplus ')),
- CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
- CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
+ # rST doesn't see the -+ flag as part of an option list, so we
+ # hide it from the module-level docstring.
+ CYTHON_DOC=dedent(CythonMagics.cython.__doc__\
+ .replace('-+, --cplus', '--cplus ')),
+ CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
+ CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
)
diff --git a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
index 148d320e54..24213091b2 100644
--- a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
+++ b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
@@ -3,26 +3,26 @@
"""Tests for the Cython magics extension."""
-from __future__ import absolute_import
-
+from __future__ import absolute_import
+
import os
import sys
-from contextlib import contextmanager
-from Cython.Build import IpythonMagic
-from Cython.TestUtils import CythonTest
+from contextlib import contextmanager
+from Cython.Build import IpythonMagic
+from Cython.TestUtils import CythonTest
try:
- import IPython.testing.globalipapp
-except ImportError:
- # Disable tests and fake helpers for initialisation below.
- def skip_if_not_installed(_):
- return None
-else:
- def skip_if_not_installed(c):
- return c
-
+ import IPython.testing.globalipapp
+except ImportError:
+ # Disable tests and fake helpers for initialisation below.
+ def skip_if_not_installed(_):
+ return None
+else:
+ def skip_if_not_installed(c):
+ return c
+
try:
- # disable IPython history thread before it gets started to avoid having to clean it up
+ # disable IPython history thread before it gets started to avoid having to clean it up
from IPython.core.history import HistoryManager
HistoryManager.enabled = False
except ImportError:
@@ -42,52 +42,52 @@ def call(x):
"""
pgo_cython3_code = cython3_code + u"""\
-def main():
- for _ in range(100): call(5)
-main()
+def main():
+ for _ in range(100): call(5)
+main()
"""
-
+
if sys.platform == 'win32':
# not using IPython's decorators here because they depend on "nose"
try:
from unittest import skip as skip_win32
except ImportError:
# poor dev's silent @unittest.skip()
- def skip_win32(dummy):
- def _skip_win32(func):
- return None
- return _skip_win32
+ def skip_win32(dummy):
+ def _skip_win32(func):
+ return None
+ return _skip_win32
else:
- def skip_win32(dummy):
- def _skip_win32(func):
- def wrapper(*args, **kwargs):
- func(*args, **kwargs)
- return wrapper
- return _skip_win32
+ def skip_win32(dummy):
+ def _skip_win32(func):
+ def wrapper(*args, **kwargs):
+ func(*args, **kwargs)
+ return wrapper
+ return _skip_win32
-@skip_if_not_installed
+@skip_if_not_installed
class TestIPythonMagic(CythonTest):
- @classmethod
- def setUpClass(cls):
- CythonTest.setUpClass()
- cls._ip = IPython.testing.globalipapp.get_ipython()
-
+ @classmethod
+ def setUpClass(cls):
+ CythonTest.setUpClass()
+ cls._ip = IPython.testing.globalipapp.get_ipython()
+
def setUp(self):
CythonTest.setUp(self)
- self._ip.extension_manager.load_extension('cython')
+ self._ip.extension_manager.load_extension('cython')
def test_cython_inline(self):
- ip = self._ip
+ ip = self._ip
ip.ex('a=10; b=20')
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
self.assertEqual(result, 30)
- @skip_win32('Skip on Windows')
+ @skip_win32('Skip on Windows')
def test_cython_pyximport(self):
- ip = self._ip
+ ip = self._ip
module_name = '_test_cython_pyximport'
ip.run_cell_magic('cython_pyximport', module_name, code)
ip.ex('g = f(10)')
@@ -101,14 +101,14 @@ class TestIPythonMagic(CythonTest):
pass
def test_cython(self):
- ip = self._ip
+ ip = self._ip
ip.run_cell_magic('cython', '', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
def test_cython_name(self):
# The Cython module named 'mymodule' defines the function f.
- ip = self._ip
+ ip = self._ip
ip.run_cell_magic('cython', '--name=mymodule', code)
# This module can now be imported in the interactive namespace.
ip.ex('import mymodule; g = mymodule.f(10)')
@@ -116,7 +116,7 @@ class TestIPythonMagic(CythonTest):
def test_cython_language_level(self):
# The Cython cell defines the functions f() and call().
- ip = self._ip
+ ip = self._ip
ip.run_cell_magic('cython', '', cython3_code)
ip.ex('g = f(10); h = call(10)')
if sys.version_info[0] < 3:
@@ -128,7 +128,7 @@ class TestIPythonMagic(CythonTest):
def test_cython3(self):
# The Cython cell defines the functions f() and call().
- ip = self._ip
+ ip = self._ip
ip.run_cell_magic('cython', '-3', cython3_code)
ip.ex('g = f(10); h = call(10)')
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
@@ -136,24 +136,24 @@ class TestIPythonMagic(CythonTest):
def test_cython2(self):
# The Cython cell defines the functions f() and call().
- ip = self._ip
+ ip = self._ip
ip.run_cell_magic('cython', '-2', cython3_code)
ip.ex('g = f(10); h = call(10)')
self.assertEqual(ip.user_ns['g'], 2 // 10)
self.assertEqual(ip.user_ns['h'], 2 // 10)
- @skip_win32('Skip on Windows')
- def test_cython3_pgo(self):
- # The Cython cell defines the functions f() and call().
- ip = self._ip
- ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
- ip.ex('g = f(10); h = call(10); main()')
- self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
- self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
-
- @skip_win32('Skip on Windows')
+ @skip_win32('Skip on Windows')
+ def test_cython3_pgo(self):
+ # The Cython cell defines the functions f() and call().
+ ip = self._ip
+ ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
+ ip.ex('g = f(10); h = call(10); main()')
+ self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
+ self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
+
+ @skip_win32('Skip on Windows')
def test_extlibs(self):
- ip = self._ip
+ ip = self._ip
code = u"""
from libc.math cimport sin
x = sin(0.0)
@@ -161,45 +161,45 @@ x = sin(0.0)
ip.user_ns['x'] = 1
ip.run_cell_magic('cython', '-l m', code)
self.assertEqual(ip.user_ns['x'], 0)
-
-
- def test_cython_verbose(self):
- ip = self._ip
- ip.run_cell_magic('cython', '--verbose', code)
- ip.ex('g = f(10)')
- self.assertEqual(ip.user_ns['g'], 20.0)
-
- def test_cython_verbose_thresholds(self):
- @contextmanager
- def mock_distutils():
- class MockLog:
- DEBUG = 1
- INFO = 2
- thresholds = [INFO]
-
- def set_threshold(self, val):
- self.thresholds.append(val)
- return self.thresholds[-2]
-
-
- new_log = MockLog()
- old_log = IpythonMagic.distutils.log
- try:
- IpythonMagic.distutils.log = new_log
- yield new_log
- finally:
- IpythonMagic.distutils.log = old_log
-
- ip = self._ip
- with mock_distutils() as verbose_log:
- ip.run_cell_magic('cython', '--verbose', code)
- ip.ex('g = f(10)')
- self.assertEqual(ip.user_ns['g'], 20.0)
+
+
+ def test_cython_verbose(self):
+ ip = self._ip
+ ip.run_cell_magic('cython', '--verbose', code)
+ ip.ex('g = f(10)')
+ self.assertEqual(ip.user_ns['g'], 20.0)
+
+ def test_cython_verbose_thresholds(self):
+ @contextmanager
+ def mock_distutils():
+ class MockLog:
+ DEBUG = 1
+ INFO = 2
+ thresholds = [INFO]
+
+ def set_threshold(self, val):
+ self.thresholds.append(val)
+ return self.thresholds[-2]
+
+
+ new_log = MockLog()
+ old_log = IpythonMagic.distutils.log
+ try:
+ IpythonMagic.distutils.log = new_log
+ yield new_log
+ finally:
+ IpythonMagic.distutils.log = old_log
+
+ ip = self._ip
+ with mock_distutils() as verbose_log:
+ ip.run_cell_magic('cython', '--verbose', code)
+ ip.ex('g = f(10)')
+ self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
- verbose_log.thresholds)
-
- with mock_distutils() as normal_log:
- ip.run_cell_magic('cython', '', code)
- ip.ex('g = f(10)')
- self.assertEqual(ip.user_ns['g'], 20.0)
+ verbose_log.thresholds)
+
+ with mock_distutils() as normal_log:
+ ip.run_cell_magic('cython', '', code)
+ ip.ex('g = f(10)')
+ self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEqual([normal_log.INFO], normal_log.thresholds)
diff --git a/contrib/tools/cython/Cython/CodeWriter.py b/contrib/tools/cython/Cython/CodeWriter.py
index 68e3137980..2e4646a654 100644
--- a/contrib/tools/cython/Cython/CodeWriter.py
+++ b/contrib/tools/cython/Cython/CodeWriter.py
@@ -85,7 +85,7 @@ class DeclarationWriter(TreeVisitor):
def visit_StatListNode(self, node):
self.visitchildren(node)
-
+
def visit_CDefExternNode(self, node):
if node.include_file is None:
file = u'*'
@@ -363,7 +363,7 @@ class CodeWriter(DeclarationWriter):
self.dedent()
def visit_IfStatNode(self, node):
- # The IfClauseNode is handled directly without a separate match
+ # The IfClauseNode is handled directly without a separate match
# for clariy.
self.startline(u"if ")
self.visit(node.if_clauses[0].condition)
@@ -516,301 +516,301 @@ class PxdWriter(DeclarationWriter):
if node.api:
self.put(u'api ')
self.visit(node.declarator)
-
+
def visit_StatNode(self, node):
pass
-
-
-class ExpressionWriter(TreeVisitor):
-
- def __init__(self, result=None):
- super(ExpressionWriter, self).__init__()
- if result is None:
- result = u""
- self.result = result
- self.precedence = [0]
-
- def write(self, tree):
- self.visit(tree)
- return self.result
-
- def put(self, s):
- self.result += s
-
- def remove(self, s):
- if self.result.endswith(s):
- self.result = self.result[:-len(s)]
-
- def comma_separated_list(self, items):
- if len(items) > 0:
- for item in items[:-1]:
- self.visit(item)
- self.put(u", ")
- self.visit(items[-1])
-
- def visit_Node(self, node):
- raise AssertionError("Node not handled by serializer: %r" % node)
-
- def visit_NameNode(self, node):
- self.put(node.name)
-
- def visit_NoneNode(self, node):
- self.put(u"None")
-
- def visit_EllipsisNode(self, node):
- self.put(u"...")
-
- def visit_BoolNode(self, node):
- self.put(str(node.value))
-
- def visit_ConstNode(self, node):
- self.put(str(node.value))
-
- def visit_ImagNode(self, node):
- self.put(node.value)
- self.put(u"j")
-
- def emit_string(self, node, prefix=u""):
- repr_val = repr(node.value)
- if repr_val[0] in 'ub':
- repr_val = repr_val[1:]
- self.put(u"%s%s" % (prefix, repr_val))
-
- def visit_BytesNode(self, node):
- self.emit_string(node, u"b")
-
- def visit_StringNode(self, node):
- self.emit_string(node)
-
- def visit_UnicodeNode(self, node):
- self.emit_string(node, u"u")
-
- def emit_sequence(self, node, parens=(u"", u"")):
- open_paren, close_paren = parens
- items = node.subexpr_nodes()
- self.put(open_paren)
- self.comma_separated_list(items)
- self.put(close_paren)
-
- def visit_ListNode(self, node):
- self.emit_sequence(node, u"[]")
-
- def visit_TupleNode(self, node):
- self.emit_sequence(node, u"()")
-
- def visit_SetNode(self, node):
- if len(node.subexpr_nodes()) > 0:
- self.emit_sequence(node, u"{}")
- else:
- self.put(u"set()")
-
- def visit_DictNode(self, node):
- self.emit_sequence(node, u"{}")
-
- def visit_DictItemNode(self, node):
- self.visit(node.key)
- self.put(u": ")
- self.visit(node.value)
-
- unop_precedence = {
- 'not': 3, '!': 3,
- '+': 11, '-': 11, '~': 11,
- }
- binop_precedence = {
- 'or': 1,
- 'and': 2,
- # unary: 'not': 3, '!': 3,
- 'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
- '|': 5,
- '^': 6,
- '&': 7,
- '<<': 8, '>>': 8,
- '+': 9, '-': 9,
- '*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
- # unary: '+': 11, '-': 11, '~': 11
- '**': 12,
- }
-
- def operator_enter(self, new_prec):
- old_prec = self.precedence[-1]
- if old_prec > new_prec:
- self.put(u"(")
- self.precedence.append(new_prec)
-
- def operator_exit(self):
- old_prec, new_prec = self.precedence[-2:]
- if old_prec > new_prec:
- self.put(u")")
- self.precedence.pop()
-
- def visit_NotNode(self, node):
- op = 'not'
- prec = self.unop_precedence[op]
- self.operator_enter(prec)
- self.put(u"not ")
- self.visit(node.operand)
- self.operator_exit()
-
- def visit_UnopNode(self, node):
- op = node.operator
- prec = self.unop_precedence[op]
- self.operator_enter(prec)
- self.put(u"%s" % node.operator)
- self.visit(node.operand)
- self.operator_exit()
-
- def visit_BinopNode(self, node):
- op = node.operator
- prec = self.binop_precedence.get(op, 0)
- self.operator_enter(prec)
- self.visit(node.operand1)
- self.put(u" %s " % op.replace('_', ' '))
- self.visit(node.operand2)
- self.operator_exit()
-
- def visit_BoolBinopNode(self, node):
- self.visit_BinopNode(node)
-
- def visit_PrimaryCmpNode(self, node):
- self.visit_BinopNode(node)
-
- def visit_IndexNode(self, node):
- self.visit(node.base)
- self.put(u"[")
- if isinstance(node.index, TupleNode):
- self.emit_sequence(node.index)
- else:
- self.visit(node.index)
- self.put(u"]")
-
- def visit_SliceIndexNode(self, node):
- self.visit(node.base)
- self.put(u"[")
- if node.start:
- self.visit(node.start)
- self.put(u":")
- if node.stop:
- self.visit(node.stop)
- if node.slice:
- self.put(u":")
- self.visit(node.slice)
- self.put(u"]")
-
- def visit_SliceNode(self, node):
- if not node.start.is_none:
- self.visit(node.start)
- self.put(u":")
- if not node.stop.is_none:
- self.visit(node.stop)
- if not node.step.is_none:
- self.put(u":")
- self.visit(node.step)
-
- def visit_CondExprNode(self, node):
- self.visit(node.true_val)
- self.put(u" if ")
- self.visit(node.test)
- self.put(u" else ")
- self.visit(node.false_val)
-
- def visit_AttributeNode(self, node):
- self.visit(node.obj)
- self.put(u".%s" % node.attribute)
-
- def visit_SimpleCallNode(self, node):
- self.visit(node.function)
- self.put(u"(")
- self.comma_separated_list(node.args)
- self.put(")")
-
- def emit_pos_args(self, node):
- if node is None:
- return
- if isinstance(node, AddNode):
- self.emit_pos_args(node.operand1)
- self.emit_pos_args(node.operand2)
- elif isinstance(node, TupleNode):
- for expr in node.subexpr_nodes():
- self.visit(expr)
- self.put(u", ")
- elif isinstance(node, AsTupleNode):
- self.put("*")
- self.visit(node.arg)
- self.put(u", ")
- else:
- self.visit(node)
- self.put(u", ")
-
- def emit_kwd_args(self, node):
- if node is None:
- return
- if isinstance(node, MergedDictNode):
- for expr in node.subexpr_nodes():
- self.emit_kwd_args(expr)
- elif isinstance(node, DictNode):
- for expr in node.subexpr_nodes():
- self.put(u"%s=" % expr.key.value)
- self.visit(expr.value)
- self.put(u", ")
- else:
- self.put(u"**")
- self.visit(node)
- self.put(u", ")
-
- def visit_GeneralCallNode(self, node):
- self.visit(node.function)
- self.put(u"(")
- self.emit_pos_args(node.positional_args)
- self.emit_kwd_args(node.keyword_args)
- self.remove(u", ")
- self.put(")")
-
- def emit_comprehension(self, body, target,
- sequence, condition,
- parens=(u"", u"")):
- open_paren, close_paren = parens
- self.put(open_paren)
- self.visit(body)
- self.put(u" for ")
- self.visit(target)
- self.put(u" in ")
- self.visit(sequence)
- if condition:
- self.put(u" if ")
- self.visit(condition)
- self.put(close_paren)
-
- def visit_ComprehensionAppendNode(self, node):
- self.visit(node.expr)
-
- def visit_DictComprehensionAppendNode(self, node):
- self.visit(node.key_expr)
- self.put(u": ")
- self.visit(node.value_expr)
-
- def visit_ComprehensionNode(self, node):
- tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
- parens = tpmap[node.type.py_type_name()]
- body = node.loop.body
- target = node.loop.target
- sequence = node.loop.iterator.sequence
- condition = None
- if hasattr(body, 'if_clauses'):
- # type(body) is Nodes.IfStatNode
- condition = body.if_clauses[0].condition
- body = body.if_clauses[0].body
- self.emit_comprehension(body, target, sequence, condition, parens)
-
- def visit_GeneratorExpressionNode(self, node):
- body = node.loop.body
- target = node.loop.target
- sequence = node.loop.iterator.sequence
- condition = None
- if hasattr(body, 'if_clauses'):
- # type(body) is Nodes.IfStatNode
- condition = body.if_clauses[0].condition
- body = body.if_clauses[0].body.expr.arg
- elif hasattr(body, 'expr'):
- # type(body) is Nodes.ExprStatNode
- body = body.expr.arg
- self.emit_comprehension(body, target, sequence, condition, u"()")
+
+
+class ExpressionWriter(TreeVisitor):
+
+ def __init__(self, result=None):
+ super(ExpressionWriter, self).__init__()
+ if result is None:
+ result = u""
+ self.result = result
+ self.precedence = [0]
+
+ def write(self, tree):
+ self.visit(tree)
+ return self.result
+
+ def put(self, s):
+ self.result += s
+
+ def remove(self, s):
+ if self.result.endswith(s):
+ self.result = self.result[:-len(s)]
+
+ def comma_separated_list(self, items):
+ if len(items) > 0:
+ for item in items[:-1]:
+ self.visit(item)
+ self.put(u", ")
+ self.visit(items[-1])
+
+ def visit_Node(self, node):
+ raise AssertionError("Node not handled by serializer: %r" % node)
+
+ def visit_NameNode(self, node):
+ self.put(node.name)
+
+ def visit_NoneNode(self, node):
+ self.put(u"None")
+
+ def visit_EllipsisNode(self, node):
+ self.put(u"...")
+
+ def visit_BoolNode(self, node):
+ self.put(str(node.value))
+
+ def visit_ConstNode(self, node):
+ self.put(str(node.value))
+
+ def visit_ImagNode(self, node):
+ self.put(node.value)
+ self.put(u"j")
+
+ def emit_string(self, node, prefix=u""):
+ repr_val = repr(node.value)
+ if repr_val[0] in 'ub':
+ repr_val = repr_val[1:]
+ self.put(u"%s%s" % (prefix, repr_val))
+
+ def visit_BytesNode(self, node):
+ self.emit_string(node, u"b")
+
+ def visit_StringNode(self, node):
+ self.emit_string(node)
+
+ def visit_UnicodeNode(self, node):
+ self.emit_string(node, u"u")
+
+ def emit_sequence(self, node, parens=(u"", u"")):
+ open_paren, close_paren = parens
+ items = node.subexpr_nodes()
+ self.put(open_paren)
+ self.comma_separated_list(items)
+ self.put(close_paren)
+
+ def visit_ListNode(self, node):
+ self.emit_sequence(node, u"[]")
+
+ def visit_TupleNode(self, node):
+ self.emit_sequence(node, u"()")
+
+ def visit_SetNode(self, node):
+ if len(node.subexpr_nodes()) > 0:
+ self.emit_sequence(node, u"{}")
+ else:
+ self.put(u"set()")
+
+ def visit_DictNode(self, node):
+ self.emit_sequence(node, u"{}")
+
+ def visit_DictItemNode(self, node):
+ self.visit(node.key)
+ self.put(u": ")
+ self.visit(node.value)
+
+ unop_precedence = {
+ 'not': 3, '!': 3,
+ '+': 11, '-': 11, '~': 11,
+ }
+ binop_precedence = {
+ 'or': 1,
+ 'and': 2,
+ # unary: 'not': 3, '!': 3,
+ 'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
+ '|': 5,
+ '^': 6,
+ '&': 7,
+ '<<': 8, '>>': 8,
+ '+': 9, '-': 9,
+ '*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
+ # unary: '+': 11, '-': 11, '~': 11
+ '**': 12,
+ }
+
+ def operator_enter(self, new_prec):
+ old_prec = self.precedence[-1]
+ if old_prec > new_prec:
+ self.put(u"(")
+ self.precedence.append(new_prec)
+
+ def operator_exit(self):
+ old_prec, new_prec = self.precedence[-2:]
+ if old_prec > new_prec:
+ self.put(u")")
+ self.precedence.pop()
+
+ def visit_NotNode(self, node):
+ op = 'not'
+ prec = self.unop_precedence[op]
+ self.operator_enter(prec)
+ self.put(u"not ")
+ self.visit(node.operand)
+ self.operator_exit()
+
+ def visit_UnopNode(self, node):
+ op = node.operator
+ prec = self.unop_precedence[op]
+ self.operator_enter(prec)
+ self.put(u"%s" % node.operator)
+ self.visit(node.operand)
+ self.operator_exit()
+
+ def visit_BinopNode(self, node):
+ op = node.operator
+ prec = self.binop_precedence.get(op, 0)
+ self.operator_enter(prec)
+ self.visit(node.operand1)
+ self.put(u" %s " % op.replace('_', ' '))
+ self.visit(node.operand2)
+ self.operator_exit()
+
+ def visit_BoolBinopNode(self, node):
+ self.visit_BinopNode(node)
+
+ def visit_PrimaryCmpNode(self, node):
+ self.visit_BinopNode(node)
+
+ def visit_IndexNode(self, node):
+ self.visit(node.base)
+ self.put(u"[")
+ if isinstance(node.index, TupleNode):
+ self.emit_sequence(node.index)
+ else:
+ self.visit(node.index)
+ self.put(u"]")
+
+ def visit_SliceIndexNode(self, node):
+ self.visit(node.base)
+ self.put(u"[")
+ if node.start:
+ self.visit(node.start)
+ self.put(u":")
+ if node.stop:
+ self.visit(node.stop)
+ if node.slice:
+ self.put(u":")
+ self.visit(node.slice)
+ self.put(u"]")
+
+ def visit_SliceNode(self, node):
+ if not node.start.is_none:
+ self.visit(node.start)
+ self.put(u":")
+ if not node.stop.is_none:
+ self.visit(node.stop)
+ if not node.step.is_none:
+ self.put(u":")
+ self.visit(node.step)
+
+ def visit_CondExprNode(self, node):
+ self.visit(node.true_val)
+ self.put(u" if ")
+ self.visit(node.test)
+ self.put(u" else ")
+ self.visit(node.false_val)
+
+ def visit_AttributeNode(self, node):
+ self.visit(node.obj)
+ self.put(u".%s" % node.attribute)
+
+ def visit_SimpleCallNode(self, node):
+ self.visit(node.function)
+ self.put(u"(")
+ self.comma_separated_list(node.args)
+ self.put(")")
+
+ def emit_pos_args(self, node):
+ if node is None:
+ return
+ if isinstance(node, AddNode):
+ self.emit_pos_args(node.operand1)
+ self.emit_pos_args(node.operand2)
+ elif isinstance(node, TupleNode):
+ for expr in node.subexpr_nodes():
+ self.visit(expr)
+ self.put(u", ")
+ elif isinstance(node, AsTupleNode):
+ self.put("*")
+ self.visit(node.arg)
+ self.put(u", ")
+ else:
+ self.visit(node)
+ self.put(u", ")
+
+ def emit_kwd_args(self, node):
+ if node is None:
+ return
+ if isinstance(node, MergedDictNode):
+ for expr in node.subexpr_nodes():
+ self.emit_kwd_args(expr)
+ elif isinstance(node, DictNode):
+ for expr in node.subexpr_nodes():
+ self.put(u"%s=" % expr.key.value)
+ self.visit(expr.value)
+ self.put(u", ")
+ else:
+ self.put(u"**")
+ self.visit(node)
+ self.put(u", ")
+
+ def visit_GeneralCallNode(self, node):
+ self.visit(node.function)
+ self.put(u"(")
+ self.emit_pos_args(node.positional_args)
+ self.emit_kwd_args(node.keyword_args)
+ self.remove(u", ")
+ self.put(")")
+
+ def emit_comprehension(self, body, target,
+ sequence, condition,
+ parens=(u"", u"")):
+ open_paren, close_paren = parens
+ self.put(open_paren)
+ self.visit(body)
+ self.put(u" for ")
+ self.visit(target)
+ self.put(u" in ")
+ self.visit(sequence)
+ if condition:
+ self.put(u" if ")
+ self.visit(condition)
+ self.put(close_paren)
+
+ def visit_ComprehensionAppendNode(self, node):
+ self.visit(node.expr)
+
+ def visit_DictComprehensionAppendNode(self, node):
+ self.visit(node.key_expr)
+ self.put(u": ")
+ self.visit(node.value_expr)
+
+ def visit_ComprehensionNode(self, node):
+ tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
+ parens = tpmap[node.type.py_type_name()]
+ body = node.loop.body
+ target = node.loop.target
+ sequence = node.loop.iterator.sequence
+ condition = None
+ if hasattr(body, 'if_clauses'):
+ # type(body) is Nodes.IfStatNode
+ condition = body.if_clauses[0].condition
+ body = body.if_clauses[0].body
+ self.emit_comprehension(body, target, sequence, condition, parens)
+
+ def visit_GeneratorExpressionNode(self, node):
+ body = node.loop.body
+ target = node.loop.target
+ sequence = node.loop.iterator.sequence
+ condition = None
+ if hasattr(body, 'if_clauses'):
+ # type(body) is Nodes.IfStatNode
+ condition = body.if_clauses[0].condition
+ body = body.if_clauses[0].body.expr.arg
+ elif hasattr(body, 'expr'):
+ # type(body) is Nodes.ExprStatNode
+ body = body.expr.arg
+ self.emit_comprehension(body, target, sequence, condition, u"()")
diff --git a/contrib/tools/cython/Cython/Compiler/Annotate.py b/contrib/tools/cython/Cython/Compiler/Annotate.py
index 51617cee03..2ea38c00c7 100644
--- a/contrib/tools/cython/Cython/Compiler/Annotate.py
+++ b/contrib/tools/cython/Cython/Compiler/Annotate.py
@@ -109,14 +109,14 @@ class AnnotationCCodeWriter(CCodeWriter):
.cython.code .c_call { color: #0000FF; }
""")
- # on-click toggle function to show/hide C source code
- _onclick_attr = ' onclick="{0}"'.format((
- "(function(s){"
- " s.display = s.display === 'block' ? 'none' : 'block'"
- "})(this.nextElementSibling.style)"
- ).replace(' ', '') # poor dev's JS minification
- )
-
+ # on-click toggle function to show/hide C source code
+ _onclick_attr = ' onclick="{0}"'.format((
+ "(function(s){"
+ " s.display = s.display === 'block' ? 'none' : 'block'"
+ "})(this.nextElementSibling.style)"
+ ).replace(' ', '') # poor dev's JS minification
+ )
+
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
with Utils.open_source_file(source_filename) as f:
code = f.read()
@@ -151,7 +151,7 @@ class AnnotationCCodeWriter(CCodeWriter):
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
</p>
- ''').format(css=self._css(), watermark=Version.watermark,
+ ''').format(css=self._css(), watermark=Version.watermark,
filename=os.path.basename(source_filename) if source_filename else '',
more_info=coverage_info)
]
@@ -253,7 +253,7 @@ class AnnotationCCodeWriter(CCodeWriter):
calls['py_macro_api'] + calls['pyx_macro_api'])
if c_code:
- onclick = self._onclick_attr
+ onclick = self._onclick_attr
expandsymbol = '+'
else:
onclick = ''
@@ -294,7 +294,7 @@ _parse_code = re.compile((
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
- br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
+ br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
).decode('ascii')).sub
diff --git a/contrib/tools/cython/Cython/Compiler/AutoDocTransforms.py b/contrib/tools/cython/Cython/Compiler/AutoDocTransforms.py
index b18d42030a..d3c0a1d0da 100644
--- a/contrib/tools/cython/Cython/Compiler/AutoDocTransforms.py
+++ b/contrib/tools/cython/Cython/Compiler/AutoDocTransforms.py
@@ -1,22 +1,22 @@
-from __future__ import absolute_import, print_function
+from __future__ import absolute_import, print_function
from .Visitor import CythonTransform
from .StringEncoding import EncodedString
from . import Options
from . import PyrexTypes, ExprNodes
-from ..CodeWriter import ExpressionWriter
-
-
-class AnnotationWriter(ExpressionWriter):
-
- def visit_Node(self, node):
- self.put(u"<???>")
-
- def visit_LambdaNode(self, node):
- # XXX Should we do better?
- self.put("<lambda>")
-
-
+from ..CodeWriter import ExpressionWriter
+
+
+class AnnotationWriter(ExpressionWriter):
+
+ def visit_Node(self, node):
+ self.put(u"<???>")
+
+ def visit_LambdaNode(self, node):
+ # XXX Should we do better?
+ self.put("<lambda>")
+
+
class EmbedSignature(CythonTransform):
def __init__(self, context):
@@ -24,10 +24,10 @@ class EmbedSignature(CythonTransform):
self.class_name = None
self.class_node = None
- def _fmt_expr(self, node):
- writer = AnnotationWriter()
- result = writer.write(node)
- # print(type(node).__name__, '-->', result)
+ def _fmt_expr(self, node):
+ writer = AnnotationWriter()
+ result = writer.write(node)
+ # print(type(node).__name__, '-->', result)
return result
def _fmt_arg(self, arg):
@@ -35,25 +35,25 @@ class EmbedSignature(CythonTransform):
doc = arg.name
else:
doc = arg.type.declaration_code(arg.name, for_display=1)
-
- if arg.annotation:
- annotation = self._fmt_expr(arg.annotation)
- doc = doc + (': %s' % annotation)
- if arg.default:
- default = self._fmt_expr(arg.default)
- doc = doc + (' = %s' % default)
- elif arg.default:
- default = self._fmt_expr(arg.default)
- doc = doc + ('=%s' % default)
+
+ if arg.annotation:
+ annotation = self._fmt_expr(arg.annotation)
+ doc = doc + (': %s' % annotation)
+ if arg.default:
+ default = self._fmt_expr(arg.default)
+ doc = doc + (' = %s' % default)
+ elif arg.default:
+ default = self._fmt_expr(arg.default)
+ doc = doc + ('=%s' % default)
return doc
- def _fmt_star_arg(self, arg):
- arg_doc = arg.name
- if arg.annotation:
- annotation = self._fmt_expr(arg.annotation)
- arg_doc = arg_doc + (': %s' % annotation)
- return arg_doc
-
+ def _fmt_star_arg(self, arg):
+ arg_doc = arg.name
+ if arg.annotation:
+ annotation = self._fmt_expr(arg.annotation)
+ arg_doc = arg_doc + (': %s' % annotation)
+ return arg_doc
+
def _fmt_arglist(self, args,
npargs=0, pargs=None,
nkargs=0, kargs=None,
@@ -64,13 +64,13 @@ class EmbedSignature(CythonTransform):
arg_doc = self._fmt_arg(arg)
arglist.append(arg_doc)
if pargs:
- arg_doc = self._fmt_star_arg(pargs)
- arglist.insert(npargs, '*%s' % arg_doc)
+ arg_doc = self._fmt_star_arg(pargs)
+ arglist.insert(npargs, '*%s' % arg_doc)
elif nkargs:
arglist.insert(npargs, '*')
if kargs:
- arg_doc = self._fmt_star_arg(kargs)
- arglist.append('**%s' % arg_doc)
+ arg_doc = self._fmt_star_arg(kargs)
+ arglist.append('**%s' % arg_doc)
return arglist
def _fmt_ret_type(self, ret):
@@ -82,7 +82,7 @@ class EmbedSignature(CythonTransform):
def _fmt_signature(self, cls_name, func_name, args,
npargs=0, pargs=None,
nkargs=0, kargs=None,
- return_expr=None,
+ return_expr=None,
return_type=None, hide_self=False):
arglist = self._fmt_arglist(args,
npargs, pargs,
@@ -92,13 +92,13 @@ class EmbedSignature(CythonTransform):
func_doc = '%s(%s)' % (func_name, arglist_doc)
if cls_name:
func_doc = '%s.%s' % (cls_name, func_doc)
- ret_doc = None
- if return_expr:
- ret_doc = self._fmt_expr(return_expr)
- elif return_type:
+ ret_doc = None
+ if return_expr:
+ ret_doc = self._fmt_expr(return_expr)
+ elif return_type:
ret_doc = self._fmt_ret_type(return_type)
- if ret_doc:
- func_doc = '%s -> %s' % (func_doc, ret_doc)
+ if ret_doc:
+ func_doc = '%s -> %s' % (func_doc, ret_doc)
return func_doc
def _embed_signature(self, signature, node_doc):
@@ -153,7 +153,7 @@ class EmbedSignature(CythonTransform):
class_name, func_name, node.args,
npargs, node.star_arg,
nkargs, node.starstar_arg,
- return_expr=node.return_type_annotation,
+ return_expr=node.return_type_annotation,
return_type=None, hide_self=hide_self)
if signature:
if is_constructor:
diff --git a/contrib/tools/cython/Cython/Compiler/Buffer.py b/contrib/tools/cython/Cython/Compiler/Buffer.py
index f8c70b156b..c62a24f568 100644
--- a/contrib/tools/cython/Cython/Compiler/Buffer.py
+++ b/contrib/tools/cython/Cython/Compiler/Buffer.py
@@ -316,7 +316,7 @@ def put_init_vars(entry, code):
code.putln("%s.data = NULL;" % pybuffernd_struct)
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
-
+
def put_acquire_arg_buffer(entry, code, pos):
buffer_aux = entry.buffer_aux
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
@@ -326,16 +326,16 @@ def put_acquire_arg_buffer(entry, code, pos):
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
code.putln("}")
- # An exception raised in arg parsing cannot be caught, so no
+ # An exception raised in arg parsing cannot be caught, so no
# need to care about the buffer then.
put_unpack_buffer_aux_into_scope(entry, code)
-
+
def put_release_buffer_code(code, entry):
code.globalstate.use_utility_code(acquire_utility_code)
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
-
+
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
ndim = buffer_type.ndim
cast = int(buffer_type.cast)
@@ -344,12 +344,12 @@ def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
- code.globalstate.use_utility_code(acquire_utility_code)
+ code.globalstate.use_utility_code(acquire_utility_code)
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
"%(cast)d, __pyx_stack)" % locals())
-
+
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
is_initialized, pos, code):
"""
@@ -370,7 +370,7 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
flags = get_flags(buffer_aux, buffer_type)
- code.putln("{") # Set up necessary stack for getbuffer
+ code.putln("{") # Set up necessary stack for getbuffer
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
@@ -386,18 +386,18 @@ def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
# before raising the exception. A failure of reacquisition
# will cause the reacquisition exception to be reported, one
# can consider working around this later.
- exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
- for _ in range(3))
- code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
+ exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
+ for _ in range(3))
+ code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
- code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
+ code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
code.globalstate.use_utility_code(raise_buffer_fallback_code)
code.putln('__Pyx_RaiseBufferFallbackError();')
code.putln('} else {')
- code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
- code.putln('}')
- code.putln('%s = %s = %s = 0;' % exc_temps)
- for t in exc_temps:
+ code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
+ code.putln('}')
+ code.putln('%s = %s = %s = 0;' % exc_temps)
+ for t in exc_temps:
code.funcstate.release_temp(t)
code.putln('}')
# Unpack indices
@@ -512,7 +512,7 @@ def buf_lookup_full_code(proto, defin, name, nd):
""") % (i, i, i, i) for i in range(nd)]
) + "\nreturn ptr;\n}")
-
+
def buf_lookup_strided_code(proto, defin, name, nd):
"""
Generates a buffer lookup function for the right number
@@ -523,7 +523,7 @@ def buf_lookup_strided_code(proto, defin, name, nd):
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
-
+
def buf_lookup_c_code(proto, defin, name, nd):
"""
Similar to strided lookup, but can assume that the last dimension
@@ -537,7 +537,7 @@ def buf_lookup_c_code(proto, defin, name, nd):
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
-
+
def buf_lookup_fortran_code(proto, defin, name, nd):
"""
Like C lookup, but the first index is optimized instead.
@@ -553,7 +553,7 @@ def buf_lookup_fortran_code(proto, defin, name, nd):
def use_py2_buffer_functions(env):
env.use_utility_code(GetAndReleaseBufferUtilityCode())
-
+
class GetAndReleaseBufferUtilityCode(object):
# Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
# For >= 2.6 we do double mode -- use the new buffer interface on objects
@@ -617,7 +617,7 @@ class GetAndReleaseBufferUtilityCode(object):
def mangle_dtype_name(dtype):
- # Use prefixes to separate user defined types from builtins
+ # Use prefixes to separate user defined types from builtins
# (consider "typedef float unsigned_int")
if dtype.is_pyobject:
return "object"
@@ -636,7 +636,7 @@ def get_type_information_cname(code, dtype, maxdepth=None):
and return the name of the type info struct.
Structs with two floats of the same size are encoded as complex numbers.
- One can separate between complex numbers declared as struct or with native
+ One can separate between complex numbers declared as struct or with native
encoding by inspecting to see if the fields field of the type is
filled in.
"""
@@ -723,9 +723,9 @@ def load_buffer_utility(util_code_name, context=None, **kwargs):
else:
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
-context = dict(max_dims=Options.buffer_max_dims)
-buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
-buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
+context = dict(max_dims=Options.buffer_max_dims)
+buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
+buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
# Utility function to set the right exception
# The caller should immediately goto_error
@@ -733,8 +733,8 @@ raise_indexerror_code = load_buffer_utility("BufferIndexError")
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
-acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
-buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
-
+acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
+buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
+
# See utility code BufferFormatFromTypeInfo
-_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
+_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
diff --git a/contrib/tools/cython/Cython/Compiler/Builtin.py b/contrib/tools/cython/Cython/Compiler/Builtin.py
index a337246dda..5fa717507d 100644
--- a/contrib/tools/cython/Cython/Compiler/Builtin.py
+++ b/contrib/tools/cython/Cython/Compiler/Builtin.py
@@ -21,7 +21,7 @@ pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c")
globals_utility_code = UtilityCode.load("Globals", "Builtins.c")
builtin_utility_code = {
- 'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"),
+ 'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"),
}
@@ -95,35 +95,35 @@ builtin_function_table = [
is_strict_signature = True),
BuiltinFunction('abs', "f", "f", "fabsf",
is_strict_signature = True),
- BuiltinFunction('abs', "i", "i", "abs",
- is_strict_signature = True),
- BuiltinFunction('abs', "l", "l", "labs",
- is_strict_signature = True),
- BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
- utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
- func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_longlong_type, [
- PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
- ],
- is_strict_signature = True, nogil=True)),
- ] + list(
- BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()),
+ BuiltinFunction('abs', "i", "i", "abs",
+ is_strict_signature = True),
+ BuiltinFunction('abs', "l", "l", "labs",
+ is_strict_signature = True),
+ BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
+ utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
+ func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_longlong_type, [
+ PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
+ ],
+ is_strict_signature = True, nogil=True)),
+ ] + list(
+ BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()),
func_type = PyrexTypes.CFuncType(
- t,
- [PyrexTypes.CFuncTypeArg("arg", t, None)],
- is_strict_signature = True, nogil=True))
- for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
- ) + list(
- BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix),
+ t,
+ [PyrexTypes.CFuncTypeArg("arg", t, None)],
+ is_strict_signature = True, nogil=True))
+ for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
+ ) + list(
+ BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix),
func_type = PyrexTypes.CFuncType(
- t.real_type, [
- PyrexTypes.CFuncTypeArg("arg", t, None)
+ t.real_type, [
+ PyrexTypes.CFuncTypeArg("arg", t, None)
],
- is_strict_signature = True, nogil=True))
- for t in (PyrexTypes.c_float_complex_type,
- PyrexTypes.c_double_complex_type,
- PyrexTypes.c_longdouble_complex_type)
- ) + [
+ is_strict_signature = True, nogil=True))
+ for t in (PyrexTypes.c_float_complex_type,
+ PyrexTypes.c_double_complex_type,
+ PyrexTypes.c_longdouble_complex_type)
+ ) + [
BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute",
utility_code=UtilityCode.load("py_abs", "Builtins.c")),
#('all', "", "", ""),
@@ -153,8 +153,8 @@ builtin_function_table = [
utility_code=getattr3_utility_code),
BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr",
utility_code=getattr_utility_code),
- BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
- utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
+ BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
+ utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
#('hex', "", "", ""),
#('id', "", "", ""),
@@ -329,18 +329,18 @@ builtin_types_table = [
("set", "PySet_Type", [BuiltinMethod("__contains__", "TO", "b", "PySequence_Contains"),
BuiltinMethod("clear", "T", "r", "PySet_Clear"),
# discard() and remove() have a special treatment for unhashable values
- BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
- utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
- BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
- utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
- # update is actually variadic (see Github issue #1645)
-# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
-# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
+ BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
+ utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
+ BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
+ utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
+ # update is actually variadic (see Github issue #1645)
+# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
+# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
BuiltinMethod("add", "TO", "r", "PySet_Add"),
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
("frozenset", "PyFrozenSet_Type", []),
("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []),
- ("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []),
+ ("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []),
]
@@ -392,14 +392,14 @@ def init_builtin_types():
utility = builtin_utility_code.get(name)
if name == 'frozenset':
objstruct_cname = 'PySetObject'
- elif name == 'bytearray':
- objstruct_cname = 'PyByteArrayObject'
+ elif name == 'bytearray':
+ objstruct_cname = 'PyByteArrayObject'
elif name == 'bool':
objstruct_cname = None
elif name == 'Exception':
objstruct_cname = "PyBaseExceptionObject"
- elif name == 'StopAsyncIteration':
- objstruct_cname = "PyBaseExceptionObject"
+ elif name == 'StopAsyncIteration':
+ objstruct_cname = "PyBaseExceptionObject"
else:
objstruct_cname = 'Py%sObject' % name.capitalize()
the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname)
diff --git a/contrib/tools/cython/Cython/Compiler/CmdLine.py b/contrib/tools/cython/Cython/Compiler/CmdLine.py
index 9c4da0c92d..a20ab38dc2 100644
--- a/contrib/tools/cython/Cython/Compiler/CmdLine.py
+++ b/contrib/tools/cython/Cython/Compiler/CmdLine.py
@@ -153,8 +153,8 @@ def parse_command_line(args):
options.module_name = pop_arg()
elif option == '--init-suffix':
options.init_suffix = pop_arg()
- elif option == '--source-root':
- Options.source_root = pop_arg()
+ elif option == '--source-root':
+ Options.source_root = pop_arg()
elif option == '-2':
options.language_level = 2
elif option == '-3':
@@ -165,8 +165,8 @@ def parse_command_line(args):
options.capi_reexport_cincludes = True
elif option == "--fast-fail":
Options.fast_fail = True
- elif option == "--cimport-from-pyx":
- Options.cimport_from_pyx = True
+ elif option == "--cimport-from-pyx":
+ Options.cimport_from_pyx = True
elif option in ('-Werror', '--warning-errors'):
Options.warning_errors = True
elif option in ('-Wextra', '--warning-extra'):
diff --git a/contrib/tools/cython/Cython/Compiler/Code.pxd b/contrib/tools/cython/Cython/Compiler/Code.pxd
index 4fb342dba9..acad0c1cf4 100644
--- a/contrib/tools/cython/Cython/Compiler/Code.pxd
+++ b/contrib/tools/cython/Cython/Compiler/Code.pxd
@@ -5,25 +5,25 @@ cimport cython
from ..StringIOTree cimport StringIOTree
-cdef class UtilityCodeBase(object):
- cpdef format_code(self, code_string, replace_empty_lines=*)
-
-
-cdef class UtilityCode(UtilityCodeBase):
- cdef public object name
- cdef public object proto
- cdef public object impl
- cdef public object init
- cdef public object cleanup
- cdef public object proto_block
- cdef public object requires
- cdef public dict _cache
- cdef public list specialize_list
- cdef public object file
-
- cpdef none_or_sub(self, s, context)
-
-
+cdef class UtilityCodeBase(object):
+ cpdef format_code(self, code_string, replace_empty_lines=*)
+
+
+cdef class UtilityCode(UtilityCodeBase):
+ cdef public object name
+ cdef public object proto
+ cdef public object impl
+ cdef public object init
+ cdef public object cleanup
+ cdef public object proto_block
+ cdef public object requires
+ cdef public dict _cache
+ cdef public list specialize_list
+ cdef public object file
+
+ cpdef none_or_sub(self, s, context)
+
+
cdef class FunctionState:
cdef public set names_taken
cdef public object owner
@@ -40,7 +40,7 @@ cdef class FunctionState:
cdef public object return_from_error_cleanup_label # not used in __init__ ?
cdef public object exc_vars
- cdef public object current_except
+ cdef public object current_except
cdef public bint in_try_finally
cdef public bint can_trace
cdef public bint gil_owned
diff --git a/contrib/tools/cython/Cython/Compiler/Code.py b/contrib/tools/cython/Cython/Compiler/Code.py
index 2d09bfae53..f43c4b2b8e 100644
--- a/contrib/tools/cython/Cython/Compiler/Code.py
+++ b/contrib/tools/cython/Cython/Compiler/Code.py
@@ -7,15 +7,15 @@
from __future__ import absolute_import
import cython
-cython.declare(os=object, re=object, operator=object, textwrap=object,
- Template=object, Naming=object, Options=object, StringEncoding=object,
+cython.declare(os=object, re=object, operator=object, textwrap=object,
+ Template=object, Naming=object, Options=object, StringEncoding=object,
Utils=object, SourceDescriptor=object, StringIOTree=object,
- DebugFlags=object, basestring=object, defaultdict=object,
- closing=object, partial=object)
+ DebugFlags=object, basestring=object, defaultdict=object,
+ closing=object, partial=object)
import os
import re
-import shutil
+import shutil
import sys
import operator
import textwrap
@@ -71,42 +71,42 @@ basicsize_builtins_map = {
}
uncachable_builtins = [
- # Global/builtin names that cannot be cached because they may or may not
- # be available at import time, for various reasons:
- ## - Py3.7+
- 'breakpoint', # might deserve an implementation in Cython
- ## - Py3.4+
- '__loader__',
- '__spec__',
- ## - Py3+
- 'BlockingIOError',
- 'BrokenPipeError',
- 'ChildProcessError',
- 'ConnectionAbortedError',
- 'ConnectionError',
- 'ConnectionRefusedError',
- 'ConnectionResetError',
- 'FileExistsError',
- 'FileNotFoundError',
- 'InterruptedError',
- 'IsADirectoryError',
- 'ModuleNotFoundError',
- 'NotADirectoryError',
- 'PermissionError',
- 'ProcessLookupError',
- 'RecursionError',
- 'ResourceWarning',
- #'StopAsyncIteration', # backported
- 'TimeoutError',
- '__build_class__',
- 'ascii', # might deserve an implementation in Cython
- #'exec', # implemented in Cython
- ## - Py2.7+
- 'memoryview',
- ## - platform specific
+ # Global/builtin names that cannot be cached because they may or may not
+ # be available at import time, for various reasons:
+ ## - Py3.7+
+ 'breakpoint', # might deserve an implementation in Cython
+ ## - Py3.4+
+ '__loader__',
+ '__spec__',
+ ## - Py3+
+ 'BlockingIOError',
+ 'BrokenPipeError',
+ 'ChildProcessError',
+ 'ConnectionAbortedError',
+ 'ConnectionError',
+ 'ConnectionRefusedError',
+ 'ConnectionResetError',
+ 'FileExistsError',
+ 'FileNotFoundError',
+ 'InterruptedError',
+ 'IsADirectoryError',
+ 'ModuleNotFoundError',
+ 'NotADirectoryError',
+ 'PermissionError',
+ 'ProcessLookupError',
+ 'RecursionError',
+ 'ResourceWarning',
+ #'StopAsyncIteration', # backported
+ 'TimeoutError',
+ '__build_class__',
+ 'ascii', # might deserve an implementation in Cython
+ #'exec', # implemented in Cython
+ ## - Py2.7+
+ 'memoryview',
+ ## - platform specific
'WindowsError',
- ## - others
- '_', # e.g. used by gettext
+ ## - others
+ '_', # e.g. used by gettext
]
special_py_methods = set([
@@ -121,82 +121,82 @@ modifier_output_mapper = {
}.get
-class IncludeCode(object):
- """
- An include file and/or verbatim C code to be included in the
- generated sources.
- """
- # attributes:
- #
- # pieces {order: unicode}: pieces of C code to be generated.
- # For the included file, the key "order" is zero.
- # For verbatim include code, the "order" is the "order"
- # attribute of the original IncludeCode where this piece
- # of C code was first added. This is needed to prevent
- # duplication if the same include code is found through
- # multiple cimports.
- # location int: where to put this include in the C sources, one
- # of the constants INITIAL, EARLY, LATE
- # order int: sorting order (automatically set by increasing counter)
-
- # Constants for location. If the same include occurs with different
- # locations, the earliest one takes precedense.
- INITIAL = 0
- EARLY = 1
- LATE = 2
-
- counter = 1 # Counter for "order"
-
- def __init__(self, include=None, verbatim=None, late=True, initial=False):
- self.order = self.counter
- type(self).counter += 1
- self.pieces = {}
-
- if include:
- if include[0] == '<' and include[-1] == '>':
- self.pieces[0] = u'#include {0}'.format(include)
- late = False # system include is never late
- else:
- self.pieces[0] = u'#include "{0}"'.format(include)
-
- if verbatim:
- self.pieces[self.order] = verbatim
-
- if initial:
- self.location = self.INITIAL
- elif late:
- self.location = self.LATE
- else:
- self.location = self.EARLY
-
- def dict_update(self, d, key):
- """
- Insert `self` in dict `d` with key `key`. If that key already
- exists, update the attributes of the existing value with `self`.
- """
- if key in d:
- other = d[key]
- other.location = min(self.location, other.location)
- other.pieces.update(self.pieces)
- else:
- d[key] = self
-
- def sortkey(self):
- return self.order
-
- def mainpiece(self):
- """
- Return the main piece of C code, corresponding to the include
- file. If there was no include file, return None.
- """
- return self.pieces.get(0)
-
- def write(self, code):
- # Write values of self.pieces dict, sorted by the keys
- for k in sorted(self.pieces):
- code.putln(self.pieces[k])
-
-
+class IncludeCode(object):
+ """
+ An include file and/or verbatim C code to be included in the
+ generated sources.
+ """
+ # attributes:
+ #
+ # pieces {order: unicode}: pieces of C code to be generated.
+ # For the included file, the key "order" is zero.
+ # For verbatim include code, the "order" is the "order"
+ # attribute of the original IncludeCode where this piece
+ # of C code was first added. This is needed to prevent
+ # duplication if the same include code is found through
+ # multiple cimports.
+ # location int: where to put this include in the C sources, one
+ # of the constants INITIAL, EARLY, LATE
+ # order int: sorting order (automatically set by increasing counter)
+
+ # Constants for location. If the same include occurs with different
+ # locations, the earliest one takes precedense.
+ INITIAL = 0
+ EARLY = 1
+ LATE = 2
+
+ counter = 1 # Counter for "order"
+
+ def __init__(self, include=None, verbatim=None, late=True, initial=False):
+ self.order = self.counter
+ type(self).counter += 1
+ self.pieces = {}
+
+ if include:
+ if include[0] == '<' and include[-1] == '>':
+ self.pieces[0] = u'#include {0}'.format(include)
+ late = False # system include is never late
+ else:
+ self.pieces[0] = u'#include "{0}"'.format(include)
+
+ if verbatim:
+ self.pieces[self.order] = verbatim
+
+ if initial:
+ self.location = self.INITIAL
+ elif late:
+ self.location = self.LATE
+ else:
+ self.location = self.EARLY
+
+ def dict_update(self, d, key):
+ """
+ Insert `self` in dict `d` with key `key`. If that key already
+ exists, update the attributes of the existing value with `self`.
+ """
+ if key in d:
+ other = d[key]
+ other.location = min(self.location, other.location)
+ other.pieces.update(self.pieces)
+ else:
+ d[key] = self
+
+ def sortkey(self):
+ return self.order
+
+ def mainpiece(self):
+ """
+ Return the main piece of C code, corresponding to the include
+ file. If there was no include file, return None.
+ """
+ return self.pieces.get(0)
+
+ def write(self, code):
+ # Write values of self.pieces dict, sorted by the keys
+ for k in sorted(self.pieces):
+ code.putln(self.pieces[k])
+
+
def get_utility_dir():
# make this a function and not global variables:
# http://trac.cython.org/cython_trac/ticket/475
@@ -256,15 +256,15 @@ class UtilityCodeBase(object):
if type == 'proto':
utility[0] = code
elif type == 'impl':
- utility[1] = code
+ utility[1] = code
else:
- all_tags = utility[2]
+ all_tags = utility[2]
if KEYWORDS_MUST_BE_BYTES:
type = type.encode('ASCII')
all_tags[type] = code
if tags:
- all_tags = utility[2]
+ all_tags = utility[2]
for name, values in tags.items():
if KEYWORDS_MUST_BE_BYTES:
name = name.encode('ASCII')
@@ -295,7 +295,7 @@ class UtilityCodeBase(object):
with closing(Utils.open_source_file(filename, encoding='UTF-8')) as f:
all_lines = f.readlines()
- utilities = defaultdict(lambda: [None, None, {}])
+ utilities = defaultdict(lambda: [None, None, {}])
lines = []
tags = defaultdict(set)
utility = type = None
@@ -369,7 +369,7 @@ class UtilityCodeBase(object):
from_file = files[0]
utilities = cls.load_utilities_from_file(from_file)
- proto, impl, tags = utilities[util_code_name]
+ proto, impl, tags = utilities[util_code_name]
if tags:
orig_kwargs = kwargs.copy()
@@ -388,7 +388,7 @@ class UtilityCodeBase(object):
elif not values:
values = None
elif len(values) == 1:
- values = list(values)[0]
+ values = list(values)[0]
kwargs[name] = values
if proto is not None:
@@ -453,7 +453,7 @@ class UtilityCode(UtilityCodeBase):
hashes/equals by instance
proto C prototypes
- impl implementation code
+ impl implementation code
init code to call on module initialization
requires utility code dependencies
proto_block the place in the resulting file where the prototype should
@@ -531,22 +531,22 @@ class UtilityCode(UtilityCodeBase):
def inject_string_constants(self, impl, output):
"""Replace 'PYIDENT("xyz")' by a constant Python identifier cname.
"""
- if 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl:
+ if 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl:
return False, impl
replacements = {}
def externalise(matchobj):
- key = matchobj.groups()
+ key = matchobj.groups()
try:
- cname = replacements[key]
+ cname = replacements[key]
except KeyError:
- str_type, name = key
- cname = replacements[key] = output.get_py_string_const(
- StringEncoding.EncodedString(name), identifier=str_type == 'IDENT').cname
+ str_type, name = key
+ cname = replacements[key] = output.get_py_string_const(
+ StringEncoding.EncodedString(name), identifier=str_type == 'IDENT').cname
return cname
- impl = re.sub(r'PY(IDENT|UNICODE)\("([^"]+)"\)', externalise, impl)
- assert 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl
+ impl = re.sub(r'PY(IDENT|UNICODE)\("([^"]+)"\)', externalise, impl)
+ assert 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl
return True, impl
def inject_unbound_methods(self, impl, output):
@@ -556,18 +556,18 @@ class UtilityCode(UtilityCodeBase):
return False, impl
def externalise(matchobj):
- type_cname, method_name, obj_cname, args = matchobj.groups()
- args = [arg.strip() for arg in args[1:].split(',')] if args else []
- assert len(args) < 3, "CALL_UNBOUND_METHOD() does not support %d call arguments" % len(args)
- return output.cached_unbound_method_call_code(obj_cname, type_cname, method_name, args)
-
- impl = re.sub(
- r'CALL_UNBOUND_METHOD\('
- r'([a-zA-Z_]+),' # type cname
- r'\s*"([^"]+)",' # method name
- r'\s*([^),]+)' # object cname
- r'((?:,\s*[^),]+)*)' # args*
- r'\)', externalise, impl)
+ type_cname, method_name, obj_cname, args = matchobj.groups()
+ args = [arg.strip() for arg in args[1:].split(',')] if args else []
+ assert len(args) < 3, "CALL_UNBOUND_METHOD() does not support %d call arguments" % len(args)
+ return output.cached_unbound_method_call_code(obj_cname, type_cname, method_name, args)
+
+ impl = re.sub(
+ r'CALL_UNBOUND_METHOD\('
+ r'([a-zA-Z_]+),' # type cname
+ r'\s*"([^"]+)",' # method name
+ r'\s*([^),]+)' # object cname
+ r'((?:,\s*[^),]+)*)' # args*
+ r'\)', externalise, impl)
assert 'CALL_UNBOUND_METHOD(' not in impl
return True, impl
@@ -679,7 +679,7 @@ class LazyUtilityCode(UtilityCodeBase):
available. Useful when you only have 'env' but not 'code'.
"""
__name__ = '<lazy>'
- requires = None
+ requires = None
def __init__(self, callback):
self.callback = callback
@@ -718,7 +718,7 @@ class FunctionState(object):
self.in_try_finally = 0
self.exc_vars = None
- self.current_except = None
+ self.current_except = None
self.can_trace = False
self.gil_owned = True
@@ -764,8 +764,8 @@ class FunctionState(object):
label += '_' + name
return label
- def new_yield_label(self, expr_type='yield'):
- label = self.new_label('resume_from_%s' % expr_type)
+ def new_yield_label(self, expr_type='yield'):
+ label = self.new_label('resume_from_%s' % expr_type)
num_and_label = (len(self.yield_labels) + 1, label)
self.yield_labels.append(num_and_label)
return num_and_label
@@ -1131,7 +1131,7 @@ class GlobalState(object):
'global_var',
'string_decls',
'decls',
- 'late_includes',
+ 'late_includes',
'all_the_rest',
'pystring_table',
'cached_builtins',
@@ -1399,8 +1399,8 @@ class GlobalState(object):
prefix = Naming.const_prefix
return "%s%s" % (prefix, name_suffix)
- def get_cached_unbound_method(self, type_cname, method_name):
- key = (type_cname, method_name)
+ def get_cached_unbound_method(self, type_cname, method_name):
+ key = (type_cname, method_name)
try:
cname = self.cached_cmethods[key]
except KeyError:
@@ -1408,18 +1408,18 @@ class GlobalState(object):
'umethod', '%s_%s' % (type_cname, method_name))
return cname
- def cached_unbound_method_call_code(self, obj_cname, type_cname, method_name, arg_cnames):
- # admittedly, not the best place to put this method, but it is reused by UtilityCode and ExprNodes ...
- utility_code_name = "CallUnboundCMethod%d" % len(arg_cnames)
- self.use_utility_code(UtilityCode.load_cached(utility_code_name, "ObjectHandling.c"))
- cache_cname = self.get_cached_unbound_method(type_cname, method_name)
- args = [obj_cname] + arg_cnames
- return "__Pyx_%s(&%s, %s)" % (
- utility_code_name,
- cache_cname,
- ', '.join(args),
- )
-
+ def cached_unbound_method_call_code(self, obj_cname, type_cname, method_name, arg_cnames):
+ # admittedly, not the best place to put this method, but it is reused by UtilityCode and ExprNodes ...
+ utility_code_name = "CallUnboundCMethod%d" % len(arg_cnames)
+ self.use_utility_code(UtilityCode.load_cached(utility_code_name, "ObjectHandling.c"))
+ cache_cname = self.get_cached_unbound_method(type_cname, method_name)
+ args = [obj_cname] + arg_cnames
+ return "__Pyx_%s(&%s, %s)" % (
+ utility_code_name,
+ cache_cname,
+ ', '.join(args),
+ )
+
def add_cached_builtin_decl(self, entry):
if entry.is_builtin and entry.is_const:
if self.should_declare(entry.cname, entry):
@@ -1472,7 +1472,7 @@ class GlobalState(object):
decl = self.parts['decls']
init = self.parts['init_globals']
cnames = []
- for (type_cname, method_name), cname in sorted(self.cached_cmethods.items()):
+ for (type_cname, method_name), cname in sorted(self.cached_cmethods.items()):
cnames.append(cname)
method_name_cname = self.get_interned_identifier(StringEncoding.EncodedString(method_name)).cname
decl.putln('static __Pyx_CachedCFunction %s = {0, &%s, 0, 0, 0};' % (
@@ -1606,13 +1606,13 @@ class GlobalState(object):
#
def lookup_filename(self, source_desc):
- entry = source_desc.get_filenametable_entry()
+ entry = source_desc.get_filenametable_entry()
try:
- index = self.filename_table[entry]
+ index = self.filename_table[entry]
except KeyError:
index = len(self.filename_list)
self.filename_list.append(source_desc)
- self.filename_table[entry] = index
+ self.filename_table[entry] = index
return index
def commented_file_contents(self, source_desc):
@@ -1693,7 +1693,7 @@ class CCodeWriter(object):
as well
- labels, temps, exc_vars: One must construct a scope in which these can
exist by calling enter_cfunc_scope/exit_cfunc_scope (these are for
- sanity checking and forward compatibility). Created insertion points
+ sanity checking and forward compatibility). Created insertion points
looses this scope and cannot access it.
- marker: Not copied to insertion point
- filename_table, filename_list, input_file_contents: All codewriters
@@ -1807,7 +1807,7 @@ class CCodeWriter(object):
# Functions delegated to function scope
def new_label(self, name=None): return self.funcstate.new_label(name)
def new_error_label(self): return self.funcstate.new_error_label()
- def new_yield_label(self, *args): return self.funcstate.new_yield_label(*args)
+ def new_yield_label(self, *args): return self.funcstate.new_yield_label(*args)
def get_loop_labels(self): return self.funcstate.get_loop_labels()
def set_loop_labels(self, labels): return self.funcstate.set_loop_labels(labels)
def new_loop_labels(self): return self.funcstate.new_loop_labels()
@@ -1918,7 +1918,7 @@ class CCodeWriter(object):
tmp_path = '%s.tmp%s' % (path, os.getpid())
with closing(Utils.open_new_file(tmp_path)) as f:
f.write(code)
- shutil.move(tmp_path, path)
+ shutil.move(tmp_path, path)
code = '#include "%s"\n' % path
self.put(code)
@@ -2093,12 +2093,12 @@ class CCodeWriter(object):
if entry.type.is_pyobject:
self.putln("__Pyx_XGIVEREF(%s);" % self.entry_as_pyobject(entry))
- def put_var_incref(self, entry, nanny=True):
+ def put_var_incref(self, entry, nanny=True):
if entry.type.is_pyobject:
- if nanny:
- self.putln("__Pyx_INCREF(%s);" % self.entry_as_pyobject(entry))
- else:
- self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry))
+ if nanny:
+ self.putln("__Pyx_INCREF(%s);" % self.entry_as_pyobject(entry))
+ else:
+ self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry))
def put_var_xincref(self, entry):
if entry.type.is_pyobject:
@@ -2122,8 +2122,8 @@ class CCodeWriter(object):
self.put_xdecref_memoryviewslice(cname, have_gil=have_gil)
return
- prefix = '__Pyx' if nanny else 'Py'
- X = 'X' if null_check else ''
+ prefix = '__Pyx' if nanny else 'Py'
+ X = 'X' if null_check else ''
if clear:
if clear_before_decref:
@@ -2147,12 +2147,12 @@ class CCodeWriter(object):
if entry.type.is_pyobject:
self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry))
- def put_var_xdecref(self, entry, nanny=True):
+ def put_var_xdecref(self, entry, nanny=True):
if entry.type.is_pyobject:
- if nanny:
- self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry))
- else:
- self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
+ if nanny:
+ self.putln("__Pyx_XDECREF(%s);" % self.entry_as_pyobject(entry))
+ else:
+ self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
def put_var_decref_clear(self, entry):
self._put_var_decref_clear(entry, null_check=False)
@@ -2273,30 +2273,30 @@ class CCodeWriter(object):
"""
self.globalstate.use_utility_code(
UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c"))
- if self.globalstate.directives['fast_gil']:
- self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
- else:
- self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
+ if self.globalstate.directives['fast_gil']:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
+ else:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.putln("#ifdef WITH_THREAD")
if not variable:
variable = '__pyx_gilstate_save'
if declare_gilstate:
self.put("PyGILState_STATE ")
- self.putln("%s = __Pyx_PyGILState_Ensure();" % variable)
+ self.putln("%s = __Pyx_PyGILState_Ensure();" % variable)
self.putln("#endif")
def put_release_ensured_gil(self, variable=None):
"""
Releases the GIL, corresponds to `put_ensure_gil`.
"""
- if self.globalstate.directives['fast_gil']:
- self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
- else:
- self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
+ if self.globalstate.directives['fast_gil']:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
+ else:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
if not variable:
variable = '__pyx_gilstate_save'
self.putln("#ifdef WITH_THREAD")
- self.putln("__Pyx_PyGILState_Release(%s);" % variable)
+ self.putln("__Pyx_PyGILState_Release(%s);" % variable)
self.putln("#endif")
def put_acquire_gil(self, variable=None):
@@ -2304,12 +2304,12 @@ class CCodeWriter(object):
Acquire the GIL. The thread's thread state must have been initialized
by a previous `put_release_gil`
"""
- if self.globalstate.directives['fast_gil']:
- self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
- else:
- self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
+ if self.globalstate.directives['fast_gil']:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
+ else:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.putln("#ifdef WITH_THREAD")
- self.putln("__Pyx_FastGIL_Forget();")
+ self.putln("__Pyx_FastGIL_Forget();")
if variable:
self.putln('_save = %s;' % variable)
self.putln("Py_BLOCK_THREADS")
@@ -2317,16 +2317,16 @@ class CCodeWriter(object):
def put_release_gil(self, variable=None):
"Release the GIL, corresponds to `put_acquire_gil`."
- if self.globalstate.directives['fast_gil']:
- self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
- else:
- self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
+ if self.globalstate.directives['fast_gil']:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
+ else:
+ self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.putln("#ifdef WITH_THREAD")
self.putln("PyThreadState *_save;")
self.putln("Py_UNBLOCK_THREADS")
if variable:
self.putln('%s = _save;' % variable)
- self.putln("__Pyx_FastGIL_Remember();")
+ self.putln("__Pyx_FastGIL_Remember();")
self.putln("#endif")
def declare_gilstate(self):
@@ -2410,7 +2410,7 @@ class CCodeWriter(object):
def put_finish_refcount_context(self):
self.putln("__Pyx_RefNannyFinishContext();")
- def put_add_traceback(self, qualified_name, include_cline=True):
+ def put_add_traceback(self, qualified_name, include_cline=True):
"""
Build a Python traceback for propagating exceptions.
@@ -2418,7 +2418,7 @@ class CCodeWriter(object):
"""
format_tuple = (
qualified_name,
- Naming.clineno_cname if include_cline else 0,
+ Naming.clineno_cname if include_cline else 0,
Naming.lineno_cname,
Naming.filename_cname,
)
@@ -2486,7 +2486,7 @@ class CCodeWriter(object):
self.putln(" #define unlikely(x) __builtin_expect(!!(x), 0)")
self.putln("#endif")
-
+
class PyrexCodeWriter(object):
# f file output file
# level int indentation level
diff --git a/contrib/tools/cython/Cython/Compiler/CodeGeneration.py b/contrib/tools/cython/Cython/Compiler/CodeGeneration.py
index 15d445cb07..e64049c7f5 100644
--- a/contrib/tools/cython/Cython/Compiler/CodeGeneration.py
+++ b/contrib/tools/cython/Cython/Compiler/CodeGeneration.py
@@ -12,7 +12,7 @@ class ExtractPxdCode(VisitorTransform):
The result is a tuple (StatListNode, ModuleScope), i.e.
everything that is needed from the pxd after it is processed.
- A purer approach would be to separately compile the pxd code,
+ A purer approach would be to separately compile the pxd code,
but the result would have to be slightly more sophisticated
than pure strings (functions + wanted interned strings +
wanted utility code + wanted cached objects) so for now this
diff --git a/contrib/tools/cython/Cython/Compiler/CythonScope.py b/contrib/tools/cython/Cython/Compiler/CythonScope.py
index 09f2bb3cfe..1c25d1a6b4 100644
--- a/contrib/tools/cython/Cython/Compiler/CythonScope.py
+++ b/contrib/tools/cython/Cython/Compiler/CythonScope.py
@@ -26,10 +26,10 @@ class CythonScope(ModuleScope):
cname='<error>')
entry.in_cinclude = True
- def is_cpp(self):
- # Allow C++ utility code in C++ contexts.
- return self.context.cpp
-
+ def is_cpp(self):
+ # Allow C++ utility code in C++ contexts.
+ return self.context.cpp
+
def lookup_type(self, name):
# This function should go away when types are all first-level objects.
type = parse_basic_type(name)
diff --git a/contrib/tools/cython/Cython/Compiler/Errors.py b/contrib/tools/cython/Cython/Compiler/Errors.py
index 66fe05487c..9761b52c32 100644
--- a/contrib/tools/cython/Cython/Compiler/Errors.py
+++ b/contrib/tools/cython/Cython/Compiler/Errors.py
@@ -10,7 +10,7 @@ except ImportError:
any_string_type = (bytes, str)
import sys
-from contextlib import contextmanager
+from contextlib import contextmanager
from ..Utils import open_new_file
from . import DebugFlags
@@ -146,8 +146,8 @@ def close_listing_file():
listing_file.close()
listing_file = None
-def report_error(err, use_stack=True):
- if error_stack and use_stack:
+def report_error(err, use_stack=True):
+ if error_stack and use_stack:
error_stack[-1].append(err)
else:
global num_errors
@@ -229,34 +229,34 @@ def warn_once(position, message, level=0):
error_stack = []
-
+
def hold_errors():
error_stack.append([])
-
+
def release_errors(ignore=False):
held_errors = error_stack.pop()
if not ignore:
for err in held_errors:
report_error(err)
-
+
def held_errors():
return error_stack[-1]
-# same as context manager:
-
-@contextmanager
-def local_errors(ignore=False):
- errors = []
- error_stack.append(errors)
- try:
- yield errors
- finally:
- release_errors(ignore=ignore)
-
-
+# same as context manager:
+
+@contextmanager
+def local_errors(ignore=False):
+ errors = []
+ error_stack.append(errors)
+ try:
+ yield errors
+ finally:
+ release_errors(ignore=ignore)
+
+
# this module needs a redesign to support parallel cythonisation, but
# for now, the following works at least in sequential compiler runs
diff --git a/contrib/tools/cython/Cython/Compiler/ExprNodes.py b/contrib/tools/cython/Cython/Compiler/ExprNodes.py
index 4feeb2a037..4a402f8126 100644
--- a/contrib/tools/cython/Cython/Compiler/ExprNodes.py
+++ b/contrib/tools/cython/Cython/Compiler/ExprNodes.py
@@ -7,7 +7,7 @@ from __future__ import absolute_import
import cython
cython.declare(error=object, warning=object, warn_once=object, InternalError=object,
CompileError=object, UtilityCode=object, TempitaUtilityCode=object,
- StringEncoding=object, operator=object, local_errors=object, report_error=object,
+ StringEncoding=object, operator=object, local_errors=object, report_error=object,
Naming=object, Nodes=object, PyrexTypes=object, py_object_type=object,
list_type=object, tuple_type=object, set_type=object, dict_type=object,
unicode_type=object, str_type=object, bytes_type=object, type_type=object,
@@ -16,19 +16,19 @@ cython.declare(error=object, warning=object, warn_once=object, InternalError=obj
bytearray_type=object, slice_type=object, _py_int_types=object,
IS_PYTHON3=cython.bint)
-import re
+import re
import sys
import copy
import os.path
import operator
-from .Errors import (
- error, warning, InternalError, CompileError, report_error, local_errors)
+from .Errors import (
+ error, warning, InternalError, CompileError, report_error, local_errors)
from .Code import UtilityCode, TempitaUtilityCode
from . import StringEncoding
from . import Naming
from . import Nodes
-from .Nodes import Node, utility_code_for_imports, analyse_type_annotation
+from .Nodes import Node, utility_code_for_imports, analyse_type_annotation
from . import PyrexTypes
from .PyrexTypes import py_object_type, c_long_type, typecast, error_type, \
unspecified_type
@@ -47,7 +47,7 @@ from .Pythran import (to_pythran, is_pythran_supported_type, is_pythran_supporte
is_pythran_expr, pythran_func_type, pythran_binop_type, pythran_unaryop_type, has_np_pythran,
pythran_indexing_code, pythran_indexing_type, is_pythran_supported_node_or_none, pythran_type,
pythran_is_numpy_func_supported, pythran_get_func_include_file, pythran_functor)
-from .PyrexTypes import PythranExpr
+from .PyrexTypes import PythranExpr
try:
from __builtin__ import basestring
@@ -306,18 +306,18 @@ class ExprNode(Node):
# Cached result of subexpr_nodes()
# use_managed_ref boolean use ref-counted temps/assignments/etc.
# result_is_used boolean indicates that the result will be dropped and the
- # is_numpy_attribute boolean Is a Numpy module attribute
+ # is_numpy_attribute boolean Is a Numpy module attribute
# result_code/temp_result can safely be set to None
- # annotation ExprNode or None PEP526 annotation for names or expressions
+ # annotation ExprNode or None PEP526 annotation for names or expressions
result_ctype = None
type = None
- annotation = None
+ annotation = None
temp_code = None
old_temp = None # error checker for multiple frees etc.
use_managed_ref = True # can be set by optimisation transforms
result_is_used = True
- is_numpy_attribute = False
+ is_numpy_attribute = False
# The Analyse Expressions phase for expressions is split
# into two sub-phases:
@@ -498,13 +498,13 @@ class ExprNode(Node):
else:
return self.calculate_result_code()
- def pythran_result(self, type_=None):
- if is_pythran_supported_node_or_none(self):
- return to_pythran(self)
-
- assert(type_ is not None)
- return to_pythran(self, type_)
-
+ def pythran_result(self, type_=None):
+ if is_pythran_supported_node_or_none(self):
+ return to_pythran(self)
+
+ assert(type_ is not None)
+ return to_pythran(self, type_)
+
def is_c_result_required(self):
"""
Subtypes may return False here if result temp allocation can be skipped.
@@ -927,19 +927,19 @@ class ExprNode(Node):
elif not src_type.is_error:
error(self.pos,
"Cannot convert '%s' to memoryviewslice" % (src_type,))
- else:
- if src.type.writable_needed:
- dst_type.writable_needed = True
- if not src.type.conforms_to(dst_type, broadcast=self.is_memview_broadcast,
- copying=self.is_memview_copy_assignment):
- if src.type.dtype.same_as(dst_type.dtype):
- msg = "Memoryview '%s' not conformable to memoryview '%s'."
- tup = src.type, dst_type
- else:
- msg = "Different base types for memoryviews (%s, %s)"
- tup = src.type.dtype, dst_type.dtype
-
- error(self.pos, msg % tup)
+ else:
+ if src.type.writable_needed:
+ dst_type.writable_needed = True
+ if not src.type.conforms_to(dst_type, broadcast=self.is_memview_broadcast,
+ copying=self.is_memview_copy_assignment):
+ if src.type.dtype.same_as(dst_type.dtype):
+ msg = "Memoryview '%s' not conformable to memoryview '%s'."
+ tup = src.type, dst_type
+ else:
+ msg = "Different base types for memoryviews (%s, %s)"
+ tup = src.type.dtype, dst_type.dtype
+
+ error(self.pos, msg % tup)
elif dst_type.is_pyobject:
if not src.type.is_pyobject:
@@ -950,16 +950,16 @@ class ExprNode(Node):
if not src.type.subtype_of(dst_type):
if src.constant_result is not None:
src = PyTypeTestNode(src, dst_type, env)
- elif is_pythran_expr(dst_type) and is_pythran_supported_type(src.type):
- # We let the compiler decide whether this is valid
- return src
- elif is_pythran_expr(src.type):
- if is_pythran_supported_type(dst_type):
- # Match the case were a pythran expr is assigned to a value, or vice versa.
- # We let the C++ compiler decide whether this is valid or not!
- return src
- # Else, we need to convert the Pythran expression to a Python object
- src = CoerceToPyTypeNode(src, env, type=dst_type)
+ elif is_pythran_expr(dst_type) and is_pythran_supported_type(src.type):
+ # We let the compiler decide whether this is valid
+ return src
+ elif is_pythran_expr(src.type):
+ if is_pythran_supported_type(dst_type):
+ # Match the case were a pythran expr is assigned to a value, or vice versa.
+ # We let the C++ compiler decide whether this is valid or not!
+ return src
+ # Else, we need to convert the Pythran expression to a Python object
+ src = CoerceToPyTypeNode(src, env, type=dst_type)
elif src.type.is_pyobject:
if used_as_reference and dst_type.is_cpp_class:
warning(
@@ -1141,13 +1141,13 @@ class NoneNode(PyConstNode):
def may_be_none(self):
return True
- def coerce_to(self, dst_type, env):
- if not (dst_type.is_pyobject or dst_type.is_memoryviewslice or dst_type.is_error):
- # Catch this error early and loudly.
- error(self.pos, "Cannot assign None to %s" % dst_type)
- return super(NoneNode, self).coerce_to(dst_type, env)
+ def coerce_to(self, dst_type, env):
+ if not (dst_type.is_pyobject or dst_type.is_memoryviewslice or dst_type.is_error):
+ # Catch this error early and loudly.
+ error(self.pos, "Cannot assign None to %s" % dst_type)
+ return super(NoneNode, self).coerce_to(dst_type, env)
+
-
class EllipsisNode(PyConstNode):
# '...' in a subscript list.
@@ -1432,28 +1432,28 @@ def _analyse_name_as_type(name, pos, env):
type = PyrexTypes.parse_basic_type(name)
if type is not None:
return type
-
+
global_entry = env.global_scope().lookup(name)
if global_entry and global_entry.type and (
global_entry.type.is_extension_type
or global_entry.type.is_struct_or_union
or global_entry.type.is_builtin_type
or global_entry.type.is_cpp_class):
- return global_entry.type
-
+ return global_entry.type
+
from .TreeFragment import TreeFragment
- with local_errors(ignore=True):
- pos = (pos[0], pos[1], pos[2]-7)
- try:
- declaration = TreeFragment(u"sizeof(%s)" % name, name=pos[0].filename, initial_pos=pos)
- except CompileError:
- pass
- else:
- sizeof_node = declaration.root.stats[0].expr
- if isinstance(sizeof_node, SizeofTypeNode):
- sizeof_node = sizeof_node.analyse_types(env)
- if isinstance(sizeof_node, SizeofTypeNode):
- return sizeof_node.arg_type
+ with local_errors(ignore=True):
+ pos = (pos[0], pos[1], pos[2]-7)
+ try:
+ declaration = TreeFragment(u"sizeof(%s)" % name, name=pos[0].filename, initial_pos=pos)
+ except CompileError:
+ pass
+ else:
+ sizeof_node = declaration.root.stats[0].expr
+ if isinstance(sizeof_node, SizeofTypeNode):
+ sizeof_node = sizeof_node.analyse_types(env)
+ if isinstance(sizeof_node, SizeofTypeNode):
+ return sizeof_node.arg_type
return None
@@ -1507,7 +1507,7 @@ class BytesNode(ConstNode):
node.type = Builtin.bytes_type
else:
self.check_for_coercion_error(dst_type, env, fail=True)
- return node
+ return node
elif dst_type in (PyrexTypes.c_char_ptr_type, PyrexTypes.c_const_char_ptr_type):
node.type = dst_type
return node
@@ -1516,10 +1516,10 @@ class BytesNode(ConstNode):
else PyrexTypes.c_char_ptr_type)
return CastNode(node, dst_type)
elif dst_type.assignable_from(PyrexTypes.c_char_ptr_type):
- # Exclude the case of passing a C string literal into a non-const C++ string.
- if not dst_type.is_cpp_class or dst_type.is_const:
- node.type = dst_type
- return node
+ # Exclude the case of passing a C string literal into a non-const C++ string.
+ if not dst_type.is_cpp_class or dst_type.is_const:
+ node.type = dst_type
+ return node
# We still need to perform normal coerce_to processing on the
# result, because we might be coercing to an extension type,
@@ -1740,15 +1740,15 @@ class IdentifierStringNode(StringNode):
class ImagNode(AtomicExprNode):
# Imaginary number literal
#
- # value string imaginary part (float value)
+ # value string imaginary part (float value)
type = PyrexTypes.c_double_complex_type
def calculate_constant_result(self):
- self.constant_result = complex(0.0, float(self.value))
+ self.constant_result = complex(0.0, float(self.value))
def compile_time_value(self, denv):
- return complex(0.0, float(self.value))
+ return complex(0.0, float(self.value))
def analyse_types(self, env):
self.type.create_declaration_utility_code(env)
@@ -1763,7 +1763,7 @@ class ImagNode(AtomicExprNode):
node = ImagNode(self.pos, value=self.value)
if dst_type.is_pyobject:
node.is_temp = 1
- node.type = Builtin.complex_type
+ node.type = Builtin.complex_type
# We still need to perform normal coerce_to processing on the
# result, because we might be coercing to an extension type,
# in which case a type test node will be needed.
@@ -1802,7 +1802,7 @@ class NewExprNode(AtomicExprNode):
self.type = error_type
return
self.cpp_check(env)
- constructor = type.get_constructor(self.pos)
+ constructor = type.get_constructor(self.pos)
self.class_type = type
self.entry = constructor
self.type = constructor.type
@@ -1916,34 +1916,34 @@ class NameNode(AtomicExprNode):
return super(NameNode, self).coerce_to(dst_type, env)
- def declare_from_annotation(self, env, as_target=False):
- """Implements PEP 526 annotation typing in a fairly relaxed way.
-
- Annotations are ignored for global variables, Python class attributes and already declared variables.
- String literals are allowed and ignored.
- The ambiguous Python types 'int' and 'long' are ignored and the 'cython.int' form must be used instead.
- """
- if not env.directives['annotation_typing']:
- return
- if env.is_module_scope or env.is_py_class_scope:
- # annotations never create global cdef names and Python classes don't support them anyway
- return
- name = self.name
- if self.entry or env.lookup_here(name) is not None:
- # already declared => ignore annotation
- return
-
- annotation = self.annotation
- if annotation.is_string_literal:
- # name: "description" => not a type, but still a declared variable or attribute
- atype = None
- else:
- _, atype = analyse_type_annotation(annotation, env)
- if atype is None:
- atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type
- self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target)
- self.entry.annotation = annotation
-
+ def declare_from_annotation(self, env, as_target=False):
+ """Implements PEP 526 annotation typing in a fairly relaxed way.
+
+ Annotations are ignored for global variables, Python class attributes and already declared variables.
+ String literals are allowed and ignored.
+ The ambiguous Python types 'int' and 'long' are ignored and the 'cython.int' form must be used instead.
+ """
+ if not env.directives['annotation_typing']:
+ return
+ if env.is_module_scope or env.is_py_class_scope:
+ # annotations never create global cdef names and Python classes don't support them anyway
+ return
+ name = self.name
+ if self.entry or env.lookup_here(name) is not None:
+ # already declared => ignore annotation
+ return
+
+ annotation = self.annotation
+ if annotation.is_string_literal:
+ # name: "description" => not a type, but still a declared variable or attribute
+ atype = None
+ else:
+ _, atype = analyse_type_annotation(annotation, env)
+ if atype is None:
+ atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type
+ self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target)
+ self.entry.annotation = annotation
+
def analyse_as_module(self, env):
# Try to interpret this as a reference to a cimported module.
# Returns the module scope, or None.
@@ -1983,9 +1983,9 @@ class NameNode(AtomicExprNode):
def analyse_target_declaration(self, env):
if not self.entry:
self.entry = env.lookup_here(self.name)
- if not self.entry and self.annotation is not None:
- # name : type = ...
- self.declare_from_annotation(env, as_target=True)
+ if not self.entry and self.annotation is not None:
+ # name : type = ...
+ self.declare_from_annotation(env, as_target=True)
if not self.entry:
if env.directives['warn.undeclared']:
warning(self.pos, "implicit declaration of '%s'" % self.name, 1)
@@ -1996,27 +1996,27 @@ class NameNode(AtomicExprNode):
self.entry = env.declare_var(self.name, type, self.pos)
if self.entry.is_declared_generic:
self.result_ctype = py_object_type
- if self.entry.as_module:
- # cimported modules namespace can shadow actual variables
- self.entry.is_variable = 1
+ if self.entry.as_module:
+ # cimported modules namespace can shadow actual variables
+ self.entry.is_variable = 1
def analyse_types(self, env):
self.initialized_check = env.directives['initializedcheck']
entry = self.entry
- if entry is None:
- entry = env.lookup(self.name)
- if not entry:
- entry = env.declare_builtin(self.name, self.pos)
- if entry and entry.is_builtin and entry.is_const:
- self.is_literal = True
- if not entry:
- self.type = PyrexTypes.error_type
- return self
- self.entry = entry
- entry.used = 1
- if entry.type.is_buffer:
- from . import Buffer
- Buffer.used_buffer_aux_vars(entry)
+ if entry is None:
+ entry = env.lookup(self.name)
+ if not entry:
+ entry = env.declare_builtin(self.name, self.pos)
+ if entry and entry.is_builtin and entry.is_const:
+ self.is_literal = True
+ if not entry:
+ self.type = PyrexTypes.error_type
+ return self
+ self.entry = entry
+ entry.used = 1
+ if entry.type.is_buffer:
+ from . import Buffer
+ Buffer.used_buffer_aux_vars(entry)
self.analyse_rvalue_entry(env)
return self
@@ -2101,20 +2101,20 @@ class NameNode(AtomicExprNode):
py_entry.is_pyglobal = True
py_entry.scope = self.entry.scope
self.entry = py_entry
- elif not (entry.is_const or entry.is_variable or
- entry.is_builtin or entry.is_cfunction or
- entry.is_cpp_class):
- if self.entry.as_variable:
- self.entry = self.entry.as_variable
- elif not self.is_cython_module:
- error(self.pos, "'%s' is not a constant, variable or function identifier" % self.name)
-
- def is_cimported_module_without_shadow(self, env):
- if self.is_cython_module or self.cython_attribute:
- return False
- entry = self.entry or env.lookup(self.name)
- return entry.as_module and not entry.is_variable
-
+ elif not (entry.is_const or entry.is_variable or
+ entry.is_builtin or entry.is_cfunction or
+ entry.is_cpp_class):
+ if self.entry.as_variable:
+ self.entry = self.entry.as_variable
+ elif not self.is_cython_module:
+ error(self.pos, "'%s' is not a constant, variable or function identifier" % self.name)
+
+ def is_cimported_module_without_shadow(self, env):
+ if self.is_cython_module or self.cython_attribute:
+ return False
+ entry = self.entry or env.lookup(self.name)
+ return entry.as_module and not entry.is_variable
+
def is_simple(self):
# If it's not a C variable, it'll be in a temp.
return 1
@@ -2153,11 +2153,11 @@ class NameNode(AtomicExprNode):
def check_const(self):
entry = self.entry
- if entry is not None and not (
- entry.is_const or
- entry.is_cfunction or
- entry.is_builtin or
- entry.type.is_const):
+ if entry is not None and not (
+ entry.is_const or
+ entry.is_cfunction or
+ entry.is_builtin or
+ entry.type.is_const):
self.not_const()
return False
return True
@@ -2301,8 +2301,8 @@ class NameNode(AtomicExprNode):
setter = 'PyDict_SetItem'
namespace = Naming.moddict_cname
elif entry.is_pyclass_attr:
- code.globalstate.use_utility_code(UtilityCode.load_cached("SetNameInClass", "ObjectHandling.c"))
- setter = '__Pyx_SetNameInClass'
+ code.globalstate.use_utility_code(UtilityCode.load_cached("SetNameInClass", "ObjectHandling.c"))
+ setter = '__Pyx_SetNameInClass'
else:
assert False, repr(entry)
code.put_error_if_neg(
@@ -2379,11 +2379,11 @@ class NameNode(AtomicExprNode):
code.putln('%s = %s;' % (self.result(), result))
else:
result = rhs.result_as(self.ctype())
-
- if is_pythran_expr(self.type):
- code.putln('new (&%s) decltype(%s){%s};' % (self.result(), self.result(), result))
+
+ if is_pythran_expr(self.type):
+ code.putln('new (&%s) decltype(%s){%s};' % (self.result(), self.result(), result))
elif result != self.result():
- code.putln('%s = %s;' % (self.result(), result))
+ code.putln('%s = %s;' % (self.result(), result))
if debug_disposal_code:
print("NameNode.generate_assignment_code:")
print("...generating post-assignment code for %s" % rhs)
@@ -2833,7 +2833,7 @@ class IteratorNode(ExprNode):
code.putln("if (unlikely(!%s)) {" % result_name)
code.putln("PyObject* exc_type = PyErr_Occurred();")
code.putln("if (exc_type) {")
- code.putln("if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();")
+ code.putln("if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();")
code.putln("else %s" % code.error_goto(self.pos))
code.putln("}")
code.putln("break;")
@@ -2967,18 +2967,18 @@ class WithExitCallNode(ExprNode):
# The __exit__() call of a 'with' statement. Used in both the
# except and finally clauses.
- # with_stat WithStatNode the surrounding 'with' statement
- # args TupleNode or ResultStatNode the exception info tuple
- # await_expr AwaitExprNode the await expression of an 'async with' statement
+ # with_stat WithStatNode the surrounding 'with' statement
+ # args TupleNode or ResultStatNode the exception info tuple
+ # await_expr AwaitExprNode the await expression of an 'async with' statement
- subexprs = ['args', 'await_expr']
+ subexprs = ['args', 'await_expr']
test_if_run = True
- await_expr = None
+ await_expr = None
def analyse_types(self, env):
self.args = self.args.analyse_types(env)
- if self.await_expr:
- self.await_expr = self.await_expr.analyse_types(env)
+ if self.await_expr:
+ self.await_expr = self.await_expr.analyse_types(env)
self.type = PyrexTypes.c_bint_type
self.is_temp = True
return self
@@ -3005,12 +3005,12 @@ class WithExitCallNode(ExprNode):
code.putln(code.error_goto_if_null(result_var, self.pos))
code.put_gotref(result_var)
- if self.await_expr:
+ if self.await_expr:
# FIXME: result_var temp currently leaks into the closure
- self.await_expr.generate_evaluation_code(code, source_cname=result_var, decref_source=True)
- code.putln("%s = %s;" % (result_var, self.await_expr.py_result()))
- self.await_expr.generate_post_assignment_code(code)
- self.await_expr.free_temps(code)
+ self.await_expr.generate_evaluation_code(code, source_cname=result_var, decref_source=True)
+ code.putln("%s = %s;" % (result_var, self.await_expr.py_result()))
+ self.await_expr.generate_post_assignment_code(code)
+ self.await_expr.free_temps(code)
if self.result_is_used:
self.allocate_temp_result(code)
@@ -3170,27 +3170,27 @@ class JoinedStrNode(ExprNode):
is_ascii = False
if isinstance(node, UnicodeNode):
try:
- # most strings will be ASCII or at least Latin-1
+ # most strings will be ASCII or at least Latin-1
node.value.encode('iso8859-1')
max_char_value = '255'
node.value.encode('us-ascii')
is_ascii = True
except UnicodeEncodeError:
- if max_char_value != '255':
- # not ISO8859-1 => check BMP limit
- max_char = max(map(ord, node.value))
- if max_char < 0xD800:
- # BMP-only, no surrogate pairs used
- max_char_value = '65535'
- ulength = str(len(node.value))
- elif max_char >= 65536:
- # cleary outside of BMP, and not on a 16-bit Unicode system
- max_char_value = '1114111'
- ulength = str(len(node.value))
- else:
- # not really worth implementing a check for surrogate pairs here
- # drawback: C code can differ when generating on Py2 with 2-byte Unicode
- pass
+ if max_char_value != '255':
+ # not ISO8859-1 => check BMP limit
+ max_char = max(map(ord, node.value))
+ if max_char < 0xD800:
+ # BMP-only, no surrogate pairs used
+ max_char_value = '65535'
+ ulength = str(len(node.value))
+ elif max_char >= 65536:
+ # cleary outside of BMP, and not on a 16-bit Unicode system
+ max_char_value = '1114111'
+ ulength = str(len(node.value))
+ else:
+ # not really worth implementing a check for surrogate pairs here
+ # drawback: C code can differ when generating on Py2 with 2-byte Unicode
+ pass
else:
ulength = str(len(node.value))
elif isinstance(node, FormattedValueNode) and node.value.type.is_numeric:
@@ -3260,7 +3260,7 @@ class FormattedValueNode(ExprNode):
self.format_spec = self.format_spec.analyse_types(env).coerce_to_pyobject(env)
if self.c_format_spec is None:
self.value = self.value.coerce_to_pyobject(env)
- if not self.format_spec and (not self.conversion_char or self.conversion_char == 's'):
+ if not self.format_spec and (not self.conversion_char or self.conversion_char == 's'):
if self.value.type is unicode_type and not self.value.may_be_none():
# value is definitely a unicode string and we don't format it any special
return self.value
@@ -3390,7 +3390,7 @@ class _IndexingBaseNode(ExprNode):
# in most cases, indexing will return a safe reference to an object in a container,
# so we consider the result safe if the base object is
return self.base.is_ephemeral() or self.base.type in (
- basestring_type, str_type, bytes_type, bytearray_type, unicode_type)
+ basestring_type, str_type, bytes_type, bytearray_type, unicode_type)
def check_const_addr(self):
return self.base.check_const_addr() and self.index.check_const()
@@ -3450,7 +3450,7 @@ class IndexNode(_IndexingBaseNode):
return False
if isinstance(self.index, SliceNode):
# slicing!
- if base_type in (bytes_type, bytearray_type, str_type, unicode_type,
+ if base_type in (bytes_type, bytearray_type, str_type, unicode_type,
basestring_type, list_type, tuple_type):
return False
return ExprNode.may_be_none(self)
@@ -3562,10 +3562,10 @@ class IndexNode(_IndexingBaseNode):
if index_func is not None:
return index_func.type.return_type
- if is_pythran_expr(base_type) and is_pythran_expr(index_type):
- index_with_type = (self.index, index_type)
- return PythranExpr(pythran_indexing_type(base_type, [index_with_type]))
-
+ if is_pythran_expr(base_type) and is_pythran_expr(index_type):
+ index_with_type = (self.index, index_type)
+ return PythranExpr(pythran_indexing_type(base_type, [index_with_type]))
+
# may be slicing or indexing, we don't know
if base_type in (unicode_type, str_type):
# these types always returns their own type on Python indexing/slicing
@@ -3657,14 +3657,14 @@ class IndexNode(_IndexingBaseNode):
def analyse_as_pyobject(self, env, is_slice, getting, setting):
base_type = self.base.type
- if self.index.type.is_unicode_char and base_type is not dict_type:
- # TODO: eventually fold into case below and remove warning, once people have adapted their code
- warning(self.pos,
- "Item lookup of unicode character codes now always converts to a Unicode string. "
- "Use an explicit C integer cast to get back the previous integer lookup behaviour.", level=1)
- self.index = self.index.coerce_to_pyobject(env)
- self.is_temp = 1
- elif self.index.type.is_int and base_type is not dict_type:
+ if self.index.type.is_unicode_char and base_type is not dict_type:
+ # TODO: eventually fold into case below and remove warning, once people have adapted their code
+ warning(self.pos,
+ "Item lookup of unicode character codes now always converts to a Unicode string. "
+ "Use an explicit C integer cast to get back the previous integer lookup behaviour.", level=1)
+ self.index = self.index.coerce_to_pyobject(env)
+ self.is_temp = 1
+ elif self.index.type.is_int and base_type is not dict_type:
if (getting
and (base_type in (list_type, tuple_type, bytearray_type))
and (not self.index.type.signed
@@ -3691,7 +3691,7 @@ class IndexNode(_IndexingBaseNode):
else:
# not using 'uchar' to enable fast and safe error reporting as '-1'
self.type = PyrexTypes.c_int_type
- elif is_slice and base_type in (bytes_type, bytearray_type, str_type, unicode_type, list_type, tuple_type):
+ elif is_slice and base_type in (bytes_type, bytearray_type, str_type, unicode_type, list_type, tuple_type):
self.type = base_type
else:
item_type = None
@@ -3753,9 +3753,9 @@ class IndexNode(_IndexingBaseNode):
if base_type.templates is None:
error(self.pos, "Can only parameterize template functions.")
self.type = error_type
- elif self.type_indices is None:
- # Error recorded earlier.
- self.type = error_type
+ elif self.type_indices is None:
+ # Error recorded earlier.
+ self.type = error_type
elif len(base_type.templates) != len(self.type_indices):
error(self.pos, "Wrong number of template arguments: expected %s, got %s" % (
(len(base_type.templates), len(self.type_indices))))
@@ -3792,45 +3792,45 @@ class IndexNode(_IndexingBaseNode):
else:
indices = [self.index]
- base = self.base
- base_type = base.type
+ base = self.base
+ base_type = base.type
replacement_node = None
if base_type.is_memoryviewslice:
# memoryviewslice indexing or slicing
from . import MemoryView
- if base.is_memview_slice:
- # For memory views, "view[i][j]" is the same as "view[i, j]" => use the latter for speed.
- merged_indices = base.merged_indices(indices)
- if merged_indices is not None:
- base = base.base
- base_type = base.type
- indices = merged_indices
+ if base.is_memview_slice:
+ # For memory views, "view[i][j]" is the same as "view[i, j]" => use the latter for speed.
+ merged_indices = base.merged_indices(indices)
+ if merged_indices is not None:
+ base = base.base
+ base_type = base.type
+ indices = merged_indices
have_slices, indices, newaxes = MemoryView.unellipsify(indices, base_type.ndim)
if have_slices:
- replacement_node = MemoryViewSliceNode(self.pos, indices=indices, base=base)
+ replacement_node = MemoryViewSliceNode(self.pos, indices=indices, base=base)
else:
- replacement_node = MemoryViewIndexNode(self.pos, indices=indices, base=base)
- elif base_type.is_buffer or base_type.is_pythran_expr:
- if base_type.is_pythran_expr or len(indices) == base_type.ndim:
- # Buffer indexing
- is_buffer_access = True
- indices = [index.analyse_types(env) for index in indices]
- if base_type.is_pythran_expr:
- do_replacement = all(
- index.type.is_int or index.is_slice or index.type.is_pythran_expr
- for index in indices)
- if do_replacement:
- for i,index in enumerate(indices):
- if index.is_slice:
- index = SliceIntNode(index.pos, start=index.start, stop=index.stop, step=index.step)
- index = index.analyse_types(env)
- indices[i] = index
- else:
- do_replacement = all(index.type.is_int for index in indices)
- if do_replacement:
- replacement_node = BufferIndexNode(self.pos, indices=indices, base=base)
- # On cloning, indices is cloned. Otherwise, unpack index into indices.
- assert not isinstance(self.index, CloneNode)
+ replacement_node = MemoryViewIndexNode(self.pos, indices=indices, base=base)
+ elif base_type.is_buffer or base_type.is_pythran_expr:
+ if base_type.is_pythran_expr or len(indices) == base_type.ndim:
+ # Buffer indexing
+ is_buffer_access = True
+ indices = [index.analyse_types(env) for index in indices]
+ if base_type.is_pythran_expr:
+ do_replacement = all(
+ index.type.is_int or index.is_slice or index.type.is_pythran_expr
+ for index in indices)
+ if do_replacement:
+ for i,index in enumerate(indices):
+ if index.is_slice:
+ index = SliceIntNode(index.pos, start=index.start, stop=index.stop, step=index.step)
+ index = index.analyse_types(env)
+ indices[i] = index
+ else:
+ do_replacement = all(index.type.is_int for index in indices)
+ if do_replacement:
+ replacement_node = BufferIndexNode(self.pos, indices=indices, base=base)
+ # On cloning, indices is cloned. Otherwise, unpack index into indices.
+ assert not isinstance(self.index, CloneNode)
if replacement_node is not None:
replacement_node = replacement_node.analyse_types(env, getting)
@@ -3995,8 +3995,8 @@ class IndexNode(_IndexingBaseNode):
if not self.is_temp:
# all handled in self.calculate_result_code()
return
-
- utility_code = None
+
+ utility_code = None
if self.type.is_pyobject:
error_value = 'NULL'
if self.index.type.is_int:
@@ -4006,38 +4006,38 @@ class IndexNode(_IndexingBaseNode):
function = "__Pyx_GetItemInt_Tuple"
else:
function = "__Pyx_GetItemInt"
- utility_code = TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c")
+ utility_code = TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c")
else:
if self.base.type is dict_type:
function = "__Pyx_PyDict_GetItem"
- utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c")
- elif self.base.type is py_object_type and self.index.type in (str_type, unicode_type):
- # obj[str] is probably doing a dict lookup
- function = "__Pyx_PyObject_Dict_GetItem"
- utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c")
- else:
- function = "__Pyx_PyObject_GetItem"
+ utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c")
+ elif self.base.type is py_object_type and self.index.type in (str_type, unicode_type):
+ # obj[str] is probably doing a dict lookup
+ function = "__Pyx_PyObject_Dict_GetItem"
+ utility_code = UtilityCode.load_cached("DictGetItem", "ObjectHandling.c")
+ else:
+ function = "__Pyx_PyObject_GetItem"
code.globalstate.use_utility_code(
- TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c"))
- utility_code = UtilityCode.load_cached("ObjectGetItem", "ObjectHandling.c")
+ TempitaUtilityCode.load_cached("GetItemInt", "ObjectHandling.c"))
+ utility_code = UtilityCode.load_cached("ObjectGetItem", "ObjectHandling.c")
elif self.type.is_unicode_char and self.base.type is unicode_type:
assert self.index.type.is_int
function = "__Pyx_GetItemInt_Unicode"
error_value = '(Py_UCS4)-1'
- utility_code = UtilityCode.load_cached("GetItemIntUnicode", "StringTools.c")
+ utility_code = UtilityCode.load_cached("GetItemIntUnicode", "StringTools.c")
elif self.base.type is bytearray_type:
assert self.index.type.is_int
assert self.type.is_int
function = "__Pyx_GetItemInt_ByteArray"
error_value = '-1'
- utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c")
+ utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c")
elif not (self.base.type.is_cpp_class and self.exception_check):
assert False, "unexpected type %s and base type %s for indexing" % (
self.type, self.base.type)
- if utility_code is not None:
- code.globalstate.use_utility_code(utility_code)
-
+ if utility_code is not None:
+ code.globalstate.use_utility_code(utility_code)
+
if self.index.type.is_int:
index_code = self.index.result()
else:
@@ -4219,7 +4219,7 @@ class BufferIndexNode(_IndexingBaseNode):
indexing and slicing subclasses
"""
# self.indices are already analyzed
- if not self.base.is_name and not is_pythran_expr(self.base.type):
+ if not self.base.is_name and not is_pythran_expr(self.base.type):
error(self.pos, "Can only index buffer variables")
self.type = error_type
return self
@@ -4238,15 +4238,15 @@ class BufferIndexNode(_IndexingBaseNode):
return self
def analyse_buffer_index(self, env, getting):
- if is_pythran_expr(self.base.type):
- index_with_type_list = [(idx, idx.type) for idx in self.indices]
- self.type = PythranExpr(pythran_indexing_type(self.base.type, index_with_type_list))
- else:
- self.base = self.base.coerce_to_simple(env)
- self.type = self.base.type.dtype
+ if is_pythran_expr(self.base.type):
+ index_with_type_list = [(idx, idx.type) for idx in self.indices]
+ self.type = PythranExpr(pythran_indexing_type(self.base.type, index_with_type_list))
+ else:
+ self.base = self.base.coerce_to_simple(env)
+ self.type = self.base.type.dtype
self.buffer_type = self.base.type
- if getting and (self.type.is_pyobject or self.type.is_pythran_expr):
+ if getting and (self.type.is_pyobject or self.type.is_pythran_expr):
self.is_temp = True
def analyse_assignment(self, rhs):
@@ -4275,24 +4275,24 @@ class BufferIndexNode(_IndexingBaseNode):
base = base.arg
return base.type.get_entry(base)
- def get_index_in_temp(self, code, ivar):
- ret = code.funcstate.allocate_temp(
- PyrexTypes.widest_numeric_type(
- ivar.type,
- PyrexTypes.c_ssize_t_type if ivar.type.signed else PyrexTypes.c_size_t_type),
- manage_ref=False)
- code.putln("%s = %s;" % (ret, ivar.result()))
- return ret
-
+ def get_index_in_temp(self, code, ivar):
+ ret = code.funcstate.allocate_temp(
+ PyrexTypes.widest_numeric_type(
+ ivar.type,
+ PyrexTypes.c_ssize_t_type if ivar.type.signed else PyrexTypes.c_size_t_type),
+ manage_ref=False)
+ code.putln("%s = %s;" % (ret, ivar.result()))
+ return ret
+
def buffer_lookup_code(self, code):
"""
ndarray[1, 2, 3] and memslice[1, 2, 3]
"""
- if self.in_nogil_context:
- if self.is_buffer_access or self.is_memview_index:
- if code.globalstate.directives['boundscheck']:
- warning(self.pos, "Use boundscheck(False) for faster access", level=1)
-
+ if self.in_nogil_context:
+ if self.is_buffer_access or self.is_memview_index:
+ if code.globalstate.directives['boundscheck']:
+ warning(self.pos, "Use boundscheck(False) for faster access", level=1)
+
# Assign indices to temps of at least (s)size_t to allow further index calculations.
self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices]
@@ -4322,27 +4322,27 @@ class BufferIndexNode(_IndexingBaseNode):
rhs.free_temps(code)
def generate_buffer_setitem_code(self, rhs, code, op=""):
- base_type = self.base.type
- if is_pythran_expr(base_type) and is_pythran_supported_type(rhs.type):
- obj = code.funcstate.allocate_temp(PythranExpr(pythran_type(self.base.type)), manage_ref=False)
- # We have got to do this because we have to declare pythran objects
- # at the beginning of the functions.
- # Indeed, Cython uses "goto" statement for error management, and
- # RAII doesn't work with that kind of construction.
- # Moreover, the way Pythran expressions are made is that they don't
- # support move-assignation easily.
- # This, we explicitly destroy then in-place new objects in this
- # case.
- code.putln("__Pyx_call_destructor(%s);" % obj)
- code.putln("new (&%s) decltype(%s){%s};" % (obj, obj, self.base.pythran_result()))
- code.putln("%s%s %s= %s;" % (
- obj,
- pythran_indexing_code(self.indices),
- op,
- rhs.pythran_result()))
+ base_type = self.base.type
+ if is_pythran_expr(base_type) and is_pythran_supported_type(rhs.type):
+ obj = code.funcstate.allocate_temp(PythranExpr(pythran_type(self.base.type)), manage_ref=False)
+ # We have got to do this because we have to declare pythran objects
+ # at the beginning of the functions.
+ # Indeed, Cython uses "goto" statement for error management, and
+ # RAII doesn't work with that kind of construction.
+ # Moreover, the way Pythran expressions are made is that they don't
+ # support move-assignation easily.
+ # This, we explicitly destroy then in-place new objects in this
+ # case.
+ code.putln("__Pyx_call_destructor(%s);" % obj)
+ code.putln("new (&%s) decltype(%s){%s};" % (obj, obj, self.base.pythran_result()))
+ code.putln("%s%s %s= %s;" % (
+ obj,
+ pythran_indexing_code(self.indices),
+ op,
+ rhs.pythran_result()))
code.funcstate.release_temp(obj)
- return
-
+ return
+
# Used from generate_assignment_code and InPlaceAssignmentNode
buffer_entry, ptrexpr = self.buffer_lookup_code(code)
@@ -4364,15 +4364,15 @@ class BufferIndexNode(_IndexingBaseNode):
code.putln("*%s %s= %s;" % (ptrexpr, op, rhs.result()))
def generate_result_code(self, code):
- if is_pythran_expr(self.base.type):
- res = self.result()
- code.putln("__Pyx_call_destructor(%s);" % res)
- code.putln("new (&%s) decltype(%s){%s%s};" % (
- res,
- res,
- self.base.pythran_result(),
- pythran_indexing_code(self.indices)))
- return
+ if is_pythran_expr(self.base.type):
+ res = self.result()
+ code.putln("__Pyx_call_destructor(%s);" % res)
+ code.putln("new (&%s) decltype(%s){%s%s};" % (
+ res,
+ res,
+ self.base.pythran_result(),
+ pythran_indexing_code(self.indices)))
+ return
buffer_entry, self.buffer_ptr_code = self.buffer_lookup_code(code)
if self.type.is_pyobject:
# is_temp is True, so must pull out value and incref it.
@@ -4398,15 +4398,15 @@ class MemoryViewIndexNode(BufferIndexNode):
# memoryviewslice indexing or slicing
from . import MemoryView
- self.is_pythran_mode = has_np_pythran(env)
+ self.is_pythran_mode = has_np_pythran(env)
indices = self.indices
have_slices, indices, newaxes = MemoryView.unellipsify(indices, self.base.type.ndim)
- if not getting:
- self.writable_needed = True
- if self.base.is_name or self.base.is_attribute:
- self.base.entry.type.writable_needed = True
-
+ if not getting:
+ self.writable_needed = True
+ if self.base.is_name or self.base.is_attribute:
+ self.base.entry.type.writable_needed = True
+
self.memslice_index = (not newaxes and len(indices) == self.base.type.ndim)
axes = []
@@ -4554,37 +4554,37 @@ class MemoryViewSliceNode(MemoryViewIndexNode):
else:
return MemoryCopySlice(self.pos, self)
- def merged_indices(self, indices):
- """Return a new list of indices/slices with 'indices' merged into the current ones
- according to slicing rules.
- Is used to implement "view[i][j]" => "view[i, j]".
- Return None if the indices cannot (easily) be merged at compile time.
- """
- if not indices:
- return None
- # NOTE: Need to evaluate "self.original_indices" here as they might differ from "self.indices".
- new_indices = self.original_indices[:]
- indices = indices[:]
- for i, s in enumerate(self.original_indices):
- if s.is_slice:
- if s.start.is_none and s.stop.is_none and s.step.is_none:
- # Full slice found, replace by index.
- new_indices[i] = indices[0]
- indices.pop(0)
- if not indices:
- return new_indices
- else:
- # Found something non-trivial, e.g. a partial slice.
- return None
- elif not s.type.is_int:
- # Not a slice, not an integer index => could be anything...
- return None
- if indices:
- if len(new_indices) + len(indices) > self.base.type.ndim:
- return None
- new_indices += indices
- return new_indices
-
+ def merged_indices(self, indices):
+ """Return a new list of indices/slices with 'indices' merged into the current ones
+ according to slicing rules.
+ Is used to implement "view[i][j]" => "view[i, j]".
+ Return None if the indices cannot (easily) be merged at compile time.
+ """
+ if not indices:
+ return None
+ # NOTE: Need to evaluate "self.original_indices" here as they might differ from "self.indices".
+ new_indices = self.original_indices[:]
+ indices = indices[:]
+ for i, s in enumerate(self.original_indices):
+ if s.is_slice:
+ if s.start.is_none and s.stop.is_none and s.step.is_none:
+ # Full slice found, replace by index.
+ new_indices[i] = indices[0]
+ indices.pop(0)
+ if not indices:
+ return new_indices
+ else:
+ # Found something non-trivial, e.g. a partial slice.
+ return None
+ elif not s.type.is_int:
+ # Not a slice, not an integer index => could be anything...
+ return None
+ if indices:
+ if len(new_indices) + len(indices) > self.base.type.ndim:
+ return None
+ new_indices += indices
+ return new_indices
+
def is_simple(self):
if self.is_ellipsis_noop:
# TODO: fix SimpleCallNode.is_simple()
@@ -4757,7 +4757,7 @@ class SliceIndexNode(ExprNode):
return bytes_type
elif base_type.is_pyunicode_ptr:
return unicode_type
- elif base_type in (bytes_type, bytearray_type, str_type, unicode_type,
+ elif base_type in (bytes_type, bytearray_type, str_type, unicode_type,
basestring_type, list_type, tuple_type):
return base_type
elif base_type.is_ptr or base_type.is_array:
@@ -4822,13 +4822,13 @@ class SliceIndexNode(ExprNode):
def analyse_types(self, env, getting=True):
self.base = self.base.analyse_types(env)
- if self.base.type.is_buffer or self.base.type.is_pythran_expr or self.base.type.is_memoryviewslice:
+ if self.base.type.is_buffer or self.base.type.is_pythran_expr or self.base.type.is_memoryviewslice:
none_node = NoneNode(self.pos)
index = SliceNode(self.pos,
start=self.start or none_node,
stop=self.stop or none_node,
step=none_node)
- index_node = IndexNode(self.pos, index=index, base=self.base)
+ index_node = IndexNode(self.pos, index=index, base=self.base)
return index_node.analyse_base_and_index_types(
env, getting=getting, setting=not getting,
analyse_base=False)
@@ -5296,61 +5296,61 @@ class SliceNode(ExprNode):
if self.is_literal:
code.put_giveref(self.py_result())
-class SliceIntNode(SliceNode):
- # start:stop:step in subscript list
- # This is just a node to hold start,stop and step nodes that can be
- # converted to integers. This does not generate a slice python object.
- #
- # start ExprNode
- # stop ExprNode
- # step ExprNode
-
- is_temp = 0
-
- def calculate_constant_result(self):
- self.constant_result = slice(
- self.start.constant_result,
- self.stop.constant_result,
- self.step.constant_result)
-
- def compile_time_value(self, denv):
- start = self.start.compile_time_value(denv)
- stop = self.stop.compile_time_value(denv)
- step = self.step.compile_time_value(denv)
- try:
- return slice(start, stop, step)
- except Exception as e:
- self.compile_time_value_error(e)
-
- def may_be_none(self):
- return False
-
- def analyse_types(self, env):
- self.start = self.start.analyse_types(env)
- self.stop = self.stop.analyse_types(env)
- self.step = self.step.analyse_types(env)
-
- if not self.start.is_none:
- self.start = self.start.coerce_to_integer(env)
- if not self.stop.is_none:
- self.stop = self.stop.coerce_to_integer(env)
- if not self.step.is_none:
- self.step = self.step.coerce_to_integer(env)
-
- if self.start.is_literal and self.stop.is_literal and self.step.is_literal:
- self.is_literal = True
- self.is_temp = False
- return self
-
- def calculate_result_code(self):
- pass
-
- def generate_result_code(self, code):
- for a in self.start,self.stop,self.step:
- if isinstance(a, CloneNode):
- a.arg.result()
-
-
+class SliceIntNode(SliceNode):
+ # start:stop:step in subscript list
+ # This is just a node to hold start,stop and step nodes that can be
+ # converted to integers. This does not generate a slice python object.
+ #
+ # start ExprNode
+ # stop ExprNode
+ # step ExprNode
+
+ is_temp = 0
+
+ def calculate_constant_result(self):
+ self.constant_result = slice(
+ self.start.constant_result,
+ self.stop.constant_result,
+ self.step.constant_result)
+
+ def compile_time_value(self, denv):
+ start = self.start.compile_time_value(denv)
+ stop = self.stop.compile_time_value(denv)
+ step = self.step.compile_time_value(denv)
+ try:
+ return slice(start, stop, step)
+ except Exception as e:
+ self.compile_time_value_error(e)
+
+ def may_be_none(self):
+ return False
+
+ def analyse_types(self, env):
+ self.start = self.start.analyse_types(env)
+ self.stop = self.stop.analyse_types(env)
+ self.step = self.step.analyse_types(env)
+
+ if not self.start.is_none:
+ self.start = self.start.coerce_to_integer(env)
+ if not self.stop.is_none:
+ self.stop = self.stop.coerce_to_integer(env)
+ if not self.step.is_none:
+ self.step = self.step.coerce_to_integer(env)
+
+ if self.start.is_literal and self.stop.is_literal and self.step.is_literal:
+ self.is_literal = True
+ self.is_temp = False
+ return self
+
+ def calculate_result_code(self):
+ pass
+
+ def generate_result_code(self, code):
+ for a in self.start,self.stop,self.step:
+ if isinstance(a, CloneNode):
+ a.arg.result()
+
+
class CallNode(ExprNode):
# allow overriding the default 'may_be_none' behaviour
@@ -5418,32 +5418,32 @@ class CallNode(ExprNode):
return False
return ExprNode.may_be_none(self)
- def set_py_result_type(self, function, func_type=None):
- if func_type is None:
- func_type = function.type
- if func_type is Builtin.type_type and (
- function.is_name and
- function.entry and
- function.entry.is_builtin and
- function.entry.name in Builtin.types_that_construct_their_instance):
- # calling a builtin type that returns a specific object type
- if function.entry.name == 'float':
- # the following will come true later on in a transform
- self.type = PyrexTypes.c_double_type
- self.result_ctype = PyrexTypes.c_double_type
- else:
- self.type = Builtin.builtin_types[function.entry.name]
- self.result_ctype = py_object_type
- self.may_return_none = False
- elif function.is_name and function.type_entry:
- # We are calling an extension type constructor. As long as we do not
- # support __new__(), the result type is clear
- self.type = function.type_entry.type
- self.result_ctype = py_object_type
- self.may_return_none = False
- else:
- self.type = py_object_type
-
+ def set_py_result_type(self, function, func_type=None):
+ if func_type is None:
+ func_type = function.type
+ if func_type is Builtin.type_type and (
+ function.is_name and
+ function.entry and
+ function.entry.is_builtin and
+ function.entry.name in Builtin.types_that_construct_their_instance):
+ # calling a builtin type that returns a specific object type
+ if function.entry.name == 'float':
+ # the following will come true later on in a transform
+ self.type = PyrexTypes.c_double_type
+ self.result_ctype = PyrexTypes.c_double_type
+ else:
+ self.type = Builtin.builtin_types[function.entry.name]
+ self.result_ctype = py_object_type
+ self.may_return_none = False
+ elif function.is_name and function.type_entry:
+ # We are calling an extension type constructor. As long as we do not
+ # support __new__(), the result type is clear
+ self.type = function.type_entry.type
+ self.result_ctype = py_object_type
+ self.may_return_none = False
+ else:
+ self.type = py_object_type
+
def analyse_as_type_constructor(self, env):
type = self.function.analyse_as_type(env)
if type and type.is_struct_or_union:
@@ -5461,10 +5461,10 @@ class CallNode(ExprNode):
elif type and type.is_cpp_class:
self.args = [ arg.analyse_types(env) for arg in self.args ]
constructor = type.scope.lookup("<init>")
- if not constructor:
- error(self.function.pos, "no constructor found for C++ type '%s'" % self.function.name)
- self.type = error_type
- return self
+ if not constructor:
+ error(self.function.pos, "no constructor found for C++ type '%s'" % self.function.name)
+ self.type = error_type
+ return self
self.function = RawCNameExprNode(self.function.pos, constructor.type)
self.function.entry = constructor
self.function.set_cname(type.empty_declaration_code())
@@ -5506,7 +5506,7 @@ class SimpleCallNode(CallNode):
has_optional_args = False
nogil = False
analysed = False
- overflowcheck = False
+ overflowcheck = False
def compile_time_value(self, denv):
function = self.function.compile_time_value(denv)
@@ -5527,11 +5527,11 @@ class SimpleCallNode(CallNode):
error(self.args[0].pos, "Unknown type")
else:
return PyrexTypes.CPtrType(type)
- elif attr == 'typeof':
- if len(self.args) != 1:
- error(self.args.pos, "only one type allowed.")
- operand = self.args[0].analyse_types(env)
- return operand.type
+ elif attr == 'typeof':
+ if len(self.args) != 1:
+ error(self.args.pos, "only one type allowed.")
+ operand = self.args[0].analyse_types(env)
+ return operand.type
def explicit_args_kwds(self):
return self.args, None
@@ -5553,28 +5553,28 @@ class SimpleCallNode(CallNode):
function.obj = CloneNode(self.self)
func_type = self.function_type()
- self.is_numpy_call_with_exprs = False
+ self.is_numpy_call_with_exprs = False
if (has_np_pythran(env) and function.is_numpy_attribute and
pythran_is_numpy_func_supported(function)):
- has_pythran_args = True
+ has_pythran_args = True
self.arg_tuple = TupleNode(self.pos, args = self.args)
- self.arg_tuple = self.arg_tuple.analyse_types(env)
- for arg in self.arg_tuple.args:
- has_pythran_args &= is_pythran_supported_node_or_none(arg)
- self.is_numpy_call_with_exprs = bool(has_pythran_args)
- if self.is_numpy_call_with_exprs:
+ self.arg_tuple = self.arg_tuple.analyse_types(env)
+ for arg in self.arg_tuple.args:
+ has_pythran_args &= is_pythran_supported_node_or_none(arg)
+ self.is_numpy_call_with_exprs = bool(has_pythran_args)
+ if self.is_numpy_call_with_exprs:
env.add_include_file(pythran_get_func_include_file(function))
- return NumPyMethodCallNode.from_node(
- self,
+ return NumPyMethodCallNode.from_node(
+ self,
function_cname=pythran_functor(function),
- arg_tuple=self.arg_tuple,
+ arg_tuple=self.arg_tuple,
type=PythranExpr(pythran_func_type(function, self.arg_tuple.args)),
- )
- elif func_type.is_pyobject:
- self.arg_tuple = TupleNode(self.pos, args = self.args)
+ )
+ elif func_type.is_pyobject:
+ self.arg_tuple = TupleNode(self.pos, args = self.args)
self.arg_tuple = self.arg_tuple.analyse_types(env).coerce_to_pyobject(env)
self.args = None
- self.set_py_result_type(function, func_type)
+ self.set_py_result_type(function, func_type)
self.is_temp = 1
else:
self.args = [ arg.analyse_types(env) for arg in self.args ]
@@ -5669,7 +5669,7 @@ class SimpleCallNode(CallNode):
if formal_arg.not_none:
if self.self:
self.self = self.self.as_none_safe_node(
- "'NoneType' object has no attribute '%{0}s'".format('.30' if len(entry.name) <= 30 else ''),
+ "'NoneType' object has no attribute '%{0}s'".format('.30' if len(entry.name) <= 30 else ''),
error='PyExc_AttributeError',
format_args=[entry.name])
else:
@@ -5801,8 +5801,8 @@ class SimpleCallNode(CallNode):
if func_type.exception_value is None:
env.use_utility_code(UtilityCode.load_cached("CppExceptionConversion", "CppSupport.cpp"))
- self.overflowcheck = env.directives['overflowcheck']
-
+ self.overflowcheck = env.directives['overflowcheck']
+
def calculate_result_code(self):
return self.c_call_code()
@@ -5842,11 +5842,11 @@ class SimpleCallNode(CallNode):
return False # skip allocation of unused result temp
return True
- def generate_evaluation_code(self, code):
- function = self.function
- if function.is_name or function.is_attribute:
- code.globalstate.use_entry_utility_code(function.entry)
-
+ def generate_evaluation_code(self, code):
+ function = self.function
+ if function.is_name or function.is_attribute:
+ code.globalstate.use_entry_utility_code(function.entry)
+
abs_function_cnames = ('abs', 'labs', '__Pyx_abs_longlong')
is_signed_int = self.type.is_int and self.type.signed
if self.overflowcheck and is_signed_int and function.result() in abs_function_cnames:
@@ -5858,59 +5858,59 @@ class SimpleCallNode(CallNode):
self.args[0].type.empty_declaration_code(),
code.error_goto(self.pos)))
- if not function.type.is_pyobject or len(self.arg_tuple.args) > 1 or (
- self.arg_tuple.args and self.arg_tuple.is_literal):
- super(SimpleCallNode, self).generate_evaluation_code(code)
- return
-
- # Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
- arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
- subexprs = (self.self, self.coerced_self, function, arg)
- for subexpr in subexprs:
- if subexpr is not None:
- subexpr.generate_evaluation_code(code)
-
- code.mark_pos(self.pos)
- assert self.is_temp
- self.allocate_temp_result(code)
-
- if arg is None:
- code.globalstate.use_utility_code(UtilityCode.load_cached(
- "PyObjectCallNoArg", "ObjectHandling.c"))
- code.putln(
- "%s = __Pyx_PyObject_CallNoArg(%s); %s" % (
- self.result(),
- function.py_result(),
- code.error_goto_if_null(self.result(), self.pos)))
- else:
- code.globalstate.use_utility_code(UtilityCode.load_cached(
- "PyObjectCallOneArg", "ObjectHandling.c"))
- code.putln(
- "%s = __Pyx_PyObject_CallOneArg(%s, %s); %s" % (
- self.result(),
- function.py_result(),
- arg.py_result(),
- code.error_goto_if_null(self.result(), self.pos)))
-
- code.put_gotref(self.py_result())
-
- for subexpr in subexprs:
- if subexpr is not None:
- subexpr.generate_disposal_code(code)
- subexpr.free_temps(code)
-
+ if not function.type.is_pyobject or len(self.arg_tuple.args) > 1 or (
+ self.arg_tuple.args and self.arg_tuple.is_literal):
+ super(SimpleCallNode, self).generate_evaluation_code(code)
+ return
+
+ # Special case 0-args and try to avoid explicit tuple creation for Python calls with 1 arg.
+ arg = self.arg_tuple.args[0] if self.arg_tuple.args else None
+ subexprs = (self.self, self.coerced_self, function, arg)
+ for subexpr in subexprs:
+ if subexpr is not None:
+ subexpr.generate_evaluation_code(code)
+
+ code.mark_pos(self.pos)
+ assert self.is_temp
+ self.allocate_temp_result(code)
+
+ if arg is None:
+ code.globalstate.use_utility_code(UtilityCode.load_cached(
+ "PyObjectCallNoArg", "ObjectHandling.c"))
+ code.putln(
+ "%s = __Pyx_PyObject_CallNoArg(%s); %s" % (
+ self.result(),
+ function.py_result(),
+ code.error_goto_if_null(self.result(), self.pos)))
+ else:
+ code.globalstate.use_utility_code(UtilityCode.load_cached(
+ "PyObjectCallOneArg", "ObjectHandling.c"))
+ code.putln(
+ "%s = __Pyx_PyObject_CallOneArg(%s, %s); %s" % (
+ self.result(),
+ function.py_result(),
+ arg.py_result(),
+ code.error_goto_if_null(self.result(), self.pos)))
+
+ code.put_gotref(self.py_result())
+
+ for subexpr in subexprs:
+ if subexpr is not None:
+ subexpr.generate_disposal_code(code)
+ subexpr.free_temps(code)
+
def generate_result_code(self, code):
func_type = self.function_type()
if func_type.is_pyobject:
- arg_code = self.arg_tuple.py_result()
- code.globalstate.use_utility_code(UtilityCode.load_cached(
- "PyObjectCall", "ObjectHandling.c"))
- code.putln(
- "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % (
- self.result(),
- self.function.py_result(),
- arg_code,
- code.error_goto_if_null(self.result(), self.pos)))
+ arg_code = self.arg_tuple.py_result()
+ code.globalstate.use_utility_code(UtilityCode.load_cached(
+ "PyObjectCall", "ObjectHandling.c"))
+ code.putln(
+ "%s = __Pyx_PyObject_Call(%s, %s, NULL); %s" % (
+ self.result(),
+ self.function.py_result(),
+ arg_code,
+ code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
elif func_type.is_cfunction:
if self.has_optional_args:
@@ -5938,7 +5938,7 @@ class SimpleCallNode(CallNode):
exc_val = func_type.exception_value
exc_check = func_type.exception_check
if exc_val is not None:
- exc_checks.append("%s == %s" % (self.result(), func_type.return_type.cast_code(exc_val)))
+ exc_checks.append("%s == %s" % (self.result(), func_type.return_type.cast_code(exc_val)))
if exc_check:
if self.nogil:
exc_checks.append("__Pyx_ErrOccurredWithGIL()")
@@ -5972,33 +5972,33 @@ class SimpleCallNode(CallNode):
class NumPyMethodCallNode(ExprNode):
- # Pythran call to a NumPy function or method.
- #
+ # Pythran call to a NumPy function or method.
+ #
# function_cname string the function/method to call
# arg_tuple TupleNode the arguments as an args tuple
-
+
subexprs = ['arg_tuple']
- is_temp = True
- may_return_none = True
-
- def generate_evaluation_code(self, code):
- code.mark_pos(self.pos)
- self.allocate_temp_result(code)
-
- assert self.arg_tuple.mult_factor is None
- args = self.arg_tuple.args
- for arg in args:
- arg.generate_evaluation_code(code)
-
- code.putln("// function evaluation code for numpy function")
- code.putln("__Pyx_call_destructor(%s);" % self.result())
+ is_temp = True
+ may_return_none = True
+
+ def generate_evaluation_code(self, code):
+ code.mark_pos(self.pos)
+ self.allocate_temp_result(code)
+
+ assert self.arg_tuple.mult_factor is None
+ args = self.arg_tuple.args
+ for arg in args:
+ arg.generate_evaluation_code(code)
+
+ code.putln("// function evaluation code for numpy function")
+ code.putln("__Pyx_call_destructor(%s);" % self.result())
code.putln("new (&%s) decltype(%s){%s{}(%s)};" % (
- self.result(),
- self.result(),
+ self.result(),
+ self.result(),
self.function_cname,
- ", ".join(a.pythran_result() for a in args)))
-
-
+ ", ".join(a.pythran_result() for a in args)))
+
+
class PyMethodCallNode(SimpleCallNode):
# Specialised call to a (potential) PyMethodObject with non-constant argument tuple.
# Allows the self argument to be injected directly instead of repacking a tuple for it.
@@ -6323,37 +6323,37 @@ class PythonCapiCallNode(SimpleCallNode):
SimpleCallNode.__init__(self, pos, **kwargs)
-class CachedBuiltinMethodCallNode(CallNode):
- # Python call to a method of a known Python builtin (only created in transforms)
-
- subexprs = ['obj', 'args']
- is_temp = True
-
- def __init__(self, call_node, obj, method_name, args):
- super(CachedBuiltinMethodCallNode, self).__init__(
- call_node.pos,
- obj=obj, method_name=method_name, args=args,
- may_return_none=call_node.may_return_none,
- type=call_node.type)
-
- def may_be_none(self):
- if self.may_return_none is not None:
- return self.may_return_none
- return ExprNode.may_be_none(self)
-
- def generate_result_code(self, code):
- type_cname = self.obj.type.cname
- obj_cname = self.obj.py_result()
- args = [arg.py_result() for arg in self.args]
- call_code = code.globalstate.cached_unbound_method_call_code(
- obj_cname, type_cname, self.method_name, args)
- code.putln("%s = %s; %s" % (
- self.result(), call_code,
- code.error_goto_if_null(self.result(), self.pos)
- ))
- code.put_gotref(self.result())
-
-
+class CachedBuiltinMethodCallNode(CallNode):
+ # Python call to a method of a known Python builtin (only created in transforms)
+
+ subexprs = ['obj', 'args']
+ is_temp = True
+
+ def __init__(self, call_node, obj, method_name, args):
+ super(CachedBuiltinMethodCallNode, self).__init__(
+ call_node.pos,
+ obj=obj, method_name=method_name, args=args,
+ may_return_none=call_node.may_return_none,
+ type=call_node.type)
+
+ def may_be_none(self):
+ if self.may_return_none is not None:
+ return self.may_return_none
+ return ExprNode.may_be_none(self)
+
+ def generate_result_code(self, code):
+ type_cname = self.obj.type.cname
+ obj_cname = self.obj.py_result()
+ args = [arg.py_result() for arg in self.args]
+ call_code = code.globalstate.cached_unbound_method_call_code(
+ obj_cname, type_cname, self.method_name, args)
+ code.putln("%s = %s; %s" % (
+ self.result(), call_code,
+ code.error_goto_if_null(self.result(), self.pos)
+ ))
+ code.put_gotref(self.result())
+
+
class GeneralCallNode(CallNode):
# General Python function call, including keyword,
# * and ** arguments.
@@ -6412,7 +6412,7 @@ class GeneralCallNode(CallNode):
self.positional_args = self.positional_args.analyse_types(env)
self.positional_args = \
self.positional_args.coerce_to_pyobject(env)
- self.set_py_result_type(self.function)
+ self.set_py_result_type(self.function)
self.is_temp = 1
return self
@@ -6579,7 +6579,7 @@ class AsTupleNode(ExprNode):
# arg ExprNode
subexprs = ['arg']
- is_temp = 1
+ is_temp = 1
def calculate_constant_result(self):
self.constant_result = tuple(self.arg.constant_result)
@@ -6605,11 +6605,11 @@ class AsTupleNode(ExprNode):
gil_message = "Constructing Python tuple"
def generate_result_code(self, code):
- cfunc = "__Pyx_PySequence_Tuple" if self.arg.type in (py_object_type, tuple_type) else "PySequence_Tuple"
+ cfunc = "__Pyx_PySequence_Tuple" if self.arg.type in (py_object_type, tuple_type) else "PySequence_Tuple"
code.putln(
- "%s = %s(%s); %s" % (
+ "%s = %s(%s); %s" % (
self.result(),
- cfunc, self.arg.py_result(),
+ cfunc, self.arg.py_result(),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
@@ -6900,9 +6900,9 @@ class AttributeNode(ExprNode):
entry.is_cglobal or entry.is_cfunction
or entry.is_type or entry.is_const):
return self.as_name_node(env, entry, target)
- if self.is_cimported_module_without_shadow(env):
- error(self.pos, "cimported module has no attribute '%s'" % self.attribute)
- return self
+ if self.is_cimported_module_without_shadow(env):
+ error(self.pos, "cimported module has no attribute '%s'" % self.attribute)
+ return self
return None
def analyse_as_type_attribute(self, env):
@@ -7094,14 +7094,14 @@ class AttributeNode(ExprNode):
self.member = self.attribute
self.type = py_object_type
self.is_py_attr = 1
-
+
if not obj_type.is_pyobject and not obj_type.is_error:
- # Expose python methods for immutable objects.
- if (obj_type.is_string or obj_type.is_cpp_string
- or obj_type.is_buffer or obj_type.is_memoryviewslice
- or obj_type.is_numeric
- or (obj_type.is_ctuple and obj_type.can_coerce_to_pyobject(env))
- or (obj_type.is_struct and obj_type.can_coerce_to_pyobject(env))):
+ # Expose python methods for immutable objects.
+ if (obj_type.is_string or obj_type.is_cpp_string
+ or obj_type.is_buffer or obj_type.is_memoryviewslice
+ or obj_type.is_numeric
+ or (obj_type.is_ctuple and obj_type.can_coerce_to_pyobject(env))
+ or (obj_type.is_struct and obj_type.can_coerce_to_pyobject(env))):
if not immutable_obj:
self.obj = self.obj.coerce_to_pyobject(env)
elif (obj_type.is_cfunction and (self.obj.is_name or self.obj.is_attribute)
@@ -7123,7 +7123,7 @@ class AttributeNode(ExprNode):
format_args = ()
if (self.obj.type.is_extension_type and self.needs_none_check and not
self.is_py_attr):
- msg = "'NoneType' object has no attribute '%{0}s'".format('.30' if len(self.attribute) <= 30 else '')
+ msg = "'NoneType' object has no attribute '%{0}s'".format('.30' if len(self.attribute) <= 30 else '')
format_args = (self.attribute,)
elif self.obj.type.is_memoryviewslice:
if self.is_memslice_transpose:
@@ -7145,9 +7145,9 @@ class AttributeNode(ExprNode):
gil_message = "Accessing Python attribute"
- def is_cimported_module_without_shadow(self, env):
- return self.obj.is_cimported_module_without_shadow(env)
-
+ def is_cimported_module_without_shadow(self, env):
+ return self.obj.is_cimported_module_without_shadow(env)
+
def is_simple(self):
if self.obj:
return self.result_in_temp() or self.obj.is_simple()
@@ -7674,14 +7674,14 @@ class SequenceNode(ExprNode):
code.putln("PyObject* sequence = %s;" % rhs.py_result())
# list/tuple => check size
- code.putln("Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);")
+ code.putln("Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);")
code.putln("if (unlikely(size != %d)) {" % len(self.args))
code.globalstate.use_utility_code(raise_too_many_values_to_unpack)
code.putln("if (size > %d) __Pyx_RaiseTooManyValuesError(%d);" % (
len(self.args), len(self.args)))
code.globalstate.use_utility_code(raise_need_more_values_to_unpack)
code.putln("else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);")
- # < 0 => exception
+ # < 0 => exception
code.putln(code.error_goto(self.pos))
code.putln("}")
@@ -7912,10 +7912,10 @@ class TupleNode(SequenceNode):
if self.mult_factor or not self.args:
return tuple_type
arg_types = [arg.infer_type(env) for arg in self.args]
- if any(type.is_pyobject or type.is_memoryviewslice or type.is_unspecified or type.is_fused
- for type in arg_types):
+ if any(type.is_pyobject or type.is_memoryviewslice or type.is_unspecified or type.is_fused
+ for type in arg_types):
return tuple_type
- return env.declare_tuple_type(self.pos, arg_types).type
+ return env.declare_tuple_type(self.pos, arg_types).type
def analyse_types(self, env, skip_children=False):
if len(self.args) == 0:
@@ -7929,8 +7929,8 @@ class TupleNode(SequenceNode):
arg.starred_expr_allowed_here = True
self.args[i] = arg.analyse_types(env)
if (not self.mult_factor and
- not any((arg.is_starred or arg.type.is_pyobject or arg.type.is_memoryviewslice or arg.type.is_fused)
- for arg in self.args)):
+ not any((arg.is_starred or arg.type.is_pyobject or arg.type.is_memoryviewslice or arg.type.is_fused)
+ for arg in self.args)):
self.type = env.declare_tuple_type(self.pos, (arg.type for arg in self.args)).type
self.is_temp = 1
return self
@@ -8013,8 +8013,8 @@ class TupleNode(SequenceNode):
if len(self.args) == 0:
# result_code is Naming.empty_tuple
return
-
- if self.is_literal or self.is_partly_literal:
+
+ if self.is_literal or self.is_partly_literal:
# The "mult_factor" is part of the deduplication if it is also constant, i.e. when
# we deduplicate the multiplied result. Otherwise, only deduplicate the constant part.
dedup_key = make_dedup_key(self.type, [self.mult_factor if self.is_literal else None] + self.args)
@@ -8025,14 +8025,14 @@ class TupleNode(SequenceNode):
const_code.mark_pos(self.pos)
self.generate_sequence_packing_code(const_code, tuple_target, plain=not self.is_literal)
const_code.put_giveref(tuple_target)
- if self.is_literal:
- self.result_code = tuple_target
- else:
- code.putln('%s = PyNumber_Multiply(%s, %s); %s' % (
- self.result(), tuple_target, self.mult_factor.py_result(),
- code.error_goto_if_null(self.result(), self.pos)
+ if self.is_literal:
+ self.result_code = tuple_target
+ else:
+ code.putln('%s = PyNumber_Multiply(%s, %s); %s' % (
+ self.result(), tuple_target, self.mult_factor.py_result(),
+ code.error_goto_if_null(self.result(), self.pos)
))
- code.put_gotref(self.py_result())
+ code.put_gotref(self.py_result())
else:
self.type.entry.used = True
self.generate_sequence_packing_code(code)
@@ -8065,10 +8065,10 @@ class ListNode(SequenceNode):
return node.coerce_to_pyobject(env)
def analyse_types(self, env):
- with local_errors(ignore=True) as errors:
- self.original_args = list(self.args)
- node = SequenceNode.analyse_types(self, env)
- node.obj_conversion_errors = errors
+ with local_errors(ignore=True) as errors:
+ self.original_args = list(self.args)
+ node = SequenceNode.analyse_types(self, env)
+ node.obj_conversion_errors = errors
if env.is_module_scope:
self.in_module_scope = True
node = node._create_merge_node_if_necessary(env)
@@ -8244,7 +8244,7 @@ class ScopedExprNode(ExprNode):
code.putln('{ /* enter inner scope */')
py_entries = []
- for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
+ for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
if not entry.in_closure:
if entry.type.is_pyobject and entry.used:
py_entries.append(entry)
@@ -8255,13 +8255,13 @@ class ScopedExprNode(ExprNode):
return
# must free all local Python references at each exit point
- old_loop_labels = code.new_loop_labels()
+ old_loop_labels = code.new_loop_labels()
old_error_label = code.new_error_label()
generate_inner_evaluation_code(code)
# normal (non-error) exit
- self._generate_vars_cleanup(code, py_entries)
+ self._generate_vars_cleanup(code, py_entries)
# error/loop body exit points
exit_scope = code.new_label('exit_scope')
@@ -8270,7 +8270,7 @@ class ScopedExprNode(ExprNode):
list(zip(code.get_loop_labels(), old_loop_labels))):
if code.label_used(label):
code.put_label(label)
- self._generate_vars_cleanup(code, py_entries)
+ self._generate_vars_cleanup(code, py_entries)
code.put_goto(old_label)
code.put_label(exit_scope)
code.putln('} /* exit inner scope */')
@@ -8278,22 +8278,22 @@ class ScopedExprNode(ExprNode):
code.set_loop_labels(old_loop_labels)
code.error_label = old_error_label
- def _generate_vars_cleanup(self, code, py_entries):
- for entry in py_entries:
- if entry.is_cglobal:
- code.put_var_gotref(entry)
- code.put_decref_set(entry.cname, "Py_None")
- else:
- code.put_var_xdecref_clear(entry)
+ def _generate_vars_cleanup(self, code, py_entries):
+ for entry in py_entries:
+ if entry.is_cglobal:
+ code.put_var_gotref(entry)
+ code.put_decref_set(entry.cname, "Py_None")
+ else:
+ code.put_var_xdecref_clear(entry)
+
-
class ComprehensionNode(ScopedExprNode):
# A list/set/dict comprehension
child_attrs = ["loop"]
is_temp = True
- constant_result = not_a_constant
+ constant_result = not_a_constant
def infer_type(self, env):
return self.type
@@ -8721,12 +8721,12 @@ class DictNode(ExprNode):
return dict_type
def analyse_types(self, env):
- with local_errors(ignore=True) as errors:
- self.key_value_pairs = [
- item.analyse_types(env)
- for item in self.key_value_pairs
- ]
- self.obj_conversion_errors = errors
+ with local_errors(ignore=True) as errors:
+ self.key_value_pairs = [
+ item.analyse_types(env)
+ for item in self.key_value_pairs
+ ]
+ self.obj_conversion_errors = errors
return self
def may_be_none(self):
@@ -8788,9 +8788,9 @@ class DictNode(ExprNode):
if is_dict:
self.release_errors()
code.putln(
- "%s = __Pyx_PyDict_NewPresized(%d); %s" % (
+ "%s = __Pyx_PyDict_NewPresized(%d); %s" % (
self.result(),
- len(self.key_value_pairs),
+ len(self.key_value_pairs),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
@@ -9262,19 +9262,19 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin):
else:
default_args.append(arg)
if arg.annotation:
- arg.annotation = self.analyse_annotation(env, arg.annotation)
+ arg.annotation = self.analyse_annotation(env, arg.annotation)
annotations.append((arg.pos, arg.name, arg.annotation))
for arg in (self.def_node.star_arg, self.def_node.starstar_arg):
if arg and arg.annotation:
- arg.annotation = self.analyse_annotation(env, arg.annotation)
+ arg.annotation = self.analyse_annotation(env, arg.annotation)
annotations.append((arg.pos, arg.name, arg.annotation))
- annotation = self.def_node.return_type_annotation
- if annotation:
- annotation = self.analyse_annotation(env, annotation)
- self.def_node.return_type_annotation = annotation
- annotations.append((annotation.pos, StringEncoding.EncodedString("return"), annotation))
+ annotation = self.def_node.return_type_annotation
+ if annotation:
+ annotation = self.analyse_annotation(env, annotation)
+ self.def_node.return_type_annotation = annotation
+ annotations.append((annotation.pos, StringEncoding.EncodedString("return"), annotation))
if nonliteral_objects or nonliteral_other:
module_scope = env.global_scope()
@@ -9289,7 +9289,7 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin):
for arg in nonliteral_other:
entry = scope.declare_var(arg.name, arg.type, None,
Naming.arg_prefix + arg.name,
- allow_pyobject=False, allow_memoryview=True)
+ allow_pyobject=False, allow_memoryview=True)
self.defaults.append((arg, entry))
entry = module_scope.declare_struct_or_union(
None, 'struct', scope, 1, None, cname=cname)
@@ -9351,20 +9351,20 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin):
for pos, name, value in annotations])
self.annotations_dict = annotations_dict.analyse_types(env)
- def analyse_annotation(self, env, annotation):
- if annotation is None:
- return None
- atype = annotation.analyse_as_type(env)
- if atype is not None:
- # Keep parsed types as strings as they might not be Python representable.
- annotation = UnicodeNode(
- annotation.pos,
- value=StringEncoding.EncodedString(atype.declaration_code('', for_display=True)))
- annotation = annotation.analyse_types(env)
- if not annotation.type.is_pyobject:
- annotation = annotation.coerce_to_pyobject(env)
- return annotation
-
+ def analyse_annotation(self, env, annotation):
+ if annotation is None:
+ return None
+ atype = annotation.analyse_as_type(env)
+ if atype is not None:
+ # Keep parsed types as strings as they might not be Python representable.
+ annotation = UnicodeNode(
+ annotation.pos,
+ value=StringEncoding.EncodedString(atype.declaration_code('', for_display=True)))
+ annotation = annotation.analyse_types(env)
+ if not annotation.type.is_pyobject:
+ annotation = annotation.coerce_to_pyobject(env)
+ return annotation
+
def may_be_none(self):
return False
@@ -9536,13 +9536,13 @@ class CodeObjectNode(ExprNode):
func.name, identifier=True, is_str=False, unicode_value=func.name)
# FIXME: better way to get the module file path at module init time? Encoding to use?
file_path = StringEncoding.bytes_literal(func.pos[0].get_filenametable_entry().encode('utf8'), 'utf8')
- # XXX Use get_description() to set arcadia root relative filename
- file_path = StringEncoding.bytes_literal(func.pos[0].get_description().encode('utf8'), 'utf8')
+ # XXX Use get_description() to set arcadia root relative filename
+ file_path = StringEncoding.bytes_literal(func.pos[0].get_description().encode('utf8'), 'utf8')
file_path_const = code.get_py_string_const(file_path, identifier=False, is_str=True)
- # This combination makes CPython create a new dict for "frame.f_locals" (see GH #1836).
- flags = ['CO_OPTIMIZED', 'CO_NEWLOCALS']
-
+ # This combination makes CPython create a new dict for "frame.f_locals" (see GH #1836).
+ flags = ['CO_OPTIMIZED', 'CO_NEWLOCALS']
+
if self.def_node.star_arg:
flags.append('CO_VARARGS')
if self.def_node.starstar_arg:
@@ -9729,11 +9729,11 @@ class YieldExprNode(ExprNode):
label_num = 0
is_yield_from = False
is_await = False
- in_async_gen = False
+ in_async_gen = False
expr_keyword = 'yield'
def analyse_types(self, env):
- if not self.label_num or (self.is_yield_from and self.in_async_gen):
+ if not self.label_num or (self.is_yield_from and self.in_async_gen):
error(self.pos, "'%s' not supported here" % self.expr_keyword)
self.is_temp = 1
if self.arg is not None:
@@ -9764,8 +9764,8 @@ class YieldExprNode(ExprNode):
Generate the code to return the argument in 'Naming.retval_cname'
and to continue at the yield label.
"""
- label_num, label_name = code.new_yield_label(
- self.expr_keyword.replace(' ', '_'))
+ label_num, label_name = code.new_yield_label(
+ self.expr_keyword.replace(' ', '_'))
code.use_label(label_name)
saved = []
@@ -9778,30 +9778,30 @@ class YieldExprNode(ExprNode):
code.putln('%s->%s = %s;' % (Naming.cur_scope_cname, save_cname, cname))
code.put_xgiveref(Naming.retval_cname)
- profile = code.globalstate.directives['profile']
- linetrace = code.globalstate.directives['linetrace']
- if profile or linetrace:
- code.put_trace_return(Naming.retval_cname,
- nogil=not code.funcstate.gil_owned)
+ profile = code.globalstate.directives['profile']
+ linetrace = code.globalstate.directives['linetrace']
+ if profile or linetrace:
+ code.put_trace_return(Naming.retval_cname,
+ nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context()
-
- if code.funcstate.current_except is not None:
- # inside of an except block => save away currently handled exception
- code.putln("__Pyx_Coroutine_SwapException(%s);" % Naming.generator_cname)
- else:
- # no exceptions being handled => restore exception state of caller
- code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
-
- code.putln("/* return from %sgenerator, %sing value */" % (
- 'async ' if self.in_async_gen else '',
- 'await' if self.is_await else 'yield'))
+
+ if code.funcstate.current_except is not None:
+ # inside of an except block => save away currently handled exception
+ code.putln("__Pyx_Coroutine_SwapException(%s);" % Naming.generator_cname)
+ else:
+ # no exceptions being handled => restore exception state of caller
+ code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
+
+ code.putln("/* return from %sgenerator, %sing value */" % (
+ 'async ' if self.in_async_gen else '',
+ 'await' if self.is_await else 'yield'))
code.putln("%s->resume_label = %d;" % (
Naming.generator_cname, label_num))
- if self.in_async_gen and not self.is_await:
- # __Pyx__PyAsyncGenValueWrapperNew() steals a reference to the return value
- code.putln("return __Pyx__PyAsyncGenValueWrapperNew(%s);" % Naming.retval_cname)
- else:
- code.putln("return %s;" % Naming.retval_cname)
+ if self.in_async_gen and not self.is_await:
+ # __Pyx__PyAsyncGenValueWrapperNew() steals a reference to the return value
+ code.putln("return __Pyx__PyAsyncGenValueWrapperNew(%s);" % Naming.retval_cname)
+ else:
+ code.putln("return %s;" % Naming.retval_cname)
code.put_label(label_name)
for cname, save_cname, type in saved:
@@ -9809,19 +9809,19 @@ class YieldExprNode(ExprNode):
if type.is_pyobject:
code.putln('%s->%s = 0;' % (Naming.cur_scope_cname, save_cname))
code.put_xgotref(cname)
- self.generate_sent_value_handling_code(code, Naming.sent_value_cname)
+ self.generate_sent_value_handling_code(code, Naming.sent_value_cname)
if self.result_is_used:
self.allocate_temp_result(code)
code.put('%s = %s; ' % (self.result(), Naming.sent_value_cname))
code.put_incref(self.result(), py_object_type)
- def generate_sent_value_handling_code(self, code, value_cname):
- code.putln(code.error_goto_if_null(value_cname, self.pos))
+ def generate_sent_value_handling_code(self, code, value_cname):
+ code.putln(code.error_goto_if_null(value_cname, self.pos))
-class _YieldDelegationExprNode(YieldExprNode):
+class _YieldDelegationExprNode(YieldExprNode):
def yield_from_func(self, code):
- raise NotImplementedError()
+ raise NotImplementedError()
def generate_evaluation_code(self, code, source_cname=None, decref_source=False):
if source_cname is None:
@@ -9855,31 +9855,31 @@ class _YieldDelegationExprNode(YieldExprNode):
code.put_gotref(self.result())
def handle_iteration_exception(self, code):
- code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
+ code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
code.putln("if (exc_type) {")
- code.putln("if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit &&"
- " __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear();")
+ code.putln("if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit &&"
+ " __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear();")
code.putln("else %s" % code.error_goto(self.pos))
code.putln("}")
-class YieldFromExprNode(_YieldDelegationExprNode):
- # "yield from GEN" expression
- is_yield_from = True
- expr_keyword = 'yield from'
-
- def coerce_yield_argument(self, env):
- if not self.arg.type.is_string:
- # FIXME: support C arrays and C++ iterators?
- error(self.pos, "yielding from non-Python object not supported")
- self.arg = self.arg.coerce_to_pyobject(env)
-
- def yield_from_func(self, code):
- code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c"))
- return "__Pyx_Generator_Yield_From"
-
-
-class AwaitExprNode(_YieldDelegationExprNode):
+class YieldFromExprNode(_YieldDelegationExprNode):
+ # "yield from GEN" expression
+ is_yield_from = True
+ expr_keyword = 'yield from'
+
+ def coerce_yield_argument(self, env):
+ if not self.arg.type.is_string:
+ # FIXME: support C arrays and C++ iterators?
+ error(self.pos, "yielding from non-Python object not supported")
+ self.arg = self.arg.coerce_to_pyobject(env)
+
+ def yield_from_func(self, code):
+ code.globalstate.use_utility_code(UtilityCode.load_cached("GeneratorYieldFrom", "Coroutine.c"))
+ return "__Pyx_Generator_Yield_From"
+
+
+class AwaitExprNode(_YieldDelegationExprNode):
# 'await' expression node
#
# arg ExprNode the Awaitable value to await
@@ -9903,30 +9903,30 @@ class AwaitIterNextExprNode(AwaitExprNode):
#
# Breaks out of loop on StopAsyncIteration exception.
- def _generate_break(self, code):
+ def _generate_break(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
- code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
- code.putln("if (unlikely(exc_type && (exc_type == __Pyx_PyExc_StopAsyncIteration || ("
- " exc_type != PyExc_StopIteration && exc_type != PyExc_GeneratorExit &&"
- " __Pyx_PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))))) {")
+ code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
+ code.putln("if (unlikely(exc_type && (exc_type == __Pyx_PyExc_StopAsyncIteration || ("
+ " exc_type != PyExc_StopIteration && exc_type != PyExc_GeneratorExit &&"
+ " __Pyx_PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))))) {")
code.putln("PyErr_Clear();")
code.putln("break;")
code.putln("}")
-
- def fetch_iteration_result(self, code):
- assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
- self._generate_break(code)
+
+ def fetch_iteration_result(self, code):
+ assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
+ self._generate_break(code)
super(AwaitIterNextExprNode, self).fetch_iteration_result(code)
- def generate_sent_value_handling_code(self, code, value_cname):
- assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
- code.putln("if (unlikely(!%s)) {" % value_cname)
- self._generate_break(code)
- # all non-break exceptions are errors, as in parent class
- code.putln(code.error_goto(self.pos))
- code.putln("}")
+ def generate_sent_value_handling_code(self, code, value_cname):
+ assert code.break_label, "AwaitIterNextExprNode outside of 'async for' loop"
+ code.putln("if (unlikely(!%s)) {" % value_cname)
+ self._generate_break(code)
+ # all non-break exceptions are errors, as in parent class
+ code.putln(code.error_goto(self.pos))
+ code.putln("}")
+
-
class GlobalsExprNode(AtomicExprNode):
type = dict_type
is_temp = 1
@@ -10071,10 +10071,10 @@ class UnopNode(ExprNode):
def analyse_types(self, env):
self.operand = self.operand.analyse_types(env)
- if self.is_pythran_operation(env):
- self.type = PythranExpr(pythran_unaryop_type(self.operator, self.operand.type))
- self.is_temp = 1
- elif self.is_py_operation():
+ if self.is_pythran_operation(env):
+ self.type = PythranExpr(pythran_unaryop_type(self.operator, self.operand.type))
+ self.is_temp = 1
+ elif self.is_py_operation():
self.coerce_operand_to_pyobject(env)
self.type = py_object_type
self.is_temp = 1
@@ -10090,11 +10090,11 @@ class UnopNode(ExprNode):
def is_py_operation(self):
return self.operand.type.is_pyobject or self.operand.type.is_ctuple
- def is_pythran_operation(self, env):
- np_pythran = has_np_pythran(env)
- op_type = self.operand.type
- return np_pythran and (op_type.is_buffer or op_type.is_pythran_expr)
-
+ def is_pythran_operation(self, env):
+ np_pythran = has_np_pythran(env)
+ op_type = self.operand.type
+ return np_pythran and (op_type.is_buffer or op_type.is_pythran_expr)
+
def nogil_check(self, env):
if self.is_py_operation():
self.gil_error()
@@ -10107,15 +10107,15 @@ class UnopNode(ExprNode):
self.operand = self.operand.coerce_to_pyobject(env)
def generate_result_code(self, code):
- if self.type.is_pythran_expr:
- code.putln("// Pythran unaryop")
- code.putln("__Pyx_call_destructor(%s);" % self.result())
- code.putln("new (&%s) decltype(%s){%s%s};" % (
- self.result(),
- self.result(),
- self.operator,
- self.operand.pythran_result()))
- elif self.operand.type.is_pyobject:
+ if self.type.is_pythran_expr:
+ code.putln("// Pythran unaryop")
+ code.putln("__Pyx_call_destructor(%s);" % self.result())
+ code.putln("new (&%s) decltype(%s){%s%s};" % (
+ self.result(),
+ self.result(),
+ self.operator,
+ self.operand.pythran_result()))
+ elif self.operand.type.is_pyobject:
self.generate_py_operation_code(code)
elif self.is_temp:
if self.is_cpp_operation() and self.exception_check == '+':
@@ -10337,10 +10337,10 @@ class AmpersandNode(CUnopNode):
self.error("Taking address of non-lvalue (type %s)" % argtype)
return self
if argtype.is_pyobject:
- self.error("Cannot take address of Python %s" % (
- "variable '%s'" % self.operand.name if self.operand.is_name else
- "object attribute '%s'" % self.operand.attribute if self.operand.is_attribute else
- "object"))
+ self.error("Cannot take address of Python %s" % (
+ "variable '%s'" % self.operand.name if self.operand.is_name else
+ "object attribute '%s'" % self.operand.attribute if self.operand.is_attribute else
+ "object"))
return self
if not argtype.is_cpp_class or not self.type:
self.type = PyrexTypes.c_ptr_type(argtype)
@@ -10675,7 +10675,7 @@ class CythonArrayNode(ExprNode):
def allocate_temp_result(self, code):
if self.temp_code:
- raise RuntimeError("temp allocated multiple times")
+ raise RuntimeError("temp allocated multiple times")
self.temp_code = code.funcstate.allocate_temp(self.type, True)
@@ -10683,9 +10683,9 @@ class CythonArrayNode(ExprNode):
return self.get_cython_array_type(env)
def get_cython_array_type(self, env):
- cython_scope = env.global_scope().context.cython_scope
- cython_scope.load_cythonscope()
- return cython_scope.viewscope.lookup("array").type
+ cython_scope = env.global_scope().context.cython_scope
+ cython_scope.load_cythonscope()
+ return cython_scope.viewscope.lookup("array").type
def generate_result_code(self, code):
from . import Buffer
@@ -10794,7 +10794,7 @@ class SizeofTypeNode(SizeofNode):
for attr in path[1:]:
operand = AttributeNode(pos=self.pos, obj=operand, attribute=attr)
operand = AttributeNode(pos=self.pos, obj=operand, attribute=self.base_type.name)
- node = SizeofVarNode(self.pos, operand=operand).analyse_types(env)
+ node = SizeofVarNode(self.pos, operand=operand).analyse_types(env)
return node
if self.arg_type is None:
base_type = self.base_type.analyse(env)
@@ -10942,10 +10942,10 @@ class TypeofNode(ExprNode):
self.literal = literal.coerce_to_pyobject(env)
return self
- def analyse_as_type(self, env):
- self.operand = self.operand.analyse_types(env)
- return self.operand.type
-
+ def analyse_as_type(self, env):
+ self.operand = self.operand.analyse_types(env)
+ return self.operand.type
+
def may_be_none(self):
return False
@@ -11039,7 +11039,7 @@ class BinopNode(ExprNode):
def infer_type(self, env):
return self.result_type(self.operand1.infer_type(env),
- self.operand2.infer_type(env), env)
+ self.operand2.infer_type(env), env)
def analyse_types(self, env):
self.operand1 = self.operand1.analyse_types(env)
@@ -11048,15 +11048,15 @@ class BinopNode(ExprNode):
return self
def analyse_operation(self, env):
- if self.is_pythran_operation(env):
- self.type = self.result_type(self.operand1.type,
- self.operand2.type, env)
- assert self.type.is_pythran_expr
- self.is_temp = 1
- elif self.is_py_operation():
+ if self.is_pythran_operation(env):
+ self.type = self.result_type(self.operand1.type,
+ self.operand2.type, env)
+ assert self.type.is_pythran_expr
+ self.is_temp = 1
+ elif self.is_py_operation():
self.coerce_operands_to_pyobjects(env)
self.type = self.result_type(self.operand1.type,
- self.operand2.type, env)
+ self.operand2.type, env)
assert self.type.is_pyobject
self.is_temp = 1
elif self.is_cpp_operation():
@@ -11070,15 +11070,15 @@ class BinopNode(ExprNode):
def is_py_operation_types(self, type1, type2):
return type1.is_pyobject or type2.is_pyobject or type1.is_ctuple or type2.is_ctuple
- def is_pythran_operation(self, env):
- return self.is_pythran_operation_types(self.operand1.type, self.operand2.type, env)
-
- def is_pythran_operation_types(self, type1, type2, env):
- # Support only expr op supported_type, or supported_type op expr
- return has_np_pythran(env) and \
- (is_pythran_supported_operation_type(type1) and is_pythran_supported_operation_type(type2)) and \
- (is_pythran_expr(type1) or is_pythran_expr(type2))
-
+ def is_pythran_operation(self, env):
+ return self.is_pythran_operation_types(self.operand1.type, self.operand2.type, env)
+
+ def is_pythran_operation_types(self, type1, type2, env):
+ # Support only expr op supported_type, or supported_type op expr
+ return has_np_pythran(env) and \
+ (is_pythran_supported_operation_type(type1) and is_pythran_supported_operation_type(type2)) and \
+ (is_pythran_expr(type1) or is_pythran_expr(type2))
+
def is_cpp_operation(self):
return (self.operand1.type.is_cpp_class
or self.operand2.type.is_cpp_class)
@@ -11106,9 +11106,9 @@ class BinopNode(ExprNode):
self.operand2 = self.operand2.coerce_to(func_type.args[1].type, env)
self.type = func_type.return_type
- def result_type(self, type1, type2, env):
- if self.is_pythran_operation_types(type1, type2, env):
- return PythranExpr(pythran_binop_type(self.operator, type1, type2))
+ def result_type(self, type1, type2, env):
+ if self.is_pythran_operation_types(type1, type2, env):
+ return PythranExpr(pythran_binop_type(self.operator, type1, type2))
if self.is_py_operation_types(type1, type2):
if type2.is_string:
type2 = Builtin.bytes_type
@@ -11126,8 +11126,8 @@ class BinopNode(ExprNode):
if result_type is not None:
return result_type
return py_object_type
- elif type1.is_error or type2.is_error:
- return PyrexTypes.error_type
+ elif type1.is_error or type2.is_error:
+ return PyrexTypes.error_type
else:
return self.compute_c_result_type(type1, type2)
@@ -11150,9 +11150,9 @@ class BinopNode(ExprNode):
self.operand1.is_ephemeral() or self.operand2.is_ephemeral())
def generate_result_code(self, code):
- if self.type.is_pythran_expr:
- code.putln("// Pythran binop")
- code.putln("__Pyx_call_destructor(%s);" % self.result())
+ if self.type.is_pythran_expr:
+ code.putln("// Pythran binop")
+ code.putln("__Pyx_call_destructor(%s);" % self.result())
if self.operator == '**':
code.putln("new (&%s) decltype(%s){pythonic::numpy::functor::power{}(%s, %s)};" % (
self.result(),
@@ -11166,7 +11166,7 @@ class BinopNode(ExprNode):
self.operand1.pythran_result(),
self.operator,
self.operand2.pythran_result()))
- elif self.operand1.type.is_pyobject:
+ elif self.operand1.type.is_pyobject:
function = self.py_operation_function(code)
if self.operator == '**':
extra_args = ", Py_None"
@@ -11328,11 +11328,11 @@ class NumBinopNode(BinopNode):
self.operand2.result(),
self.overflow_bit_node.overflow_bit)
elif self.type.is_cpp_class or self.infix:
- if is_pythran_expr(self.type):
- result1, result2 = self.operand1.pythran_result(), self.operand2.pythran_result()
- else:
- result1, result2 = self.operand1.result(), self.operand2.result()
- return "(%s %s %s)" % (result1, self.operator, result2)
+ if is_pythran_expr(self.type):
+ result1, result2 = self.operand1.pythran_result(), self.operand2.pythran_result()
+ else:
+ result1, result2 = self.operand1.result(), self.operand2.result()
+ return "(%s %s %s)" % (result1, self.operator, result2)
else:
func = self.type.binary_op(self.operator)
if func is None:
@@ -11398,7 +11398,7 @@ class AddNode(NumBinopNode):
def infer_builtin_types_operation(self, type1, type2):
# b'abc' + 'abc' raises an exception in Py3,
# so we can safely infer the Py2 type for bytes here
- string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type)
+ string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type)
if type1 in string_types and type2 in string_types:
return string_types[max(string_types.index(type1),
string_types.index(type2))]
@@ -11462,7 +11462,7 @@ class MulNode(NumBinopNode):
def infer_builtin_types_operation(self, type1, type2):
# let's assume that whatever builtin type you multiply a string with
# will either return a string of the same type or fail with an exception
- string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type)
+ string_types = (bytes_type, bytearray_type, str_type, basestring_type, unicode_type)
if type1 in string_types and type2.is_builtin_type:
return type1
if type2 in string_types and type1.is_builtin_type:
@@ -11532,7 +11532,7 @@ class DivNode(NumBinopNode):
self._check_truedivision(env)
return self.result_type(
self.operand1.infer_type(env),
- self.operand2.infer_type(env), env)
+ self.operand2.infer_type(env), env)
def analyse_operation(self, env):
self._check_truedivision(env)
@@ -11663,20 +11663,20 @@ class DivNode(NumBinopNode):
self.operand2.result())
-_find_formatting_types = re.compile(
- br"%"
- br"(?:%|" # %%
- br"(?:\([^)]+\))?" # %(name)
- br"[-+#,0-9 ]*([a-z])" # %.2f etc.
- br")").findall
-
-# These format conversion types can never trigger a Unicode string conversion in Py2.
-_safe_bytes_formats = set([
- # Excludes 's' and 'r', which can generate non-bytes strings.
- b'd', b'i', b'o', b'u', b'x', b'X', b'e', b'E', b'f', b'F', b'g', b'G', b'c', b'b', b'a',
-])
-
-
+_find_formatting_types = re.compile(
+ br"%"
+ br"(?:%|" # %%
+ br"(?:\([^)]+\))?" # %(name)
+ br"[-+#,0-9 ]*([a-z])" # %.2f etc.
+ br")").findall
+
+# These format conversion types can never trigger a Unicode string conversion in Py2.
+_safe_bytes_formats = set([
+ # Excludes 's' and 'r', which can generate non-bytes strings.
+ b'd', b'i', b'o', b'u', b'x', b'X', b'e', b'E', b'f', b'F', b'g', b'G', b'c', b'b', b'a',
+])
+
+
class ModNode(DivNode):
# '%' operator.
@@ -11686,7 +11686,7 @@ class ModNode(DivNode):
or NumBinopNode.is_py_operation_types(self, type1, type2))
def infer_builtin_types_operation(self, type1, type2):
- # b'%s' % xyz raises an exception in Py3<3.5, so it's safe to infer the type for Py2 and later Py3's.
+ # b'%s' % xyz raises an exception in Py3<3.5, so it's safe to infer the type for Py2 and later Py3's.
if type1 is unicode_type:
# None + xyz may be implemented by RHS
if type2.is_builtin_type or not self.operand1.may_be_none():
@@ -11696,11 +11696,11 @@ class ModNode(DivNode):
return type2
elif type2.is_numeric:
return type1
- elif self.operand1.is_string_literal:
- if type1 is str_type or type1 is bytes_type:
- if set(_find_formatting_types(self.operand1.value)) <= _safe_bytes_formats:
- return type1
- return basestring_type
+ elif self.operand1.is_string_literal:
+ if type1 is str_type or type1 is bytes_type:
+ if set(_find_formatting_types(self.operand1.value)) <= _safe_bytes_formats:
+ return type1
+ return basestring_type
elif type1 is bytes_type and not type2.is_builtin_type:
return None # RHS might implement '% operator differently in Py3
else:
@@ -11905,7 +11905,7 @@ class BoolBinopNode(ExprNode):
operator=self.operator,
operand1=operand1, operand2=operand2)
- def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through):
+ def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through):
code.mark_pos(self.pos)
outer_labels = (and_label, or_label)
@@ -11914,20 +11914,20 @@ class BoolBinopNode(ExprNode):
else:
my_label = or_label = code.new_label('next_or')
self.operand1.generate_bool_evaluation_code(
- code, final_result_temp, final_result_type, and_label, or_label, end_label, my_label)
+ code, final_result_temp, final_result_type, and_label, or_label, end_label, my_label)
and_label, or_label = outer_labels
code.put_label(my_label)
self.operand2.generate_bool_evaluation_code(
- code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through)
+ code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through)
def generate_evaluation_code(self, code):
self.allocate_temp_result(code)
- result_type = PyrexTypes.py_object_type if self.type.is_pyobject else self.type
+ result_type = PyrexTypes.py_object_type if self.type.is_pyobject else self.type
or_label = and_label = None
end_label = code.new_label('bool_binop_done')
- self.generate_bool_evaluation_code(code, self.result(), result_type, and_label, or_label, end_label, end_label)
+ self.generate_bool_evaluation_code(code, self.result(), result_type, and_label, or_label, end_label, end_label)
code.put_label(end_label)
gil_message = "Truth-testing Python object"
@@ -12012,7 +12012,7 @@ class BoolBinopResultNode(ExprNode):
test_result = self.arg.result()
return (test_result, self.arg.type.is_pyobject)
- def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through):
+ def generate_bool_evaluation_code(self, code, final_result_temp, final_result_type, and_label, or_label, end_label, fall_through):
code.mark_pos(self.pos)
# x => x
@@ -12055,7 +12055,7 @@ class BoolBinopResultNode(ExprNode):
code.putln("} else {")
self.value.generate_evaluation_code(code)
self.value.make_owned_reference(code)
- code.putln("%s = %s;" % (final_result_temp, self.value.result_as(final_result_type)))
+ code.putln("%s = %s;" % (final_result_temp, self.value.result_as(final_result_type)))
self.value.generate_post_assignment_code(code)
# disposal: {not (and_label and or_label) [else]}
self.arg.generate_disposal_code(code)
@@ -12275,22 +12275,22 @@ class CmpNode(object):
new_common_type = None
# catch general errors
- if (type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or
- type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type))):
+ if (type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or
+ type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type))):
error(self.pos, "Comparisons between bytes/unicode and str are not portable to Python 3")
new_common_type = error_type
# try to use numeric comparisons where possible
elif type1.is_complex or type2.is_complex:
- if (op not in ('==', '!=')
- and (type1.is_complex or type1.is_numeric)
- and (type2.is_complex or type2.is_numeric)):
+ if (op not in ('==', '!=')
+ and (type1.is_complex or type1.is_numeric)
+ and (type2.is_complex or type2.is_numeric)):
error(self.pos, "complex types are unordered")
new_common_type = error_type
elif type1.is_pyobject:
- new_common_type = Builtin.complex_type if type1.subtype_of(Builtin.complex_type) else py_object_type
+ new_common_type = Builtin.complex_type if type1.subtype_of(Builtin.complex_type) else py_object_type
elif type2.is_pyobject:
- new_common_type = Builtin.complex_type if type2.subtype_of(Builtin.complex_type) else py_object_type
+ new_common_type = Builtin.complex_type if type2.subtype_of(Builtin.complex_type) else py_object_type
else:
new_common_type = PyrexTypes.widest_numeric_type(type1, type2)
elif type1.is_numeric and type2.is_numeric:
@@ -12416,11 +12416,11 @@ class CmpNode(object):
self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyDictContains", "ObjectHandling.c")
self.special_bool_cmp_function = "__Pyx_PyDict_ContainsTF"
return True
- elif self.operand2.type is Builtin.set_type:
- self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable")
- self.special_bool_cmp_utility_code = UtilityCode.load_cached("PySetContains", "ObjectHandling.c")
- self.special_bool_cmp_function = "__Pyx_PySet_ContainsTF"
- return True
+ elif self.operand2.type is Builtin.set_type:
+ self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable")
+ self.special_bool_cmp_utility_code = UtilityCode.load_cached("PySetContains", "ObjectHandling.c")
+ self.special_bool_cmp_function = "__Pyx_PySet_ContainsTF"
+ return True
elif self.operand2.type is Builtin.unicode_type:
self.operand2 = self.operand2.as_none_safe_node("'NoneType' object is not iterable")
self.special_bool_cmp_utility_code = UtilityCode.load_cached("PyUnicodeContains", "StringTools.c")
@@ -12547,14 +12547,14 @@ class PrimaryCmpNode(ExprNode, CmpNode):
is_memslice_nonecheck = False
def infer_type(self, env):
- type1 = self.operand1.infer_type(env)
- type2 = self.operand2.infer_type(env)
-
- if is_pythran_expr(type1) or is_pythran_expr(type2):
- if is_pythran_supported_type(type1) and is_pythran_supported_type(type2):
- return PythranExpr(pythran_binop_type(self.operator, type1, type2))
-
- # TODO: implement this for other types.
+ type1 = self.operand1.infer_type(env)
+ type2 = self.operand2.infer_type(env)
+
+ if is_pythran_expr(type1) or is_pythran_expr(type2):
+ if is_pythran_supported_type(type1) and is_pythran_supported_type(type2):
+ return PythranExpr(pythran_binop_type(self.operator, type1, type2))
+
+ # TODO: implement this for other types.
return py_object_type
def type_dependencies(self, env):
@@ -12577,14 +12577,14 @@ class PrimaryCmpNode(ExprNode, CmpNode):
error(self.pos, "Cascading comparison not yet supported for cpp types.")
return self
- type1 = self.operand1.type
- type2 = self.operand2.type
- if is_pythran_expr(type1) or is_pythran_expr(type2):
- if is_pythran_supported_type(type1) and is_pythran_supported_type(type2):
- self.type = PythranExpr(pythran_binop_type(self.operator, type1, type2))
- self.is_pycmp = False
- return self
-
+ type1 = self.operand1.type
+ type2 = self.operand2.type
+ if is_pythran_expr(type1) or is_pythran_expr(type2):
+ if is_pythran_supported_type(type1) and is_pythran_supported_type(type2):
+ self.type = PythranExpr(pythran_binop_type(self.operator, type1, type2))
+ self.is_pycmp = False
+ return self
+
if self.analyse_memoryviewslice_comparison(env):
return self
@@ -12724,19 +12724,19 @@ class PrimaryCmpNode(ExprNode, CmpNode):
return self.operand1.check_const() and self.operand2.check_const()
def calculate_result_code(self):
- operand1, operand2 = self.operand1, self.operand2
- if operand1.type.is_complex:
+ operand1, operand2 = self.operand1, self.operand2
+ if operand1.type.is_complex:
if self.operator == "!=":
negation = "!"
else:
negation = ""
return "(%s%s(%s, %s))" % (
negation,
- operand1.type.binary_op('=='),
- operand1.result(),
- operand2.result())
+ operand1.type.binary_op('=='),
+ operand1.result(),
+ operand2.result())
elif self.is_c_string_contains():
- if operand2.type is unicode_type:
+ if operand2.type is unicode_type:
method = "__Pyx_UnicodeContainsUCS4"
else:
method = "__Pyx_BytesContains"
@@ -12747,18 +12747,18 @@ class PrimaryCmpNode(ExprNode, CmpNode):
return "(%s%s(%s, %s))" % (
negation,
method,
- operand2.result(),
- operand1.result())
- else:
- if is_pythran_expr(self.type):
- result1, result2 = operand1.pythran_result(), operand2.pythran_result()
- else:
- result1, result2 = operand1.result(), operand2.result()
- if self.is_memslice_nonecheck:
- if operand1.type.is_memoryviewslice:
- result1 = "((PyObject *) %s.memview)" % result1
- else:
- result2 = "((PyObject *) %s.memview)" % result2
+ operand2.result(),
+ operand1.result())
+ else:
+ if is_pythran_expr(self.type):
+ result1, result2 = operand1.pythran_result(), operand2.pythran_result()
+ else:
+ result1, result2 = operand1.result(), operand2.result()
+ if self.is_memslice_nonecheck:
+ if operand1.type.is_memoryviewslice:
+ result1 = "((PyObject *) %s.memview)" % result1
+ else:
+ result2 = "((PyObject *) %s.memview)" % result2
return "(%s %s %s)" % (
result1,
@@ -12979,12 +12979,12 @@ class CoerceToMemViewSliceNode(CoercionNode):
self.type.create_from_py_utility_code(env)
def generate_result_code(self, code):
- code.putln(self.type.from_py_call_code(
- self.arg.py_result(),
- self.result(),
- self.pos,
- code
- ))
+ code.putln(self.type.from_py_call_code(
+ self.arg.py_result(),
+ self.result(),
+ self.pos,
+ code
+ ))
class CastNode(CoercionNode):
@@ -13043,15 +13043,15 @@ class PyTypeTestNode(CoercionNode):
def nonlocally_immutable(self):
return self.arg.nonlocally_immutable()
- def reanalyse(self):
- if self.type != self.arg.type or not self.arg.is_temp:
- return self
- if not self.type.typeobj_is_available():
- return self
- if self.arg.may_be_none() and self.notnone:
- return self.arg.as_none_safe_node("Cannot convert NoneType to %.200s" % self.type.name)
- return self.arg
-
+ def reanalyse(self):
+ if self.type != self.arg.type or not self.arg.is_temp:
+ return self
+ if not self.type.typeobj_is_available():
+ return self
+ if self.arg.may_be_none() and self.notnone:
+ return self.arg.as_none_safe_node("Cannot convert NoneType to %.200s" % self.type.name)
+ return self.arg
+
def calculate_constant_result(self):
# FIXME
pass
@@ -13100,7 +13100,7 @@ class NoneCheckNode(CoercionNode):
is_nonecheck = True
def __init__(self, arg, exception_type_cname, exception_message,
- exception_format_args=()):
+ exception_format_args=()):
CoercionNode.__init__(self, arg)
self.type = arg.type
self.result_ctype = arg.ctype()
@@ -13136,19 +13136,19 @@ class NoneCheckNode(CoercionNode):
else:
raise Exception("unsupported type")
- @classmethod
- def generate(cls, arg, code, exception_message,
- exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False):
- node = cls(arg, exception_type_cname, exception_message, exception_format_args)
- node.in_nogil_context = in_nogil_context
- node.put_nonecheck(code)
-
- @classmethod
- def generate_if_needed(cls, arg, code, exception_message,
- exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False):
- if arg.may_be_none():
- cls.generate(arg, code, exception_message, exception_type_cname, exception_format_args, in_nogil_context)
-
+ @classmethod
+ def generate(cls, arg, code, exception_message,
+ exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False):
+ node = cls(arg, exception_type_cname, exception_message, exception_format_args)
+ node.in_nogil_context = in_nogil_context
+ node.put_nonecheck(code)
+
+ @classmethod
+ def generate_if_needed(cls, arg, code, exception_message,
+ exception_type_cname="PyExc_TypeError", exception_format_args=(), in_nogil_context=False):
+ if arg.may_be_none():
+ cls.generate(arg, code, exception_message, exception_type_cname, exception_format_args, in_nogil_context)
+
def put_nonecheck(self, code):
code.putln(
"if (unlikely(%s == Py_None)) {" % self.condition())
@@ -13323,15 +13323,15 @@ class CoerceFromPyTypeNode(CoercionNode):
return (self.type.is_ptr and not self.type.is_array) and self.arg.is_ephemeral()
def generate_result_code(self, code):
- from_py_function = None
- # for certain source types, we can do better than the generic coercion
- if self.type.is_string and self.arg.type is bytes_type:
- if self.type.from_py_function.startswith('__Pyx_PyObject_As'):
- from_py_function = '__Pyx_PyBytes' + self.type.from_py_function[len('__Pyx_PyObject'):]
- NoneCheckNode.generate_if_needed(self.arg, code, "expected bytes, NoneType found")
-
+ from_py_function = None
+ # for certain source types, we can do better than the generic coercion
+ if self.type.is_string and self.arg.type is bytes_type:
+ if self.type.from_py_function.startswith('__Pyx_PyObject_As'):
+ from_py_function = '__Pyx_PyBytes' + self.type.from_py_function[len('__Pyx_PyObject'):]
+ NoneCheckNode.generate_if_needed(self.arg, code, "expected bytes, NoneType found")
+
code.putln(self.type.from_py_call_code(
- self.arg.py_result(), self.result(), self.pos, code, from_py_function=from_py_function))
+ self.arg.py_result(), self.result(), self.pos, code, from_py_function=from_py_function))
if self.type.is_pyobject:
code.put_gotref(self.py_result())
@@ -13351,7 +13351,7 @@ class CoerceToBooleanNode(CoercionNode):
Builtin.set_type: 'PySet_GET_SIZE',
Builtin.frozenset_type: 'PySet_GET_SIZE',
Builtin.bytes_type: 'PyBytes_GET_SIZE',
- Builtin.bytearray_type: 'PyByteArray_GET_SIZE',
+ Builtin.bytearray_type: 'PyByteArray_GET_SIZE',
Builtin.unicode_type: '__Pyx_PyUnicode_IS_TRUE',
}
@@ -13380,9 +13380,9 @@ class CoerceToBooleanNode(CoercionNode):
return
test_func = self._special_builtins.get(self.arg.type)
if test_func is not None:
- checks = ["(%s != Py_None)" % self.arg.py_result()] if self.arg.may_be_none() else []
- checks.append("(%s(%s) != 0)" % (test_func, self.arg.py_result()))
- code.putln("%s = %s;" % (self.result(), '&&'.join(checks)))
+ checks = ["(%s != Py_None)" % self.arg.py_result()] if self.arg.may_be_none() else []
+ checks.append("(%s(%s) != 0)" % (test_func, self.arg.py_result()))
+ code.putln("%s = %s;" % (self.result(), '&&'.join(checks)))
else:
code.putln(
"%s = __Pyx_PyObject_IsTrue(%s); %s" % (
diff --git a/contrib/tools/cython/Cython/Compiler/FusedNode.py b/contrib/tools/cython/Cython/Compiler/FusedNode.py
index e86c0d30a7..26d6ffd3d6 100644
--- a/contrib/tools/cython/Cython/Compiler/FusedNode.py
+++ b/contrib/tools/cython/Cython/Compiler/FusedNode.py
@@ -6,7 +6,7 @@ from . import (ExprNodes, PyrexTypes, MemoryView,
ParseTreeTransforms, StringEncoding, Errors)
from .ExprNodes import CloneNode, ProxyNode, TupleNode
from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode
-from ..Utils import OrderedSet
+from ..Utils import OrderedSet
class FusedCFuncDefNode(StatListNode):
@@ -136,27 +136,27 @@ class FusedCFuncDefNode(StatListNode):
fused_types = self.node.type.get_fused_types()
self.fused_compound_types = fused_types
- new_cfunc_entries = []
+ new_cfunc_entries = []
for cname, fused_to_specific in permutations:
copied_node = copy.deepcopy(self.node)
- # Make the types in our CFuncType specific.
+ # Make the types in our CFuncType specific.
type = copied_node.type.specialize(fused_to_specific)
entry = copied_node.entry
- type.specialize_entry(entry, cname)
-
- # Reuse existing Entries (e.g. from .pxd files).
- for i, orig_entry in enumerate(env.cfunc_entries):
- if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
- copied_node.entry = env.cfunc_entries[i]
- if not copied_node.entry.func_cname:
- copied_node.entry.func_cname = entry.func_cname
- entry = copied_node.entry
- type = entry.type
- break
- else:
- new_cfunc_entries.append(entry)
-
+ type.specialize_entry(entry, cname)
+
+ # Reuse existing Entries (e.g. from .pxd files).
+ for i, orig_entry in enumerate(env.cfunc_entries):
+ if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
+ copied_node.entry = env.cfunc_entries[i]
+ if not copied_node.entry.func_cname:
+ copied_node.entry.func_cname = entry.func_cname
+ entry = copied_node.entry
+ type = entry.type
+ break
+ else:
+ new_cfunc_entries.append(entry)
+
copied_node.type = type
entry.type, type.entry = type, entry
@@ -189,14 +189,14 @@ class FusedCFuncDefNode(StatListNode):
if not self.replace_fused_typechecks(copied_node):
break
- # replace old entry with new entries
- try:
- cindex = env.cfunc_entries.index(self.node.entry)
- except ValueError:
- env.cfunc_entries.extend(new_cfunc_entries)
- else:
- env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
-
+ # replace old entry with new entries
+ try:
+ cindex = env.cfunc_entries.index(self.node.entry)
+ except ValueError:
+ env.cfunc_entries.extend(new_cfunc_entries)
+ else:
+ env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
+
if orig_py_func:
self.py_func = self.make_fused_cpdef(orig_py_func, env,
is_def=False)
@@ -225,7 +225,7 @@ class FusedCFuncDefNode(StatListNode):
"""
Create a new local scope for the copied node and append it to
self.nodes. A new local scope is needed because the arguments with the
- fused types are already in the local scope, and we need the specialized
+ fused types are already in the local scope, and we need the specialized
entries created after analyse_declarations on each specialized version
of the (CFunc)DefNode.
f2s is a dict mapping each fused type to its specialized version
@@ -276,7 +276,7 @@ class FusedCFuncDefNode(StatListNode):
def _fused_instance_checks(self, normal_types, pyx_code, env):
"""
- Generate Cython code for instance checks, matching an object to
+ Generate Cython code for instance checks, matching an object to
specialized types.
"""
for specialized_type in normal_types:
@@ -331,22 +331,22 @@ class FusedCFuncDefNode(StatListNode):
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
no_match = "dest_sig[{{dest_sig_idx}}] = None"
- def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
+ def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
"""
Match a numpy dtype object to the individual specializations.
"""
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
- for specialized_type in pythran_types+specialized_buffer_types:
- final_type = specialized_type
- if specialized_type.is_pythran_expr:
- specialized_type = specialized_type.org_buffer
+ for specialized_type in pythran_types+specialized_buffer_types:
+ final_type = specialized_type
+ if specialized_type.is_pythran_expr:
+ specialized_type = specialized_type.org_buffer
dtype = specialized_type.dtype
pyx_code.context.update(
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
dtype=dtype,
- specialized_type_name=final_type.specialization_string)
+ specialized_type_name=final_type.specialization_string)
dtypes = [
(dtype.is_int, pyx_code.dtype_int),
@@ -361,11 +361,11 @@ class FusedCFuncDefNode(StatListNode):
if dtype.is_int:
cond += ' and {{signed_match}}'
- if final_type.is_pythran_expr:
- cond += ' and arg_is_pythran_compatible'
-
+ if final_type.is_pythran_expr:
+ cond += ' and arg_is_pythran_compatible'
+
if codewriter.indenter("if %s:" % cond):
- #codewriter.putln("print 'buffer match found based on numpy dtype'")
+ #codewriter.putln("print 'buffer match found based on numpy dtype'")
codewriter.putln(self.match)
codewriter.putln("break")
codewriter.dedent()
@@ -390,7 +390,7 @@ class FusedCFuncDefNode(StatListNode):
coerce_from_py_func=memslice_type.from_py_function,
dtype=dtype)
decl_code.putln(
- "{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
+ "{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
pyx_code.context.update(
specialized_type_name=specialized_type.specialization_string,
@@ -400,7 +400,7 @@ class FusedCFuncDefNode(StatListNode):
u"""
# try {{dtype}}
if itemsize == -1 or itemsize == {{sizeof_dtype}}:
- memslice = {{coerce_from_py_func}}(arg, 0)
+ memslice = {{coerce_from_py_func}}(arg, 0)
if memslice.memview:
__PYX_XDEC_MEMVIEW(&memslice, 1)
# print 'found a match for the buffer through format parsing'
@@ -410,7 +410,7 @@ class FusedCFuncDefNode(StatListNode):
__pyx_PyErr_Clear()
""" % self.match)
- def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env):
+ def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env):
"""
Generate Cython code to match objects to buffer specializations.
First try to get a numpy dtype object and match it against the individual
@@ -421,11 +421,11 @@ class FusedCFuncDefNode(StatListNode):
# The first thing to find a match in this loop breaks out of the loop
pyx_code.put_chunk(
u"""
- """ + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
+ """ + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
if ndarray is not None:
if isinstance(arg, ndarray):
dtype = arg.dtype
- """ + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
+ """ + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
elif __pyx_memoryview_check(arg):
arg_base = arg.base
if isinstance(arg_base, ndarray):
@@ -439,39 +439,39 @@ class FusedCFuncDefNode(StatListNode):
if dtype is not None:
itemsize = dtype.itemsize
kind = ord(dtype.kind)
- dtype_signed = kind == 'i'
- """)
- pyx_code.indent(2)
- if pythran_types:
- pyx_code.put_chunk(
- u"""
- # Pythran only supports the endianness of the current compiler
- byteorder = dtype.byteorder
- if byteorder == "<" and not __Pyx_Is_Little_Endian():
- arg_is_pythran_compatible = False
- elif byteorder == ">" and __Pyx_Is_Little_Endian():
- arg_is_pythran_compatible = False
- if arg_is_pythran_compatible:
- cur_stride = itemsize
- shape = arg.shape
- strides = arg.strides
- for i in range(arg.ndim-1, -1, -1):
- if (<Py_ssize_t>strides[i]) != cur_stride:
- arg_is_pythran_compatible = False
- break
- cur_stride *= <Py_ssize_t> shape[i]
- else:
- arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
- """)
+ dtype_signed = kind == 'i'
+ """)
+ pyx_code.indent(2)
+ if pythran_types:
+ pyx_code.put_chunk(
+ u"""
+ # Pythran only supports the endianness of the current compiler
+ byteorder = dtype.byteorder
+ if byteorder == "<" and not __Pyx_Is_Little_Endian():
+ arg_is_pythran_compatible = False
+ elif byteorder == ">" and __Pyx_Is_Little_Endian():
+ arg_is_pythran_compatible = False
+ if arg_is_pythran_compatible:
+ cur_stride = itemsize
+ shape = arg.shape
+ strides = arg.strides
+ for i in range(arg.ndim-1, -1, -1):
+ if (<Py_ssize_t>strides[i]) != cur_stride:
+ arg_is_pythran_compatible = False
+ break
+ cur_stride *= <Py_ssize_t> shape[i]
+ else:
+ arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
+ """)
pyx_code.named_insertion_point("numpy_dtype_checks")
- self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
+ self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
pyx_code.dedent(2)
for specialized_type in buffer_types:
self._buffer_parse_format_string_check(
pyx_code, decl_code, specialized_type, env)
- def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
+ def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
"""
If we have any buffer specializations, write out some variable
declarations and imports.
@@ -495,16 +495,16 @@ class FusedCFuncDefNode(StatListNode):
itemsize = -1
""")
- if pythran_types:
- pyx_code.local_variable_declarations.put_chunk(u"""
- cdef bint arg_is_pythran_compatible
- cdef Py_ssize_t cur_stride
- """)
-
+ if pythran_types:
+ pyx_code.local_variable_declarations.put_chunk(u"""
+ cdef bint arg_is_pythran_compatible
+ cdef Py_ssize_t cur_stride
+ """)
+
pyx_code.imports.put_chunk(
u"""
cdef type ndarray
- ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
+ ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
""")
seen_typedefs = set()
@@ -527,7 +527,7 @@ class FusedCFuncDefNode(StatListNode):
pyx_code.local_variable_declarations.put_chunk(
u"""
cdef bint {{dtype_name}}_is_signed
- {{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
+ {{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
""")
def _split_fused_types(self, arg):
@@ -540,7 +540,7 @@ class FusedCFuncDefNode(StatListNode):
specialized_types.sort()
seen_py_type_names = set()
- normal_types, buffer_types, pythran_types = [], [], []
+ normal_types, buffer_types, pythran_types = [], [], []
has_object_fallback = False
for specialized_type in specialized_types:
py_type_name = specialized_type.py_type_name()
@@ -552,12 +552,12 @@ class FusedCFuncDefNode(StatListNode):
has_object_fallback = True
else:
normal_types.append(specialized_type)
- elif specialized_type.is_pythran_expr:
- pythran_types.append(specialized_type)
+ elif specialized_type.is_pythran_expr:
+ pythran_types.append(specialized_type)
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
buffer_types.append(specialized_type)
- return normal_types, buffer_types, pythran_types, has_object_fallback
+ return normal_types, buffer_types, pythran_types, has_object_fallback
def _unpack_argument(self, pyx_code):
pyx_code.put_chunk(
@@ -565,18 +565,18 @@ class FusedCFuncDefNode(StatListNode):
# PROCESSING ARGUMENT {{arg_tuple_idx}}
if {{arg_tuple_idx}} < len(<tuple>args):
arg = (<tuple>args)[{{arg_tuple_idx}}]
- elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
+ elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
arg = (<dict>kwargs)['{{arg.name}}']
else:
{{if arg.default}}
arg = (<tuple>defaults)[{{default_idx}}]
{{else}}
- {{if arg_tuple_idx < min_positional_args}}
- raise TypeError("Expected at least %d argument%s, got %d" % (
- {{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
- {{else}}
- raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
- {{endif}}
+ {{if arg_tuple_idx < min_positional_args}}
+ raise TypeError("Expected at least %d argument%s, got %d" % (
+ {{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
+ {{else}}
+ raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
+ {{endif}}
{{endif}}
""")
@@ -596,10 +596,10 @@ class FusedCFuncDefNode(StatListNode):
'memviewslice_cname': MemoryView.memviewslice_cname,
'func_args': self.node.args,
'n_fused': len(fused_types),
- 'min_positional_args':
- self.node.num_required_args - self.node.num_required_kw_args
- if is_def else
- sum(1 for arg in self.node.args if arg.default is None),
+ 'min_positional_args':
+ self.node.num_required_args - self.node.num_required_kw_args
+ if is_def else
+ sum(1 for arg in self.node.args if arg.default is None),
'name': orig_py_func.entry.name,
}
@@ -609,8 +609,8 @@ class FusedCFuncDefNode(StatListNode):
u"""
cdef extern from *:
void __pyx_PyErr_Clear "PyErr_Clear" ()
- type __Pyx_ImportNumPyArrayTypeIfAvailable()
- int __Pyx_Is_Little_Endian()
+ type __Pyx_ImportNumPyArrayTypeIfAvailable()
+ int __Pyx_Is_Little_Endian()
""")
decl_code.indent()
@@ -622,22 +622,22 @@ class FusedCFuncDefNode(StatListNode):
dest_sig = [None] * {{n_fused}}
- if kwargs is not None and not kwargs:
- kwargs = None
+ if kwargs is not None and not kwargs:
+ kwargs = None
cdef Py_ssize_t i
# instance check body
""")
-
+
pyx_code.indent() # indent following code to function body
pyx_code.named_insertion_point("imports")
- pyx_code.named_insertion_point("func_defs")
+ pyx_code.named_insertion_point("func_defs")
pyx_code.named_insertion_point("local_variable_declarations")
fused_index = 0
default_idx = 0
- all_buffer_types = OrderedSet()
+ all_buffer_types = OrderedSet()
seen_fused_types = set()
for i, arg in enumerate(self.node.args):
if arg.type.is_fused:
@@ -657,16 +657,16 @@ class FusedCFuncDefNode(StatListNode):
default_idx=default_idx,
)
- normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
+ normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
self._unpack_argument(pyx_code)
# 'unrolled' loop, first match breaks out of it
if pyx_code.indenter("while 1:"):
if normal_types:
self._fused_instance_checks(normal_types, pyx_code, env)
- if buffer_types or pythran_types:
- env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
- self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env)
+ if buffer_types or pythran_types:
+ env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
+ self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env)
if has_object_fallback:
pyx_code.context.update(specialized_type_name='object')
pyx_code.putln(self.match)
@@ -677,26 +677,26 @@ class FusedCFuncDefNode(StatListNode):
fused_index += 1
all_buffer_types.update(buffer_types)
- all_buffer_types.update(ty.org_buffer for ty in pythran_types)
+ all_buffer_types.update(ty.org_buffer for ty in pythran_types)
if arg.default:
default_idx += 1
if all_buffer_types:
- self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
+ self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
- env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
+ env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
pyx_code.put_chunk(
u"""
candidates = []
for sig in <dict>signatures:
match_found = False
- src_sig = sig.strip('()').split('|')
- for i in range(len(dest_sig)):
- dst_type = dest_sig[i]
+ src_sig = sig.strip('()').split('|')
+ for i in range(len(dest_sig)):
+ dst_type = dest_sig[i]
if dst_type is not None:
- if src_sig[i] == dst_type:
+ if src_sig[i] == dst_type:
match_found = True
else:
match_found = False
diff --git a/contrib/tools/cython/Cython/Compiler/Main.py b/contrib/tools/cython/Cython/Compiler/Main.py
index df53e9ba01..af873843b5 100644
--- a/contrib/tools/cython/Cython/Compiler/Main.py
+++ b/contrib/tools/cython/Cython/Compiler/Main.py
@@ -9,8 +9,8 @@ import re
import sys
import io
-if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 3):
- sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2]))
+if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 3):
+ sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2]))
sys.exit(1)
try:
@@ -23,7 +23,7 @@ except ImportError:
# conditional metaclass. These options are processed by CmdLine called from
# main() in this file.
# import Parsing
-from . import Errors
+from . import Errors
from .StringEncoding import EncodedString
from .Scanning import PyrexScanner, FileSourceDescriptor
from .Errors import PyrexError, CompileError, error, warning
@@ -40,7 +40,7 @@ verbose = 0
standard_include_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir, 'Includes'))
-
+
class CompilationData(object):
# Bundles the information that is passed from transform to transform.
# (For now, this is only)
@@ -55,7 +55,7 @@ class CompilationData(object):
# result CompilationResult
pass
-
+
class Context(object):
# This class encapsulates the context needed for compiling
# one or more Cython implementation files along with their
@@ -226,8 +226,8 @@ class Context(object):
rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
if not pxd_pathname.endswith(rel_path):
rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
- if Options.source_root:
- rel_path = os.path.relpath(pxd_pathname, Options.source_root)
+ if Options.source_root:
+ rel_path = os.path.relpath(pxd_pathname, Options.source_root)
source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
err, result = self.process_pxd(source_desc, scope, qualified_name)
if err:
@@ -250,7 +250,7 @@ class Context(object):
pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path)
if pxd is None: # XXX Keep this until Includes/Deprecated is removed
if (qualified_name.startswith('python') or
- qualified_name in ('stdlib', 'stdio', 'stl')):
+ qualified_name in ('stdlib', 'stdio', 'stl')):
standard_include_path = os.path.abspath(os.path.normpath(
os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
@@ -372,7 +372,7 @@ class Context(object):
from ..Parser import ConcreteSyntaxTree
except ImportError:
raise RuntimeError(
- "Formal grammar can only be used with compiled Cython with an available pgen.")
+ "Formal grammar can only be used with compiled Cython with an available pgen.")
ConcreteSyntaxTree.p_module(source_filename)
except UnicodeDecodeError as e:
#import traceback
@@ -442,7 +442,7 @@ class Context(object):
pass
result.c_file = None
-
+
def get_output_filename(source_filename, cwd, options):
if options.cplus:
c_suffix = ".cpp"
@@ -458,7 +458,7 @@ def get_output_filename(source_filename, cwd, options):
else:
return suggested_file_name
-
+
def create_default_resultobj(compilation_source, options):
result = CompilationResult()
result.main_source_file = compilation_source.source_desc.filename
@@ -469,7 +469,7 @@ def create_default_resultobj(compilation_source, options):
result.embedded_metadata = options.embedded_metadata
return result
-
+
def run_pipeline(source, options, full_module_name=None, context=None):
from . import Pipeline
@@ -491,8 +491,8 @@ def run_pipeline(source, options, full_module_name=None, context=None):
rel_path = source # safety measure to prevent printing incorrect paths
else:
rel_path = abs_path
- if Options.source_root:
- rel_path = os.path.relpath(abs_path, Options.source_root)
+ if Options.source_root:
+ rel_path = os.path.relpath(abs_path, Options.source_root)
source_desc = FileSourceDescriptor(abs_path, rel_path)
source = CompilationSource(source_desc, full_module_name, cwd)
@@ -519,15 +519,15 @@ def run_pipeline(source, options, full_module_name=None, context=None):
return result
-# ------------------------------------------------------------------------
+# ------------------------------------------------------------------------
#
# Main Python entry points
#
-# ------------------------------------------------------------------------
+# ------------------------------------------------------------------------
class CompilationSource(object):
"""
- Contains the data necessary to start up a compilation pipeline for
+ Contains the data necessary to start up a compilation pipeline for
a single compilation unit.
"""
def __init__(self, source_desc, full_module_name, cwd):
@@ -535,7 +535,7 @@ class CompilationSource(object):
self.full_module_name = full_module_name
self.cwd = cwd
-
+
class CompilationOptions(object):
r"""
See default_options at the end of this module for a list of all possible
@@ -562,22 +562,22 @@ class CompilationOptions(object):
message = "got unknown compilation option%s, please remove: %s" % (
's' if len(unknown_options) > 1 else '',
', '.join(unknown_options))
- raise ValueError(message)
+ raise ValueError(message)
directive_defaults = Options.get_directive_defaults()
directives = dict(options['compiler_directives']) # copy mutable field
- # check for invalid directives
+ # check for invalid directives
unknown_directives = set(directives) - set(directive_defaults)
- if unknown_directives:
- message = "got unknown compiler directive%s: %s" % (
- 's' if len(unknown_directives) > 1 else '',
- ', '.join(unknown_directives))
- raise ValueError(message)
+ if unknown_directives:
+ message = "got unknown compiler directive%s: %s" % (
+ 's' if len(unknown_directives) > 1 else '',
+ ', '.join(unknown_directives))
+ raise ValueError(message)
options['compiler_directives'] = directives
- if directives.get('np_pythran', False) and not options['cplus']:
- import warnings
- warnings.warn("C++ mode forced when in Pythran mode!")
- options['cplus'] = True
+ if directives.get('np_pythran', False) and not options['cplus']:
+ import warnings
+ warnings.warn("C++ mode forced when in Pythran mode!")
+ options['cplus'] = True
if 'language_level' in directives and 'language_level' not in kw:
options['language_level'] = directives['language_level']
elif not options.get('language_level'):
@@ -764,14 +764,14 @@ def compile_multiple(sources, options):
processed.add(source)
return results
-
+
def compile(source, options = None, full_module_name = None, **kwds):
"""
compile(source [, options], [, <option> = <value>]...)
Compile one or more Pyrex implementation files, with optional timestamp
- checking and recursing on dependencies. The source argument may be a string
- or a sequence of strings. If it is a string and no recursion or timestamp
+ checking and recursing on dependencies. The source argument may be a string
+ or a sequence of strings. If it is a string and no recursion or timestamp
checking is requested, a CompilationResult is returned, otherwise a
CompilationResultSet is returned.
"""
@@ -781,7 +781,7 @@ def compile(source, options = None, full_module_name = None, **kwds):
else:
return compile_multiple(source, options)
-
+
@Utils.cached_function
def search_include_directories(dirs, qualified_name, suffix, pos, include=False):
"""
@@ -847,16 +847,16 @@ def search_include_directories(dirs, qualified_name, suffix, pos, include=False)
return None
-# ------------------------------------------------------------------------
+# ------------------------------------------------------------------------
#
# Main command-line entry point
#
-# ------------------------------------------------------------------------
-
+# ------------------------------------------------------------------------
+
def setuptools_main():
return main(command_line = 1)
-
+
def main(command_line = 0):
args = sys.argv[1:]
any_failures = 0
@@ -882,11 +882,11 @@ def main(command_line = 0):
sys.exit(1)
-# ------------------------------------------------------------------------
+# ------------------------------------------------------------------------
#
# Set the default options depending on the platform
#
-# ------------------------------------------------------------------------
+# ------------------------------------------------------------------------
default_options = dict(
show_version = 0,
@@ -918,6 +918,6 @@ default_options = dict(
output_dir=None,
build_dir=None,
cache=None,
- create_extension=None,
- np_pythran=False
+ create_extension=None,
+ np_pythran=False
)
diff --git a/contrib/tools/cython/Cython/Compiler/MemoryView.py b/contrib/tools/cython/Cython/Compiler/MemoryView.py
index 29d27432ea..0406d6c716 100644
--- a/contrib/tools/cython/Cython/Compiler/MemoryView.py
+++ b/contrib/tools/cython/Cython/Compiler/MemoryView.py
@@ -28,12 +28,12 @@ def concat_flags(*flags):
format_flag = "PyBUF_FORMAT"
-memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT)"
-memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT)"
-memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT)"
-memview_full_access = "PyBUF_FULL_RO"
-#memview_strided_access = "PyBUF_STRIDED_RO"
-memview_strided_access = "PyBUF_RECORDS_RO"
+memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT)"
+memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT)"
+memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT)"
+memview_full_access = "PyBUF_FULL_RO"
+#memview_strided_access = "PyBUF_STRIDED_RO"
+memview_strided_access = "PyBUF_RECORDS_RO"
MEMVIEW_DIRECT = '__Pyx_MEMVIEW_DIRECT'
MEMVIEW_PTR = '__Pyx_MEMVIEW_PTR'
@@ -392,15 +392,15 @@ def get_memoryview_flag(access, packing):
return 'contiguous'
-def get_is_contig_func_name(contig_type, ndim):
- assert contig_type in ('C', 'F')
- return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
+def get_is_contig_func_name(contig_type, ndim):
+ assert contig_type in ('C', 'F')
+ return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
-def get_is_contig_utility(contig_type, ndim):
- assert contig_type in ('C', 'F')
- C = dict(context, ndim=ndim, contig_type=contig_type)
- utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
+def get_is_contig_utility(contig_type, ndim):
+ assert contig_type in ('C', 'F')
+ C = dict(context, ndim=ndim, contig_type=contig_type)
+ utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
return utility
@@ -816,7 +816,7 @@ memviewslice_declare_code = load_memview_c_utility(
context=context,
requires=[])
-atomic_utility = load_memview_c_utility("Atomics", context)
+atomic_utility = load_memview_c_utility("Atomics", context)
memviewslice_init_code = load_memview_c_utility(
"MemviewSliceInit",
@@ -843,7 +843,7 @@ view_utility_code = load_memview_cy_utility(
context=context,
requires=[Buffer.GetAndReleaseBufferUtilityCode(),
Buffer.buffer_struct_declare_code,
- Buffer.buffer_formats_declare_code,
+ Buffer.buffer_formats_declare_code,
memviewslice_init_code,
is_contig_utility,
overlapping_utility,
diff --git a/contrib/tools/cython/Cython/Compiler/ModuleNode.py b/contrib/tools/cython/Cython/Compiler/ModuleNode.py
index 304f6f9a85..cd7166408e 100644
--- a/contrib/tools/cython/Cython/Compiler/ModuleNode.py
+++ b/contrib/tools/cython/Cython/Compiler/ModuleNode.py
@@ -7,13 +7,13 @@ from __future__ import absolute_import
import cython
cython.declare(Naming=object, Options=object, PyrexTypes=object, TypeSlots=object,
error=object, warning=object, py_object_type=object, UtilityCode=object,
- EncodedString=object, re=object)
+ EncodedString=object, re=object)
from collections import defaultdict
import json
import operator
import os
-import re
+import re
from .PyrexTypes import CPtrType
from . import Future
@@ -24,14 +24,14 @@ from . import Nodes
from . import Options
from . import TypeSlots
from . import PyrexTypes
-from . import Pythran
+from . import Pythran
from .Errors import error, warning
from .PyrexTypes import py_object_type
from ..Utils import open_new_file, replace_suffix, decode_filename, build_hex_version
-from .Code import UtilityCode, IncludeCode
+from .Code import UtilityCode, IncludeCode
from .StringEncoding import EncodedString
-from .Pythran import has_np_pythran
+from .Pythran import has_np_pythran
def check_c_declarations_pxd(module_node):
module_node.scope.check_c_classes_pxd()
@@ -87,9 +87,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.scope.utility_code_list.extend(scope.utility_code_list)
- for inc in scope.c_includes.values():
- self.scope.process_include(inc)
-
+ for inc in scope.c_includes.values():
+ self.scope.process_include(inc)
+
def extend_if_not_in(L1, L2):
for x in L2:
if x not in L1:
@@ -106,8 +106,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.scope.merge_in(scope)
def analyse_declarations(self, env):
- if has_np_pythran(env):
- Pythran.include_pythran_generic(env)
+ if has_np_pythran(env):
+ Pythran.include_pythran_generic(env)
if self.directives:
env.old_style_globals = self.directives['old_style_globals']
if not Options.docstrings:
@@ -120,7 +120,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
else:
env.doc = self.doc
env.directives = self.directives
-
+
self.body.analyse_declarations(env)
def prepare_utility_code(self):
@@ -204,14 +204,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
h_code.putln("")
h_code.putln("#endif /* !%s */" % api_guard)
h_code.putln("")
- h_code.putln("/* WARNING: the interface of the module init function changed in CPython 3.5. */")
- h_code.putln("/* It now returns a PyModuleDef instance instead of a PyModule instance. */")
- h_code.putln("")
+ h_code.putln("/* WARNING: the interface of the module init function changed in CPython 3.5. */")
+ h_code.putln("/* It now returns a PyModuleDef instance instead of a PyModule instance. */")
+ h_code.putln("")
h_code.putln("#if PY_MAJOR_VERSION < 3")
init_name = 'init' + (options.init_suffix or env.module_name)
h_code.putln("PyMODINIT_FUNC %s(void);" % init_name)
h_code.putln("#else")
- h_code.putln("PyMODINIT_FUNC %s(void);" % self.mod_init_func_cname('PyInit', env, options))
+ h_code.putln("PyMODINIT_FUNC %s(void);" % self.mod_init_func_cname('PyInit', env, options))
h_code.putln("#endif")
h_code.putln("")
h_code.putln("#endif /* !%s */" % h_guard)
@@ -225,7 +225,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_public_declaration(self, entry, h_code, i_code):
h_code.putln("%s %s;" % (
Naming.extern_c_macro,
- entry.type.declaration_code(entry.cname)))
+ entry.type.declaration_code(entry.cname)))
if i_code:
i_code.putln("cdef extern %s" % (
entry.type.declaration_code(entry.cname, pyrex=1)))
@@ -366,16 +366,16 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code = globalstate['before_global_var']
code.putln('#define __Pyx_MODULE_NAME "%s"' % self.full_module_name)
- module_is_main = "%s%s" % (Naming.module_is_main, self.full_module_name.replace('.', '__'))
- code.putln("extern int %s;" % module_is_main)
- code.putln("int %s = 0;" % module_is_main)
+ module_is_main = "%s%s" % (Naming.module_is_main, self.full_module_name.replace('.', '__'))
+ code.putln("extern int %s;" % module_is_main)
+ code.putln("int %s = 0;" % module_is_main)
code.putln("")
code.putln("/* Implementation of '%s' */" % env.qualified_name)
- code = globalstate['late_includes']
- code.putln("/* Late includes */")
- self.generate_includes(env, modules, code, early=False)
-
+ code = globalstate['late_includes']
+ code.putln("/* Late includes */")
+ self.generate_includes(env, modules, code, early=False)
+
code = globalstate['all_the_rest']
self.generate_cached_builtins_decls(env, code)
@@ -640,30 +640,30 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("#define PY_SSIZE_T_CLEAN")
code.putln("#endif /* PY_SSIZE_T_CLEAN */")
- for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
- if inc.location == inc.INITIAL:
- inc.write(code)
+ for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
+ if inc.location == inc.INITIAL:
+ inc.write(code)
code.putln("#ifndef Py_PYTHON_H")
code.putln(" #error Python headers needed to compile C extensions, "
"please install development version of Python.")
code.putln("#elif PY_VERSION_HEX < 0x02060000 || "
- "(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)")
- code.putln(" #error Cython requires Python 2.6+ or Python 3.3+.")
+ "(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)")
+ code.putln(" #error Cython requires Python 2.6+ or Python 3.3+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace('.', '_'))
code.putln('#define CYTHON_HEX_VERSION %s' % build_hex_version(__version__))
- code.putln("#define CYTHON_FUTURE_DIVISION %d" % (
- Future.division in env.context.future_directives))
+ code.putln("#define CYTHON_FUTURE_DIVISION %d" % (
+ Future.division in env.context.future_directives))
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
- self._put_setup_code(code, "PythonCompatibility")
+ self._put_setup_code(code, "PythonCompatibility")
self._put_setup_code(code, "MathInitCode")
# Using "(void)cname" to prevent "unused" warnings.
@@ -686,10 +686,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
- code.putln("/* Early includes */")
- self.generate_includes(env, cimported_modules, code, late=False)
+ code.putln("/* Early includes */")
+ self.generate_includes(env, cimported_modules, code, late=False)
code.putln("")
- code.putln("#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)")
+ code.putln("#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)")
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
@@ -729,9 +729,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
- code.putln('static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }')
+ code.putln('static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }')
code.putln('')
- code.putln('static PyObject *%s = NULL;' % env.module_cname)
+ code.putln('static PyObject *%s = NULL;' % env.module_cname)
code.putln('static PyObject *%s;' % env.module_dict_cname)
code.putln('static PyObject *%s;' % Naming.builtins_cname)
code.putln('static PyObject *%s = NULL;' % Naming.cython_runtime_cname)
@@ -745,10 +745,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln('static const char * %s= %s;' % (Naming.cfilenm_cname, Naming.file_c_macro))
code.putln('static const char *%s;' % Naming.filename_cname)
- env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
- if has_np_pythran(env):
- env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp"))
-
+ env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
+ if has_np_pythran(env):
+ env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp"))
+
def generate_extern_c_macro_definition(self, code):
name = Naming.extern_c_macro
code.putln("#ifndef %s" % name)
@@ -764,31 +764,31 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(" #define DL_IMPORT(_T) _T")
code.putln("#endif")
- def generate_includes(self, env, cimported_modules, code, early=True, late=True):
+ def generate_includes(self, env, cimported_modules, code, early=True, late=True):
includes = []
- for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
- if inc.location == inc.EARLY:
- if early:
- inc.write(code)
- elif inc.location == inc.LATE:
- if late:
- inc.write(code)
- if early:
- code.putln_openmp("#include <omp.h>")
+ for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
+ if inc.location == inc.EARLY:
+ if early:
+ inc.write(code)
+ elif inc.location == inc.LATE:
+ if late:
+ inc.write(code)
+ if early:
+ code.putln_openmp("#include <omp.h>")
def generate_filename_table(self, code):
- from os.path import isabs, basename
+ from os.path import isabs, basename
code.putln("")
code.putln("static const char *%s[] = {" % Naming.filetable_cname)
if code.globalstate.filename_list:
for source_desc in code.globalstate.filename_list:
- file_path = source_desc.get_filenametable_entry()
- if Options.source_root:
- # If source root specified, dump description - it's source root relative filename
- file_path = source_desc.get_description()
- if isabs(file_path):
- file_path = basename(file_path) # never include absolute paths
- escaped_filename = file_path.replace("\\", "\\\\").replace('"', r'\"')
+ file_path = source_desc.get_filenametable_entry()
+ if Options.source_root:
+ # If source root specified, dump description - it's source root relative filename
+ file_path = source_desc.get_description()
+ if isabs(file_path):
+ file_path = basename(file_path) # never include absolute paths
+ escaped_filename = file_path.replace("\\", "\\\\").replace('"', r'\"')
code.putln('"%s",' % escaped_filename)
else:
# Some C compilers don't like an empty array
@@ -926,96 +926,96 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
[base_class.empty_declaration_code() for base_class in type.base_classes])
code.put(" : public %s" % base_class_decl)
code.putln(" {")
- py_attrs = [e for e in scope.entries.values()
- if e.type.is_pyobject and not e.is_inherited]
+ py_attrs = [e for e in scope.entries.values()
+ if e.type.is_pyobject and not e.is_inherited]
has_virtual_methods = False
- constructor = None
- destructor = None
+ constructor = None
+ destructor = None
for attr in scope.var_entries:
if attr.type.is_cfunction:
code.put("inline ")
if attr.type.is_cfunction and attr.type.is_static_method:
code.put("static ")
- elif attr.name == "<init>":
- constructor = attr
- elif attr.name == "<del>":
- destructor = attr
- elif attr.type.is_cfunction:
+ elif attr.name == "<init>":
+ constructor = attr
+ elif attr.name == "<del>":
+ destructor = attr
+ elif attr.type.is_cfunction:
code.put("virtual ")
has_virtual_methods = True
code.putln("%s;" % attr.type.declaration_code(attr.cname))
- is_implementing = 'init_module' in code.globalstate.parts
- if constructor or py_attrs:
- if constructor:
- arg_decls = []
- arg_names = []
- for arg in constructor.type.original_args[
- :len(constructor.type.args)-constructor.type.optional_arg_count]:
- arg_decls.append(arg.declaration_code())
- arg_names.append(arg.cname)
- if constructor.type.optional_arg_count:
- arg_decls.append(constructor.type.op_arg_struct.declaration_code(Naming.optional_args_cname))
- arg_names.append(Naming.optional_args_cname)
- if not arg_decls:
- arg_decls = ["void"]
- else:
- arg_decls = ["void"]
- arg_names = []
- if is_implementing:
- code.putln("%s(%s) {" % (type.cname, ", ".join(arg_decls)))
- if py_attrs:
- code.put_ensure_gil()
- for attr in py_attrs:
- code.put_init_var_to_py_none(attr, nanny=False);
- if constructor:
- code.putln("%s(%s);" % (constructor.cname, ", ".join(arg_names)))
- if py_attrs:
- code.put_release_ensured_gil()
- code.putln("}")
- else:
- code.putln("%s(%s);" % (type.cname, ", ".join(arg_decls)))
- if destructor or py_attrs or has_virtual_methods:
- if has_virtual_methods:
- code.put("virtual ")
- if is_implementing:
- code.putln("~%s() {" % type.cname)
- if py_attrs:
- code.put_ensure_gil()
- if destructor:
- code.putln("%s();" % destructor.cname)
- if py_attrs:
- for attr in py_attrs:
- code.put_var_xdecref(attr, nanny=False);
- code.put_release_ensured_gil()
- code.putln("}")
- else:
- code.putln("~%s();" % type.cname)
- if py_attrs:
- # Also need copy constructor and assignment operators.
- if is_implementing:
- code.putln("%s(const %s& __Pyx_other) {" % (type.cname, type.cname))
- code.put_ensure_gil()
- for attr in scope.var_entries:
- if not attr.type.is_cfunction:
- code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
- code.put_var_incref(attr, nanny=False)
- code.put_release_ensured_gil()
- code.putln("}")
- code.putln("%s& operator=(const %s& __Pyx_other) {" % (type.cname, type.cname))
- code.putln("if (this != &__Pyx_other) {")
- code.put_ensure_gil()
- for attr in scope.var_entries:
- if not attr.type.is_cfunction:
- code.put_var_xdecref(attr, nanny=False);
- code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
- code.put_var_incref(attr, nanny=False)
- code.put_release_ensured_gil()
- code.putln("}")
- code.putln("return *this;")
- code.putln("}")
- else:
- code.putln("%s(const %s& __Pyx_other);" % (type.cname, type.cname))
- code.putln("%s& operator=(const %s& __Pyx_other);" % (type.cname, type.cname))
+ is_implementing = 'init_module' in code.globalstate.parts
+ if constructor or py_attrs:
+ if constructor:
+ arg_decls = []
+ arg_names = []
+ for arg in constructor.type.original_args[
+ :len(constructor.type.args)-constructor.type.optional_arg_count]:
+ arg_decls.append(arg.declaration_code())
+ arg_names.append(arg.cname)
+ if constructor.type.optional_arg_count:
+ arg_decls.append(constructor.type.op_arg_struct.declaration_code(Naming.optional_args_cname))
+ arg_names.append(Naming.optional_args_cname)
+ if not arg_decls:
+ arg_decls = ["void"]
+ else:
+ arg_decls = ["void"]
+ arg_names = []
+ if is_implementing:
+ code.putln("%s(%s) {" % (type.cname, ", ".join(arg_decls)))
+ if py_attrs:
+ code.put_ensure_gil()
+ for attr in py_attrs:
+ code.put_init_var_to_py_none(attr, nanny=False);
+ if constructor:
+ code.putln("%s(%s);" % (constructor.cname, ", ".join(arg_names)))
+ if py_attrs:
+ code.put_release_ensured_gil()
+ code.putln("}")
+ else:
+ code.putln("%s(%s);" % (type.cname, ", ".join(arg_decls)))
+ if destructor or py_attrs or has_virtual_methods:
+ if has_virtual_methods:
+ code.put("virtual ")
+ if is_implementing:
+ code.putln("~%s() {" % type.cname)
+ if py_attrs:
+ code.put_ensure_gil()
+ if destructor:
+ code.putln("%s();" % destructor.cname)
+ if py_attrs:
+ for attr in py_attrs:
+ code.put_var_xdecref(attr, nanny=False);
+ code.put_release_ensured_gil()
+ code.putln("}")
+ else:
+ code.putln("~%s();" % type.cname)
+ if py_attrs:
+ # Also need copy constructor and assignment operators.
+ if is_implementing:
+ code.putln("%s(const %s& __Pyx_other) {" % (type.cname, type.cname))
+ code.put_ensure_gil()
+ for attr in scope.var_entries:
+ if not attr.type.is_cfunction:
+ code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
+ code.put_var_incref(attr, nanny=False)
+ code.put_release_ensured_gil()
+ code.putln("}")
+ code.putln("%s& operator=(const %s& __Pyx_other) {" % (type.cname, type.cname))
+ code.putln("if (this != &__Pyx_other) {")
+ code.put_ensure_gil()
+ for attr in scope.var_entries:
+ if not attr.type.is_cfunction:
+ code.put_var_xdecref(attr, nanny=False);
+ code.putln("%s = __Pyx_other.%s;" % (attr.cname, attr.cname))
+ code.put_var_incref(attr, nanny=False)
+ code.put_release_ensured_gil()
+ code.putln("}")
+ code.putln("return *this;")
+ code.putln("}")
+ else:
+ code.putln("%s(const %s& __Pyx_other);" % (type.cname, type.cname))
+ code.putln("%s& operator=(const %s& __Pyx_other);" % (type.cname, type.cname))
code.putln("};")
def generate_enum_definition(self, entry, code):
@@ -1247,31 +1247,31 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
self.generate_traverse_function(scope, code, entry)
if scope.needs_tp_clear():
self.generate_clear_function(scope, code, entry)
- if scope.defines_any_special(["__getitem__"]):
+ if scope.defines_any_special(["__getitem__"]):
self.generate_getitem_int_function(scope, code)
- if scope.defines_any_special(["__setitem__", "__delitem__"]):
+ if scope.defines_any_special(["__setitem__", "__delitem__"]):
self.generate_ass_subscript_function(scope, code)
- if scope.defines_any_special(["__getslice__", "__setslice__", "__delslice__"]):
+ if scope.defines_any_special(["__getslice__", "__setslice__", "__delslice__"]):
warning(self.pos,
"__getslice__, __setslice__, and __delslice__ are not supported by Python 3, "
"use __getitem__, __setitem__, and __delitem__ instead", 1)
code.putln("#if PY_MAJOR_VERSION >= 3")
code.putln("#error __getslice__, __setslice__, and __delslice__ not supported in Python 3.")
code.putln("#endif")
- if scope.defines_any_special(["__setslice__", "__delslice__"]):
+ if scope.defines_any_special(["__setslice__", "__delslice__"]):
self.generate_ass_slice_function(scope, code)
- if scope.defines_any_special(["__getattr__", "__getattribute__"]):
+ if scope.defines_any_special(["__getattr__", "__getattribute__"]):
self.generate_getattro_function(scope, code)
- if scope.defines_any_special(["__setattr__", "__delattr__"]):
+ if scope.defines_any_special(["__setattr__", "__delattr__"]):
self.generate_setattro_function(scope, code)
- if scope.defines_any_special(["__get__"]):
+ if scope.defines_any_special(["__get__"]):
self.generate_descr_get_function(scope, code)
- if scope.defines_any_special(["__set__", "__delete__"]):
+ if scope.defines_any_special(["__set__", "__delete__"]):
self.generate_descr_set_function(scope, code)
- if not scope.is_closure_class_scope and scope.defines_any(["__dict__"]):
+ if not scope.is_closure_class_scope and scope.defines_any(["__dict__"]):
self.generate_dict_getter_function(scope, code)
- if scope.defines_any_special(TypeSlots.richcmp_special_methods):
- self.generate_richcmp_function(scope, code)
+ if scope.defines_any_special(TypeSlots.richcmp_special_methods):
+ self.generate_richcmp_function(scope, code)
self.generate_property_accessors(scope, code)
self.generate_method_table(scope, code)
self.generate_getset_table(scope, code)
@@ -1450,11 +1450,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
is_final_type = scope.parent_type.is_final_type
needs_gc = scope.needs_gc()
- weakref_slot = scope.lookup_here("__weakref__") if not scope.is_closure_class_scope else None
+ weakref_slot = scope.lookup_here("__weakref__") if not scope.is_closure_class_scope else None
if weakref_slot not in scope.var_entries:
weakref_slot = None
- dict_slot = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None
+ dict_slot = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None
if dict_slot not in scope.var_entries:
dict_slot = None
@@ -1467,15 +1467,15 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if not is_final_type:
# in Py3.4+, call tp_finalize() as early as possible
- code.putln("#if CYTHON_USE_TP_FINALIZE")
+ code.putln("#if CYTHON_USE_TP_FINALIZE")
if needs_gc:
finalised_check = '!_PyGC_FINALIZED(o)'
else:
finalised_check = (
'(!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))')
- code.putln(
- "if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)"
- " && Py_TYPE(o)->tp_finalize) && %s) {" % finalised_check)
+ code.putln(
+ "if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE)"
+ " && Py_TYPE(o)->tp_finalize) && %s) {" % finalised_check)
# if instance was resurrected by finaliser, return
code.putln("if (PyObject_CallFinalizerFromDealloc(o)) return;")
code.putln("}")
@@ -1622,9 +1622,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for entry in py_attrs:
var_code = "p->%s" % entry.cname
- var_as_pyobject = PyrexTypes.typecast(py_object_type, entry.type, var_code)
+ var_as_pyobject = PyrexTypes.typecast(py_object_type, entry.type, var_code)
code.putln("if (%s) {" % var_code)
- code.putln("e = (*v)(%s, a); if (e) return e;" % var_as_pyobject)
+ code.putln("e = (*v)(%s, a); if (e) return e;" % var_as_pyobject)
code.putln("}")
# Traverse buffer exporting objects.
@@ -1841,76 +1841,76 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(
"}")
- def generate_richcmp_function(self, scope, code):
- if scope.lookup_here("__richcmp__"):
- # user implemented, nothing to do
- return
- # otherwise, we have to generate it from the Python special methods
- richcmp_cfunc = scope.mangle_internal("tp_richcompare")
- code.putln("")
- code.putln("static PyObject *%s(PyObject *o1, PyObject *o2, int op) {" % richcmp_cfunc)
- code.putln("switch (op) {")
-
- class_scopes = []
- cls = scope.parent_type
- while cls is not None and not cls.entry.visibility == 'extern':
- class_scopes.append(cls.scope)
- cls = cls.scope.parent_type.base_type
- assert scope in class_scopes
-
- extern_parent = None
- if cls and cls.entry.visibility == 'extern':
- # need to call up into base classes as we may not know all implemented comparison methods
- extern_parent = cls if cls.typeptr_cname else scope.parent_type.base_type
-
- eq_entry = None
- has_ne = False
- for cmp_method in TypeSlots.richcmp_special_methods:
- for class_scope in class_scopes:
- entry = class_scope.lookup_here(cmp_method)
- if entry is not None:
- break
- else:
- continue
-
- cmp_type = cmp_method.strip('_').upper() # e.g. "__eq__" -> EQ
- code.putln("case Py_%s: {" % cmp_type)
- if cmp_method == '__eq__':
- eq_entry = entry
- # Python itself does not do this optimisation, it seems...
- #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_True);")
- elif cmp_method == '__ne__':
- has_ne = True
- # Python itself does not do this optimisation, it seems...
- #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_False);")
- code.putln("return %s(o1, o2);" % entry.func_cname)
- code.putln("}")
-
- if eq_entry and not has_ne and not extern_parent:
- code.putln("case Py_NE: {")
- code.putln("PyObject *ret;")
- # Python itself does not do this optimisation, it seems...
- #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_False);")
- code.putln("ret = %s(o1, o2);" % eq_entry.func_cname)
- code.putln("if (likely(ret && ret != Py_NotImplemented)) {")
- code.putln("int b = __Pyx_PyObject_IsTrue(ret); Py_DECREF(ret);")
- code.putln("if (unlikely(b < 0)) return NULL;")
- code.putln("ret = (b) ? Py_False : Py_True;")
- code.putln("Py_INCREF(ret);")
- code.putln("}")
- code.putln("return ret;")
- code.putln("}")
-
- code.putln("default: {")
- if extern_parent and extern_parent.typeptr_cname:
- code.putln("if (likely(%s->tp_richcompare)) return %s->tp_richcompare(o1, o2, op);" % (
- extern_parent.typeptr_cname, extern_parent.typeptr_cname))
- code.putln("return __Pyx_NewRef(Py_NotImplemented);")
- code.putln("}")
-
- code.putln("}") # switch
- code.putln("}")
-
+ def generate_richcmp_function(self, scope, code):
+ if scope.lookup_here("__richcmp__"):
+ # user implemented, nothing to do
+ return
+ # otherwise, we have to generate it from the Python special methods
+ richcmp_cfunc = scope.mangle_internal("tp_richcompare")
+ code.putln("")
+ code.putln("static PyObject *%s(PyObject *o1, PyObject *o2, int op) {" % richcmp_cfunc)
+ code.putln("switch (op) {")
+
+ class_scopes = []
+ cls = scope.parent_type
+ while cls is not None and not cls.entry.visibility == 'extern':
+ class_scopes.append(cls.scope)
+ cls = cls.scope.parent_type.base_type
+ assert scope in class_scopes
+
+ extern_parent = None
+ if cls and cls.entry.visibility == 'extern':
+ # need to call up into base classes as we may not know all implemented comparison methods
+ extern_parent = cls if cls.typeptr_cname else scope.parent_type.base_type
+
+ eq_entry = None
+ has_ne = False
+ for cmp_method in TypeSlots.richcmp_special_methods:
+ for class_scope in class_scopes:
+ entry = class_scope.lookup_here(cmp_method)
+ if entry is not None:
+ break
+ else:
+ continue
+
+ cmp_type = cmp_method.strip('_').upper() # e.g. "__eq__" -> EQ
+ code.putln("case Py_%s: {" % cmp_type)
+ if cmp_method == '__eq__':
+ eq_entry = entry
+ # Python itself does not do this optimisation, it seems...
+ #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_True);")
+ elif cmp_method == '__ne__':
+ has_ne = True
+ # Python itself does not do this optimisation, it seems...
+ #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_False);")
+ code.putln("return %s(o1, o2);" % entry.func_cname)
+ code.putln("}")
+
+ if eq_entry and not has_ne and not extern_parent:
+ code.putln("case Py_NE: {")
+ code.putln("PyObject *ret;")
+ # Python itself does not do this optimisation, it seems...
+ #code.putln("if (o1 == o2) return __Pyx_NewRef(Py_False);")
+ code.putln("ret = %s(o1, o2);" % eq_entry.func_cname)
+ code.putln("if (likely(ret && ret != Py_NotImplemented)) {")
+ code.putln("int b = __Pyx_PyObject_IsTrue(ret); Py_DECREF(ret);")
+ code.putln("if (unlikely(b < 0)) return NULL;")
+ code.putln("ret = (b) ? Py_False : Py_True;")
+ code.putln("Py_INCREF(ret);")
+ code.putln("}")
+ code.putln("return ret;")
+ code.putln("}")
+
+ code.putln("default: {")
+ if extern_parent and extern_parent.typeptr_cname:
+ code.putln("if (likely(%s->tp_richcompare)) return %s->tp_richcompare(o1, o2, op);" % (
+ extern_parent.typeptr_cname, extern_parent.typeptr_cname))
+ code.putln("return __Pyx_NewRef(Py_NotImplemented);")
+ code.putln("}")
+
+ code.putln("}") # switch
+ code.putln("}")
+
def generate_getattro_function(self, scope, code):
# First try to get the attribute using __getattribute__, if defined, or
# PyObject_GenericGetAttr.
@@ -1918,19 +1918,19 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
# If that raises an AttributeError, call the __getattr__ if defined.
#
# In both cases, defined can be in this class, or any base class.
- def lookup_here_or_base(n, tp=None, extern_return=None):
+ def lookup_here_or_base(n, tp=None, extern_return=None):
# Recursive lookup
- if tp is None:
- tp = scope.parent_type
- r = tp.scope.lookup_here(n)
- if r is None:
- if tp.is_external and extern_return is not None:
- return extern_return
- if tp.base_type is not None:
- return lookup_here_or_base(n, tp.base_type)
- return r
-
- has_instance_dict = lookup_here_or_base("__dict__", extern_return="extern")
+ if tp is None:
+ tp = scope.parent_type
+ r = tp.scope.lookup_here(n)
+ if r is None:
+ if tp.is_external and extern_return is not None:
+ return extern_return
+ if tp.base_type is not None:
+ return lookup_here_or_base(n, tp.base_type)
+ return r
+
+ has_instance_dict = lookup_here_or_base("__dict__", extern_return="extern")
getattr_entry = lookup_here_or_base("__getattr__")
getattribute_entry = lookup_here_or_base("__getattribute__")
code.putln("")
@@ -1942,20 +1942,20 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
"PyObject *v = %s(o, n);" % (
getattribute_entry.func_cname))
else:
- if not has_instance_dict and scope.parent_type.is_final_type:
- # Final with no dict => use faster type attribute lookup.
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("PyObject_GenericGetAttrNoDict", "ObjectHandling.c"))
- generic_getattr_cfunc = "__Pyx_PyObject_GenericGetAttrNoDict"
- elif not has_instance_dict or has_instance_dict == "extern":
- # No dict in the known ancestors, but don't know about extern ancestors or subtypes.
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("PyObject_GenericGetAttr", "ObjectHandling.c"))
- generic_getattr_cfunc = "__Pyx_PyObject_GenericGetAttr"
- else:
- generic_getattr_cfunc = "PyObject_GenericGetAttr"
+ if not has_instance_dict and scope.parent_type.is_final_type:
+ # Final with no dict => use faster type attribute lookup.
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("PyObject_GenericGetAttrNoDict", "ObjectHandling.c"))
+ generic_getattr_cfunc = "__Pyx_PyObject_GenericGetAttrNoDict"
+ elif not has_instance_dict or has_instance_dict == "extern":
+ # No dict in the known ancestors, but don't know about extern ancestors or subtypes.
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("PyObject_GenericGetAttr", "ObjectHandling.c"))
+ generic_getattr_cfunc = "__Pyx_PyObject_GenericGetAttr"
+ else:
+ generic_getattr_cfunc = "PyObject_GenericGetAttr"
code.putln(
- "PyObject *v = %s(o, n);" % generic_getattr_cfunc)
+ "PyObject *v = %s(o, n);" % generic_getattr_cfunc)
if getattr_entry is not None:
code.putln(
"if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {")
@@ -2311,47 +2311,47 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("return -1;")
code.putln("}")
code.putln("")
- code.putln(UtilityCode.load_as_string("ImportStar", "ImportExport.c")[1])
+ code.putln(UtilityCode.load_as_string("ImportStar", "ImportExport.c")[1])
code.exit_cfunc_scope() # done with labels
def generate_module_init_func(self, imported_modules, env, options, code):
subfunction = self.mod_init_subfunction(self.pos, self.scope, code)
-
+
code.enter_cfunc_scope(self.scope)
code.putln("")
- code.putln(UtilityCode.load_as_string("PyModInitFuncType", "ModuleSetupCode.c")[0])
+ code.putln(UtilityCode.load_as_string("PyModInitFuncType", "ModuleSetupCode.c")[0])
init_name = 'init' + (options.init_suffix or env.module_name)
- header2 = "__Pyx_PyMODINIT_FUNC %s(void)" % init_name
- header3 = "__Pyx_PyMODINIT_FUNC %s(void)" % self.mod_init_func_cname('PyInit', env, options)
+ header2 = "__Pyx_PyMODINIT_FUNC %s(void)" % init_name
+ header3 = "__Pyx_PyMODINIT_FUNC %s(void)" % self.mod_init_func_cname('PyInit', env, options)
code.putln("#if PY_MAJOR_VERSION < 3")
- # Optimise for small code size as the module init function is only executed once.
- code.putln("%s CYTHON_SMALL_CODE; /*proto*/" % header2)
+ # Optimise for small code size as the module init function is only executed once.
+ code.putln("%s CYTHON_SMALL_CODE; /*proto*/" % header2)
code.putln(header2)
code.putln("#else")
- code.putln("%s CYTHON_SMALL_CODE; /*proto*/" % header3)
+ code.putln("%s CYTHON_SMALL_CODE; /*proto*/" % header3)
code.putln(header3)
-
- # CPython 3.5+ supports multi-phase module initialisation (gives access to __spec__, __file__, etc.)
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+
+ # CPython 3.5+ supports multi-phase module initialisation (gives access to __spec__, __file__, etc.)
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
code.putln("{")
- code.putln("return PyModuleDef_Init(&%s);" % Naming.pymoduledef_cname)
- code.putln("}")
-
- mod_create_func = UtilityCode.load_as_string("ModuleCreationPEP489", "ModuleSetupCode.c")[1]
- code.put(mod_create_func)
-
- code.putln("")
- # main module init code lives in Py_mod_exec function, not in PyInit function
+ code.putln("return PyModuleDef_Init(&%s);" % Naming.pymoduledef_cname)
+ code.putln("}")
+
+ mod_create_func = UtilityCode.load_as_string("ModuleCreationPEP489", "ModuleSetupCode.c")[1]
+ code.put(mod_create_func)
+
+ code.putln("")
+ # main module init code lives in Py_mod_exec function, not in PyInit function
code.putln("static CYTHON_SMALL_CODE int %s(PyObject *%s)" % (
- self.mod_init_func_cname(Naming.pymodule_exec_func_cname, env),
- Naming.pymodinit_module_arg))
- code.putln("#endif") # PEP489
-
- code.putln("#endif") # Py3
-
- # start of module init/exec function (pre/post PEP 489)
- code.putln("{")
-
+ self.mod_init_func_cname(Naming.pymodule_exec_func_cname, env),
+ Naming.pymodinit_module_arg))
+ code.putln("#endif") # PEP489
+
+ code.putln("#endif") # Py3
+
+ # start of module init/exec function (pre/post PEP 489)
+ code.putln("{")
+
tempdecl_code = code.insertion_point()
profile = code.globalstate.directives['profile']
@@ -2360,34 +2360,34 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.globalstate.use_utility_code(UtilityCode.load_cached("Profile", "Profile.c"))
code.put_declare_refcount_context()
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
# Most extension modules simply can't deal with it, and Cython isn't ready either.
# See issues listed here: https://docs.python.org/3/c-api/init.html#sub-interpreter-support
code.putln("if (%s) {" % Naming.module_cname)
- # Hack: enforce single initialisation.
+ # Hack: enforce single initialisation.
code.putln("if (%s == %s) return 0;" % (
- Naming.module_cname,
- Naming.pymodinit_module_arg,
- ))
+ Naming.module_cname,
+ Naming.pymodinit_module_arg,
+ ))
code.putln('PyErr_SetString(PyExc_RuntimeError,'
' "Module \'%s\' has already been imported. Re-initialisation is not supported.");' %
env.module_name)
code.putln("return -1;")
code.putln("}")
- code.putln("#elif PY_MAJOR_VERSION >= 3")
- # Hack: enforce single initialisation also on reimports under different names on Python 3 (with PEP 3121/489).
- code.putln("if (%s) return __Pyx_NewRef(%s);" % (
- Naming.module_cname,
- Naming.module_cname,
- ))
- code.putln("#endif")
-
+ code.putln("#elif PY_MAJOR_VERSION >= 3")
+ # Hack: enforce single initialisation also on reimports under different names on Python 3 (with PEP 3121/489).
+ code.putln("if (%s) return __Pyx_NewRef(%s);" % (
+ Naming.module_cname,
+ Naming.module_cname,
+ ))
+ code.putln("#endif")
+
if profile or linetrace:
tempdecl_code.put_trace_declarations()
code.put_trace_frame_init()
- refnanny_import_code = UtilityCode.load_as_string("ImportRefnannyAPI", "ModuleSetupCode.c")[1]
- code.putln(refnanny_import_code.rstrip())
+ refnanny_import_code = UtilityCode.load_as_string("ImportRefnannyAPI", "ModuleSetupCode.c")[1]
+ code.putln(refnanny_import_code.rstrip())
code.put_setup_refcount_context(header3)
env.use_utility_code(UtilityCode.load("CheckBinaryVersion", "ModuleSetupCode.c"))
@@ -2403,14 +2403,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("%s = PyUnicode_FromStringAndSize(\"\", 0); %s" % (
Naming.empty_unicode, code.error_goto_if_null(Naming.empty_unicode, self.pos)))
- for ext_type in ('CyFunction', 'FusedFunction', 'Coroutine', 'Generator', 'AsyncGen', 'StopAsyncIteration'):
+ for ext_type in ('CyFunction', 'FusedFunction', 'Coroutine', 'Generator', 'AsyncGen', 'StopAsyncIteration'):
code.putln("#ifdef __Pyx_%s_USED" % ext_type)
code.put_error_if_neg(self.pos, "__pyx_%s_init()" % ext_type)
code.putln("#endif")
code.putln("/*--- Library function declarations ---*/")
- if env.directives['np_pythran']:
- code.put_error_if_neg(self.pos, "_import_array()")
+ if env.directives['np_pythran']:
+ code.put_error_if_neg(self.pos, "_import_array()")
code.putln("/*--- Threads initialization code ---*/")
code.putln("#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 "
@@ -2446,33 +2446,33 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("/*--- Constants init code ---*/")
code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()")
- code.putln("/*--- Global type/function init code ---*/")
+ code.putln("/*--- Global type/function init code ---*/")
+
+ with subfunction("Global init code") as inner_code:
+ self.generate_global_init_code(env, inner_code)
- with subfunction("Global init code") as inner_code:
- self.generate_global_init_code(env, inner_code)
+ with subfunction("Variable export code") as inner_code:
+ self.generate_c_variable_export_code(env, inner_code)
- with subfunction("Variable export code") as inner_code:
- self.generate_c_variable_export_code(env, inner_code)
+ with subfunction("Function export code") as inner_code:
+ self.generate_c_function_export_code(env, inner_code)
- with subfunction("Function export code") as inner_code:
- self.generate_c_function_export_code(env, inner_code)
+ with subfunction("Type init code") as inner_code:
+ self.generate_type_init_code(env, inner_code)
- with subfunction("Type init code") as inner_code:
- self.generate_type_init_code(env, inner_code)
+ with subfunction("Type import code") as inner_code:
+ for module in imported_modules:
+ self.generate_type_import_code_for_module(module, env, inner_code)
- with subfunction("Type import code") as inner_code:
- for module in imported_modules:
- self.generate_type_import_code_for_module(module, env, inner_code)
+ with subfunction("Variable import code") as inner_code:
+ for module in imported_modules:
+ self.generate_c_variable_import_code_for_module(module, env, inner_code)
- with subfunction("Variable import code") as inner_code:
- for module in imported_modules:
- self.generate_c_variable_import_code_for_module(module, env, inner_code)
+ with subfunction("Function import code") as inner_code:
+ for module in imported_modules:
+ self.specialize_fused_types(module)
+ self.generate_c_function_import_code_for_module(module, env, inner_code)
- with subfunction("Function import code") as inner_code:
- for module in imported_modules:
- self.specialize_fused_types(module)
- self.generate_c_function_import_code_for_module(module, env, inner_code)
-
code.putln("/*--- Execution code ---*/")
code.mark_pos(None)
@@ -2522,11 +2522,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.put_finish_refcount_context()
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
- code.putln("return (%s != NULL) ? 0 : -1;" % env.module_cname)
- code.putln("#elif PY_MAJOR_VERSION >= 3")
- code.putln("return %s;" % env.module_cname)
- code.putln("#else")
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ code.putln("return (%s != NULL) ? 0 : -1;" % env.module_cname)
+ code.putln("#elif PY_MAJOR_VERSION >= 3")
+ code.putln("return %s;" % env.module_cname)
+ code.putln("#else")
code.putln("return;")
code.putln("#endif")
code.putln('}')
@@ -2536,86 +2536,86 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.exit_cfunc_scope()
def mod_init_subfunction(self, pos, scope, orig_code):
- """
- Return a context manager that allows deviating the module init code generation
- into a separate function and instead inserts a call to it.
-
- Can be reused sequentially to create multiple functions.
- The functions get inserted at the point where the context manager was created.
- The call gets inserted where the context manager is used (on entry).
- """
- prototypes = orig_code.insertion_point()
- prototypes.putln("")
- function_code = orig_code.insertion_point()
- function_code.putln("")
-
- class ModInitSubfunction(object):
- def __init__(self, code_type):
- cname = '_'.join(code_type.lower().split())
- assert re.match("^[a-z0-9_]+$", cname)
- self.cfunc_name = "__Pyx_modinit_%s" % cname
- self.description = code_type
- self.tempdecl_code = None
- self.call_code = None
-
- def __enter__(self):
- self.call_code = orig_code.insertion_point()
- code = function_code
- code.enter_cfunc_scope(scope)
+ """
+ Return a context manager that allows deviating the module init code generation
+ into a separate function and instead inserts a call to it.
+
+ Can be reused sequentially to create multiple functions.
+ The functions get inserted at the point where the context manager was created.
+ The call gets inserted where the context manager is used (on entry).
+ """
+ prototypes = orig_code.insertion_point()
+ prototypes.putln("")
+ function_code = orig_code.insertion_point()
+ function_code.putln("")
+
+ class ModInitSubfunction(object):
+ def __init__(self, code_type):
+ cname = '_'.join(code_type.lower().split())
+ assert re.match("^[a-z0-9_]+$", cname)
+ self.cfunc_name = "__Pyx_modinit_%s" % cname
+ self.description = code_type
+ self.tempdecl_code = None
+ self.call_code = None
+
+ def __enter__(self):
+ self.call_code = orig_code.insertion_point()
+ code = function_code
+ code.enter_cfunc_scope(scope)
prototypes.putln("static CYTHON_SMALL_CODE int %s(void); /*proto*/" % self.cfunc_name)
- code.putln("static int %s(void) {" % self.cfunc_name)
- code.put_declare_refcount_context()
- self.tempdecl_code = code.insertion_point()
- code.put_setup_refcount_context(self.cfunc_name)
- # Leave a grepable marker that makes it easy to find the generator source.
- code.putln("/*--- %s ---*/" % self.description)
- return code
-
- def __exit__(self, *args):
- code = function_code
- code.put_finish_refcount_context()
- code.putln("return 0;")
-
- self.tempdecl_code.put_temp_declarations(code.funcstate)
- self.tempdecl_code = None
-
- needs_error_handling = code.label_used(code.error_label)
- if needs_error_handling:
- code.put_label(code.error_label)
- for cname, type in code.funcstate.all_managed_temps():
- code.put_xdecref(cname, type)
- code.put_finish_refcount_context()
- code.putln("return -1;")
- code.putln("}")
- code.exit_cfunc_scope()
- code.putln("")
-
- if needs_error_handling:
+ code.putln("static int %s(void) {" % self.cfunc_name)
+ code.put_declare_refcount_context()
+ self.tempdecl_code = code.insertion_point()
+ code.put_setup_refcount_context(self.cfunc_name)
+ # Leave a grepable marker that makes it easy to find the generator source.
+ code.putln("/*--- %s ---*/" % self.description)
+ return code
+
+ def __exit__(self, *args):
+ code = function_code
+ code.put_finish_refcount_context()
+ code.putln("return 0;")
+
+ self.tempdecl_code.put_temp_declarations(code.funcstate)
+ self.tempdecl_code = None
+
+ needs_error_handling = code.label_used(code.error_label)
+ if needs_error_handling:
+ code.put_label(code.error_label)
+ for cname, type in code.funcstate.all_managed_temps():
+ code.put_xdecref(cname, type)
+ code.put_finish_refcount_context()
+ code.putln("return -1;")
+ code.putln("}")
+ code.exit_cfunc_scope()
+ code.putln("")
+
+ if needs_error_handling:
self.call_code.putln(
self.call_code.error_goto_if_neg("%s()" % self.cfunc_name, pos))
- else:
- self.call_code.putln("(void)%s();" % self.cfunc_name)
- self.call_code = None
-
- return ModInitSubfunction
-
+ else:
+ self.call_code.putln("(void)%s();" % self.cfunc_name)
+ self.call_code = None
+
+ return ModInitSubfunction
+
def generate_module_import_setup(self, env, code):
module_path = env.directives['set_initial_path']
if module_path == 'SOURCEFILE':
module_path = self.pos[0].filename
if module_path:
- code.putln('if (!CYTHON_PEP489_MULTI_PHASE_INIT) {')
+ code.putln('if (!CYTHON_PEP489_MULTI_PHASE_INIT) {')
code.putln('if (PyObject_SetAttrString(%s, "__file__", %s) < 0) %s;' % (
env.module_cname,
code.globalstate.get_py_string_const(
EncodedString(decode_filename(module_path))).cname,
code.error_goto(self.pos)))
- code.putln("}")
+ code.putln("}")
if env.is_package:
# set __path__ to mark the module as package
- code.putln('if (!CYTHON_PEP489_MULTI_PHASE_INIT) {')
+ code.putln('if (!CYTHON_PEP489_MULTI_PHASE_INIT) {')
temp = code.funcstate.allocate_temp(py_object_type, True)
code.putln('%s = Py_BuildValue("[O]", %s); %s' % (
temp,
@@ -2629,12 +2629,12 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
env.module_cname, temp, code.error_goto(self.pos)))
code.put_decref_clear(temp, py_object_type)
code.funcstate.release_temp(temp)
- code.putln("}")
+ code.putln("}")
elif env.is_package:
# packages require __path__, so all we can do is try to figure
# out the module path at runtime by rerunning the import lookup
- code.putln("if (!CYTHON_PEP489_MULTI_PHASE_INIT) {")
+ code.putln("if (!CYTHON_PEP489_MULTI_PHASE_INIT) {")
package_name, _ = self.full_module_name.rsplit('.', 1)
if '.' in package_name:
parent_name = '"%s"' % (package_name.rsplit('.', 1)[0],)
@@ -2648,7 +2648,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.globalstate.get_py_string_const(
EncodedString(env.module_name)).cname),
self.pos))
- code.putln("}")
+ code.putln("}")
# CPython may not have put us into sys.modules yet, but relative imports and reimports require it
fq_module_name = self.full_module_name
@@ -2750,9 +2750,9 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
main_method=Options.embed,
wmain_method=wmain))
- def mod_init_func_cname(self, prefix, env, options=None):
+ def mod_init_func_cname(self, prefix, env, options=None):
return '%s_%s' % (prefix, options and options.init_suffix or env.module_name)
-
+
def generate_pymoduledef_struct(self, env, options, code):
if env.doc:
doc = "%s" % code.get_string_const(env.doc)
@@ -2765,35 +2765,35 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln("")
code.putln("#if PY_MAJOR_VERSION >= 3")
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
- exec_func_cname = self.mod_init_func_cname(Naming.pymodule_exec_func_cname, env)
- code.putln("static PyObject* %s(PyObject *spec, PyModuleDef *def); /*proto*/" %
- Naming.pymodule_create_func_cname)
- code.putln("static int %s(PyObject* module); /*proto*/" % exec_func_cname)
-
- code.putln("static PyModuleDef_Slot %s[] = {" % Naming.pymoduledef_slots_cname)
- code.putln("{Py_mod_create, (void*)%s}," % Naming.pymodule_create_func_cname)
- code.putln("{Py_mod_exec, (void*)%s}," % exec_func_cname)
- code.putln("{0, NULL}")
- code.putln("};")
- code.putln("#endif")
-
- code.putln("")
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ exec_func_cname = self.mod_init_func_cname(Naming.pymodule_exec_func_cname, env)
+ code.putln("static PyObject* %s(PyObject *spec, PyModuleDef *def); /*proto*/" %
+ Naming.pymodule_create_func_cname)
+ code.putln("static int %s(PyObject* module); /*proto*/" % exec_func_cname)
+
+ code.putln("static PyModuleDef_Slot %s[] = {" % Naming.pymoduledef_slots_cname)
+ code.putln("{Py_mod_create, (void*)%s}," % Naming.pymodule_create_func_cname)
+ code.putln("{Py_mod_exec, (void*)%s}," % exec_func_cname)
+ code.putln("{0, NULL}")
+ code.putln("};")
+ code.putln("#endif")
+
+ code.putln("")
code.putln("static struct PyModuleDef %s = {" % Naming.pymoduledef_cname)
code.putln(" PyModuleDef_HEAD_INIT,")
code.putln(' "%s",' % (options.module_name or env.module_name))
code.putln(" %s, /* m_doc */" % doc)
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
- code.putln(" 0, /* m_size */")
- code.putln("#else")
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ code.putln(" 0, /* m_size */")
+ code.putln("#else")
code.putln(" -1, /* m_size */")
- code.putln("#endif")
+ code.putln("#endif")
code.putln(" %s /* m_methods */," % env.method_table_cname)
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
- code.putln(" %s, /* m_slots */" % Naming.pymoduledef_slots_cname)
- code.putln("#else")
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ code.putln(" %s, /* m_slots */" % Naming.pymoduledef_slots_cname)
+ code.putln("#else")
code.putln(" NULL, /* m_reload */")
- code.putln("#endif")
+ code.putln("#endif")
code.putln(" NULL, /* m_traverse */")
code.putln(" NULL, /* m_clear */")
code.putln(" %s /* m_free */" % cleanup_func)
@@ -2807,13 +2807,13 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
doc = "%s" % code.get_string_const(env.doc)
else:
doc = "0"
-
- code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
- code.putln("%s = %s;" % (
- env.module_cname,
- Naming.pymodinit_module_arg))
- code.put_incref(env.module_cname, py_object_type, nanny=False)
- code.putln("#else")
+
+ code.putln("#if CYTHON_PEP489_MULTI_PHASE_INIT")
+ code.putln("%s = %s;" % (
+ env.module_cname,
+ Naming.pymodinit_module_arg))
+ code.put_incref(env.module_cname, py_object_type, nanny=False)
+ code.putln("#else")
code.putln("#if PY_MAJOR_VERSION < 3")
code.putln(
'%s = Py_InitModule4("%s", %s, %s, 0, PYTHON_API_VERSION); Py_XINCREF(%s);' % (
@@ -2829,8 +2829,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
Naming.pymoduledef_cname))
code.putln("#endif")
code.putln(code.error_goto_if_null(env.module_cname, self.pos))
- code.putln("#endif") # CYTHON_PEP489_MULTI_PHASE_INIT
-
+ code.putln("#endif") # CYTHON_PEP489_MULTI_PHASE_INIT
+
code.putln(
"%s = PyModule_GetDict(%s); %s" % (
env.module_dict_cname, env.module_cname,
@@ -2842,10 +2842,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
Naming.builtins_cname,
code.error_goto_if_null(Naming.builtins_cname, self.pos)))
code.put_incref(Naming.builtins_cname, py_object_type, nanny=False)
- code.putln(
- '%s = PyImport_AddModule((char *) "cython_runtime"); %s' % (
- Naming.cython_runtime_cname,
- code.error_goto_if_null(Naming.cython_runtime_cname, self.pos)))
+ code.putln(
+ '%s = PyImport_AddModule((char *) "cython_runtime"); %s' % (
+ Naming.cython_runtime_cname,
+ code.error_goto_if_null(Naming.cython_runtime_cname, self.pos)))
code.put_incref(Naming.cython_runtime_cname, py_object_type, nanny=False)
code.putln(
'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
@@ -3110,8 +3110,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(' if (!%s) %s' % (type.typeptr_cname, error_code))
- def generate_type_ready_code(self, entry, code):
- Nodes.CClassDefNode.generate_type_ready_code(entry, code)
+ def generate_type_ready_code(self, entry, code):
+ Nodes.CClassDefNode.generate_type_ready_code(entry, code)
def generate_exttype_vtable_init_code(self, entry, code):
# Generate code to initialise the C method table of an
@@ -3188,7 +3188,7 @@ def generate_cfunction_declaration(entry, env, code, definition):
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'public':
storage_class = Naming.extern_c_macro
- dll_linkage = None
+ dll_linkage = None
elif entry.visibility == 'private':
storage_class = "static"
dll_linkage = None
diff --git a/contrib/tools/cython/Cython/Compiler/Naming.py b/contrib/tools/cython/Cython/Compiler/Naming.py
index be555eb7d2..2c9b620788 100644
--- a/contrib/tools/cython/Cython/Compiler/Naming.py
+++ b/contrib/tools/cython/Cython/Compiler/Naming.py
@@ -102,10 +102,10 @@ print_function = pyrex_prefix + "print"
print_function_kwargs = pyrex_prefix + "print_kwargs"
cleanup_cname = pyrex_prefix + "module_cleanup"
pymoduledef_cname = pyrex_prefix + "moduledef"
-pymoduledef_slots_cname = pyrex_prefix + "moduledef_slots"
-pymodinit_module_arg = pyrex_prefix + "pyinit_module"
-pymodule_create_func_cname = pyrex_prefix + "pymod_create"
-pymodule_exec_func_cname = pyrex_prefix + "pymod_exec"
+pymoduledef_slots_cname = pyrex_prefix + "moduledef_slots"
+pymodinit_module_arg = pyrex_prefix + "pyinit_module"
+pymodule_create_func_cname = pyrex_prefix + "pymod_create"
+pymodule_exec_func_cname = pyrex_prefix + "pymod_exec"
optional_args_cname = pyrex_prefix + "optional_args"
import_star = pyrex_prefix + "import_star"
import_star_set = pyrex_prefix + "import_star_set"
@@ -120,7 +120,7 @@ quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty tempi
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
-cython_runtime_cname = pyrex_prefix + "cython_runtime"
+cython_runtime_cname = pyrex_prefix + "cython_runtime"
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
global_code_object_cache_insert = pyrex_prefix + 'insert_code_object'
diff --git a/contrib/tools/cython/Cython/Compiler/Nodes.py b/contrib/tools/cython/Cython/Compiler/Nodes.py
index 0796f40c0f..6436c5002d 100644
--- a/contrib/tools/cython/Cython/Compiler/Nodes.py
+++ b/contrib/tools/cython/Cython/Compiler/Nodes.py
@@ -10,7 +10,7 @@ cython.declare(sys=object, os=object, copy=object,
py_object_type=object, ModuleScope=object, LocalScope=object, ClosureScope=object,
StructOrUnionScope=object, PyClassScope=object,
CppClassScope=object, UtilityCode=object, EncodedString=object,
- error_type=object, _py_int_types=object)
+ error_type=object, _py_int_types=object)
import sys, os, copy
from itertools import chain
@@ -28,7 +28,7 @@ from .StringEncoding import EncodedString
from . import Future
from . import Options
from . import DebugFlags
-from .Pythran import has_np_pythran, pythran_type, is_pythran_buffer
+from .Pythran import has_np_pythran, pythran_type, is_pythran_buffer
from ..Utils import add_metaclass
@@ -39,7 +39,7 @@ else:
def relative_position(pos):
- return (pos[0].get_filenametable_entry(), pos[1])
+ return (pos[0].get_filenametable_entry(), pos[1])
def embed_position(pos, docstring):
@@ -68,13 +68,13 @@ def embed_position(pos, docstring):
return doc
-def analyse_type_annotation(annotation, env, assigned_value=None):
+def analyse_type_annotation(annotation, env, assigned_value=None):
base_type = None
- is_ambiguous = False
+ is_ambiguous = False
explicit_pytype = explicit_ctype = False
if annotation.is_dict_literal:
- warning(annotation.pos,
- "Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.")
+ warning(annotation.pos,
+ "Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.")
for name, value in annotation.key_value_pairs:
if not name.is_string_literal:
continue
@@ -88,30 +88,30 @@ def analyse_type_annotation(annotation, env, assigned_value=None):
if explicit_pytype and explicit_ctype:
warning(annotation.pos, "Duplicate type declarations found in signature annotation")
arg_type = annotation.analyse_as_type(env)
- if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'):
- # Map builtin numeric Python types to C types in safe cases.
- if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject:
- assigned_type = assigned_value.infer_type(env)
- if assigned_type and assigned_type.is_pyobject:
- # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
- is_ambiguous = True
- arg_type = None
- # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
- if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type):
- arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type
- elif arg_type is not None and annotation.is_string_literal:
- warning(annotation.pos,
- "Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.")
+ if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'):
+ # Map builtin numeric Python types to C types in safe cases.
+ if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject:
+ assigned_type = assigned_value.infer_type(env)
+ if assigned_type and assigned_type.is_pyobject:
+ # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
+ is_ambiguous = True
+ arg_type = None
+ # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
+ if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type):
+ arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type
+ elif arg_type is not None and annotation.is_string_literal:
+ warning(annotation.pos,
+ "Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.")
if arg_type is not None:
if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject:
warning(annotation.pos,
"Python type declaration in signature annotation does not refer to a Python type")
base_type = CAnalysedBaseTypeNode(
annotation.pos, type=arg_type, is_arg=True)
- elif is_ambiguous:
- warning(annotation.pos, "Ambiguous types in annotation, ignoring")
+ elif is_ambiguous:
+ warning(annotation.pos, "Ambiguous types in annotation, ignoring")
else:
- warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
+ warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
return base_type, arg_type
@@ -474,9 +474,9 @@ class StatNode(Node):
class CDefExternNode(StatNode):
- # include_file string or None
- # verbatim_include string or None
- # body StatListNode
+ # include_file string or None
+ # verbatim_include string or None
+ # body StatListNode
child_attrs = ["body"]
@@ -486,18 +486,18 @@ class CDefExternNode(StatNode):
self.body.analyse_declarations(env)
env.in_cinclude = old_cinclude_flag
- if self.include_file or self.verbatim_include:
- # Determine whether include should be late
- stats = self.body.stats
- if not env.directives['preliminary_late_includes_cy28']:
- late = False
- elif not stats:
- # Special case: empty 'cdef extern' blocks are early
- late = False
- else:
- late = all(isinstance(node, CVarDefNode) for node in stats)
- env.add_include_file(self.include_file, self.verbatim_include, late)
-
+ if self.include_file or self.verbatim_include:
+ # Determine whether include should be late
+ stats = self.body.stats
+ if not env.directives['preliminary_late_includes_cy28']:
+ late = False
+ elif not stats:
+ # Special case: empty 'cdef extern' blocks are early
+ late = False
+ else:
+ late = all(isinstance(node, CVarDefNode) for node in stats)
+ env.add_include_file(self.include_file, self.verbatim_include, late)
+
def analyse_expressions(self, env):
return self
@@ -539,7 +539,7 @@ class CNameDeclaratorNode(CDeclaratorNode):
default = None
- def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
+ def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
if nonempty and self.name == '':
# May have mistaken the name for the type.
if base_type.is_ptr or base_type.is_array or base_type.is_buffer:
@@ -565,11 +565,11 @@ class CPtrDeclaratorNode(CDeclaratorNode):
def analyse_templates(self):
return self.base.analyse_templates()
- def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
+ def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
if base_type.is_pyobject:
error(self.pos, "Pointer base type cannot be a Python object")
ptr_type = PyrexTypes.c_ptr_type(base_type)
- return self.base.analyse(ptr_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(ptr_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
class CReferenceDeclaratorNode(CDeclaratorNode):
@@ -580,11 +580,11 @@ class CReferenceDeclaratorNode(CDeclaratorNode):
def analyse_templates(self):
return self.base.analyse_templates()
- def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
+ def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
if base_type.is_pyobject:
error(self.pos, "Reference base type cannot be a Python object")
ref_type = PyrexTypes.c_ref_type(base_type)
- return self.base.analyse(ref_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(ref_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
class CArrayDeclaratorNode(CDeclaratorNode):
@@ -593,7 +593,7 @@ class CArrayDeclaratorNode(CDeclaratorNode):
child_attrs = ["base", "dimension"]
- def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
+ def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
if (base_type.is_cpp_class and base_type.is_template_type()) or base_type.is_cfunction:
from .ExprNodes import TupleNode
if isinstance(self.dimension, TupleNode):
@@ -607,7 +607,7 @@ class CArrayDeclaratorNode(CDeclaratorNode):
base_type = error_type
else:
base_type = base_type.specialize_here(self.pos, values)
- return self.base.analyse(base_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(base_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
if self.dimension:
self.dimension = self.dimension.analyse_const_expression(env)
if not self.dimension.type.is_int:
@@ -628,7 +628,7 @@ class CArrayDeclaratorNode(CDeclaratorNode):
if base_type.is_cfunction:
error(self.pos, "Array element cannot be a function")
array_type = PyrexTypes.c_array_type(base_type, size)
- return self.base.analyse(array_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(array_type, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
class CFuncDeclaratorNode(CDeclaratorNode):
@@ -671,7 +671,7 @@ class CFuncDeclaratorNode(CDeclaratorNode):
else:
return None
- def analyse(self, return_type, env, nonempty=0, directive_locals=None, visibility=None, in_pxd=False):
+ def analyse(self, return_type, env, nonempty=0, directive_locals=None, visibility=None, in_pxd=False):
if directive_locals is None:
directive_locals = {}
if nonempty:
@@ -723,16 +723,16 @@ class CFuncDeclaratorNode(CDeclaratorNode):
and self.exception_check != '+'):
error(self.pos, "Exception clause not allowed for function returning Python object")
else:
- if self.exception_value is None and self.exception_check and self.exception_check != '+':
- # Use an explicit exception return value to speed up exception checks.
- # Even if it is not declared, we can use the default exception value of the return type,
- # unless the function is some kind of external function that we do not control.
- if return_type.exception_value is not None and (visibility != 'extern' and not in_pxd):
- # Extension types are more difficult because the signature must match the base type signature.
- if not env.is_c_class_scope:
- from .ExprNodes import ConstNode
- self.exception_value = ConstNode(
- self.pos, value=return_type.exception_value, type=return_type)
+ if self.exception_value is None and self.exception_check and self.exception_check != '+':
+ # Use an explicit exception return value to speed up exception checks.
+ # Even if it is not declared, we can use the default exception value of the return type,
+ # unless the function is some kind of external function that we do not control.
+ if return_type.exception_value is not None and (visibility != 'extern' and not in_pxd):
+ # Extension types are more difficult because the signature must match the base type signature.
+ if not env.is_c_class_scope:
+ from .ExprNodes import ConstNode
+ self.exception_value = ConstNode(
+ self.pos, value=return_type.exception_value, type=return_type)
if self.exception_value:
self.exception_value = self.exception_value.analyse_const_expression(env)
if self.exception_check == '+':
@@ -789,7 +789,7 @@ class CFuncDeclaratorNode(CDeclaratorNode):
error(self.pos, "cannot have both '%s' and '%s' "
"calling conventions" % (current, callspec))
func_type.calling_convention = callspec
- return self.base.analyse(func_type, env, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(func_type, env, visibility=visibility, in_pxd=in_pxd)
def declare_optional_arg_struct(self, func_type, env, fused_cname=None):
"""
@@ -803,7 +803,7 @@ class CFuncDeclaratorNode(CDeclaratorNode):
scope.declare_var(arg_count_member, PyrexTypes.c_int_type, self.pos)
for arg in func_type.args[len(func_type.args) - self.optional_arg_count:]:
- scope.declare_var(arg.name, arg.type, arg.pos, allow_pyobject=True, allow_memoryview=True)
+ scope.declare_var(arg.name, arg.type, arg.pos, allow_pyobject=True, allow_memoryview=True)
struct_cname = env.mangle(Naming.opt_arg_prefix, self.base.name)
@@ -829,12 +829,12 @@ class CConstDeclaratorNode(CDeclaratorNode):
child_attrs = ["base"]
- def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
+ def analyse(self, base_type, env, nonempty=0, visibility=None, in_pxd=False):
if base_type.is_pyobject:
error(self.pos,
"Const base type cannot be a Python object")
const = PyrexTypes.c_const_type(base_type)
- return self.base.analyse(const, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
+ return self.base.analyse(const, env, nonempty=nonempty, visibility=visibility, in_pxd=in_pxd)
class CArgDeclNode(Node):
@@ -905,8 +905,8 @@ class CArgDeclNode(Node):
base_type = base_type.base_type
# inject type declaration from annotations
- # this is called without 'env' by AdjustDefByDirectives transform before declaration analysis
- if self.annotation and env and env.directives['annotation_typing'] and self.base_type.name is None:
+ # this is called without 'env' by AdjustDefByDirectives transform before declaration analysis
+ if self.annotation and env and env.directives['annotation_typing'] and self.base_type.name is None:
arg_type = self.inject_type_from_annotations(env)
if arg_type is not None:
base_type = arg_type
@@ -918,7 +918,7 @@ class CArgDeclNode(Node):
annotation = self.annotation
if not annotation:
return None
- base_type, arg_type = analyse_type_annotation(annotation, env, assigned_value=self.default)
+ base_type, arg_type = analyse_type_annotation(annotation, env, assigned_value=self.default)
if base_type is not None:
self.base_type = base_type
return arg_type
@@ -1155,7 +1155,7 @@ class TemplatedTypeNode(CBaseTypeNode):
type = template_node.analyse_as_type(env)
if type is None:
error(template_node.pos, "unknown type in template argument")
- type = error_type
+ type = error_type
template_types.append(type)
self.type = base_type.specialize_here(self.pos, template_types)
@@ -1176,8 +1176,8 @@ class TemplatedTypeNode(CBaseTypeNode):
for name, value in options.items()])
self.type = PyrexTypes.BufferType(base_type, **options)
- if has_np_pythran(env) and is_pythran_buffer(self.type):
- self.type = PyrexTypes.PythranExpr(pythran_type(self.type), self.type)
+ if has_np_pythran(env) and is_pythran_buffer(self.type):
+ self.type = PyrexTypes.PythranExpr(pythran_type(self.type), self.type)
else:
# Array
@@ -1352,11 +1352,11 @@ class CVarDefNode(StatNode):
if create_extern_wrapper:
declarator.overridable = False
if isinstance(declarator, CFuncDeclaratorNode):
- name_declarator, type = declarator.analyse(
- base_type, env, directive_locals=self.directive_locals, visibility=visibility, in_pxd=self.in_pxd)
+ name_declarator, type = declarator.analyse(
+ base_type, env, directive_locals=self.directive_locals, visibility=visibility, in_pxd=self.in_pxd)
else:
- name_declarator, type = declarator.analyse(
- base_type, env, visibility=visibility, in_pxd=self.in_pxd)
+ name_declarator, type = declarator.analyse(
+ base_type, env, visibility=visibility, in_pxd=self.in_pxd)
if not type.is_complete():
if not (self.visibility == 'extern' and type.is_array or type.is_memoryviewslice):
error(declarator.pos, "Variable type '%s' is incomplete" % type)
@@ -1367,8 +1367,8 @@ class CVarDefNode(StatNode):
if name == '':
error(declarator.pos, "Missing name in declaration.")
return
- if type.is_reference and self.visibility != 'extern':
- error(declarator.pos, "C++ references cannot be declared; use a pointer instead")
+ if type.is_reference and self.visibility != 'extern':
+ error(declarator.pos, "C++ references cannot be declared; use a pointer instead")
if type.is_cfunction:
if 'staticmethod' in env.directives:
type.is_static_method = True
@@ -1611,8 +1611,8 @@ class CTypeDefNode(StatNode):
def analyse_declarations(self, env):
base = self.base_type.analyse(env)
- name_declarator, type = self.declarator.analyse(
- base, env, visibility=self.visibility, in_pxd=self.in_pxd)
+ name_declarator, type = self.declarator.analyse(
+ base, env, visibility=self.visibility, in_pxd=self.in_pxd)
name = name_declarator.name
cname = name_declarator.cname
@@ -1684,18 +1684,18 @@ class FuncDefNode(StatNode, BlockNode):
elif default_seen:
error(arg.pos, "Non-default argument following default argument")
- def analyse_annotation(self, env, annotation):
- # Annotations can not only contain valid Python expressions but arbitrary type references.
- if annotation is None:
- return None
- if not env.directives['annotation_typing'] or annotation.analyse_as_type(env) is None:
- annotation = annotation.analyse_types(env)
- return annotation
-
+ def analyse_annotation(self, env, annotation):
+ # Annotations can not only contain valid Python expressions but arbitrary type references.
+ if annotation is None:
+ return None
+ if not env.directives['annotation_typing'] or annotation.analyse_as_type(env) is None:
+ annotation = annotation.analyse_types(env)
+ return annotation
+
def analyse_annotations(self, env):
for arg in self.args:
if arg.annotation:
- arg.annotation = self.analyse_annotation(env, arg.annotation)
+ arg.annotation = self.analyse_annotation(env, arg.annotation)
def align_argument_type(self, env, arg):
# @cython.locals()
@@ -1869,16 +1869,16 @@ class FuncDefNode(StatNode, BlockNode):
code.declare_gilstate()
if profile or linetrace:
- if not self.is_generator:
- # generators are traced when iterated, not at creation
- tempvardecl_code.put_trace_declarations()
- code_object = self.code_object.calculate_result_code(code) if self.code_object else None
- code.put_trace_frame_init(code_object)
-
- # ----- Special check for getbuffer
- if is_getbuffer_slot:
- self.getbuffer_check(code)
-
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ tempvardecl_code.put_trace_declarations()
+ code_object = self.code_object.calculate_result_code(code) if self.code_object else None
+ code.put_trace_frame_init(code_object)
+
+ # ----- Special check for getbuffer
+ if is_getbuffer_slot:
+ self.getbuffer_check(code)
+
# ----- set up refnanny
if use_refnanny:
tempvardecl_code.put_declare_refcount_context()
@@ -1904,8 +1904,8 @@ class FuncDefNode(StatNode, BlockNode):
# Scope unconditionally DECREFed on return.
code.putln("%s = %s;" % (
Naming.cur_scope_cname,
- lenv.scope_class.type.cast_code("Py_None")))
- code.put_incref("Py_None", py_object_type)
+ lenv.scope_class.type.cast_code("Py_None")))
+ code.put_incref("Py_None", py_object_type)
code.putln(code.error_goto(self.pos))
code.putln("} else {")
code.put_gotref(Naming.cur_scope_cname)
@@ -1932,14 +1932,14 @@ class FuncDefNode(StatNode, BlockNode):
if profile or linetrace:
# this looks a bit late, but if we don't get here due to a
# fatal error before hand, it's not really worth tracing
- if not self.is_generator:
- # generators are traced when iterated, not at creation
- if self.is_wrapper:
- trace_name = self.entry.name + " (wrapper)"
- else:
- trace_name = self.entry.name
- code.put_trace_call(
- trace_name, self.pos, nogil=not code.funcstate.gil_owned)
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ if self.is_wrapper:
+ trace_name = self.entry.name + " (wrapper)"
+ else:
+ trace_name = self.entry.name
+ code.put_trace_call(
+ trace_name, self.pos, nogil=not code.funcstate.gil_owned)
code.funcstate.can_trace = True
# ----- Fetch arguments
self.generate_argument_parsing_code(env, code)
@@ -1952,7 +1952,7 @@ class FuncDefNode(StatNode, BlockNode):
code.put_var_incref(entry)
# Note: defaults are always incref-ed. For def functions, we
- # we acquire arguments from object conversion, so we have
+ # we acquire arguments from object conversion, so we have
# new references. If we are a cdef function, we need to
# incref our arguments
elif is_cdef and entry.type.is_memoryviewslice and len(entry.cf_assignments) > 1:
@@ -2001,8 +2001,8 @@ class FuncDefNode(StatNode, BlockNode):
val = self.return_type.default_value
if val:
code.putln("%s = %s;" % (Naming.retval_cname, val))
- elif not self.return_type.is_void:
- code.putln("__Pyx_pretend_to_initialize(&%s);" % Naming.retval_cname)
+ elif not self.return_type.is_void:
+ code.putln("__Pyx_pretend_to_initialize(&%s);" % Naming.retval_cname)
# ----- Error cleanup
if code.error_label in code.labels_used:
if not self.body.is_terminator:
@@ -2058,8 +2058,8 @@ class FuncDefNode(StatNode, BlockNode):
if err_val is not None:
if err_val != Naming.retval_cname:
code.putln("%s = %s;" % (Naming.retval_cname, err_val))
- elif not self.return_type.is_void:
- code.putln("__Pyx_pretend_to_initialize(&%s);" % Naming.retval_cname)
+ elif not self.return_type.is_void:
+ code.putln("__Pyx_pretend_to_initialize(&%s);" % Naming.retval_cname)
if is_getbuffer_slot:
self.getbuffer_error_cleanup(code)
@@ -2141,14 +2141,14 @@ class FuncDefNode(StatNode, BlockNode):
if profile or linetrace:
code.funcstate.can_trace = False
- if not self.is_generator:
- # generators are traced when iterated, not at creation
- if self.return_type.is_pyobject:
- code.put_trace_return(
- Naming.retval_cname, nogil=not code.funcstate.gil_owned)
- else:
- code.put_trace_return(
- "Py_None", nogil=not code.funcstate.gil_owned)
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ if self.return_type.is_pyobject:
+ code.put_trace_return(
+ Naming.retval_cname, nogil=not code.funcstate.gil_owned)
+ else:
+ code.put_trace_return(
+ "Py_None", nogil=not code.funcstate.gil_owned)
if not lenv.nogil:
# GIL holding function
@@ -2181,10 +2181,10 @@ class FuncDefNode(StatNode, BlockNode):
error(arg.pos, "Invalid use of 'void'")
elif not arg.type.is_complete() and not (arg.type.is_array or arg.type.is_memoryviewslice):
error(arg.pos, "Argument type '%s' is incomplete" % arg.type)
- entry = env.declare_arg(arg.name, arg.type, arg.pos)
- if arg.annotation:
- entry.annotation = arg.annotation
- return entry
+ entry = env.declare_arg(arg.name, arg.type, arg.pos)
+ if arg.annotation:
+ entry.annotation = arg.annotation
+ return entry
def generate_arg_type_test(self, arg, code):
# Generate type test for one argument.
@@ -2230,59 +2230,59 @@ class FuncDefNode(StatNode, BlockNode):
#
# Special code for the __getbuffer__ function
#
- def _get_py_buffer_info(self):
- py_buffer = self.local_scope.arg_entries[1]
- try:
- # Check builtin definition of struct Py_buffer
- obj_type = py_buffer.type.base_type.scope.entries['obj'].type
- except (AttributeError, KeyError):
- # User code redeclared struct Py_buffer
- obj_type = None
- return py_buffer, obj_type
-
- # Old Python 3 used to support write-locks on buffer-like objects by
- # calling PyObject_GetBuffer() with a view==NULL parameter. This obscure
- # feature is obsolete, it was almost never used (only one instance in
- # `Modules/posixmodule.c` in Python 3.1) and it is now officially removed
- # (see bpo-14203). We add an extra check here to prevent legacy code from
- # from trying to use the feature and prevent segmentation faults.
- def getbuffer_check(self, code):
- py_buffer, _ = self._get_py_buffer_info()
- view = py_buffer.cname
- code.putln("if (%s == NULL) {" % view)
- code.putln("PyErr_SetString(PyExc_BufferError, "
- "\"PyObject_GetBuffer: view==NULL argument is obsolete\");")
- code.putln("return -1;")
+ def _get_py_buffer_info(self):
+ py_buffer = self.local_scope.arg_entries[1]
+ try:
+ # Check builtin definition of struct Py_buffer
+ obj_type = py_buffer.type.base_type.scope.entries['obj'].type
+ except (AttributeError, KeyError):
+ # User code redeclared struct Py_buffer
+ obj_type = None
+ return py_buffer, obj_type
+
+ # Old Python 3 used to support write-locks on buffer-like objects by
+ # calling PyObject_GetBuffer() with a view==NULL parameter. This obscure
+ # feature is obsolete, it was almost never used (only one instance in
+ # `Modules/posixmodule.c` in Python 3.1) and it is now officially removed
+ # (see bpo-14203). We add an extra check here to prevent legacy code from
+ # from trying to use the feature and prevent segmentation faults.
+ def getbuffer_check(self, code):
+ py_buffer, _ = self._get_py_buffer_info()
+ view = py_buffer.cname
+ code.putln("if (%s == NULL) {" % view)
+ code.putln("PyErr_SetString(PyExc_BufferError, "
+ "\"PyObject_GetBuffer: view==NULL argument is obsolete\");")
+ code.putln("return -1;")
code.putln("}")
- def getbuffer_init(self, code):
- py_buffer, obj_type = self._get_py_buffer_info()
- view = py_buffer.cname
- if obj_type and obj_type.is_pyobject:
- code.put_init_to_py_none("%s->obj" % view, obj_type)
- code.put_giveref("%s->obj" % view) # Do not refnanny object within structs
- else:
- code.putln("%s->obj = NULL;" % view)
-
+ def getbuffer_init(self, code):
+ py_buffer, obj_type = self._get_py_buffer_info()
+ view = py_buffer.cname
+ if obj_type and obj_type.is_pyobject:
+ code.put_init_to_py_none("%s->obj" % view, obj_type)
+ code.put_giveref("%s->obj" % view) # Do not refnanny object within structs
+ else:
+ code.putln("%s->obj = NULL;" % view)
+
def getbuffer_error_cleanup(self, code):
- py_buffer, obj_type = self._get_py_buffer_info()
- view = py_buffer.cname
- if obj_type and obj_type.is_pyobject:
- code.putln("if (%s->obj != NULL) {" % view)
- code.put_gotref("%s->obj" % view)
- code.put_decref_clear("%s->obj" % view, obj_type)
- code.putln("}")
- else:
- code.putln("Py_CLEAR(%s->obj);" % view)
+ py_buffer, obj_type = self._get_py_buffer_info()
+ view = py_buffer.cname
+ if obj_type and obj_type.is_pyobject:
+ code.putln("if (%s->obj != NULL) {" % view)
+ code.put_gotref("%s->obj" % view)
+ code.put_decref_clear("%s->obj" % view, obj_type)
+ code.putln("}")
+ else:
+ code.putln("Py_CLEAR(%s->obj);" % view)
def getbuffer_normal_cleanup(self, code):
- py_buffer, obj_type = self._get_py_buffer_info()
- view = py_buffer.cname
- if obj_type and obj_type.is_pyobject:
- code.putln("if (%s->obj == Py_None) {" % view)
- code.put_gotref("%s->obj" % view)
- code.put_decref_clear("%s->obj" % view, obj_type)
- code.putln("}")
+ py_buffer, obj_type = self._get_py_buffer_info()
+ view = py_buffer.cname
+ if obj_type and obj_type.is_pyobject:
+ code.putln("if (%s->obj == Py_None) {" % view)
+ code.put_gotref("%s->obj" % view)
+ code.put_decref_clear("%s->obj" % view, obj_type)
+ code.putln("}")
def get_preprocessor_guard(self):
if not self.entry.is_special:
@@ -2358,10 +2358,10 @@ class CFuncDefNode(FuncDefNode):
if isinstance(self.declarator, CFuncDeclaratorNode):
name_declarator, type = self.declarator.analyse(
base_type, env, nonempty=2 * (self.body is not None),
- directive_locals=self.directive_locals, visibility=self.visibility)
+ directive_locals=self.directive_locals, visibility=self.visibility)
else:
name_declarator, type = self.declarator.analyse(
- base_type, env, nonempty=2 * (self.body is not None), visibility=self.visibility)
+ base_type, env, nonempty=2 * (self.body is not None), visibility=self.visibility)
if not type.is_cfunction:
error(self.pos, "Suite attached to non-function declaration")
# Remember the actual type according to the function header
@@ -2400,7 +2400,7 @@ class CFuncDefNode(FuncDefNode):
if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
- if type_arg.type.is_buffer or type_arg.type.is_pythran_expr:
+ if type_arg.type.is_buffer or type_arg.type.is_pythran_expr:
if self.type.nogil:
error(formal_arg.pos,
"Buffer may not be acquired without the GIL. Consider using memoryview slices instead.")
@@ -2752,9 +2752,9 @@ class DefNode(FuncDefNode):
child_attrs = ["args", "star_arg", "starstar_arg", "body", "decorators", "return_type_annotation"]
outer_attrs = ["decorators", "return_type_annotation"]
- is_staticmethod = False
- is_classmethod = False
-
+ is_staticmethod = False
+ is_classmethod = False
+
lambda_name = None
reqd_kw_flags_cname = "0"
is_wrapper = 0
@@ -2797,22 +2797,22 @@ class DefNode(FuncDefNode):
error(self.star_arg.pos, "cdef function cannot have star argument")
if self.starstar_arg:
error(self.starstar_arg.pos, "cdef function cannot have starstar argument")
- exception_value, exception_check = except_val or (None, False)
-
+ exception_value, exception_check = except_val or (None, False)
+
if cfunc is None:
cfunc_args = []
for formal_arg in self.args:
name_declarator, type = formal_arg.analyse(scope, nonempty=1)
cfunc_args.append(PyrexTypes.CFuncTypeArg(name=name_declarator.name,
cname=None,
- annotation=formal_arg.annotation,
+ annotation=formal_arg.annotation,
type=py_object_type,
pos=formal_arg.pos))
cfunc_type = PyrexTypes.CFuncType(return_type=py_object_type,
args=cfunc_args,
has_varargs=False,
exception_value=None,
- exception_check=exception_check,
+ exception_check=exception_check,
nogil=nogil,
with_gil=with_gil,
is_overridable=overridable)
@@ -2830,10 +2830,10 @@ class DefNode(FuncDefNode):
if type is None or type is PyrexTypes.py_object_type:
formal_arg.type = type_arg.type
formal_arg.name_declarator = name_declarator
-
- if exception_value is None and cfunc_type.exception_value is not None:
- from .ExprNodes import ConstNode
- exception_value = ConstNode(
+
+ if exception_value is None and cfunc_type.exception_value is not None:
+ from .ExprNodes import ConstNode
+ exception_value = ConstNode(
self.pos, value=cfunc_type.exception_value, type=cfunc_type.return_type)
declarator = CFuncDeclaratorNode(self.pos,
base=CNameDeclaratorNode(self.pos, name=self.name, cname=None),
@@ -2898,7 +2898,7 @@ class DefNode(FuncDefNode):
# if a signature annotation provides a more specific return object type, use it
if self.return_type is py_object_type and self.return_type_annotation:
if env.directives['annotation_typing'] and not self.entry.is_special:
- _, return_type = analyse_type_annotation(self.return_type_annotation, env)
+ _, return_type = analyse_type_annotation(self.return_type_annotation, env)
if return_type and return_type.is_pyobject:
self.return_type = return_type
@@ -2926,13 +2926,13 @@ class DefNode(FuncDefNode):
name_declarator = None
else:
base_type = arg.base_type.analyse(env)
- # If we hare in pythran mode and we got a buffer supported by
- # Pythran, we change this node to a fused type
- if has_np_pythran(env) and base_type.is_pythran_expr:
- base_type = PyrexTypes.FusedType([
- base_type,
- #PyrexTypes.PythranExpr(pythran_type(self.type, "numpy_texpr")),
- base_type.org_buffer])
+ # If we hare in pythran mode and we got a buffer supported by
+ # Pythran, we change this node to a fused type
+ if has_np_pythran(env) and base_type.is_pythran_expr:
+ base_type = PyrexTypes.FusedType([
+ base_type,
+ #PyrexTypes.PythranExpr(pythran_type(self.type, "numpy_texpr")),
+ base_type.org_buffer])
name_declarator, type = \
arg.declarator.analyse(base_type, env)
arg.name = name_declarator.name
@@ -2973,11 +2973,11 @@ class DefNode(FuncDefNode):
error(arg.pos, "Only Python type arguments can have 'or None'")
env.fused_to_specific = f2s
- if has_np_pythran(env):
- self.np_args_idx = [i for i,a in enumerate(self.args) if a.type.is_numpy_buffer]
- else:
- self.np_args_idx = []
-
+ if has_np_pythran(env):
+ self.np_args_idx = [i for i,a in enumerate(self.args) if a.type.is_numpy_buffer]
+ else:
+ self.np_args_idx = []
+
def analyse_signature(self, env):
if self.entry.is_special:
if self.decorators:
@@ -3133,7 +3133,7 @@ class DefNode(FuncDefNode):
self.analyse_default_values(env)
self.analyse_annotations(env)
if self.return_type_annotation:
- self.return_type_annotation = self.analyse_annotation(env, self.return_type_annotation)
+ self.return_type_annotation = self.analyse_annotation(env, self.return_type_annotation)
if not self.needs_assignment_synthesis(env) and self.decorators:
for decorator in self.decorators[::-1]:
@@ -3208,10 +3208,10 @@ class DefNode(FuncDefNode):
arg_code_list.append(arg_decl_code(self.star_arg))
if self.starstar_arg:
arg_code_list.append(arg_decl_code(self.starstar_arg))
- if arg_code_list:
- arg_code = ', '.join(arg_code_list)
- else:
- arg_code = 'void' # No arguments
+ if arg_code_list:
+ arg_code = ', '.join(arg_code_list)
+ else:
+ arg_code = 'void' # No arguments
dc = self.return_type.declaration_code(self.entry.pyfunc_cname)
decls_code = code.globalstate['decls']
@@ -3276,8 +3276,8 @@ class DefNodeWrapper(FuncDefNode):
self.signature = target_entry.signature
- self.np_args_idx = self.target.np_args_idx
-
+ self.np_args_idx = self.target.np_args_idx
+
def prepare_argument_coercion(self, env):
# This is only really required for Cython utility code at this time,
# everything else can be done during code generation. But we expand
@@ -3705,8 +3705,8 @@ class DefNodeWrapper(FuncDefNode):
if not arg.default:
pystring_cname = code.intern_identifier(arg.name)
# required keyword-only argument missing
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("RaiseKeywordRequired", "FunctionArguments.c"))
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("RaiseKeywordRequired", "FunctionArguments.c"))
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' % (
self.name,
pystring_cname))
@@ -3730,12 +3730,12 @@ class DefNodeWrapper(FuncDefNode):
reversed_args = list(enumerate(positional_args))[::-1]
for i, arg in reversed_args:
if i >= min_positional_args-1:
- if i != reversed_args[0][0]:
- code.putln('CYTHON_FALLTHROUGH;')
+ if i != reversed_args[0][0]:
+ code.putln('CYTHON_FALLTHROUGH;')
code.put('case %2d: ' % (i+1))
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (i, Naming.args_cname, i))
if min_positional_args == 0:
- code.putln('CYTHON_FALLTHROUGH;')
+ code.putln('CYTHON_FALLTHROUGH;')
code.put('case 0: ')
code.putln('break;')
if self.star_arg:
@@ -3777,12 +3777,12 @@ class DefNodeWrapper(FuncDefNode):
entry = arg.entry
code.putln("%s = %s;" % (entry.cname, item))
else:
- if arg.type.from_py_function:
+ if arg.type.from_py_function:
if arg.default:
# C-typed default arguments must be handled here
code.putln('if (%s) {' % item)
- code.putln(arg.type.from_py_call_code(
- item, arg.entry.cname, arg.pos, code))
+ code.putln(arg.type.from_py_call_code(
+ item, arg.entry.cname, arg.pos, code))
if arg.default:
code.putln('} else {')
code.putln("%s = %s;" % (
@@ -3855,7 +3855,7 @@ class DefNodeWrapper(FuncDefNode):
code.put('case %2d: ' % (i+1))
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (
i, Naming.args_cname, i))
- code.putln('CYTHON_FALLTHROUGH;')
+ code.putln('CYTHON_FALLTHROUGH;')
code.putln('case 0: break;')
if not self.star_arg:
code.put('default: ') # more arguments than allowed
@@ -3883,8 +3883,8 @@ class DefNodeWrapper(FuncDefNode):
code.putln('switch (pos_args) {')
for i, arg in enumerate(all_args[:last_required_arg+1]):
if max_positional_args > 0 and i <= max_positional_args:
- if i != 0:
- code.putln('CYTHON_FALLTHROUGH;')
+ if i != 0:
+ code.putln('CYTHON_FALLTHROUGH;')
if self.star_arg and i == max_positional_args:
code.putln('default:')
else:
@@ -3896,12 +3896,12 @@ class DefNodeWrapper(FuncDefNode):
continue
code.putln('if (kw_args > 0) {')
# don't overwrite default argument
- code.putln('PyObject* value = __Pyx_PyDict_GetItemStr(%s, %s);' % (
+ code.putln('PyObject* value = __Pyx_PyDict_GetItemStr(%s, %s);' % (
Naming.kwds_cname, pystring_cname))
code.putln('if (value) { values[%d] = value; kw_args--; }' % i)
code.putln('}')
else:
- code.putln('if (likely((values[%d] = __Pyx_PyDict_GetItemStr(%s, %s)) != 0)) kw_args--;' % (
+ code.putln('if (likely((values[%d] = __Pyx_PyDict_GetItemStr(%s, %s)) != 0)) kw_args--;' % (
i, Naming.kwds_cname, pystring_cname))
if i < min_positional_args:
if i == 0:
@@ -3922,8 +3922,8 @@ class DefNodeWrapper(FuncDefNode):
code.putln('}')
elif arg.kw_only:
code.putln('else {')
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("RaiseKeywordRequired", "FunctionArguments.c"))
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("RaiseKeywordRequired", "FunctionArguments.c"))
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' % (
self.name, pystring_cname))
code.putln(code.error_goto(self.pos))
@@ -3987,7 +3987,7 @@ class DefNodeWrapper(FuncDefNode):
else:
code.putln('if (kw_args == 1) {')
code.putln('const Py_ssize_t index = %d;' % first_optional_arg)
- code.putln('PyObject* value = __Pyx_PyDict_GetItemStr(%s, *%s[index]);' % (
+ code.putln('PyObject* value = __Pyx_PyDict_GetItemStr(%s, *%s[index]);' % (
Naming.kwds_cname, Naming.pykwdlist_cname))
code.putln('if (value) { values[index] = value; kw_args--; }')
if len(optional_args) > 1:
@@ -4024,13 +4024,13 @@ class DefNodeWrapper(FuncDefNode):
def generate_arg_conversion_from_pyobject(self, arg, code):
new_type = arg.type
# copied from CoerceFromPyTypeNode
- if new_type.from_py_function:
- code.putln(new_type.from_py_call_code(
- arg.hdr_cname,
- arg.entry.cname,
- arg.pos,
- code,
- ))
+ if new_type.from_py_function:
+ code.putln(new_type.from_py_call_code(
+ arg.hdr_cname,
+ arg.entry.cname,
+ arg.pos,
+ code,
+ ))
else:
error(arg.pos, "Cannot convert Python object argument to type '%s'" % new_type)
@@ -4071,9 +4071,9 @@ class GeneratorDefNode(DefNode):
is_generator = True
is_coroutine = False
- is_iterable_coroutine = False
- is_asyncgen = False
- gen_type_name = 'Generator'
+ is_iterable_coroutine = False
+ is_asyncgen = False
+ gen_type_name = 'Generator'
needs_closure = True
child_attrs = DefNode.child_attrs + ["gbody"]
@@ -4096,10 +4096,10 @@ class GeneratorDefNode(DefNode):
code.putln('{')
code.putln('__pyx_CoroutineObject *gen = __Pyx_%s_New('
- '(__pyx_coroutine_body_t) %s, %s, (PyObject *) %s, %s, %s, %s); %s' % (
- self.gen_type_name,
- body_cname, self.code_object.calculate_result_code(code) if self.code_object else 'NULL',
- Naming.cur_scope_cname, name, qualname, module_name,
+ '(__pyx_coroutine_body_t) %s, %s, (PyObject *) %s, %s, %s, %s); %s' % (
+ self.gen_type_name,
+ body_cname, self.code_object.calculate_result_code(code) if self.code_object else 'NULL',
+ Naming.cur_scope_cname, name, qualname, module_name,
code.error_goto_if_null('gen', self.pos)))
code.put_decref(Naming.cur_scope_cname, py_object_type)
if self.requires_classobj:
@@ -4113,40 +4113,40 @@ class GeneratorDefNode(DefNode):
code.putln('}')
def generate_function_definitions(self, env, code):
- env.use_utility_code(UtilityCode.load_cached(self.gen_type_name, "Coroutine.c"))
+ env.use_utility_code(UtilityCode.load_cached(self.gen_type_name, "Coroutine.c"))
self.gbody.generate_function_header(code, proto=True)
super(GeneratorDefNode, self).generate_function_definitions(env, code)
self.gbody.generate_function_definitions(env, code)
class AsyncDefNode(GeneratorDefNode):
- gen_type_name = 'Coroutine'
+ gen_type_name = 'Coroutine'
is_coroutine = True
-class IterableAsyncDefNode(AsyncDefNode):
- gen_type_name = 'IterableCoroutine'
- is_iterable_coroutine = True
-
-
-class AsyncGenNode(AsyncDefNode):
- gen_type_name = 'AsyncGen'
- is_asyncgen = True
-
-
+class IterableAsyncDefNode(AsyncDefNode):
+ gen_type_name = 'IterableCoroutine'
+ is_iterable_coroutine = True
+
+
+class AsyncGenNode(AsyncDefNode):
+ gen_type_name = 'AsyncGen'
+ is_asyncgen = True
+
+
class GeneratorBodyDefNode(DefNode):
# Main code body of a generator implemented as a DefNode.
#
is_generator_body = True
is_inlined = False
- is_async_gen_body = False
+ is_async_gen_body = False
inlined_comprehension_type = None # container type for inlined comprehensions
- def __init__(self, pos=None, name=None, body=None, is_async_gen_body=False):
+ def __init__(self, pos=None, name=None, body=None, is_async_gen_body=False):
super(GeneratorBodyDefNode, self).__init__(
- pos=pos, body=body, name=name, is_async_gen_body=is_async_gen_body,
- doc=None, args=[], star_arg=None, starstar_arg=None)
+ pos=pos, body=body, name=name, is_async_gen_body=is_async_gen_body,
+ doc=None, args=[], star_arg=None, starstar_arg=None)
def declare_generator_body(self, env):
prefix = env.next_id(env.scope_prefix)
@@ -4167,10 +4167,10 @@ class GeneratorBodyDefNode(DefNode):
self.declare_generator_body(env)
def generate_function_header(self, code, proto=False):
- header = "static PyObject *%s(PyObject *%s_obj, CYTHON_UNUSED PyThreadState *%s, PyObject *%s)" % (
+ header = "static PyObject *%s(PyObject *%s_obj, CYTHON_UNUSED PyThreadState *%s, PyObject *%s)" % (
self.entry.func_cname,
Naming.generator_cname,
- Naming.local_tstate_cname,
+ Naming.local_tstate_cname,
Naming.sent_value_cname)
if proto:
code.putln('%s; /* proto */' % header)
@@ -4199,14 +4199,14 @@ class GeneratorBodyDefNode(DefNode):
code.putln("PyObject *%s = NULL;" % Naming.retval_cname)
tempvardecl_code = code.insertion_point()
code.put_declare_refcount_context()
- code.put_setup_refcount_context(self.entry.name or self.entry.qualified_name)
- profile = code.globalstate.directives['profile']
- linetrace = code.globalstate.directives['linetrace']
- if profile or linetrace:
- tempvardecl_code.put_trace_declarations()
- code.funcstate.can_trace = True
- code_object = self.code_object.calculate_result_code(code) if self.code_object else None
- code.put_trace_frame_init(code_object)
+ code.put_setup_refcount_context(self.entry.name or self.entry.qualified_name)
+ profile = code.globalstate.directives['profile']
+ linetrace = code.globalstate.directives['linetrace']
+ if profile or linetrace:
+ tempvardecl_code.put_trace_declarations()
+ code.funcstate.can_trace = True
+ code_object = self.code_object.calculate_result_code(code) if self.code_object else None
+ code.put_trace_frame_init(code_object)
# ----- Resume switch point.
code.funcstate.init_closure_temps(lenv.scope_class.type.scope)
@@ -4237,7 +4237,7 @@ class GeneratorBodyDefNode(DefNode):
# ----- Function body
self.generate_function_body(env, code)
# ----- Closure initialization
- if lenv.scope_class.type.scope.var_entries:
+ if lenv.scope_class.type.scope.var_entries:
closure_init_code.putln('%s = %s;' % (
lenv.scope_class.type.declaration_code(Naming.cur_scope_cname),
lenv.scope_class.type.cast_code('%s->closure' %
@@ -4245,9 +4245,9 @@ class GeneratorBodyDefNode(DefNode):
# FIXME: this silences a potential "unused" warning => try to avoid unused closures in more cases
code.putln("CYTHON_MAYBE_UNUSED_VAR(%s);" % Naming.cur_scope_cname)
- if profile or linetrace:
- code.funcstate.can_trace = False
-
+ if profile or linetrace:
+ code.funcstate.can_trace = False
+
code.mark_pos(self.pos)
code.putln("")
code.putln("/* function exit code */")
@@ -4255,13 +4255,13 @@ class GeneratorBodyDefNode(DefNode):
# on normal generator termination, we do not take the exception propagation
# path: no traceback info is required and not creating it is much faster
if not self.is_inlined and not self.body.is_terminator:
- if self.is_async_gen_body:
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
- code.putln('PyErr_SetNone(%s);' % (
- '__Pyx_PyExc_StopAsyncIteration' if self.is_async_gen_body else 'PyExc_StopIteration'))
+ if self.is_async_gen_body:
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
+ code.putln('PyErr_SetNone(%s);' % (
+ '__Pyx_PyExc_StopAsyncIteration' if self.is_async_gen_body else 'PyExc_StopIteration'))
# ----- Error cleanup
- if code.label_used(code.error_label):
+ if code.label_used(code.error_label):
if not self.body.is_terminator:
code.put_goto(code.return_label)
code.put_label(code.error_label)
@@ -4270,7 +4270,7 @@ class GeneratorBodyDefNode(DefNode):
if Future.generator_stop in env.global_scope().context.future_directives:
# PEP 479: turn accidental StopIteration exceptions into a RuntimeError
code.globalstate.use_utility_code(UtilityCode.load_cached("pep479", "Coroutine.c"))
- code.putln("__Pyx_Generator_Replace_StopIteration(%d);" % bool(self.is_async_gen_body))
+ code.putln("__Pyx_Generator_Replace_StopIteration(%d);" % bool(self.is_async_gen_body))
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type)
code.put_add_traceback(self.entry.qualified_name)
@@ -4283,14 +4283,14 @@ class GeneratorBodyDefNode(DefNode):
code.put_xdecref_clear(Naming.retval_cname, py_object_type)
# For Py3.7, clearing is already done below.
code.putln("#if !CYTHON_USE_EXC_INFO_STACK")
- code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
+ code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
code.putln("#endif")
code.putln('%s->resume_label = -1;' % Naming.generator_cname)
# clean up as early as possible to help breaking any reference cycles
code.putln('__Pyx_Coroutine_clear((PyObject*)%s);' % Naming.generator_cname)
- if profile or linetrace:
- code.put_trace_return(Naming.retval_cname,
- nogil=not code.funcstate.gil_owned)
+ if profile or linetrace:
+ code.put_trace_return(Naming.retval_cname,
+ nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context()
code.putln("return %s;" % Naming.retval_cname)
code.putln("}")
@@ -4298,20 +4298,20 @@ class GeneratorBodyDefNode(DefNode):
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
# ----- Generator resume code
- if profile or linetrace:
- resume_code.put_trace_call(self.entry.qualified_name, self.pos,
- nogil=not code.funcstate.gil_owned)
+ if profile or linetrace:
+ resume_code.put_trace_call(self.entry.qualified_name, self.pos,
+ nogil=not code.funcstate.gil_owned)
resume_code.putln("switch (%s->resume_label) {" % (
Naming.generator_cname))
-
+
resume_code.putln("case 0: goto %s;" % first_run_label)
for i, label in code.yield_labels:
resume_code.putln("case %d: goto %s;" % (i, label))
resume_code.putln("default: /* CPython raises the right error here */")
- if profile or linetrace:
- resume_code.put_trace_return("Py_None",
- nogil=not code.funcstate.gil_owned)
+ if profile or linetrace:
+ resume_code.put_trace_return("Py_None",
+ nogil=not code.funcstate.gil_owned)
resume_code.put_finish_refcount_context()
resume_code.putln("return NULL;")
resume_code.putln("}")
@@ -4321,7 +4321,7 @@ class GeneratorBodyDefNode(DefNode):
class OverrideCheckNode(StatNode):
# A Node for dispatching to the def method if it
- # is overridden.
+ # is overridden.
#
# py_func
#
@@ -4539,7 +4539,7 @@ class PyClassDefNode(ClassDefNode):
error(self.classobj.pos, "Python3 style class could not be represented as C class")
return
- from . import ExprNodes
+ from . import ExprNodes
return CClassDefNode(self.pos,
visibility='private',
module_name=None,
@@ -4651,7 +4651,7 @@ class CClassDefNode(ClassDefNode):
# module_name string or None For import of extern type objects
# class_name string Unqualified name of class
# as_name string or None Name to declare as in this scope
- # bases TupleNode Base class(es)
+ # bases TupleNode Base class(es)
# objstruct_name string or None Specified C name of object struct
# typeobj_name string or None Specified C name of type object
# check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match
@@ -4734,34 +4734,34 @@ class CClassDefNode(ClassDefNode):
self.module.has_extern_class = 1
env.add_imported_module(self.module)
- if self.bases.args:
- base = self.bases.args[0]
- base_type = base.analyse_as_type(env)
- if base_type in (PyrexTypes.c_int_type, PyrexTypes.c_long_type, PyrexTypes.c_float_type):
- # Use the Python rather than C variant of these types.
- base_type = env.lookup(base_type.sign_and_name()).type
- if base_type is None:
- error(base.pos, "First base of '%s' is not an extension type" % self.class_name)
- elif base_type == PyrexTypes.py_object_type:
- base_class_scope = None
- elif not base_type.is_extension_type and \
- not (base_type.is_builtin_type and base_type.objstruct_cname):
- error(base.pos, "'%s' is not an extension type" % base_type)
- elif not base_type.is_complete():
- error(base.pos, "Base class '%s' of type '%s' is incomplete" % (
- base_type.name, self.class_name))
- elif base_type.scope and base_type.scope.directives and \
- base_type.is_final_type:
- error(base.pos, "Base class '%s' of type '%s' is final" % (
- base_type, self.class_name))
- elif base_type.is_builtin_type and \
- base_type.name in ('tuple', 'str', 'bytes'):
- error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
- % base_type.name)
+ if self.bases.args:
+ base = self.bases.args[0]
+ base_type = base.analyse_as_type(env)
+ if base_type in (PyrexTypes.c_int_type, PyrexTypes.c_long_type, PyrexTypes.c_float_type):
+ # Use the Python rather than C variant of these types.
+ base_type = env.lookup(base_type.sign_and_name()).type
+ if base_type is None:
+ error(base.pos, "First base of '%s' is not an extension type" % self.class_name)
+ elif base_type == PyrexTypes.py_object_type:
+ base_class_scope = None
+ elif not base_type.is_extension_type and \
+ not (base_type.is_builtin_type and base_type.objstruct_cname):
+ error(base.pos, "'%s' is not an extension type" % base_type)
+ elif not base_type.is_complete():
+ error(base.pos, "Base class '%s' of type '%s' is incomplete" % (
+ base_type.name, self.class_name))
+ elif base_type.scope and base_type.scope.directives and \
+ base_type.is_final_type:
+ error(base.pos, "Base class '%s' of type '%s' is final" % (
+ base_type, self.class_name))
+ elif base_type.is_builtin_type and \
+ base_type.name in ('tuple', 'str', 'bytes'):
+ error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
+ % base_type.name)
else:
- self.base_type = base_type
- if env.directives.get('freelist', 0) > 0 and base_type != PyrexTypes.py_object_type:
- warning(self.pos, "freelists cannot be used on subtypes, only the base class can manage them", 1)
+ self.base_type = base_type
+ if env.directives.get('freelist', 0) > 0 and base_type != PyrexTypes.py_object_type:
+ warning(self.pos, "freelists cannot be used on subtypes, only the base class can manage them", 1)
has_body = self.body is not None
if has_body and self.base_type and not self.base_type.scope:
@@ -4822,28 +4822,28 @@ class CClassDefNode(ClassDefNode):
else:
scope.implemented = 1
- if len(self.bases.args) > 1:
- if not has_body or self.in_pxd:
- error(self.bases.args[1].pos, "Only declare first base in declaration.")
- # At runtime, we check that the other bases are heap types
- # and that a __dict__ is added if required.
- for other_base in self.bases.args[1:]:
- if other_base.analyse_as_type(env):
- error(other_base.pos, "Only one extension type base class allowed.")
- self.entry.type.early_init = 0
- from . import ExprNodes
- self.type_init_args = ExprNodes.TupleNode(
- self.pos,
- args=[ExprNodes.IdentifierStringNode(self.pos, value=self.class_name),
- self.bases,
- ExprNodes.DictNode(self.pos, key_value_pairs=[])])
- elif self.base_type:
- self.entry.type.early_init = self.base_type.is_external or self.base_type.early_init
- self.type_init_args = None
- else:
- self.entry.type.early_init = 1
- self.type_init_args = None
-
+ if len(self.bases.args) > 1:
+ if not has_body or self.in_pxd:
+ error(self.bases.args[1].pos, "Only declare first base in declaration.")
+ # At runtime, we check that the other bases are heap types
+ # and that a __dict__ is added if required.
+ for other_base in self.bases.args[1:]:
+ if other_base.analyse_as_type(env):
+ error(other_base.pos, "Only one extension type base class allowed.")
+ self.entry.type.early_init = 0
+ from . import ExprNodes
+ self.type_init_args = ExprNodes.TupleNode(
+ self.pos,
+ args=[ExprNodes.IdentifierStringNode(self.pos, value=self.class_name),
+ self.bases,
+ ExprNodes.DictNode(self.pos, key_value_pairs=[])])
+ elif self.base_type:
+ self.entry.type.early_init = self.base_type.is_external or self.base_type.early_init
+ self.type_init_args = None
+ else:
+ self.entry.type.early_init = 1
+ self.type_init_args = None
+
env.allocate_vtable_names(self.entry)
for thunk in self.entry.type.defered_declarations:
@@ -4853,8 +4853,8 @@ class CClassDefNode(ClassDefNode):
if self.body:
scope = self.entry.type.scope
self.body = self.body.analyse_expressions(scope)
- if self.type_init_args:
- self.type_init_args.analyse_expressions(env)
+ if self.type_init_args:
+ self.type_init_args.analyse_expressions(env)
return self
def generate_function_definitions(self, env, code):
@@ -4868,175 +4868,175 @@ class CClassDefNode(ClassDefNode):
code.mark_pos(self.pos)
if self.body:
self.body.generate_execution_code(code)
- if not self.entry.type.early_init:
- if self.type_init_args:
- self.type_init_args.generate_evaluation_code(code)
- bases = "PyTuple_GET_ITEM(%s, 1)" % self.type_init_args.result()
- first_base = "((PyTypeObject*)PyTuple_GET_ITEM(%s, 0))" % bases
- # Let Python do the base types compatibility checking.
- trial_type = code.funcstate.allocate_temp(PyrexTypes.py_object_type, True)
- code.putln("%s = PyType_Type.tp_new(&PyType_Type, %s, NULL);" % (
- trial_type, self.type_init_args.result()))
- code.putln(code.error_goto_if_null(trial_type, self.pos))
- code.put_gotref(trial_type)
- code.putln("if (((PyTypeObject*) %s)->tp_base != %s) {" % (
- trial_type, first_base))
- code.putln("PyErr_Format(PyExc_TypeError, \"best base '%s' must be equal to first base '%s'\",")
- code.putln(" ((PyTypeObject*) %s)->tp_base->tp_name, %s->tp_name);" % (
- trial_type, first_base))
- code.putln(code.error_goto(self.pos))
- code.putln("}")
- code.funcstate.release_temp(trial_type)
- code.put_incref(bases, PyrexTypes.py_object_type)
- code.put_giveref(bases)
- code.putln("%s.tp_bases = %s;" % (self.entry.type.typeobj_cname, bases))
- code.put_decref_clear(trial_type, PyrexTypes.py_object_type)
- self.type_init_args.generate_disposal_code(code)
- self.type_init_args.free_temps(code)
-
- self.generate_type_ready_code(self.entry, code, True)
-
- # Also called from ModuleNode for early init types.
- @staticmethod
- def generate_type_ready_code(entry, code, heap_type_bases=False):
- # Generate a call to PyType_Ready for an extension
- # type defined in this module.
- type = entry.type
- typeobj_cname = type.typeobj_cname
- scope = type.scope
- if not scope: # could be None if there was an error
- return
- if entry.visibility != 'extern':
- for slot in TypeSlots.slot_table:
- slot.generate_dynamic_init_code(scope, code)
- if heap_type_bases:
- code.globalstate.use_utility_code(
- UtilityCode.load_cached('PyType_Ready', 'ExtensionTypes.c'))
- readyfunc = "__Pyx_PyType_Ready"
- else:
- readyfunc = "PyType_Ready"
- code.putln(
- "if (%s(&%s) < 0) %s" % (
- readyfunc,
- typeobj_cname,
- code.error_goto(entry.pos)))
- # Don't inherit tp_print from builtin types, restoring the
- # behavior of using tp_repr or tp_str instead.
+ if not self.entry.type.early_init:
+ if self.type_init_args:
+ self.type_init_args.generate_evaluation_code(code)
+ bases = "PyTuple_GET_ITEM(%s, 1)" % self.type_init_args.result()
+ first_base = "((PyTypeObject*)PyTuple_GET_ITEM(%s, 0))" % bases
+ # Let Python do the base types compatibility checking.
+ trial_type = code.funcstate.allocate_temp(PyrexTypes.py_object_type, True)
+ code.putln("%s = PyType_Type.tp_new(&PyType_Type, %s, NULL);" % (
+ trial_type, self.type_init_args.result()))
+ code.putln(code.error_goto_if_null(trial_type, self.pos))
+ code.put_gotref(trial_type)
+ code.putln("if (((PyTypeObject*) %s)->tp_base != %s) {" % (
+ trial_type, first_base))
+ code.putln("PyErr_Format(PyExc_TypeError, \"best base '%s' must be equal to first base '%s'\",")
+ code.putln(" ((PyTypeObject*) %s)->tp_base->tp_name, %s->tp_name);" % (
+ trial_type, first_base))
+ code.putln(code.error_goto(self.pos))
+ code.putln("}")
+ code.funcstate.release_temp(trial_type)
+ code.put_incref(bases, PyrexTypes.py_object_type)
+ code.put_giveref(bases)
+ code.putln("%s.tp_bases = %s;" % (self.entry.type.typeobj_cname, bases))
+ code.put_decref_clear(trial_type, PyrexTypes.py_object_type)
+ self.type_init_args.generate_disposal_code(code)
+ self.type_init_args.free_temps(code)
+
+ self.generate_type_ready_code(self.entry, code, True)
+
+ # Also called from ModuleNode for early init types.
+ @staticmethod
+ def generate_type_ready_code(entry, code, heap_type_bases=False):
+ # Generate a call to PyType_Ready for an extension
+ # type defined in this module.
+ type = entry.type
+ typeobj_cname = type.typeobj_cname
+ scope = type.scope
+ if not scope: # could be None if there was an error
+ return
+ if entry.visibility != 'extern':
+ for slot in TypeSlots.slot_table:
+ slot.generate_dynamic_init_code(scope, code)
+ if heap_type_bases:
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached('PyType_Ready', 'ExtensionTypes.c'))
+ readyfunc = "__Pyx_PyType_Ready"
+ else:
+ readyfunc = "PyType_Ready"
+ code.putln(
+ "if (%s(&%s) < 0) %s" % (
+ readyfunc,
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ # Don't inherit tp_print from builtin types, restoring the
+ # behavior of using tp_repr or tp_str instead.
# ("tp_print" was renamed to "tp_vectorcall_offset" in Py3.8b1)
code.putln("#if PY_VERSION_HEX < 0x030800B1")
- code.putln("%s.tp_print = 0;" % typeobj_cname)
+ code.putln("%s.tp_print = 0;" % typeobj_cname)
code.putln("#endif")
-
- # Use specialised attribute lookup for types with generic lookup but no instance dict.
- getattr_slot_func = TypeSlots.get_slot_code_by_name(scope, 'tp_getattro')
- dictoffset_slot_func = TypeSlots.get_slot_code_by_name(scope, 'tp_dictoffset')
- if getattr_slot_func == '0' and dictoffset_slot_func == '0':
- if type.is_final_type:
- py_cfunc = "__Pyx_PyObject_GenericGetAttrNoDict" # grepable
- utility_func = "PyObject_GenericGetAttrNoDict"
- else:
- py_cfunc = "__Pyx_PyObject_GenericGetAttr"
- utility_func = "PyObject_GenericGetAttr"
- code.globalstate.use_utility_code(UtilityCode.load_cached(utility_func, "ObjectHandling.c"))
-
- code.putln("if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) &&"
- " likely(!%s.tp_dictoffset && %s.tp_getattro == PyObject_GenericGetAttr)) {" % (
- typeobj_cname, typeobj_cname))
- code.putln("%s.tp_getattro = %s;" % (
- typeobj_cname, py_cfunc))
- code.putln("}")
-
- # Fix special method docstrings. This is a bit of a hack, but
- # unless we let PyType_Ready create the slot wrappers we have
- # a significant performance hit. (See trac #561.)
- for func in entry.type.scope.pyfunc_entries:
- is_buffer = func.name in ('__getbuffer__', '__releasebuffer__')
- if (func.is_special and Options.docstrings and
- func.wrapperbase_cname and not is_buffer):
- slot = TypeSlots.method_name_to_slot.get(func.name)
- preprocessor_guard = slot.preprocessor_guard_code() if slot else None
- if preprocessor_guard:
- code.putln(preprocessor_guard)
- code.putln('#if CYTHON_COMPILING_IN_CPYTHON')
- code.putln("{")
- code.putln(
- 'PyObject *wrapper = PyObject_GetAttrString((PyObject *)&%s, "%s"); %s' % (
- typeobj_cname,
- func.name,
- code.error_goto_if_null('wrapper', entry.pos)))
- code.putln(
- "if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {")
- code.putln(
- "%s = *((PyWrapperDescrObject *)wrapper)->d_base;" % (
- func.wrapperbase_cname))
- code.putln(
- "%s.doc = %s;" % (func.wrapperbase_cname, func.doc_cname))
- code.putln(
- "((PyWrapperDescrObject *)wrapper)->d_base = &%s;" % (
- func.wrapperbase_cname))
- code.putln("}")
- code.putln("}")
- code.putln('#endif')
- if preprocessor_guard:
- code.putln('#endif')
- if type.vtable_cname:
- code.globalstate.use_utility_code(
- UtilityCode.load_cached('SetVTable', 'ImportExport.c'))
- code.putln(
- "if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
- typeobj_cname,
- type.vtabptr_cname,
- code.error_goto(entry.pos)))
- if heap_type_bases:
- code.globalstate.use_utility_code(
- UtilityCode.load_cached('MergeVTables', 'ImportExport.c'))
- code.putln("if (__Pyx_MergeVtables(&%s) < 0) %s" % (
- typeobj_cname,
- code.error_goto(entry.pos)))
+
+ # Use specialised attribute lookup for types with generic lookup but no instance dict.
+ getattr_slot_func = TypeSlots.get_slot_code_by_name(scope, 'tp_getattro')
+ dictoffset_slot_func = TypeSlots.get_slot_code_by_name(scope, 'tp_dictoffset')
+ if getattr_slot_func == '0' and dictoffset_slot_func == '0':
+ if type.is_final_type:
+ py_cfunc = "__Pyx_PyObject_GenericGetAttrNoDict" # grepable
+ utility_func = "PyObject_GenericGetAttrNoDict"
+ else:
+ py_cfunc = "__Pyx_PyObject_GenericGetAttr"
+ utility_func = "PyObject_GenericGetAttr"
+ code.globalstate.use_utility_code(UtilityCode.load_cached(utility_func, "ObjectHandling.c"))
+
+ code.putln("if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) &&"
+ " likely(!%s.tp_dictoffset && %s.tp_getattro == PyObject_GenericGetAttr)) {" % (
+ typeobj_cname, typeobj_cname))
+ code.putln("%s.tp_getattro = %s;" % (
+ typeobj_cname, py_cfunc))
+ code.putln("}")
+
+ # Fix special method docstrings. This is a bit of a hack, but
+ # unless we let PyType_Ready create the slot wrappers we have
+ # a significant performance hit. (See trac #561.)
+ for func in entry.type.scope.pyfunc_entries:
+ is_buffer = func.name in ('__getbuffer__', '__releasebuffer__')
+ if (func.is_special and Options.docstrings and
+ func.wrapperbase_cname and not is_buffer):
+ slot = TypeSlots.method_name_to_slot.get(func.name)
+ preprocessor_guard = slot.preprocessor_guard_code() if slot else None
+ if preprocessor_guard:
+ code.putln(preprocessor_guard)
+ code.putln('#if CYTHON_COMPILING_IN_CPYTHON')
+ code.putln("{")
+ code.putln(
+ 'PyObject *wrapper = PyObject_GetAttrString((PyObject *)&%s, "%s"); %s' % (
+ typeobj_cname,
+ func.name,
+ code.error_goto_if_null('wrapper', entry.pos)))
+ code.putln(
+ "if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {")
+ code.putln(
+ "%s = *((PyWrapperDescrObject *)wrapper)->d_base;" % (
+ func.wrapperbase_cname))
+ code.putln(
+ "%s.doc = %s;" % (func.wrapperbase_cname, func.doc_cname))
+ code.putln(
+ "((PyWrapperDescrObject *)wrapper)->d_base = &%s;" % (
+ func.wrapperbase_cname))
+ code.putln("}")
+ code.putln("}")
+ code.putln('#endif')
+ if preprocessor_guard:
+ code.putln('#endif')
+ if type.vtable_cname:
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached('SetVTable', 'ImportExport.c'))
+ code.putln(
+ "if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
+ typeobj_cname,
+ type.vtabptr_cname,
+ code.error_goto(entry.pos)))
+ if heap_type_bases:
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached('MergeVTables', 'ImportExport.c'))
+ code.putln("if (__Pyx_MergeVtables(&%s) < 0) %s" % (
+ typeobj_cname,
+ code.error_goto(entry.pos)))
if not type.scope.is_internal and not type.scope.directives.get('internal'):
- # scope.is_internal is set for types defined by
- # Cython (such as closures), the 'internal'
- # directive is set by users
- code.putln(
+ # scope.is_internal is set for types defined by
+ # Cython (such as closures), the 'internal'
+ # directive is set by users
+ code.putln(
'if (PyObject_SetAttr(%s, %s, (PyObject *)&%s) < 0) %s' % (
- Naming.module_cname,
+ Naming.module_cname,
code.intern_identifier(scope.class_name),
- typeobj_cname,
- code.error_goto(entry.pos)))
- weakref_entry = scope.lookup_here("__weakref__") if not scope.is_closure_class_scope else None
- if weakref_entry:
- if weakref_entry.type is py_object_type:
- tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
- if type.typedef_flag:
- objstruct = type.objstruct_cname
- else:
- objstruct = "struct %s" % type.objstruct_cname
- code.putln("if (%s == 0) %s = offsetof(%s, %s);" % (
- tp_weaklistoffset,
- tp_weaklistoffset,
- objstruct,
- weakref_entry.cname))
- else:
- error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
- if scope.lookup_here("__reduce_cython__") if not scope.is_closure_class_scope else None:
- # Unfortunately, we cannot reliably detect whether a
- # superclass defined __reduce__ at compile time, so we must
- # do so at runtime.
- code.globalstate.use_utility_code(
- UtilityCode.load_cached('SetupReduce', 'ExtensionTypes.c'))
- code.putln('if (__Pyx_setup_reduce((PyObject*)&%s) < 0) %s' % (
- typeobj_cname,
- code.error_goto(entry.pos)))
- # Generate code to initialise the typeptr of an extension
- # type defined in this module to point to its type object.
- if type.typeobj_cname:
- code.putln(
- "%s = &%s;" % (
- type.typeptr_cname, type.typeobj_cname))
-
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ weakref_entry = scope.lookup_here("__weakref__") if not scope.is_closure_class_scope else None
+ if weakref_entry:
+ if weakref_entry.type is py_object_type:
+ tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ code.putln("if (%s == 0) %s = offsetof(%s, %s);" % (
+ tp_weaklistoffset,
+ tp_weaklistoffset,
+ objstruct,
+ weakref_entry.cname))
+ else:
+ error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
+ if scope.lookup_here("__reduce_cython__") if not scope.is_closure_class_scope else None:
+ # Unfortunately, we cannot reliably detect whether a
+ # superclass defined __reduce__ at compile time, so we must
+ # do so at runtime.
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached('SetupReduce', 'ExtensionTypes.c'))
+ code.putln('if (__Pyx_setup_reduce((PyObject*)&%s) < 0) %s' % (
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ # Generate code to initialise the typeptr of an extension
+ # type defined in this module to point to its type object.
+ if type.typeobj_cname:
+ code.putln(
+ "%s = &%s;" % (
+ type.typeptr_cname, type.typeobj_cname))
+
def annotate(self, code):
- if self.type_init_args:
- self.type_init_args.annotate(code)
+ if self.type_init_args:
+ self.type_init_args.annotate(code)
if self.body:
self.body.annotate(code)
@@ -5115,13 +5115,13 @@ class ExprStatNode(StatNode):
def analyse_declarations(self, env):
from . import ExprNodes
- expr = self.expr
- if isinstance(expr, ExprNodes.GeneralCallNode):
- func = expr.function.as_cython_attribute()
+ expr = self.expr
+ if isinstance(expr, ExprNodes.GeneralCallNode):
+ func = expr.function.as_cython_attribute()
if func == u'declare':
- args, kwds = expr.explicit_args_kwds()
+ args, kwds = expr.explicit_args_kwds()
if len(args):
- error(expr.pos, "Variable names must be specified.")
+ error(expr.pos, "Variable names must be specified.")
for var, type_node in kwds.key_value_pairs:
type = type_node.analyse_as_type(env)
if type is None:
@@ -5129,20 +5129,20 @@ class ExprStatNode(StatNode):
else:
env.declare_var(var.value, type, var.pos, is_cdef=True)
self.__class__ = PassStatNode
- elif getattr(expr, 'annotation', None) is not None:
- if expr.is_name:
- # non-code variable annotation, e.g. "name: type"
- expr.declare_from_annotation(env)
- self.__class__ = PassStatNode
- elif expr.is_attribute or expr.is_subscript:
- # unused expression with annotation, e.g. "a[0]: type" or "a.xyz : type"
- self.__class__ = PassStatNode
+ elif getattr(expr, 'annotation', None) is not None:
+ if expr.is_name:
+ # non-code variable annotation, e.g. "name: type"
+ expr.declare_from_annotation(env)
+ self.__class__ = PassStatNode
+ elif expr.is_attribute or expr.is_subscript:
+ # unused expression with annotation, e.g. "a[0]: type" or "a.xyz : type"
+ self.__class__ = PassStatNode
def analyse_expressions(self, env):
self.expr.result_is_used = False # hint that .result() may safely be left empty
self.expr = self.expr.analyse_expressions(env)
- # Repeat in case of node replacement.
- self.expr.result_is_used = False # hint that .result() may safely be left empty
+ # Repeat in case of node replacement.
+ self.expr.result_is_used = False # hint that .result() may safely be left empty
return self
def nogil_check(self, env):
@@ -5153,13 +5153,13 @@ class ExprStatNode(StatNode):
def generate_execution_code(self, code):
code.mark_pos(self.pos)
- self.expr.result_is_used = False # hint that .result() may safely be left empty
+ self.expr.result_is_used = False # hint that .result() may safely be left empty
self.expr.generate_evaluation_code(code)
if not self.expr.is_temp and self.expr.result():
- result = self.expr.result()
- if not self.expr.type.is_void:
- result = "(void)(%s)" % result
- code.putln("%s;" % result)
+ result = self.expr.result()
+ if not self.expr.type.is_void:
+ result = "(void)(%s)" % result
+ code.putln("%s;" % result)
self.expr.generate_disposal_code(code)
self.expr.free_temps(code)
@@ -5873,9 +5873,9 @@ class DelStatNode(StatNode):
arg.generate_deletion_code(
code, ignore_nonexisting=self.ignore_nonexisting)
elif arg.type.is_ptr and arg.type.base_type.is_cpp_class:
- arg.generate_evaluation_code(code)
+ arg.generate_evaluation_code(code)
code.putln("delete %s;" % arg.result())
- arg.generate_disposal_code(code)
+ arg.generate_disposal_code(code)
arg.free_temps(code)
# else error reported earlier
@@ -5905,7 +5905,7 @@ class IndirectionNode(StatListNode):
def __init__(self, stats):
super(IndirectionNode, self).__init__(stats[0].pos, stats=stats)
-
+
class BreakStatNode(StatNode):
child_attrs = []
@@ -5944,12 +5944,12 @@ class ReturnStatNode(StatNode):
# value ExprNode or None
# return_type PyrexType
# in_generator return inside of generator => raise StopIteration
- # in_async_gen return inside of async generator
+ # in_async_gen return inside of async generator
child_attrs = ["value"]
is_terminator = True
in_generator = False
- in_async_gen = False
+ in_async_gen = False
# Whether we are in a parallel section
in_parallel = False
@@ -5961,8 +5961,8 @@ class ReturnStatNode(StatNode):
error(self.pos, "Return not inside a function body")
return self
if self.value:
- if self.in_async_gen:
- error(self.pos, "Return with value in async generator")
+ if self.in_async_gen:
+ error(self.pos, "Return with value in async generator")
self.value = self.value.analyse_types(env)
if return_type.is_void or return_type.is_returncode:
error(self.value.pos, "Return with value in void function")
@@ -5986,23 +5986,23 @@ class ReturnStatNode(StatNode):
if not self.return_type:
# error reported earlier
return
-
- value = self.value
+
+ value = self.value
if self.return_type.is_pyobject:
- code.put_xdecref(Naming.retval_cname, self.return_type)
- if value and value.is_none:
- # Use specialised default handling for "return None".
- value = None
+ code.put_xdecref(Naming.retval_cname, self.return_type)
+ if value and value.is_none:
+ # Use specialised default handling for "return None".
+ value = None
- if value:
- value.generate_evaluation_code(code)
+ if value:
+ value.generate_evaluation_code(code)
if self.return_type.is_memoryviewslice:
from . import MemoryView
MemoryView.put_acquire_memoryviewslice(
lhs_cname=Naming.retval_cname,
lhs_type=self.return_type,
- lhs_pos=value.pos,
- rhs=value,
+ lhs_pos=value.pos,
+ rhs=value,
code=code,
have_gil=self.in_nogil_context)
value.generate_post_assignment_code(code)
@@ -6012,22 +6012,22 @@ class ReturnStatNode(StatNode):
UtilityCode.load_cached("ReturnWithStopIteration", "Coroutine.c"))
code.putln("%s = NULL; __Pyx_ReturnWithStopIteration(%s);" % (
Naming.retval_cname,
- value.py_result()))
- value.generate_disposal_code(code)
+ value.py_result()))
+ value.generate_disposal_code(code)
else:
- value.make_owned_reference(code)
+ value.make_owned_reference(code)
code.putln("%s = %s;" % (
Naming.retval_cname,
- value.result_as(self.return_type)))
+ value.result_as(self.return_type)))
value.generate_post_assignment_code(code)
- value.free_temps(code)
+ value.free_temps(code)
else:
if self.return_type.is_pyobject:
if self.in_generator:
- if self.in_async_gen:
- code.globalstate.use_utility_code(
- UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
- code.put("PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); ")
+ if self.in_async_gen:
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
+ code.put("PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); ")
code.putln("%s = NULL;" % Naming.retval_cname)
else:
code.put_init_to_py_none(Naming.retval_cname, self.return_type)
@@ -6103,8 +6103,8 @@ class RaiseStatNode(StatNode):
if self.exc_type:
self.exc_type.generate_evaluation_code(code)
type_code = self.exc_type.py_result()
- if self.exc_type.is_name:
- code.globalstate.use_entry_utility_code(self.exc_type.entry)
+ if self.exc_type.is_name:
+ code.globalstate.use_entry_utility_code(self.exc_type.entry)
else:
type_code = "0"
if self.exc_value:
@@ -6272,13 +6272,13 @@ class IfStatNode(StatNode):
code.mark_pos(self.pos)
end_label = code.new_label()
last = len(self.if_clauses)
- if self.else_clause:
- # If the 'else' clause is 'unlikely', then set the preceding 'if' clause to 'likely' to reflect that.
- self._set_branch_hint(self.if_clauses[-1], self.else_clause, inverse=True)
- else:
+ if self.else_clause:
+ # If the 'else' clause is 'unlikely', then set the preceding 'if' clause to 'likely' to reflect that.
+ self._set_branch_hint(self.if_clauses[-1], self.else_clause, inverse=True)
+ else:
last -= 1 # avoid redundant goto at end of last if-clause
for i, if_clause in enumerate(self.if_clauses):
- self._set_branch_hint(if_clause, if_clause.body)
+ self._set_branch_hint(if_clause, if_clause.body)
if_clause.generate_execution_code(code, end_label, is_last=i == last)
if self.else_clause:
code.mark_pos(self.else_clause.pos)
@@ -6287,21 +6287,21 @@ class IfStatNode(StatNode):
code.putln("}")
code.put_label(end_label)
- def _set_branch_hint(self, clause, statements_node, inverse=False):
- if not statements_node.is_terminator:
- return
- if not isinstance(statements_node, StatListNode) or not statements_node.stats:
- return
- # Anything that unconditionally raises exceptions should be considered unlikely.
- if isinstance(statements_node.stats[-1], (RaiseStatNode, ReraiseStatNode)):
- if len(statements_node.stats) > 1:
- # Allow simple statements before the 'raise', but no conditions, loops, etc.
- non_branch_nodes = (ExprStatNode, AssignmentNode, DelStatNode, GlobalNode, NonlocalNode)
- for node in statements_node.stats[:-1]:
- if not isinstance(node, non_branch_nodes):
- return
- clause.branch_hint = 'likely' if inverse else 'unlikely'
-
+ def _set_branch_hint(self, clause, statements_node, inverse=False):
+ if not statements_node.is_terminator:
+ return
+ if not isinstance(statements_node, StatListNode) or not statements_node.stats:
+ return
+ # Anything that unconditionally raises exceptions should be considered unlikely.
+ if isinstance(statements_node.stats[-1], (RaiseStatNode, ReraiseStatNode)):
+ if len(statements_node.stats) > 1:
+ # Allow simple statements before the 'raise', but no conditions, loops, etc.
+ non_branch_nodes = (ExprStatNode, AssignmentNode, DelStatNode, GlobalNode, NonlocalNode)
+ for node in statements_node.stats[:-1]:
+ if not isinstance(node, non_branch_nodes):
+ return
+ clause.branch_hint = 'likely' if inverse else 'unlikely'
+
def generate_function_definitions(self, env, code):
for clause in self.if_clauses:
clause.generate_function_definitions(env, code)
@@ -6322,7 +6322,7 @@ class IfClauseNode(Node):
# body StatNode
child_attrs = ["condition", "body"]
- branch_hint = None
+ branch_hint = None
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
@@ -6335,10 +6335,10 @@ class IfClauseNode(Node):
def generate_execution_code(self, code, end_label, is_last):
self.condition.generate_evaluation_code(code)
code.mark_pos(self.pos)
- condition = self.condition.result()
- if self.branch_hint:
- condition = '%s(%s)' % (self.branch_hint, condition)
- code.putln("if (%s) {" % condition)
+ condition = self.condition.result()
+ if self.branch_hint:
+ condition = '%s(%s)' % (self.branch_hint, condition)
+ code.putln("if (%s) {" % condition)
self.condition.generate_disposal_code(code)
self.condition.free_temps(code)
self.body.generate_execution_code(code)
@@ -6595,66 +6595,66 @@ class DictIterationNextNode(Node):
var.release(code)
-class SetIterationNextNode(Node):
- # Helper node for calling _PySet_NextEntry() inside of a WhileStatNode
- # and checking the set size for changes. Created in Optimize.py.
- child_attrs = ['set_obj', 'expected_size', 'pos_index_var',
- 'coerced_value_var', 'value_target', 'is_set_flag']
-
- coerced_value_var = value_ref = None
-
- def __init__(self, set_obj, expected_size, pos_index_var, value_target, is_set_flag):
- Node.__init__(
- self, set_obj.pos,
- set_obj=set_obj,
- expected_size=expected_size,
- pos_index_var=pos_index_var,
- value_target=value_target,
- is_set_flag=is_set_flag,
- is_temp=True,
- type=PyrexTypes.c_bint_type)
-
- def analyse_expressions(self, env):
- from . import ExprNodes
- self.set_obj = self.set_obj.analyse_types(env)
- self.expected_size = self.expected_size.analyse_types(env)
- self.pos_index_var = self.pos_index_var.analyse_types(env)
- self.value_target = self.value_target.analyse_target_types(env)
- self.value_ref = ExprNodes.TempNode(self.value_target.pos, type=PyrexTypes.py_object_type)
- self.coerced_value_var = self.value_ref.coerce_to(self.value_target.type, env)
- self.is_set_flag = self.is_set_flag.analyse_types(env)
- return self
-
- def generate_function_definitions(self, env, code):
- self.set_obj.generate_function_definitions(env, code)
-
- def generate_execution_code(self, code):
- code.globalstate.use_utility_code(UtilityCode.load_cached("set_iter", "Optimize.c"))
- self.set_obj.generate_evaluation_code(code)
-
- value_ref = self.value_ref
- value_ref.allocate(code)
-
- result_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False)
- code.putln("%s = __Pyx_set_iter_next(%s, %s, &%s, &%s, %s);" % (
- result_temp,
- self.set_obj.py_result(),
- self.expected_size.result(),
- self.pos_index_var.result(),
- value_ref.result(),
- self.is_set_flag.result()
- ))
- code.putln("if (unlikely(%s == 0)) break;" % result_temp)
- code.putln(code.error_goto_if("%s == -1" % result_temp, self.pos))
- code.funcstate.release_temp(result_temp)
-
- # evaluate all coercions before the assignments
- code.put_gotref(value_ref.result())
- self.coerced_value_var.generate_evaluation_code(code)
- self.value_target.generate_assignment_code(self.coerced_value_var, code)
- value_ref.release(code)
-
-
+class SetIterationNextNode(Node):
+ # Helper node for calling _PySet_NextEntry() inside of a WhileStatNode
+ # and checking the set size for changes. Created in Optimize.py.
+ child_attrs = ['set_obj', 'expected_size', 'pos_index_var',
+ 'coerced_value_var', 'value_target', 'is_set_flag']
+
+ coerced_value_var = value_ref = None
+
+ def __init__(self, set_obj, expected_size, pos_index_var, value_target, is_set_flag):
+ Node.__init__(
+ self, set_obj.pos,
+ set_obj=set_obj,
+ expected_size=expected_size,
+ pos_index_var=pos_index_var,
+ value_target=value_target,
+ is_set_flag=is_set_flag,
+ is_temp=True,
+ type=PyrexTypes.c_bint_type)
+
+ def analyse_expressions(self, env):
+ from . import ExprNodes
+ self.set_obj = self.set_obj.analyse_types(env)
+ self.expected_size = self.expected_size.analyse_types(env)
+ self.pos_index_var = self.pos_index_var.analyse_types(env)
+ self.value_target = self.value_target.analyse_target_types(env)
+ self.value_ref = ExprNodes.TempNode(self.value_target.pos, type=PyrexTypes.py_object_type)
+ self.coerced_value_var = self.value_ref.coerce_to(self.value_target.type, env)
+ self.is_set_flag = self.is_set_flag.analyse_types(env)
+ return self
+
+ def generate_function_definitions(self, env, code):
+ self.set_obj.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ code.globalstate.use_utility_code(UtilityCode.load_cached("set_iter", "Optimize.c"))
+ self.set_obj.generate_evaluation_code(code)
+
+ value_ref = self.value_ref
+ value_ref.allocate(code)
+
+ result_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False)
+ code.putln("%s = __Pyx_set_iter_next(%s, %s, &%s, &%s, %s);" % (
+ result_temp,
+ self.set_obj.py_result(),
+ self.expected_size.result(),
+ self.pos_index_var.result(),
+ value_ref.result(),
+ self.is_set_flag.result()
+ ))
+ code.putln("if (unlikely(%s == 0)) break;" % result_temp)
+ code.putln(code.error_goto_if("%s == -1" % result_temp, self.pos))
+ code.funcstate.release_temp(result_temp)
+
+ # evaluate all coercions before the assignments
+ code.put_gotref(value_ref.result())
+ self.coerced_value_var.generate_evaluation_code(code)
+ self.value_target.generate_assignment_code(self.coerced_value_var, code)
+ value_ref.release(code)
+
+
def ForStatNode(pos, **kw):
if 'iterator' in kw:
if kw['iterator'].is_async:
@@ -6780,11 +6780,11 @@ class AsyncForStatNode(_ForInStatNode):
is_async = True
- def __init__(self, pos, **kw):
+ def __init__(self, pos, **kw):
assert 'item' not in kw
from . import ExprNodes
# AwaitExprNodes must appear before running MarkClosureVisitor
- kw['item'] = ExprNodes.AwaitIterNextExprNode(kw['iterator'].pos, arg=None)
+ kw['item'] = ExprNodes.AwaitIterNextExprNode(kw['iterator'].pos, arg=None)
_ForInStatNode.__init__(self, pos, **kw)
def _create_item_node(self):
@@ -6841,27 +6841,27 @@ class ForFromStatNode(LoopNode, StatNode):
"Consider switching the directions of the relations.", 2)
self.step = self.step.analyse_types(env)
- self.set_up_loop(env)
- target_type = self.target.type
- if not (target_type.is_pyobject or target_type.is_numeric):
- error(self.target.pos, "for-from loop variable must be c numeric type or Python object")
-
- self.body = self.body.analyse_expressions(env)
- if self.else_clause:
- self.else_clause = self.else_clause.analyse_expressions(env)
- return self
-
- def set_up_loop(self, env):
- from . import ExprNodes
-
- target_type = self.target.type
- if target_type.is_numeric:
- loop_type = target_type
+ self.set_up_loop(env)
+ target_type = self.target.type
+ if not (target_type.is_pyobject or target_type.is_numeric):
+ error(self.target.pos, "for-from loop variable must be c numeric type or Python object")
+
+ self.body = self.body.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause = self.else_clause.analyse_expressions(env)
+ return self
+
+ def set_up_loop(self, env):
+ from . import ExprNodes
+
+ target_type = self.target.type
+ if target_type.is_numeric:
+ loop_type = target_type
else:
- if target_type.is_enum:
- warning(self.target.pos,
- "Integer loops over enum values are fragile. Please cast to a safe integer type instead.")
- loop_type = PyrexTypes.c_long_type if target_type.is_pyobject else PyrexTypes.c_int_type
+ if target_type.is_enum:
+ warning(self.target.pos,
+ "Integer loops over enum values are fragile. Please cast to a safe integer type instead.")
+ loop_type = PyrexTypes.c_long_type if target_type.is_pyobject else PyrexTypes.c_int_type
if not self.bound1.type.is_pyobject:
loop_type = PyrexTypes.widest_numeric_type(loop_type, self.bound1.type)
if not self.bound2.type.is_pyobject:
@@ -6877,7 +6877,7 @@ class ForFromStatNode(LoopNode, StatNode):
if not self.step.is_literal:
self.step = self.step.coerce_to_temp(env)
- if target_type.is_numeric or target_type.is_enum:
+ if target_type.is_numeric or target_type.is_enum:
self.is_py_target = False
if isinstance(self.target, ExprNodes.BufferIndexNode):
raise error(self.pos, "Buffer or memoryview slicing/indexing not allowed as for-loop target.")
@@ -6887,7 +6887,7 @@ class ForFromStatNode(LoopNode, StatNode):
self.is_py_target = True
c_loopvar_node = ExprNodes.TempNode(self.pos, loop_type, env)
self.loopvar_node = c_loopvar_node
- self.py_loopvar_node = ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env)
+ self.py_loopvar_node = ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env)
def generate_execution_code(self, code):
code.mark_pos(self.pos)
@@ -6899,23 +6899,23 @@ class ForFromStatNode(LoopNode, StatNode):
if self.step is not None:
self.step.generate_evaluation_code(code)
step = self.step.result()
- incop = "%s=%s" % (incop[0], step) # e.g. '++' => '+= STEP'
- else:
- step = '1'
-
+ incop = "%s=%s" % (incop[0], step) # e.g. '++' => '+= STEP'
+ else:
+ step = '1'
+
from . import ExprNodes
if isinstance(self.loopvar_node, ExprNodes.TempNode):
self.loopvar_node.allocate(code)
if isinstance(self.py_loopvar_node, ExprNodes.TempNode):
self.py_loopvar_node.allocate(code)
-
- loopvar_type = PyrexTypes.c_long_type if self.target.type.is_enum else self.target.type
-
- if from_range and not self.is_py_target:
- loopvar_name = code.funcstate.allocate_temp(loopvar_type, False)
+
+ loopvar_type = PyrexTypes.c_long_type if self.target.type.is_enum else self.target.type
+
+ if from_range and not self.is_py_target:
+ loopvar_name = code.funcstate.allocate_temp(loopvar_type, False)
else:
loopvar_name = self.loopvar_node.result()
- if loopvar_type.is_int and not loopvar_type.signed and self.relation2[0] == '>':
+ if loopvar_type.is_int and not loopvar_type.signed and self.relation2[0] == '>':
# Handle the case where the endpoint of an unsigned int iteration
# is within step of 0.
code.putln("for (%s = %s%s + %s; %s %s %s + %s; ) { %s%s;" % (
@@ -6929,18 +6929,18 @@ class ForFromStatNode(LoopNode, StatNode):
self.bound1.result(), offset,
loopvar_name, self.relation2, self.bound2.result(),
loopvar_name, incop))
-
- coerced_loopvar_node = self.py_loopvar_node
- if coerced_loopvar_node is None and from_range:
- coerced_loopvar_node = ExprNodes.RawCNameExprNode(self.target.pos, loopvar_type, loopvar_name)
- if coerced_loopvar_node is not None:
- coerced_loopvar_node.generate_evaluation_code(code)
- self.target.generate_assignment_code(coerced_loopvar_node, code)
-
+
+ coerced_loopvar_node = self.py_loopvar_node
+ if coerced_loopvar_node is None and from_range:
+ coerced_loopvar_node = ExprNodes.RawCNameExprNode(self.target.pos, loopvar_type, loopvar_name)
+ if coerced_loopvar_node is not None:
+ coerced_loopvar_node.generate_evaluation_code(code)
+ self.target.generate_assignment_code(coerced_loopvar_node, code)
+
self.body.generate_execution_code(code)
code.put_label(code.continue_label)
-
- if not from_range and self.py_loopvar_node:
+
+ if not from_range and self.py_loopvar_node:
# This mess is to make for..from loops with python targets behave
# exactly like those with C targets with regards to re-assignment
# of the loop variable.
@@ -6972,17 +6972,17 @@ class ForFromStatNode(LoopNode, StatNode):
if self.target.entry.is_pyglobal:
code.put_decref(target_node.result(), target_node.type)
target_node.release(code)
-
+
code.putln("}")
-
- if not from_range and self.py_loopvar_node:
+
+ if not from_range and self.py_loopvar_node:
# This is potentially wasteful, but we don't want the semantics to
# depend on whether or not the loop is a python type.
self.py_loopvar_node.generate_evaluation_code(code)
self.target.generate_assignment_code(self.py_loopvar_node, code)
- if from_range and not self.is_py_target:
+ if from_range and not self.is_py_target:
code.funcstate.release_temp(loopvar_name)
-
+
break_label = code.break_label
code.set_loop_labels(old_loop_labels)
if self.else_clause:
@@ -7175,7 +7175,7 @@ class TryExceptStatNode(StatNode):
# else_clause StatNode or None
child_attrs = ["body", "except_clauses", "else_clause"]
- in_generator = False
+ in_generator = False
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
@@ -7214,8 +7214,8 @@ class TryExceptStatNode(StatNode):
except_error_label = code.new_label('except_error')
except_return_label = code.new_label('except_return')
try_return_label = code.new_label('try_return')
- try_break_label = code.new_label('try_break') if old_break_label else None
- try_continue_label = code.new_label('try_continue') if old_continue_label else None
+ try_break_label = code.new_label('try_break') if old_break_label else None
+ try_continue_label = code.new_label('try_continue') if old_continue_label else None
try_end_label = code.new_label('try_end')
exc_save_vars = [code.funcstate.allocate_temp(py_object_type, False)
@@ -7236,9 +7236,9 @@ class TryExceptStatNode(StatNode):
if can_raise:
# inject code before the try block to save away the exception state
code.globalstate.use_utility_code(reset_exception_utility_code)
- if not self.in_generator:
- save_exc.putln("__Pyx_PyThreadState_declare")
- save_exc.putln("__Pyx_PyThreadState_assign")
+ if not self.in_generator:
+ save_exc.putln("__Pyx_PyThreadState_declare")
+ save_exc.putln("__Pyx_PyThreadState_assign")
save_exc.putln("__Pyx_ExceptionSave(%s);" % (
', '.join(['&%s' % var for var in exc_save_vars])))
for var in exc_save_vars:
@@ -7252,8 +7252,8 @@ class TryExceptStatNode(StatNode):
else:
# try block cannot raise exceptions, but we had to allocate the temps above,
# so just keep the C compiler from complaining about them being unused
- mark_vars_used = ["(void)%s;" % var for var in exc_save_vars]
- save_exc.putln("%s /* mark used */" % ' '.join(mark_vars_used))
+ mark_vars_used = ["(void)%s;" % var for var in exc_save_vars]
+ save_exc.putln("%s /* mark used */" % ' '.join(mark_vars_used))
def restore_saved_exception():
pass
@@ -7279,14 +7279,14 @@ class TryExceptStatNode(StatNode):
code.put_label(our_error_label)
for temp_name, temp_type in temps_to_clean_up:
code.put_xdecref_clear(temp_name, temp_type)
-
- outer_except = code.funcstate.current_except
- # Currently points to self, but the ExceptClauseNode would also be ok. Change if needed.
- code.funcstate.current_except = self
+
+ outer_except = code.funcstate.current_except
+ # Currently points to self, but the ExceptClauseNode would also be ok. Change if needed.
+ code.funcstate.current_except = self
for except_clause in self.except_clauses:
except_clause.generate_handling_code(code, except_end_label)
- code.funcstate.current_except = outer_except
-
+ code.funcstate.current_except = outer_except
+
if not self.has_default_clause:
code.put_goto(except_error_label)
@@ -7383,42 +7383,42 @@ class ExceptClauseNode(Node):
def generate_handling_code(self, code, end_label):
code.mark_pos(self.pos)
-
+
if self.pattern:
- has_non_literals = not all(
- pattern.is_literal or pattern.is_simple() and not pattern.is_temp
- for pattern in self.pattern)
-
- if has_non_literals:
- # For non-trivial exception check expressions, hide the live exception from C-API calls.
- exc_vars = [code.funcstate.allocate_temp(py_object_type, manage_ref=True)
- for _ in range(3)]
- code.globalstate.use_utility_code(UtilityCode.load_cached("PyErrFetchRestore", "Exceptions.c"))
- code.putln("__Pyx_ErrFetch(&%s, &%s, &%s);" % tuple(exc_vars))
- code.globalstate.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
- exc_test_func = "__Pyx_PyErr_GivenExceptionMatches(%s, %%s)" % exc_vars[0]
- else:
- exc_vars = ()
- code.globalstate.use_utility_code(UtilityCode.load_cached("PyErrExceptionMatches", "Exceptions.c"))
- exc_test_func = "__Pyx_PyErr_ExceptionMatches(%s)"
-
+ has_non_literals = not all(
+ pattern.is_literal or pattern.is_simple() and not pattern.is_temp
+ for pattern in self.pattern)
+
+ if has_non_literals:
+ # For non-trivial exception check expressions, hide the live exception from C-API calls.
+ exc_vars = [code.funcstate.allocate_temp(py_object_type, manage_ref=True)
+ for _ in range(3)]
+ code.globalstate.use_utility_code(UtilityCode.load_cached("PyErrFetchRestore", "Exceptions.c"))
+ code.putln("__Pyx_ErrFetch(&%s, &%s, &%s);" % tuple(exc_vars))
+ code.globalstate.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
+ exc_test_func = "__Pyx_PyErr_GivenExceptionMatches(%s, %%s)" % exc_vars[0]
+ else:
+ exc_vars = ()
+ code.globalstate.use_utility_code(UtilityCode.load_cached("PyErrExceptionMatches", "Exceptions.c"))
+ exc_test_func = "__Pyx_PyErr_ExceptionMatches(%s)"
+
exc_tests = []
for pattern in self.pattern:
pattern.generate_evaluation_code(code)
- exc_tests.append(exc_test_func % pattern.py_result())
+ exc_tests.append(exc_test_func % pattern.py_result())
- match_flag = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
- code.putln("%s = %s;" % (match_flag, ' || '.join(exc_tests)))
+ match_flag = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
+ code.putln("%s = %s;" % (match_flag, ' || '.join(exc_tests)))
for pattern in self.pattern:
pattern.generate_disposal_code(code)
pattern.free_temps(code)
-
- if has_non_literals:
- code.putln("__Pyx_ErrRestore(%s, %s, %s);" % tuple(exc_vars))
- code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
- for temp in exc_vars:
- code.funcstate.release_temp(temp)
-
+
+ if has_non_literals:
+ code.putln("__Pyx_ErrRestore(%s, %s, %s);" % tuple(exc_vars))
+ code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
+ for temp in exc_vars:
+ code.funcstate.release_temp(temp)
+
code.putln(
"if (%s) {" %
match_flag)
@@ -7437,7 +7437,7 @@ class ExceptClauseNode(Node):
code.putln("}")
return
- exc_vars = [code.funcstate.allocate_temp(py_object_type, manage_ref=True)
+ exc_vars = [code.funcstate.allocate_temp(py_object_type, manage_ref=True)
for _ in range(3)]
code.put_add_traceback(self.function_name)
# We always have to fetch the exception value even if
@@ -7447,8 +7447,8 @@ class ExceptClauseNode(Node):
exc_args = "&%s, &%s, &%s" % tuple(exc_vars)
code.putln("if (__Pyx_GetException(%s) < 0) %s" % (
exc_args, code.error_goto(self.pos)))
- for var in exc_vars:
- code.put_gotref(var)
+ for var in exc_vars:
+ code.put_gotref(var)
if self.target:
self.exc_value.set_var(exc_vars[1])
self.exc_value.generate_evaluation_code(code)
@@ -7465,7 +7465,7 @@ class ExceptClauseNode(Node):
code.funcstate.exc_vars = exc_vars
self.body.generate_execution_code(code)
code.funcstate.exc_vars = old_exc_vars
-
+
if not self.body.is_terminator:
for var in exc_vars:
# FIXME: XDECREF() is needed to allow re-raising (which clears the exc_vars),
@@ -7509,7 +7509,7 @@ class TryFinallyStatNode(StatNode):
# body StatNode
# finally_clause StatNode
# finally_except_clause deep-copy of finally_clause for exception case
- # in_generator inside of generator => must store away current exception also in return case
+ # in_generator inside of generator => must store away current exception also in return case
#
# Each of the continue, break, return and error gotos runs
# into its own deep-copy of the finally block code.
@@ -7527,7 +7527,7 @@ class TryFinallyStatNode(StatNode):
finally_except_clause = None
is_try_finally_in_nogil = False
- in_generator = False
+ in_generator = False
@staticmethod
def create_analysed(pos, env, body, finally_clause):
@@ -7599,10 +7599,10 @@ class TryFinallyStatNode(StatNode):
code.putln('}')
if preserve_error:
- code.put_label(new_error_label)
+ code.put_label(new_error_label)
code.putln('/*exception exit:*/{')
- if not self.in_generator:
- code.putln("__Pyx_PyThreadState_declare")
+ if not self.in_generator:
+ code.putln("__Pyx_PyThreadState_declare")
if self.is_try_finally_in_nogil:
code.declare_gilstate()
if needs_success_cleanup:
@@ -7650,47 +7650,47 @@ class TryFinallyStatNode(StatNode):
code.set_all_labels(old_labels)
return_label = code.return_label
- exc_vars = ()
-
+ exc_vars = ()
+
for i, (new_label, old_label) in enumerate(zip(new_labels, old_labels)):
if not code.label_used(new_label):
continue
if new_label == new_error_label and preserve_error:
continue # handled above
- code.putln('%s: {' % new_label)
+ code.putln('%s: {' % new_label)
ret_temp = None
- if old_label == return_label:
- # return actually raises an (uncatchable) exception in generators that we must preserve
- if self.in_generator:
- exc_vars = tuple([
- code.funcstate.allocate_temp(py_object_type, manage_ref=False)
- for _ in range(6)])
- self.put_error_catcher(code, [], exc_vars)
- if not self.finally_clause.is_terminator:
- # store away return value for later reuse
- if (self.func_return_type and
- not self.is_try_finally_in_nogil and
- not isinstance(self.finally_clause, GILExitNode)):
- ret_temp = code.funcstate.allocate_temp(
- self.func_return_type, manage_ref=False)
- code.putln("%s = %s;" % (ret_temp, Naming.retval_cname))
- if self.func_return_type.is_pyobject:
- code.putln("%s = 0;" % Naming.retval_cname)
-
- fresh_finally_clause().generate_execution_code(code)
-
- if old_label == return_label:
- if ret_temp:
- code.putln("%s = %s;" % (Naming.retval_cname, ret_temp))
+ if old_label == return_label:
+ # return actually raises an (uncatchable) exception in generators that we must preserve
+ if self.in_generator:
+ exc_vars = tuple([
+ code.funcstate.allocate_temp(py_object_type, manage_ref=False)
+ for _ in range(6)])
+ self.put_error_catcher(code, [], exc_vars)
+ if not self.finally_clause.is_terminator:
+ # store away return value for later reuse
+ if (self.func_return_type and
+ not self.is_try_finally_in_nogil and
+ not isinstance(self.finally_clause, GILExitNode)):
+ ret_temp = code.funcstate.allocate_temp(
+ self.func_return_type, manage_ref=False)
+ code.putln("%s = %s;" % (ret_temp, Naming.retval_cname))
+ if self.func_return_type.is_pyobject:
+ code.putln("%s = 0;" % Naming.retval_cname)
+
+ fresh_finally_clause().generate_execution_code(code)
+
+ if old_label == return_label:
+ if ret_temp:
+ code.putln("%s = %s;" % (Naming.retval_cname, ret_temp))
if self.func_return_type.is_pyobject:
- code.putln("%s = 0;" % ret_temp)
- code.funcstate.release_temp(ret_temp)
- if self.in_generator:
- self.put_error_uncatcher(code, exc_vars)
+ code.putln("%s = 0;" % ret_temp)
+ code.funcstate.release_temp(ret_temp)
+ if self.in_generator:
+ self.put_error_uncatcher(code, exc_vars)
for cname in exc_vars:
code.funcstate.release_temp(cname)
-
+
if not self.finally_clause.is_terminator:
code.put_goto(old_label)
code.putln('}')
@@ -7705,7 +7705,7 @@ class TryFinallyStatNode(StatNode):
self.finally_clause.generate_function_definitions(env, code)
def put_error_catcher(self, code, temps_to_clean_up, exc_vars,
- exc_lineno_cnames=None, exc_filename_cname=None):
+ exc_lineno_cnames=None, exc_filename_cname=None):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.globalstate.use_utility_code(get_exception_utility_code)
code.globalstate.use_utility_code(swap_exception_utility_code)
@@ -7714,7 +7714,7 @@ class TryFinallyStatNode(StatNode):
code.put_ensure_gil(declare_gilstate=False)
code.putln("__Pyx_PyThreadState_assign")
- code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
+ code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
for temp_name, type in temps_to_clean_up:
code.put_xdecref_clear(temp_name, type)
@@ -7738,7 +7738,7 @@ class TryFinallyStatNode(StatNode):
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
- def put_error_uncatcher(self, code, exc_vars, exc_lineno_cnames=None, exc_filename_cname=None):
+ def put_error_uncatcher(self, code, exc_vars, exc_lineno_cnames=None, exc_filename_cname=None):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.globalstate.use_utility_code(reset_exception_utility_code)
@@ -7759,7 +7759,7 @@ class TryFinallyStatNode(StatNode):
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
- code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
+ code.putln(' '.join(["%s = 0;" % var for var in exc_vars]))
if exc_lineno_cnames:
code.putln("%s = %s; %s = %s; %s = %s;" % (
Naming.lineno_cname, exc_lineno_cnames[0],
@@ -7818,7 +7818,7 @@ class GILStatNode(NogilTryFinallyStatNode):
from .ParseTreeTransforms import YieldNodeCollector
collector = YieldNodeCollector()
collector.visitchildren(body)
- if not collector.yields:
+ if not collector.yields:
return
if state == 'gil':
@@ -8235,17 +8235,17 @@ class ParallelStatNode(StatNode, ParallelNode):
if self.kwargs:
# Try to find num_threads and chunksize keyword arguments
pairs = []
- seen = set()
+ seen = set()
for dictitem in self.kwargs.key_value_pairs:
- if dictitem.key.value in seen:
- error(self.pos, "Duplicate keyword argument found: %s" % dictitem.key.value)
- seen.add(dictitem.key.value)
+ if dictitem.key.value in seen:
+ error(self.pos, "Duplicate keyword argument found: %s" % dictitem.key.value)
+ seen.add(dictitem.key.value)
if dictitem.key.value == 'num_threads':
- if not dictitem.value.is_none:
- self.num_threads = dictitem.value
+ if not dictitem.value.is_none:
+ self.num_threads = dictitem.value
elif self.is_prange and dictitem.key.value == 'chunksize':
- if not dictitem.value.is_none:
- self.chunksize = dictitem.value
+ if not dictitem.value.is_none:
+ self.chunksize = dictitem.value
else:
pairs.append(dictitem)
@@ -8285,7 +8285,7 @@ class ParallelStatNode(StatNode, ParallelNode):
self.num_threads.compile_time_value(env) <= 0):
error(self.pos, "argument to num_threads must be greater than 0")
- if not self.num_threads.is_simple() or self.num_threads.type.is_pyobject:
+ if not self.num_threads.is_simple() or self.num_threads.type.is_pyobject:
self.num_threads = self.num_threads.coerce_to(
PyrexTypes.c_int_type, env).coerce_to_temp(env)
return self
@@ -8687,7 +8687,7 @@ class ParallelStatNode(StatNode, ParallelNode):
invalid_value = entry.type.invalid_value()
if invalid_value:
- init = ' = ' + entry.type.cast_code(invalid_value)
+ init = ' = ' + entry.type.cast_code(invalid_value)
else:
init = ''
# Declare the parallel private in the outer block
diff --git a/contrib/tools/cython/Cython/Compiler/Optimize.py b/contrib/tools/cython/Cython/Compiler/Optimize.py
index a6fdfaee9f..3cb77efe2c 100644
--- a/contrib/tools/cython/Cython/Compiler/Optimize.py
+++ b/contrib/tools/cython/Cython/Compiler/Optimize.py
@@ -1,6 +1,6 @@
from __future__ import absolute_import
-import re
+import re
import sys
import copy
import codecs
@@ -9,16 +9,16 @@ import itertools
from . import TypeSlots
from .ExprNodes import not_a_constant
import cython
-cython.declare(UtilityCode=object, EncodedString=object, bytes_literal=object, encoded_string=object,
+cython.declare(UtilityCode=object, EncodedString=object, bytes_literal=object, encoded_string=object,
Nodes=object, ExprNodes=object, PyrexTypes=object, Builtin=object,
UtilNodes=object, _py_int_types=object)
if sys.version_info[0] >= 3:
_py_int_types = int
- _py_string_types = (bytes, str)
+ _py_string_types = (bytes, str)
else:
_py_int_types = (int, long)
- _py_string_types = (bytes, unicode)
+ _py_string_types = (bytes, unicode)
from . import Nodes
from . import ExprNodes
@@ -29,8 +29,8 @@ from . import UtilNodes
from . import Options
from .Code import UtilityCode, TempitaUtilityCode
-from .StringEncoding import EncodedString, bytes_literal, encoded_string
-from .Errors import error, warning
+from .StringEncoding import EncodedString, bytes_literal, encoded_string
+from .Errors import error, warning
from .ParseTreeTransforms import SkipDeclarations
try:
@@ -189,61 +189,61 @@ class IterationTransform(Visitor.EnvTransform):
self.visitchildren(node)
return self._optimise_for_loop(node, node.iterator.sequence)
- def _optimise_for_loop(self, node, iterable, reversed=False):
- annotation_type = None
- if (iterable.is_name or iterable.is_attribute) and iterable.entry and iterable.entry.annotation:
- annotation = iterable.entry.annotation
- if annotation.is_subscript:
- annotation = annotation.base # container base type
- # FIXME: generalise annotation evaluation => maybe provide a "qualified name" also for imported names?
- if annotation.is_name:
- if annotation.entry and annotation.entry.qualified_name == 'typing.Dict':
- annotation_type = Builtin.dict_type
- elif annotation.name == 'Dict':
- annotation_type = Builtin.dict_type
- if annotation.entry and annotation.entry.qualified_name in ('typing.Set', 'typing.FrozenSet'):
- annotation_type = Builtin.set_type
- elif annotation.name in ('Set', 'FrozenSet'):
- annotation_type = Builtin.set_type
-
- if Builtin.dict_type in (iterable.type, annotation_type):
+ def _optimise_for_loop(self, node, iterable, reversed=False):
+ annotation_type = None
+ if (iterable.is_name or iterable.is_attribute) and iterable.entry and iterable.entry.annotation:
+ annotation = iterable.entry.annotation
+ if annotation.is_subscript:
+ annotation = annotation.base # container base type
+ # FIXME: generalise annotation evaluation => maybe provide a "qualified name" also for imported names?
+ if annotation.is_name:
+ if annotation.entry and annotation.entry.qualified_name == 'typing.Dict':
+ annotation_type = Builtin.dict_type
+ elif annotation.name == 'Dict':
+ annotation_type = Builtin.dict_type
+ if annotation.entry and annotation.entry.qualified_name in ('typing.Set', 'typing.FrozenSet'):
+ annotation_type = Builtin.set_type
+ elif annotation.name in ('Set', 'FrozenSet'):
+ annotation_type = Builtin.set_type
+
+ if Builtin.dict_type in (iterable.type, annotation_type):
# like iterating over dict.keys()
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_dict_iteration(
- node, dict_obj=iterable, method=None, keys=True, values=False)
-
- if (Builtin.set_type in (iterable.type, annotation_type) or
- Builtin.frozenset_type in (iterable.type, annotation_type)):
- if reversed:
- # CPython raises an error here: not a sequence
- return node
- return self._transform_set_iteration(node, iterable)
-
+ node, dict_obj=iterable, method=None, keys=True, values=False)
+
+ if (Builtin.set_type in (iterable.type, annotation_type) or
+ Builtin.frozenset_type in (iterable.type, annotation_type)):
+ if reversed:
+ # CPython raises an error here: not a sequence
+ return node
+ return self._transform_set_iteration(node, iterable)
+
# C array (slice) iteration?
- if iterable.type.is_ptr or iterable.type.is_array:
- return self._transform_carray_iteration(node, iterable, reversed=reversed)
- if iterable.type is Builtin.bytes_type:
- return self._transform_bytes_iteration(node, iterable, reversed=reversed)
- if iterable.type is Builtin.unicode_type:
- return self._transform_unicode_iteration(node, iterable, reversed=reversed)
+ if iterable.type.is_ptr or iterable.type.is_array:
+ return self._transform_carray_iteration(node, iterable, reversed=reversed)
+ if iterable.type is Builtin.bytes_type:
+ return self._transform_bytes_iteration(node, iterable, reversed=reversed)
+ if iterable.type is Builtin.unicode_type:
+ return self._transform_unicode_iteration(node, iterable, reversed=reversed)
# the rest is based on function calls
- if not isinstance(iterable, ExprNodes.SimpleCallNode):
+ if not isinstance(iterable, ExprNodes.SimpleCallNode):
return node
- if iterable.args is None:
- arg_count = iterable.arg_tuple and len(iterable.arg_tuple.args) or 0
+ if iterable.args is None:
+ arg_count = iterable.arg_tuple and len(iterable.arg_tuple.args) or 0
else:
- arg_count = len(iterable.args)
- if arg_count and iterable.self is not None:
+ arg_count = len(iterable.args)
+ if arg_count and iterable.self is not None:
arg_count -= 1
- function = iterable.function
+ function = iterable.function
# dict iteration?
if function.is_attribute and not reversed and not arg_count:
- base_obj = iterable.self or function.obj
+ base_obj = iterable.self or function.obj
method = function.attribute
# in Py3, items() is equivalent to Py2's iteritems()
is_safe_iter = self.global_scope().context.language_level >= 3
@@ -271,35 +271,35 @@ class IterationTransform(Visitor.EnvTransform):
node, base_obj, method, keys, values)
# enumerate/reversed ?
- if iterable.self is None and function.is_name and \
+ if iterable.self is None and function.is_name and \
function.entry and function.entry.is_builtin:
if function.name == 'enumerate':
if reversed:
# CPython raises an error here: not a sequence
return node
- return self._transform_enumerate_iteration(node, iterable)
+ return self._transform_enumerate_iteration(node, iterable)
elif function.name == 'reversed':
if reversed:
# CPython raises an error here: not a sequence
return node
- return self._transform_reversed_iteration(node, iterable)
+ return self._transform_reversed_iteration(node, iterable)
# range() iteration?
if Options.convert_range and 1 <= arg_count <= 3 and (
- iterable.self is None and
- function.is_name and function.name in ('range', 'xrange') and
- function.entry and function.entry.is_builtin):
- if node.target.type.is_int or node.target.type.is_enum:
- return self._transform_range_iteration(node, iterable, reversed=reversed)
- if node.target.type.is_pyobject:
- # Assume that small integer ranges (C long >= 32bit) are best handled in C as well.
- for arg in (iterable.arg_tuple.args if iterable.args is None else iterable.args):
- if isinstance(arg, ExprNodes.IntNode):
- if arg.has_constant_result() and -2**30 <= arg.constant_result < 2**30:
- continue
- break
- else:
- return self._transform_range_iteration(node, iterable, reversed=reversed)
+ iterable.self is None and
+ function.is_name and function.name in ('range', 'xrange') and
+ function.entry and function.entry.is_builtin):
+ if node.target.type.is_int or node.target.type.is_enum:
+ return self._transform_range_iteration(node, iterable, reversed=reversed)
+ if node.target.type.is_pyobject:
+ # Assume that small integer ranges (C long >= 32bit) are best handled in C as well.
+ for arg in (iterable.arg_tuple.args if iterable.args is None else iterable.args):
+ if isinstance(arg, ExprNodes.IntNode):
+ if arg.has_constant_result() and -2**30 <= arg.constant_result < 2**30:
+ continue
+ break
+ else:
+ return self._transform_range_iteration(node, iterable, reversed=reversed)
return node
@@ -804,7 +804,7 @@ class IterationTransform(Visitor.EnvTransform):
step=step, body=node.body,
else_clause=node.else_clause,
from_range=True)
- for_node.set_up_loop(self.current_env())
+ for_node.set_up_loop(self.current_env())
if bound2_is_temp:
for_node = UtilNodes.LetNode(bound2, for_node)
@@ -929,7 +929,7 @@ class IterationTransform(Visitor.EnvTransform):
method_node = ExprNodes.StringNode(
dict_obj.pos, is_identifier=True, value=method)
dict_obj = dict_obj.as_none_safe_node(
- "'NoneType' object has no attribute '%{0}s'".format('.30' if len(method) <= 30 else ''),
+ "'NoneType' object has no attribute '%{0}s'".format('.30' if len(method) <= 30 else ''),
error = "PyExc_AttributeError",
format_args = [method])
else:
@@ -983,86 +983,86 @@ class IterationTransform(Visitor.EnvTransform):
PyrexTypes.CFuncTypeArg("p_is_dict", PyrexTypes.c_int_ptr_type, None),
])
- PySet_Iterator_func_type = PyrexTypes.CFuncType(
- PyrexTypes.py_object_type, [
- PyrexTypes.CFuncTypeArg("set", PyrexTypes.py_object_type, None),
- PyrexTypes.CFuncTypeArg("is_set", PyrexTypes.c_int_type, None),
- PyrexTypes.CFuncTypeArg("p_orig_length", PyrexTypes.c_py_ssize_t_ptr_type, None),
- PyrexTypes.CFuncTypeArg("p_is_set", PyrexTypes.c_int_ptr_type, None),
- ])
-
- def _transform_set_iteration(self, node, set_obj):
- temps = []
- temp = UtilNodes.TempHandle(PyrexTypes.py_object_type)
- temps.append(temp)
- set_temp = temp.ref(set_obj.pos)
- temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
- temps.append(temp)
- pos_temp = temp.ref(node.pos)
-
- if isinstance(node.body, Nodes.StatListNode):
- body = node.body
- else:
- body = Nodes.StatListNode(pos = node.body.pos,
- stats = [node.body])
-
- # keep original length to guard against set modification
- set_len_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
- temps.append(set_len_temp)
- set_len_temp_addr = ExprNodes.AmpersandNode(
- node.pos, operand=set_len_temp.ref(set_obj.pos),
- type=PyrexTypes.c_ptr_type(set_len_temp.type))
- temp = UtilNodes.TempHandle(PyrexTypes.c_int_type)
- temps.append(temp)
- is_set_temp = temp.ref(node.pos)
- is_set_temp_addr = ExprNodes.AmpersandNode(
- node.pos, operand=is_set_temp,
- type=PyrexTypes.c_ptr_type(temp.type))
-
- value_target = node.target
- iter_next_node = Nodes.SetIterationNextNode(
- set_temp, set_len_temp.ref(set_obj.pos), pos_temp, value_target, is_set_temp)
- iter_next_node = iter_next_node.analyse_expressions(self.current_env())
- body.stats[0:0] = [iter_next_node]
-
- def flag_node(value):
- value = value and 1 or 0
- return ExprNodes.IntNode(node.pos, value=str(value), constant_result=value)
-
- result_code = [
- Nodes.SingleAssignmentNode(
- node.pos,
- lhs=pos_temp,
- rhs=ExprNodes.IntNode(node.pos, value='0', constant_result=0)),
- Nodes.SingleAssignmentNode(
- set_obj.pos,
- lhs=set_temp,
- rhs=ExprNodes.PythonCapiCallNode(
- set_obj.pos,
- "__Pyx_set_iterator",
- self.PySet_Iterator_func_type,
- utility_code=UtilityCode.load_cached("set_iter", "Optimize.c"),
- args=[set_obj, flag_node(set_obj.type is Builtin.set_type),
- set_len_temp_addr, is_set_temp_addr,
- ],
- is_temp=True,
- )),
- Nodes.WhileStatNode(
- node.pos,
- condition=None,
- body=body,
- else_clause=node.else_clause,
- )
- ]
-
- return UtilNodes.TempsBlockNode(
- node.pos, temps=temps,
- body=Nodes.StatListNode(
- node.pos,
- stats = result_code
- ))
-
-
+ PySet_Iterator_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.py_object_type, [
+ PyrexTypes.CFuncTypeArg("set", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("is_set", PyrexTypes.c_int_type, None),
+ PyrexTypes.CFuncTypeArg("p_orig_length", PyrexTypes.c_py_ssize_t_ptr_type, None),
+ PyrexTypes.CFuncTypeArg("p_is_set", PyrexTypes.c_int_ptr_type, None),
+ ])
+
+ def _transform_set_iteration(self, node, set_obj):
+ temps = []
+ temp = UtilNodes.TempHandle(PyrexTypes.py_object_type)
+ temps.append(temp)
+ set_temp = temp.ref(set_obj.pos)
+ temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
+ temps.append(temp)
+ pos_temp = temp.ref(node.pos)
+
+ if isinstance(node.body, Nodes.StatListNode):
+ body = node.body
+ else:
+ body = Nodes.StatListNode(pos = node.body.pos,
+ stats = [node.body])
+
+ # keep original length to guard against set modification
+ set_len_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
+ temps.append(set_len_temp)
+ set_len_temp_addr = ExprNodes.AmpersandNode(
+ node.pos, operand=set_len_temp.ref(set_obj.pos),
+ type=PyrexTypes.c_ptr_type(set_len_temp.type))
+ temp = UtilNodes.TempHandle(PyrexTypes.c_int_type)
+ temps.append(temp)
+ is_set_temp = temp.ref(node.pos)
+ is_set_temp_addr = ExprNodes.AmpersandNode(
+ node.pos, operand=is_set_temp,
+ type=PyrexTypes.c_ptr_type(temp.type))
+
+ value_target = node.target
+ iter_next_node = Nodes.SetIterationNextNode(
+ set_temp, set_len_temp.ref(set_obj.pos), pos_temp, value_target, is_set_temp)
+ iter_next_node = iter_next_node.analyse_expressions(self.current_env())
+ body.stats[0:0] = [iter_next_node]
+
+ def flag_node(value):
+ value = value and 1 or 0
+ return ExprNodes.IntNode(node.pos, value=str(value), constant_result=value)
+
+ result_code = [
+ Nodes.SingleAssignmentNode(
+ node.pos,
+ lhs=pos_temp,
+ rhs=ExprNodes.IntNode(node.pos, value='0', constant_result=0)),
+ Nodes.SingleAssignmentNode(
+ set_obj.pos,
+ lhs=set_temp,
+ rhs=ExprNodes.PythonCapiCallNode(
+ set_obj.pos,
+ "__Pyx_set_iterator",
+ self.PySet_Iterator_func_type,
+ utility_code=UtilityCode.load_cached("set_iter", "Optimize.c"),
+ args=[set_obj, flag_node(set_obj.type is Builtin.set_type),
+ set_len_temp_addr, is_set_temp_addr,
+ ],
+ is_temp=True,
+ )),
+ Nodes.WhileStatNode(
+ node.pos,
+ condition=None,
+ body=body,
+ else_clause=node.else_clause,
+ )
+ ]
+
+ return UtilNodes.TempsBlockNode(
+ node.pos, temps=temps,
+ body=Nodes.StatListNode(
+ node.pos,
+ stats = result_code
+ ))
+
+
class SwitchTransform(Visitor.EnvTransform):
"""
This transformation tries to turn long if statements into C switch statements.
@@ -2035,11 +2035,11 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
"""
### cleanup to avoid redundant coercions to/from Python types
- def visit_PyTypeTestNode(self, node):
+ def visit_PyTypeTestNode(self, node):
"""Flatten redundant type checks after tree changes.
"""
self.visitchildren(node)
- return node.reanalyse()
+ return node.reanalyse()
def _visit_TypecastNode(self, node):
# disabled - the user may have had a reason to put a type
@@ -2054,18 +2054,18 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
def visit_ExprStatNode(self, node):
"""
- Drop dead code and useless coercions.
+ Drop dead code and useless coercions.
"""
self.visitchildren(node)
if isinstance(node.expr, ExprNodes.CoerceToPyTypeNode):
node.expr = node.expr.arg
- expr = node.expr
- if expr is None or expr.is_none or expr.is_literal:
- # Expression was removed or is dead code => remove ExprStatNode as well.
- return None
- if expr.is_name and expr.entry and (expr.entry.is_local or expr.entry.is_arg):
- # Ignore dead references to local variables etc.
- return None
+ expr = node.expr
+ if expr is None or expr.is_none or expr.is_literal:
+ # Expression was removed or is dead code => remove ExprStatNode as well.
+ return None
+ if expr.is_name and expr.entry and (expr.entry.is_local or expr.entry.is_arg):
+ # Ignore dead references to local variables etc.
+ return None
return node
def visit_CoerceToBooleanNode(self, node):
@@ -2283,8 +2283,8 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
attribute=attr_name,
is_called=True).analyse_as_type_attribute(self.current_env())
if method is None:
- return self._optimise_generic_builtin_method_call(
- node, attr_name, function, arg_list, is_unbound_method)
+ return self._optimise_generic_builtin_method_call(
+ node, attr_name, function, arg_list, is_unbound_method)
args = node.args
if args is None and node.arg_tuple:
args = node.arg_tuple.args
@@ -2300,62 +2300,62 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
### builtin types
- def _optimise_generic_builtin_method_call(self, node, attr_name, function, arg_list, is_unbound_method):
- """
- Try to inject an unbound method call for a call to a method of a known builtin type.
- This enables caching the underlying C function of the method at runtime.
- """
- arg_count = len(arg_list)
- if is_unbound_method or arg_count >= 3 or not (function.is_attribute and function.is_py_attr):
- return node
- if not function.obj.type.is_builtin_type:
- return node
- if function.obj.type.name in ('basestring', 'type'):
- # these allow different actual types => unsafe
- return node
- return ExprNodes.CachedBuiltinMethodCallNode(
- node, function.obj, attr_name, arg_list)
-
- PyObject_Unicode_func_type = PyrexTypes.CFuncType(
- Builtin.unicode_type, [
- PyrexTypes.CFuncTypeArg("obj", PyrexTypes.py_object_type, None)
- ])
-
- def _handle_simple_function_unicode(self, node, function, pos_args):
- """Optimise single argument calls to unicode().
- """
- if len(pos_args) != 1:
- if len(pos_args) == 0:
- return ExprNodes.UnicodeNode(node.pos, value=EncodedString(), constant_result=u'')
- return node
- arg = pos_args[0]
- if arg.type is Builtin.unicode_type:
- if not arg.may_be_none():
- return arg
- cname = "__Pyx_PyUnicode_Unicode"
- utility_code = UtilityCode.load_cached('PyUnicode_Unicode', 'StringTools.c')
- else:
- cname = "__Pyx_PyObject_Unicode"
- utility_code = UtilityCode.load_cached('PyObject_Unicode', 'StringTools.c')
- return ExprNodes.PythonCapiCallNode(
- node.pos, cname, self.PyObject_Unicode_func_type,
- args=pos_args,
- is_temp=node.is_temp,
- utility_code=utility_code,
- py_name="unicode")
-
- def visit_FormattedValueNode(self, node):
- """Simplify or avoid plain string formatting of a unicode value.
- This seems misplaced here, but plain unicode formatting is essentially
- a call to the unicode() builtin, which is optimised right above.
- """
- self.visitchildren(node)
- if node.value.type is Builtin.unicode_type and not node.c_format_spec and not node.format_spec:
- if not node.conversion_char or node.conversion_char == 's':
- # value is definitely a unicode string and we don't format it any special
- return self._handle_simple_function_unicode(node, None, [node.value])
- return node
-
+ def _optimise_generic_builtin_method_call(self, node, attr_name, function, arg_list, is_unbound_method):
+ """
+ Try to inject an unbound method call for a call to a method of a known builtin type.
+ This enables caching the underlying C function of the method at runtime.
+ """
+ arg_count = len(arg_list)
+ if is_unbound_method or arg_count >= 3 or not (function.is_attribute and function.is_py_attr):
+ return node
+ if not function.obj.type.is_builtin_type:
+ return node
+ if function.obj.type.name in ('basestring', 'type'):
+ # these allow different actual types => unsafe
+ return node
+ return ExprNodes.CachedBuiltinMethodCallNode(
+ node, function.obj, attr_name, arg_list)
+
+ PyObject_Unicode_func_type = PyrexTypes.CFuncType(
+ Builtin.unicode_type, [
+ PyrexTypes.CFuncTypeArg("obj", PyrexTypes.py_object_type, None)
+ ])
+
+ def _handle_simple_function_unicode(self, node, function, pos_args):
+ """Optimise single argument calls to unicode().
+ """
+ if len(pos_args) != 1:
+ if len(pos_args) == 0:
+ return ExprNodes.UnicodeNode(node.pos, value=EncodedString(), constant_result=u'')
+ return node
+ arg = pos_args[0]
+ if arg.type is Builtin.unicode_type:
+ if not arg.may_be_none():
+ return arg
+ cname = "__Pyx_PyUnicode_Unicode"
+ utility_code = UtilityCode.load_cached('PyUnicode_Unicode', 'StringTools.c')
+ else:
+ cname = "__Pyx_PyObject_Unicode"
+ utility_code = UtilityCode.load_cached('PyObject_Unicode', 'StringTools.c')
+ return ExprNodes.PythonCapiCallNode(
+ node.pos, cname, self.PyObject_Unicode_func_type,
+ args=pos_args,
+ is_temp=node.is_temp,
+ utility_code=utility_code,
+ py_name="unicode")
+
+ def visit_FormattedValueNode(self, node):
+ """Simplify or avoid plain string formatting of a unicode value.
+ This seems misplaced here, but plain unicode formatting is essentially
+ a call to the unicode() builtin, which is optimised right above.
+ """
+ self.visitchildren(node)
+ if node.value.type is Builtin.unicode_type and not node.c_format_spec and not node.format_spec:
+ if not node.conversion_char or node.conversion_char == 's':
+ # value is definitely a unicode string and we don't format it any special
+ return self._handle_simple_function_unicode(node, None, [node.value])
+ return node
+
PyDict_Copy_func_type = PyrexTypes.CFuncType(
Builtin.dict_type, [
PyrexTypes.CFuncTypeArg("dict", Builtin.dict_type, None)
@@ -2398,7 +2398,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
def _handle_simple_function_tuple(self, node, function, pos_args):
"""Replace tuple([...]) by PyList_AsTuple or PySequence_Tuple.
"""
- if len(pos_args) != 1 or not node.is_temp:
+ if len(pos_args) != 1 or not node.is_temp:
return node
arg = pos_args[0]
if arg.type is Builtin.tuple_type and not arg.may_be_none():
@@ -2411,7 +2411,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
node.pos, "PyList_AsTuple", self.PyList_AsTuple_func_type,
args=pos_args, is_temp=node.is_temp)
else:
- return ExprNodes.AsTupleNode(node.pos, arg=arg, type=Builtin.tuple_type)
+ return ExprNodes.AsTupleNode(node.pos, arg=arg, type=Builtin.tuple_type)
PySet_New_func_type = PyrexTypes.CFuncType(
Builtin.set_type, [
@@ -2577,7 +2577,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
_map_to_capi_len_function = {
Builtin.unicode_type: "__Pyx_PyUnicode_GET_LENGTH",
Builtin.bytes_type: "PyBytes_GET_SIZE",
- Builtin.bytearray_type: 'PyByteArray_GET_SIZE',
+ Builtin.bytearray_type: 'PyByteArray_GET_SIZE',
Builtin.list_type: "PyList_GET_SIZE",
Builtin.tuple_type: "PyTuple_GET_SIZE",
Builtin.set_type: "PySet_GET_SIZE",
@@ -2609,14 +2609,14 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
node.pos, "__Pyx_Py_UNICODE_strlen", self.Pyx_Py_UNICODE_strlen_func_type,
args = [arg],
is_temp = node.is_temp)
- elif arg.type.is_memoryviewslice:
- func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_size_t_type, [
- PyrexTypes.CFuncTypeArg("memoryviewslice", arg.type, None)
- ], nogil=True)
- new_node = ExprNodes.PythonCapiCallNode(
- node.pos, "__Pyx_MemoryView_Len", func_type,
- args=[arg], is_temp=node.is_temp)
+ elif arg.type.is_memoryviewslice:
+ func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_size_t_type, [
+ PyrexTypes.CFuncTypeArg("memoryviewslice", arg.type, None)
+ ], nogil=True)
+ new_node = ExprNodes.PythonCapiCallNode(
+ node.pos, "__Pyx_MemoryView_Len", func_type,
+ args=[arg], is_temp=node.is_temp)
elif arg.type.is_pyobject:
cfunc_name = self._map_to_capi_len_function(arg.type)
if cfunc_name is None:
@@ -2630,7 +2630,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
"object of type 'NoneType' has no len()")
new_node = ExprNodes.PythonCapiCallNode(
node.pos, cfunc_name, self.PyObject_Size_func_type,
- args=[arg], is_temp=node.is_temp)
+ args=[arg], is_temp=node.is_temp)
elif arg.type.is_unicode_char:
return ExprNodes.IntNode(node.pos, value='1', constant_result=1,
type=node.type)
@@ -2811,7 +2811,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
PyTypeObjectPtr = PyrexTypes.CPtrType(
cython_scope.lookup('PyTypeObject').type)
pyx_tp_new_kwargs_func_type = PyrexTypes.CFuncType(
- ext_type, [
+ ext_type, [
PyrexTypes.CFuncTypeArg("type", PyTypeObjectPtr, None),
PyrexTypes.CFuncTypeArg("args", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("kwargs", PyrexTypes.py_object_type, None),
@@ -2824,7 +2824,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
node.pos, slot_func_cname,
pyx_tp_new_kwargs_func_type,
args=[type_arg, args_tuple, kwargs],
- may_return_none=False,
+ may_return_none=False,
is_temp=True)
else:
# arbitrary variable, needs a None check for safety
@@ -2872,69 +2872,69 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
utility_code=load_c_utility('append')
)
- def _handle_simple_method_list_extend(self, node, function, args, is_unbound_method):
- """Replace list.extend([...]) for short sequence literals values by sequential appends
- to avoid creating an intermediate sequence argument.
- """
- if len(args) != 2:
- return node
- obj, value = args
- if not value.is_sequence_constructor:
- return node
- items = list(value.args)
- if value.mult_factor is not None or len(items) > 8:
- # Appending wins for short sequences but slows down when multiple resize operations are needed.
- # This seems to be a good enough limit that avoids repeated resizing.
- if False and isinstance(value, ExprNodes.ListNode):
- # One would expect that tuples are more efficient here, but benchmarking with
- # Py3.5 and Py3.7 suggests that they are not. Probably worth revisiting at some point.
- # Might be related to the usage of PySequence_FAST() in CPython's list.extend(),
- # which is probably tuned more towards lists than tuples (and rightly so).
- tuple_node = args[1].as_tuple().analyse_types(self.current_env(), skip_children=True)
- Visitor.recursively_replace_node(node, args[1], tuple_node)
- return node
- wrapped_obj = self._wrap_self_arg(obj, function, is_unbound_method, 'extend')
- if not items:
- # Empty sequences are not likely to occur, but why waste a call to list.extend() for them?
- wrapped_obj.result_is_used = node.result_is_used
- return wrapped_obj
- cloned_obj = obj = wrapped_obj
- if len(items) > 1 and not obj.is_simple():
- cloned_obj = UtilNodes.LetRefNode(obj)
- # Use ListComp_Append() for all but the last item and finish with PyList_Append()
- # to shrink the list storage size at the very end if necessary.
- temps = []
- arg = items[-1]
- if not arg.is_simple():
- arg = UtilNodes.LetRefNode(arg)
- temps.append(arg)
- new_node = ExprNodes.PythonCapiCallNode(
- node.pos, "__Pyx_PyList_Append", self.PyObject_Append_func_type,
- args=[cloned_obj, arg],
- is_temp=True,
- utility_code=load_c_utility("ListAppend"))
- for arg in items[-2::-1]:
- if not arg.is_simple():
- arg = UtilNodes.LetRefNode(arg)
- temps.append(arg)
- new_node = ExprNodes.binop_node(
- node.pos, '|',
- ExprNodes.PythonCapiCallNode(
- node.pos, "__Pyx_ListComp_Append", self.PyObject_Append_func_type,
- args=[cloned_obj, arg], py_name="extend",
- is_temp=True,
- utility_code=load_c_utility("ListCompAppend")),
- new_node,
- type=PyrexTypes.c_returncode_type,
- )
- new_node.result_is_used = node.result_is_used
- if cloned_obj is not obj:
- temps.append(cloned_obj)
- for temp in temps:
- new_node = UtilNodes.EvalWithTempExprNode(temp, new_node)
- new_node.result_is_used = node.result_is_used
- return new_node
-
+ def _handle_simple_method_list_extend(self, node, function, args, is_unbound_method):
+ """Replace list.extend([...]) for short sequence literals values by sequential appends
+ to avoid creating an intermediate sequence argument.
+ """
+ if len(args) != 2:
+ return node
+ obj, value = args
+ if not value.is_sequence_constructor:
+ return node
+ items = list(value.args)
+ if value.mult_factor is not None or len(items) > 8:
+ # Appending wins for short sequences but slows down when multiple resize operations are needed.
+ # This seems to be a good enough limit that avoids repeated resizing.
+ if False and isinstance(value, ExprNodes.ListNode):
+ # One would expect that tuples are more efficient here, but benchmarking with
+ # Py3.5 and Py3.7 suggests that they are not. Probably worth revisiting at some point.
+ # Might be related to the usage of PySequence_FAST() in CPython's list.extend(),
+ # which is probably tuned more towards lists than tuples (and rightly so).
+ tuple_node = args[1].as_tuple().analyse_types(self.current_env(), skip_children=True)
+ Visitor.recursively_replace_node(node, args[1], tuple_node)
+ return node
+ wrapped_obj = self._wrap_self_arg(obj, function, is_unbound_method, 'extend')
+ if not items:
+ # Empty sequences are not likely to occur, but why waste a call to list.extend() for them?
+ wrapped_obj.result_is_used = node.result_is_used
+ return wrapped_obj
+ cloned_obj = obj = wrapped_obj
+ if len(items) > 1 and not obj.is_simple():
+ cloned_obj = UtilNodes.LetRefNode(obj)
+ # Use ListComp_Append() for all but the last item and finish with PyList_Append()
+ # to shrink the list storage size at the very end if necessary.
+ temps = []
+ arg = items[-1]
+ if not arg.is_simple():
+ arg = UtilNodes.LetRefNode(arg)
+ temps.append(arg)
+ new_node = ExprNodes.PythonCapiCallNode(
+ node.pos, "__Pyx_PyList_Append", self.PyObject_Append_func_type,
+ args=[cloned_obj, arg],
+ is_temp=True,
+ utility_code=load_c_utility("ListAppend"))
+ for arg in items[-2::-1]:
+ if not arg.is_simple():
+ arg = UtilNodes.LetRefNode(arg)
+ temps.append(arg)
+ new_node = ExprNodes.binop_node(
+ node.pos, '|',
+ ExprNodes.PythonCapiCallNode(
+ node.pos, "__Pyx_ListComp_Append", self.PyObject_Append_func_type,
+ args=[cloned_obj, arg], py_name="extend",
+ is_temp=True,
+ utility_code=load_c_utility("ListCompAppend")),
+ new_node,
+ type=PyrexTypes.c_returncode_type,
+ )
+ new_node.result_is_used = node.result_is_used
+ if cloned_obj is not obj:
+ temps.append(cloned_obj)
+ for temp in temps:
+ new_node = UtilNodes.EvalWithTempExprNode(temp, new_node)
+ new_node.result_is_used = node.result_is_used
+ return new_node
+
PyByteArray_Append_func_type = PyrexTypes.CFuncType(
PyrexTypes.c_returncode_type, [
PyrexTypes.CFuncTypeArg("bytearray", PyrexTypes.py_object_type, None),
@@ -3010,7 +3010,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if is_list:
type_name = 'List'
obj = obj.as_none_safe_node(
- "'NoneType' object has no attribute '%.30s'",
+ "'NoneType' object has no attribute '%.30s'",
error="PyExc_AttributeError",
format_args=['pop'])
else:
@@ -3140,29 +3140,29 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
may_return_none=True,
utility_code=load_c_utility('dict_setdefault'))
- PyDict_Pop_func_type = PyrexTypes.CFuncType(
- PyrexTypes.py_object_type, [
- PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
- PyrexTypes.CFuncTypeArg("key", PyrexTypes.py_object_type, None),
- PyrexTypes.CFuncTypeArg("default", PyrexTypes.py_object_type, None),
- ])
-
- def _handle_simple_method_dict_pop(self, node, function, args, is_unbound_method):
- """Replace dict.pop() by a call to _PyDict_Pop().
- """
- if len(args) == 2:
- args.append(ExprNodes.NullNode(node.pos))
- elif len(args) != 3:
- self._error_wrong_arg_count('dict.pop', node, args, "2 or 3")
- return node
-
- return self._substitute_method_call(
- node, function,
- "__Pyx_PyDict_Pop", self.PyDict_Pop_func_type,
- 'pop', is_unbound_method, args,
- may_return_none=True,
- utility_code=load_c_utility('py_dict_pop'))
-
+ PyDict_Pop_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.py_object_type, [
+ PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("key", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("default", PyrexTypes.py_object_type, None),
+ ])
+
+ def _handle_simple_method_dict_pop(self, node, function, args, is_unbound_method):
+ """Replace dict.pop() by a call to _PyDict_Pop().
+ """
+ if len(args) == 2:
+ args.append(ExprNodes.NullNode(node.pos))
+ elif len(args) != 3:
+ self._error_wrong_arg_count('dict.pop', node, args, "2 or 3")
+ return node
+
+ return self._substitute_method_call(
+ node, function,
+ "__Pyx_PyDict_Pop", self.PyDict_Pop_func_type,
+ 'pop', is_unbound_method, args,
+ may_return_none=True,
+ utility_code=load_c_utility('py_dict_pop'))
+
Pyx_BinopInt_func_types = dict(
((ctype, ret_type), PyrexTypes.CFuncType(
ret_type, [
@@ -3635,7 +3635,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
PyrexTypes.CFuncTypeArg("obj", Builtin.unicode_type, None),
])
- _special_encodings = ['UTF8', 'UTF16', 'UTF-16LE', 'UTF-16BE', 'Latin1', 'ASCII',
+ _special_encodings = ['UTF8', 'UTF16', 'UTF-16LE', 'UTF-16BE', 'Latin1', 'ASCII',
'unicode_escape', 'raw_unicode_escape']
_special_codecs = [ (name, codecs.getencoder(name))
@@ -3677,7 +3677,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if encoding and error_handling == 'strict':
# try to find a specific encoder function
codec_name = self._find_special_codec_name(encoding)
- if codec_name is not None and '-' not in codec_name:
+ if codec_name is not None and '-' not in codec_name:
encode_function = "PyUnicode_As%sString" % codec_name
return self._substitute_method_call(
node, function, encode_function,
@@ -3747,7 +3747,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
format_args=['decode', string_type.name])
else:
string_node = string_node.as_none_safe_node(
- "'NoneType' object has no attribute '%.30s'",
+ "'NoneType' object has no attribute '%.30s'",
error="PyExc_AttributeError",
format_args=['decode'])
elif not string_type.is_string and not string_type.is_cpp_string:
@@ -3771,12 +3771,12 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
if encoding is not None:
codec_name = self._find_special_codec_name(encoding)
if codec_name is not None:
- if codec_name in ('UTF16', 'UTF-16LE', 'UTF-16BE'):
- codec_cname = "__Pyx_PyUnicode_Decode%s" % codec_name.replace('-', '')
- else:
- codec_cname = "PyUnicode_Decode%s" % codec_name
+ if codec_name in ('UTF16', 'UTF-16LE', 'UTF-16BE'):
+ codec_cname = "__Pyx_PyUnicode_Decode%s" % codec_name.replace('-', '')
+ else:
+ codec_cname = "PyUnicode_Decode%s" % codec_name
decode_function = ExprNodes.RawCNameExprNode(
- node.pos, type=self.PyUnicode_DecodeXyz_func_ptr_type, cname=codec_cname)
+ node.pos, type=self.PyUnicode_DecodeXyz_func_ptr_type, cname=codec_cname)
encoding_node = ExprNodes.NullNode(node.pos)
else:
decode_function = ExprNodes.NullNode(node.pos)
@@ -3936,8 +3936,8 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
may_return_none=ExprNodes.PythonCapiCallNode.may_return_none,
with_none_check=True):
args = list(args)
- if with_none_check and args:
- args[0] = self._wrap_self_arg(args[0], function, is_unbound_method, attr_name)
+ if with_none_check and args:
+ args[0] = self._wrap_self_arg(args[0], function, is_unbound_method, attr_name)
if is_temp is None:
is_temp = node.is_temp
return ExprNodes.PythonCapiCallNode(
@@ -3949,20 +3949,20 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
result_is_used = node.result_is_used,
)
- def _wrap_self_arg(self, self_arg, function, is_unbound_method, attr_name):
- if self_arg.is_literal:
- return self_arg
- if is_unbound_method:
- self_arg = self_arg.as_none_safe_node(
- "descriptor '%s' requires a '%s' object but received a 'NoneType'",
- format_args=[attr_name, self_arg.type.name])
- else:
- self_arg = self_arg.as_none_safe_node(
- "'NoneType' object has no attribute '%{0}s'".format('.30' if len(attr_name) <= 30 else ''),
- error="PyExc_AttributeError",
- format_args=[attr_name])
- return self_arg
-
+ def _wrap_self_arg(self, self_arg, function, is_unbound_method, attr_name):
+ if self_arg.is_literal:
+ return self_arg
+ if is_unbound_method:
+ self_arg = self_arg.as_none_safe_node(
+ "descriptor '%s' requires a '%s' object but received a 'NoneType'",
+ format_args=[attr_name, self_arg.type.name])
+ else:
+ self_arg = self_arg.as_none_safe_node(
+ "'NoneType' object has no attribute '%{0}s'".format('.30' if len(attr_name) <= 30 else ''),
+ error="PyExc_AttributeError",
+ format_args=[attr_name])
+ return self_arg
+
def _inject_int_default_argument(self, node, args, arg_index, type, default_value):
assert len(args) >= arg_index
if len(args) == arg_index:
@@ -4231,48 +4231,48 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
if isinstance(node.operand1, ExprNodes.IntNode) and \
node.operand2.is_sequence_constructor:
return self._calculate_constant_seq(node, node.operand2, node.operand1)
- if node.operand1.is_string_literal:
- return self._multiply_string(node, node.operand1, node.operand2)
- elif node.operand2.is_string_literal:
- return self._multiply_string(node, node.operand2, node.operand1)
+ if node.operand1.is_string_literal:
+ return self._multiply_string(node, node.operand1, node.operand2)
+ elif node.operand2.is_string_literal:
+ return self._multiply_string(node, node.operand2, node.operand1)
return self.visit_BinopNode(node)
- def _multiply_string(self, node, string_node, multiplier_node):
- multiplier = multiplier_node.constant_result
- if not isinstance(multiplier, _py_int_types):
- return node
- if not (node.has_constant_result() and isinstance(node.constant_result, _py_string_types)):
- return node
- if len(node.constant_result) > 256:
- # Too long for static creation, leave it to runtime. (-> arbitrary limit)
- return node
-
- build_string = encoded_string
- if isinstance(string_node, ExprNodes.BytesNode):
- build_string = bytes_literal
- elif isinstance(string_node, ExprNodes.StringNode):
- if string_node.unicode_value is not None:
- string_node.unicode_value = encoded_string(
- string_node.unicode_value * multiplier,
- string_node.unicode_value.encoding)
+ def _multiply_string(self, node, string_node, multiplier_node):
+ multiplier = multiplier_node.constant_result
+ if not isinstance(multiplier, _py_int_types):
+ return node
+ if not (node.has_constant_result() and isinstance(node.constant_result, _py_string_types)):
+ return node
+ if len(node.constant_result) > 256:
+ # Too long for static creation, leave it to runtime. (-> arbitrary limit)
+ return node
+
+ build_string = encoded_string
+ if isinstance(string_node, ExprNodes.BytesNode):
+ build_string = bytes_literal
+ elif isinstance(string_node, ExprNodes.StringNode):
+ if string_node.unicode_value is not None:
+ string_node.unicode_value = encoded_string(
+ string_node.unicode_value * multiplier,
+ string_node.unicode_value.encoding)
build_string = encoded_string if string_node.value.is_unicode else bytes_literal
- elif isinstance(string_node, ExprNodes.UnicodeNode):
- if string_node.bytes_value is not None:
- string_node.bytes_value = bytes_literal(
- string_node.bytes_value * multiplier,
- string_node.bytes_value.encoding)
- else:
- assert False, "unknown string node type: %s" % type(string_node)
+ elif isinstance(string_node, ExprNodes.UnicodeNode):
+ if string_node.bytes_value is not None:
+ string_node.bytes_value = bytes_literal(
+ string_node.bytes_value * multiplier,
+ string_node.bytes_value.encoding)
+ else:
+ assert False, "unknown string node type: %s" % type(string_node)
string_node.value = build_string(
- string_node.value * multiplier,
- string_node.value.encoding)
+ string_node.value * multiplier,
+ string_node.value.encoding)
# follow constant-folding and use unicode_value in preference
if isinstance(string_node, ExprNodes.StringNode) and string_node.unicode_value is not None:
string_node.constant_result = string_node.unicode_value
else:
string_node.constant_result = string_node.value
- return string_node
-
+ return string_node
+
def _calculate_constant_seq(self, node, sequence_node, factor):
if factor.constant_result != 1 and sequence_node.args:
if isinstance(factor.constant_result, _py_int_types) and factor.constant_result <= 0:
@@ -4292,57 +4292,57 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
sequence_node.mult_factor = factor
return sequence_node
- def visit_ModNode(self, node):
- self.visitchildren(node)
- if isinstance(node.operand1, ExprNodes.UnicodeNode) and isinstance(node.operand2, ExprNodes.TupleNode):
- if not node.operand2.mult_factor:
- fstring = self._build_fstring(node.operand1.pos, node.operand1.value, node.operand2.args)
- if fstring is not None:
- return fstring
- return self.visit_BinopNode(node)
-
- _parse_string_format_regex = (
+ def visit_ModNode(self, node):
+ self.visitchildren(node)
+ if isinstance(node.operand1, ExprNodes.UnicodeNode) and isinstance(node.operand2, ExprNodes.TupleNode):
+ if not node.operand2.mult_factor:
+ fstring = self._build_fstring(node.operand1.pos, node.operand1.value, node.operand2.args)
+ if fstring is not None:
+ return fstring
+ return self.visit_BinopNode(node)
+
+ _parse_string_format_regex = (
u'(%(?:' # %...
u'(?:[-0-9]+|[ ])?' # width (optional) or space prefix fill character (optional)
u'(?:[.][0-9]+)?' # precision (optional)
u')?.)' # format type (or something different for unsupported formats)
- )
-
- def _build_fstring(self, pos, ustring, format_args):
- # Issues formatting warnings instead of errors since we really only catch a few errors by accident.
- args = iter(format_args)
- substrings = []
- can_be_optimised = True
- for s in re.split(self._parse_string_format_regex, ustring):
- if not s:
- continue
- if s == u'%%':
- substrings.append(ExprNodes.UnicodeNode(pos, value=EncodedString(u'%'), constant_result=u'%'))
- continue
- if s[0] != u'%':
- if s[-1] == u'%':
- warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
- can_be_optimised = False
- substrings.append(ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s))
- continue
- format_type = s[-1]
- try:
- arg = next(args)
- except StopIteration:
- warning(pos, "Too few arguments for format placeholders", level=1)
- can_be_optimised = False
- break
+ )
+
+ def _build_fstring(self, pos, ustring, format_args):
+ # Issues formatting warnings instead of errors since we really only catch a few errors by accident.
+ args = iter(format_args)
+ substrings = []
+ can_be_optimised = True
+ for s in re.split(self._parse_string_format_regex, ustring):
+ if not s:
+ continue
+ if s == u'%%':
+ substrings.append(ExprNodes.UnicodeNode(pos, value=EncodedString(u'%'), constant_result=u'%'))
+ continue
+ if s[0] != u'%':
+ if s[-1] == u'%':
+ warning(pos, "Incomplete format: '...%s'" % s[-3:], level=1)
+ can_be_optimised = False
+ substrings.append(ExprNodes.UnicodeNode(pos, value=EncodedString(s), constant_result=s))
+ continue
+ format_type = s[-1]
+ try:
+ arg = next(args)
+ except StopIteration:
+ warning(pos, "Too few arguments for format placeholders", level=1)
+ can_be_optimised = False
+ break
if arg.is_starred:
can_be_optimised = False
break
if format_type in u'asrfdoxX':
- format_spec = s[1:]
+ format_spec = s[1:]
conversion_char = None
- if format_type in u'doxX' and u'.' in format_spec:
- # Precision is not allowed for integers in format(), but ok in %-formatting.
- can_be_optimised = False
+ if format_type in u'doxX' and u'.' in format_spec:
+ # Precision is not allowed for integers in format(), but ok in %-formatting.
+ can_be_optimised = False
elif format_type in u'ars':
- format_spec = format_spec[:-1]
+ format_spec = format_spec[:-1]
conversion_char = format_type
if format_spec.startswith('0'):
format_spec = '>' + format_spec[1:] # right-alignment '%05s' spells '{:>5}'
@@ -4353,49 +4353,49 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
if format_spec.startswith('-'):
format_spec = '<' + format_spec[1:] # left-alignment '%-5s' spells '{:<5}'
- substrings.append(ExprNodes.FormattedValueNode(
- arg.pos, value=arg,
+ substrings.append(ExprNodes.FormattedValueNode(
+ arg.pos, value=arg,
conversion_char=conversion_char,
- format_spec=ExprNodes.UnicodeNode(
- pos, value=EncodedString(format_spec), constant_result=format_spec)
- if format_spec else None,
- ))
- else:
- # keep it simple for now ...
- can_be_optimised = False
+ format_spec=ExprNodes.UnicodeNode(
+ pos, value=EncodedString(format_spec), constant_result=format_spec)
+ if format_spec else None,
+ ))
+ else:
+ # keep it simple for now ...
+ can_be_optimised = False
break
-
- if not can_be_optimised:
- # Print all warnings we can find before finally giving up here.
- return None
-
- try:
- next(args)
- except StopIteration: pass
- else:
- warning(pos, "Too many arguments for format placeholders", level=1)
- return None
-
- node = ExprNodes.JoinedStrNode(pos, values=substrings)
- return self.visit_JoinedStrNode(node)
-
+
+ if not can_be_optimised:
+ # Print all warnings we can find before finally giving up here.
+ return None
+
+ try:
+ next(args)
+ except StopIteration: pass
+ else:
+ warning(pos, "Too many arguments for format placeholders", level=1)
+ return None
+
+ node = ExprNodes.JoinedStrNode(pos, values=substrings)
+ return self.visit_JoinedStrNode(node)
+
def visit_FormattedValueNode(self, node):
self.visitchildren(node)
- conversion_char = node.conversion_char or 's'
+ conversion_char = node.conversion_char or 's'
if isinstance(node.format_spec, ExprNodes.UnicodeNode) and not node.format_spec.value:
node.format_spec = None
- if node.format_spec is None and isinstance(node.value, ExprNodes.IntNode):
- value = EncodedString(node.value.value)
- if value.isdigit():
- return ExprNodes.UnicodeNode(node.value.pos, value=value, constant_result=value)
- if node.format_spec is None and conversion_char == 's':
- value = None
- if isinstance(node.value, ExprNodes.UnicodeNode):
- value = node.value.value
- elif isinstance(node.value, ExprNodes.StringNode):
- value = node.value.unicode_value
- if value is not None:
- return ExprNodes.UnicodeNode(node.value.pos, value=value, constant_result=value)
+ if node.format_spec is None and isinstance(node.value, ExprNodes.IntNode):
+ value = EncodedString(node.value.value)
+ if value.isdigit():
+ return ExprNodes.UnicodeNode(node.value.pos, value=value, constant_result=value)
+ if node.format_spec is None and conversion_char == 's':
+ value = None
+ if isinstance(node.value, ExprNodes.UnicodeNode):
+ value = node.value.value
+ elif isinstance(node.value, ExprNodes.StringNode):
+ value = node.value.unicode_value
+ if value is not None:
+ return ExprNodes.UnicodeNode(node.value.pos, value=value, constant_result=value)
return node
def visit_JoinedStrNode(self, node):
@@ -4413,8 +4413,8 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
substrings = list(substrings)
unode = substrings[0]
if len(substrings) > 1:
- value = EncodedString(u''.join(value.value for value in substrings))
- unode = ExprNodes.UnicodeNode(unode.pos, value=value, constant_result=value)
+ value = EncodedString(u''.join(value.value for value in substrings))
+ unode = ExprNodes.UnicodeNode(unode.pos, value=value, constant_result=value)
# ignore empty Unicode strings
if unode.value:
values.append(unode)
@@ -4422,8 +4422,8 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
values.extend(substrings)
if not values:
- value = EncodedString('')
- node = ExprNodes.UnicodeNode(node.pos, value=value, constant_result=value)
+ value = EncodedString('')
+ node = ExprNodes.UnicodeNode(node.pos, value=value, constant_result=value)
elif len(values) == 1:
node = values[0]
elif len(values) == 2:
@@ -4713,7 +4713,7 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
visit_Node = Visitor.VisitorTransform.recurse_to_children
-class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
+class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
"""
This visitor handles several commuting optimizations, and is run
just before the C code generation phase.
@@ -4722,11 +4722,11 @@ class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
- eliminate None assignment and refcounting for first assignment.
- isinstance -> typecheck for cdef types
- eliminate checks for None and/or types that became redundant after tree changes
- - eliminate useless string formatting steps
+ - eliminate useless string formatting steps
- replace Python function calls that look like method calls by a faster PyMethodCallNode
"""
- in_loop = False
-
+ in_loop = False
+
def visit_SingleAssignmentNode(self, node):
"""Avoid redundant initialisation of local variables before their
first assignment.
@@ -4753,10 +4753,10 @@ class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
function.type = function.entry.type
PyTypeObjectPtr = PyrexTypes.CPtrType(cython_scope.lookup('PyTypeObject').type)
node.args[1] = ExprNodes.CastNode(node.args[1], PyTypeObjectPtr)
- elif (node.is_temp and function.type.is_pyobject and self.current_directives.get(
- "optimize.unpack_method_calls_in_pyinit"
- if not self.in_loop and self.current_env().is_module_scope
- else "optimize.unpack_method_calls")):
+ elif (node.is_temp and function.type.is_pyobject and self.current_directives.get(
+ "optimize.unpack_method_calls_in_pyinit"
+ if not self.in_loop and self.current_env().is_module_scope
+ else "optimize.unpack_method_calls")):
# optimise simple Python methods calls
if isinstance(node.arg_tuple, ExprNodes.TupleNode) and not (
node.arg_tuple.mult_factor or (node.arg_tuple.is_literal and len(node.arg_tuple.args) > 1)):
@@ -4787,11 +4787,11 @@ class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
node, function=function, arg_tuple=node.arg_tuple, type=node.type))
return node
- def visit_NumPyMethodCallNode(self, node):
- # Exclude from replacement above.
- self.visitchildren(node)
- return node
-
+ def visit_NumPyMethodCallNode(self, node):
+ # Exclude from replacement above.
+ self.visitchildren(node)
+ return node
+
def visit_PyTypeTestNode(self, node):
"""Remove tests for alternatively allowed None values from
type tests when we know that the argument cannot be None
@@ -4812,16 +4812,16 @@ class FinalOptimizePhase(Visitor.EnvTransform, Visitor.NodeRefCleanupMixin):
return node.arg
return node
- def visit_LoopNode(self, node):
- """Remember when we enter a loop as some expensive optimisations might still be worth it there.
- """
- old_val = self.in_loop
- self.in_loop = True
- self.visitchildren(node)
- self.in_loop = old_val
- return node
-
-
+ def visit_LoopNode(self, node):
+ """Remember when we enter a loop as some expensive optimisations might still be worth it there.
+ """
+ old_val = self.in_loop
+ self.in_loop = True
+ self.visitchildren(node)
+ self.in_loop = old_val
+ return node
+
+
class ConsolidateOverflowCheck(Visitor.CythonTransform):
"""
This class facilitates the sharing of overflow checking among all nodes
diff --git a/contrib/tools/cython/Cython/Compiler/Options.py b/contrib/tools/cython/Cython/Compiler/Options.py
index d29fd6b6c8..b3ffbcd927 100644
--- a/contrib/tools/cython/Cython/Compiler/Options.py
+++ b/contrib/tools/cython/Cython/Compiler/Options.py
@@ -9,10 +9,10 @@ class ShouldBeFromDirective(object):
known_directives = []
- def __init__(self, options_name, directive_name=None, disallow=False):
+ def __init__(self, options_name, directive_name=None, disallow=False):
self.options_name = options_name
self.directive_name = directive_name or options_name
- self.disallow = disallow
+ self.disallow = disallow
self.known_directives.append(self)
def __nonzero__(self):
@@ -150,10 +150,10 @@ buffer_max_dims = 8
#: Number of function closure instances to keep in a freelist (0: no freelists)
closure_freelist_size = 8
-# Arcadia specific
-source_root = None
+# Arcadia specific
+source_root = None
+
-
def get_directive_defaults():
# To add an item to this list, all accesses should be changed to use the new
# directive, and the global option itself should be set to an instance of
@@ -178,7 +178,7 @@ _directive_defaults = {
'initializedcheck' : True,
'embedsignature' : False,
'auto_cpdef': False,
- 'auto_pickle': None,
+ 'auto_pickle': None,
'cdivision': False, # was True before 0.12
'cdivision_warnings': False,
'c_api_binop_methods': True,
@@ -202,15 +202,15 @@ _directive_defaults = {
'language_level': None,
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode
- 'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
- 'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
+ 'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
+ 'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
'c_string_type': 'bytes',
'c_string_encoding': '',
'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types
- 'unraisable_tracebacks': True,
+ 'unraisable_tracebacks': True,
'old_style_globals': False,
- 'np_pythran': False,
- 'fast_gil': False,
+ 'np_pythran': False,
+ 'fast_gil': False,
# set __file__ and/or __path__ to known source/target path at import time (instead of not having them available)
'set_initial_path' : None, # SOURCEFILE or "/full/path/to/module"
@@ -301,7 +301,7 @@ def normalise_encoding_name(option_name, encoding):
# Override types possibilities above, if needed
directive_types = {
'language_level': str, # values can be None/2/3/'3str', where None == 2+warning
- 'auto_pickle': bool,
+ 'auto_pickle': bool,
'locals': dict,
'final' : bool, # final cdef classes and methods
'nogil' : bool,
@@ -329,15 +329,15 @@ for key, val in _directive_defaults.items():
directive_scopes = { # defaults to available everywhere
# 'module', 'function', 'class', 'with statement'
- 'auto_pickle': ('module', 'cclass'),
+ 'auto_pickle': ('module', 'cclass'),
'final' : ('cclass', 'function'),
'nogil' : ('function', 'with statement'),
'inline' : ('function',),
'cfunc' : ('function', 'with statement'),
'ccall' : ('function', 'with statement'),
- 'returns' : ('function',),
- 'exceptval' : ('function',),
- 'locals' : ('function',),
+ 'returns' : ('function',),
+ 'exceptval' : ('function',),
+ 'locals' : ('function',),
'staticmethod' : ('function',), # FIXME: analysis currently lacks more specific function scope
'no_gc_clear' : ('cclass',),
'no_gc' : ('cclass',),
@@ -360,9 +360,9 @@ directive_scopes = { # defaults to available everywhere
# globals() could conceivably be controlled at a finer granularity,
# but that would complicate the implementation
'old_style_globals': ('module',),
- 'np_pythran': ('module',),
- 'fast_gil': ('module',),
- 'iterable_coroutine': ('module', 'function'),
+ 'np_pythran': ('module',),
+ 'fast_gil': ('module',),
+ 'iterable_coroutine': ('module', 'function'),
}
diff --git a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.pxd b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.pxd
index 0f40c75160..2c17901fa4 100644
--- a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.pxd
+++ b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.pxd
@@ -43,33 +43,33 @@ cdef class ExpandInplaceOperators(EnvTransform):
cdef class AlignFunctionDefinitions(CythonTransform):
cdef dict directives
- cdef set imported_names
- cdef object scope
+ cdef set imported_names
+ cdef object scope
-@cython.final
+@cython.final
cdef class YieldNodeCollector(TreeVisitor):
cdef public list yields
cdef public list returns
- cdef public list finallys
- cdef public list excepts
+ cdef public list finallys
+ cdef public list excepts
cdef public bint has_return_value
- cdef public bint has_yield
- cdef public bint has_await
+ cdef public bint has_yield
+ cdef public bint has_await
-@cython.final
+@cython.final
cdef class MarkClosureVisitor(CythonTransform):
cdef bint needs_closure
-@cython.final
+@cython.final
cdef class CreateClosureClasses(CythonTransform):
cdef list path
cdef bint in_lambda
cdef module_scope
cdef generator_class
- cdef create_class_from_scope(self, node, target_module_scope, inner_node=*)
- cdef find_entries_used_in_closures(self, node)
-
+ cdef create_class_from_scope(self, node, target_module_scope, inner_node=*)
+ cdef find_entries_used_in_closures(self, node)
+
#cdef class InjectGilHandling(VisitorTransform, SkipDeclarations):
# cdef bint nogil
diff --git a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py
index 88f028aa0c..0da3670cae 100644
--- a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py
+++ b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py
@@ -7,7 +7,7 @@ cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
error=object, warning=object, copy=object, _unicode=object)
import copy
-import hashlib
+import hashlib
from . import PyrexTypes
from . import Naming
@@ -15,7 +15,7 @@ from . import ExprNodes
from . import Nodes
from . import Options
from . import Builtin
-from . import Errors
+from . import Errors
from .Visitor import VisitorTransform, TreeVisitor
from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform
@@ -180,7 +180,7 @@ class PostParse(ScopeTrackingTransform):
# unpack a lambda expression into the corresponding DefNode
collector = YieldNodeCollector()
collector.visitchildren(node.result_expr)
- if collector.has_yield or collector.has_await or isinstance(node.result_expr, ExprNodes.YieldExprNode):
+ if collector.has_yield or collector.has_await or isinstance(node.result_expr, ExprNodes.YieldExprNode):
body = Nodes.ExprStatNode(
node.result_expr.pos, expr=node.result_expr)
else:
@@ -196,25 +196,25 @@ class PostParse(ScopeTrackingTransform):
def visit_GeneratorExpressionNode(self, node):
# unpack a generator expression into the corresponding DefNode
- collector = YieldNodeCollector()
- collector.visitchildren(node.loop)
- node.def_node = Nodes.DefNode(
- node.pos, name=node.name, doc=None,
- args=[], star_arg=None, starstar_arg=None,
- body=node.loop, is_async_def=collector.has_await)
- self.visitchildren(node)
- return node
-
- def visit_ComprehensionNode(self, node):
- # enforce local scope also in Py2 for async generators (seriously, that's a Py3.6 feature...)
- if not node.has_local_scope:
- collector = YieldNodeCollector()
- collector.visitchildren(node.loop)
- if collector.has_await:
- node.has_local_scope = True
- self.visitchildren(node)
- return node
-
+ collector = YieldNodeCollector()
+ collector.visitchildren(node.loop)
+ node.def_node = Nodes.DefNode(
+ node.pos, name=node.name, doc=None,
+ args=[], star_arg=None, starstar_arg=None,
+ body=node.loop, is_async_def=collector.has_await)
+ self.visitchildren(node)
+ return node
+
+ def visit_ComprehensionNode(self, node):
+ # enforce local scope also in Py2 for async generators (seriously, that's a Py3.6 feature...)
+ if not node.has_local_scope:
+ collector = YieldNodeCollector()
+ collector.visitchildren(node.loop)
+ if collector.has_await:
+ node.has_local_scope = True
+ self.visitchildren(node)
+ return node
+
# cdef variables
def handle_bufferdefaults(self, decl):
if not isinstance(decl.default, ExprNodes.DictNode):
@@ -599,29 +599,29 @@ class PxdPostParse(CythonTransform, SkipDeclarations):
else:
return node
-
-class TrackNumpyAttributes(VisitorTransform, SkipDeclarations):
- # TODO: Make name handling as good as in InterpretCompilerDirectives() below - probably best to merge the two.
- def __init__(self):
- super(TrackNumpyAttributes, self).__init__()
- self.numpy_module_names = set()
-
- def visit_CImportStatNode(self, node):
- if node.module_name == u"numpy":
- self.numpy_module_names.add(node.as_name or u"numpy")
- return node
-
- def visit_AttributeNode(self, node):
- self.visitchildren(node)
+
+class TrackNumpyAttributes(VisitorTransform, SkipDeclarations):
+ # TODO: Make name handling as good as in InterpretCompilerDirectives() below - probably best to merge the two.
+ def __init__(self):
+ super(TrackNumpyAttributes, self).__init__()
+ self.numpy_module_names = set()
+
+ def visit_CImportStatNode(self, node):
+ if node.module_name == u"numpy":
+ self.numpy_module_names.add(node.as_name or u"numpy")
+ return node
+
+ def visit_AttributeNode(self, node):
+ self.visitchildren(node)
obj = node.obj
if (obj.is_name and obj.name in self.numpy_module_names) or obj.is_numpy_attribute:
- node.is_numpy_attribute = True
- return node
-
- visit_Node = VisitorTransform.recurse_to_children
-
-
-class InterpretCompilerDirectives(CythonTransform):
+ node.is_numpy_attribute = True
+ return node
+
+ visit_Node = VisitorTransform.recurse_to_children
+
+
+class InterpretCompilerDirectives(CythonTransform):
"""
After parsing, directives can be stored in a number of places:
- #cython-comments at the top of the file (stored in ModuleNode)
@@ -841,16 +841,16 @@ class InterpretCompilerDirectives(CythonTransform):
if node.name in self.cython_module_names:
node.is_cython_module = True
else:
- directive = self.directive_names.get(node.name)
- if directive is not None:
- node.cython_attribute = directive
+ directive = self.directive_names.get(node.name)
+ if directive is not None:
+ node.cython_attribute = directive
+ return node
+
+ def visit_NewExprNode(self, node):
+ self.visit(node.cppclass)
+ self.visitchildren(node)
return node
- def visit_NewExprNode(self, node):
- self.visit(node.cppclass)
- self.visitchildren(node)
- return node
-
def try_to_parse_directives(self, node):
# If node is the contents of an directive (in a with statement or
# decorator), returns a list of (directivename, value) pairs.
@@ -886,8 +886,8 @@ class InterpretCompilerDirectives(CythonTransform):
if optname:
directivetype = Options.directive_types.get(optname)
if directivetype is bool:
- arg = ExprNodes.BoolNode(node.pos, value=True)
- return [self.try_to_parse_directive(optname, [arg], None, node.pos)]
+ arg = ExprNodes.BoolNode(node.pos, value=True)
+ return [self.try_to_parse_directive(optname, [arg], None, node.pos)]
elif directivetype is None:
return [(optname, None)]
else:
@@ -896,25 +896,25 @@ class InterpretCompilerDirectives(CythonTransform):
return None
def try_to_parse_directive(self, optname, args, kwds, pos):
- if optname == 'np_pythran' and not self.context.cpp:
- raise PostParseError(pos, 'The %s directive can only be used in C++ mode.' % optname)
- elif optname == 'exceptval':
- # default: exceptval(None, check=True)
- arg_error = len(args) > 1
- check = True
- if kwds and kwds.key_value_pairs:
- kw = kwds.key_value_pairs[0]
- if (len(kwds.key_value_pairs) == 1 and
- kw.key.is_string_literal and kw.key.value == 'check' and
- isinstance(kw.value, ExprNodes.BoolNode)):
- check = kw.value.value
- else:
- arg_error = True
- if arg_error:
- raise PostParseError(
- pos, 'The exceptval directive takes 0 or 1 positional arguments and the boolean keyword "check"')
- return ('exceptval', (args[0] if args else None, check))
-
+ if optname == 'np_pythran' and not self.context.cpp:
+ raise PostParseError(pos, 'The %s directive can only be used in C++ mode.' % optname)
+ elif optname == 'exceptval':
+ # default: exceptval(None, check=True)
+ arg_error = len(args) > 1
+ check = True
+ if kwds and kwds.key_value_pairs:
+ kw = kwds.key_value_pairs[0]
+ if (len(kwds.key_value_pairs) == 1 and
+ kw.key.is_string_literal and kw.key.value == 'check' and
+ isinstance(kw.value, ExprNodes.BoolNode)):
+ check = kw.value.value
+ else:
+ arg_error = True
+ if arg_error:
+ raise PostParseError(
+ pos, 'The exceptval directive takes 0 or 1 positional arguments and the boolean keyword "check"')
+ return ('exceptval', (args[0] if args else None, check))
+
directivetype = Options.directive_types.get(optname)
if len(args) == 1 and isinstance(args[0], ExprNodes.NoneNode):
return optname, Options.get_directive_defaults()[optname]
@@ -945,7 +945,7 @@ class InterpretCompilerDirectives(CythonTransform):
'The %s directive takes no prepositional arguments' % optname)
return optname, dict([(key.value, value) for key, value in kwds.key_value_pairs])
elif directivetype is list:
- if kwds and len(kwds.key_value_pairs) != 0:
+ if kwds and len(kwds.key_value_pairs) != 0:
raise PostParseError(pos,
'The %s directive takes no keyword arguments' % optname)
return optname, [ str(arg.value) for arg in args ]
@@ -1014,8 +1014,8 @@ class InterpretCompilerDirectives(CythonTransform):
directives = []
realdecs = []
both = []
- # Decorators coming first take precedence.
- for dec in node.decorators[::-1]:
+ # Decorators coming first take precedence.
+ for dec in node.decorators[::-1]:
new_directives = self.try_to_parse_directives(dec.decorator)
if new_directives is not None:
for directive in new_directives:
@@ -1025,15 +1025,15 @@ class InterpretCompilerDirectives(CythonTransform):
directives.append(directive)
if directive[0] == 'staticmethod':
both.append(dec)
- # Adapt scope type based on decorators that change it.
- if directive[0] == 'cclass' and scope_name == 'class':
- scope_name = 'cclass'
+ # Adapt scope type based on decorators that change it.
+ if directive[0] == 'cclass' and scope_name == 'class':
+ scope_name = 'cclass'
else:
realdecs.append(dec)
- if realdecs and (scope_name == 'cclass' or
- isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode))):
+ if realdecs and (scope_name == 'cclass' or
+ isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode))):
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
- node.decorators = realdecs[::-1] + both[::-1]
+ node.decorators = realdecs[::-1] + both[::-1]
# merge or override repeated directives
optdict = {}
for directive in directives:
@@ -1283,7 +1283,7 @@ class WithTransform(CythonTransform, SkipDeclarations):
pos, with_stat=node,
test_if_run=False,
args=excinfo_target,
- await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
+ await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
body=Nodes.ReraiseStatNode(pos),
),
],
@@ -1305,7 +1305,7 @@ class WithTransform(CythonTransform, SkipDeclarations):
test_if_run=True,
args=ExprNodes.TupleNode(
pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]),
- await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
+ await_expr=ExprNodes.AwaitExprNode(pos, arg=None) if is_async else None)),
handle_error_case=False,
)
return node
@@ -1376,28 +1376,28 @@ class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
elif decorator.is_attribute and decorator.obj.name in properties:
handler_name = self._map_property_attribute(decorator.attribute)
if handler_name:
- if decorator.obj.name != node.name:
- # CPython does not generate an error or warning, but not something useful either.
- error(decorator_node.pos,
- "Mismatching property names, expected '%s', got '%s'" % (
- decorator.obj.name, node.name))
- elif len(node.decorators) > 1:
+ if decorator.obj.name != node.name:
+ # CPython does not generate an error or warning, but not something useful either.
+ error(decorator_node.pos,
+ "Mismatching property names, expected '%s', got '%s'" % (
+ decorator.obj.name, node.name))
+ elif len(node.decorators) > 1:
return self._reject_decorated_property(node, decorator_node)
- else:
- return self._add_to_property(properties, node, handler_name, decorator_node)
-
- # we clear node.decorators, so we need to set the
- # is_staticmethod/is_classmethod attributes now
- for decorator in node.decorators:
- func = decorator.decorator
- if func.is_name:
- node.is_classmethod |= func.name == 'classmethod'
- node.is_staticmethod |= func.name == 'staticmethod'
-
+ else:
+ return self._add_to_property(properties, node, handler_name, decorator_node)
+
+ # we clear node.decorators, so we need to set the
+ # is_staticmethod/is_classmethod attributes now
+ for decorator in node.decorators:
+ func = decorator.decorator
+ if func.is_name:
+ node.is_classmethod |= func.name == 'classmethod'
+ node.is_staticmethod |= func.name == 'staticmethod'
+
# transform normal decorators
- decs = node.decorators
- node.decorators = None
- return self.chain_decorators(node, decs, node.name)
+ decs = node.decorators
+ node.decorators = None
+ return self.chain_decorators(node, decs, node.name)
@staticmethod
def _reject_decorated_property(node, decorator_node):
@@ -1531,13 +1531,13 @@ class ForwardDeclareTypes(CythonTransform):
def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope)
- # Expand fused methods of .pxd declared types to construct the final vtable order.
- type = self.module_scope.entries[node.class_name].type
- if type is not None and type.is_extension_type and not type.is_builtin_type and type.scope:
- scope = type.scope
- for entry in scope.cfunc_entries:
- if entry.type and entry.type.is_fused:
- entry.type.get_all_specialized_function_types()
+ # Expand fused methods of .pxd declared types to construct the final vtable order.
+ type = self.module_scope.entries[node.class_name].type
+ if type is not None and type.is_extension_type and not type.is_builtin_type and type.scope:
+ scope = type.scope
+ for entry in scope.cfunc_entries:
+ if entry.type and entry.type.is_fused:
+ entry.type.get_all_specialized_function_types()
return node
@@ -1602,13 +1602,13 @@ if VALUE is not None:
return node
def visit_ModuleNode(self, node):
- # Pickling support requires injecting module-level nodes.
- self.extra_module_declarations = []
+ # Pickling support requires injecting module-level nodes.
+ self.extra_module_declarations = []
self.seen_vars_stack.append(set())
node.analyse_declarations(self.current_env())
self.visitchildren(node)
self.seen_vars_stack.pop()
- node.body.stats.extend(self.extra_module_declarations)
+ node.body.stats.extend(self.extra_module_declarations)
return node
def visit_LambdaNode(self, node):
@@ -1630,145 +1630,145 @@ if VALUE is not None:
stats.append(property)
if stats:
node.body.stats += stats
- if (node.visibility != 'extern'
- and not node.scope.lookup('__reduce__')
- and not node.scope.lookup('__reduce_ex__')):
- self._inject_pickle_methods(node)
- return node
-
- def _inject_pickle_methods(self, node):
- env = self.current_env()
- if node.scope.directives['auto_pickle'] is False: # None means attempt it.
- # Old behavior of not doing anything.
- return
- auto_pickle_forced = node.scope.directives['auto_pickle'] is True
-
- all_members = []
- cls = node.entry.type
- cinit = None
- inherited_reduce = None
- while cls is not None:
- all_members.extend(e for e in cls.scope.var_entries if e.name not in ('__weakref__', '__dict__'))
- cinit = cinit or cls.scope.lookup('__cinit__')
- inherited_reduce = inherited_reduce or cls.scope.lookup('__reduce__') or cls.scope.lookup('__reduce_ex__')
- cls = cls.base_type
- all_members.sort(key=lambda e: e.name)
-
- if inherited_reduce:
- # This is not failsafe, as we may not know whether a cimported class defines a __reduce__.
- # This is why we define __reduce_cython__ and only replace __reduce__
- # (via ExtensionTypes.SetupReduce utility code) at runtime on class creation.
- return
-
- non_py = [
- e for e in all_members
- if not e.type.is_pyobject and (not e.type.can_coerce_to_pyobject(env)
- or not e.type.can_coerce_from_pyobject(env))
- ]
-
- structs = [e for e in all_members if e.type.is_struct_or_union]
-
- if cinit or non_py or (structs and not auto_pickle_forced):
- if cinit:
- # TODO(robertwb): We could allow this if __cinit__ has no require arguments.
- msg = 'no default __reduce__ due to non-trivial __cinit__'
- elif non_py:
- msg = "%s cannot be converted to a Python object for pickling" % ','.join("self.%s" % e.name for e in non_py)
- else:
- # Extern structs may be only partially defined.
- # TODO(robertwb): Limit the restriction to extern
- # (and recursively extern-containing) structs.
- msg = ("Pickling of struct members such as %s must be explicitly requested "
- "with @auto_pickle(True)" % ','.join("self.%s" % e.name for e in structs))
-
- if auto_pickle_forced:
- error(node.pos, msg)
-
- pickle_func = TreeFragment(u"""
- def __reduce_cython__(self):
- raise TypeError("%(msg)s")
- def __setstate_cython__(self, __pyx_state):
- raise TypeError("%(msg)s")
- """ % {'msg': msg},
- level='c_class', pipeline=[NormalizeTree(None)]).substitute({})
- pickle_func.analyse_declarations(node.scope)
- self.visit(pickle_func)
- node.body.stats.append(pickle_func)
-
- else:
- for e in all_members:
- if not e.type.is_pyobject:
- e.type.create_to_py_utility_code(env)
- e.type.create_from_py_utility_code(env)
- all_members_names = sorted([e.name for e in all_members])
- checksum = '0x%s' % hashlib.md5(' '.join(all_members_names).encode('utf-8')).hexdigest()[:7]
- unpickle_func_name = '__pyx_unpickle_%s' % node.class_name
-
- # TODO(robertwb): Move the state into the third argument
- # so it can be pickled *after* self is memoized.
- unpickle_func = TreeFragment(u"""
- def %(unpickle_func_name)s(__pyx_type, long __pyx_checksum, __pyx_state):
+ if (node.visibility != 'extern'
+ and not node.scope.lookup('__reduce__')
+ and not node.scope.lookup('__reduce_ex__')):
+ self._inject_pickle_methods(node)
+ return node
+
+ def _inject_pickle_methods(self, node):
+ env = self.current_env()
+ if node.scope.directives['auto_pickle'] is False: # None means attempt it.
+ # Old behavior of not doing anything.
+ return
+ auto_pickle_forced = node.scope.directives['auto_pickle'] is True
+
+ all_members = []
+ cls = node.entry.type
+ cinit = None
+ inherited_reduce = None
+ while cls is not None:
+ all_members.extend(e for e in cls.scope.var_entries if e.name not in ('__weakref__', '__dict__'))
+ cinit = cinit or cls.scope.lookup('__cinit__')
+ inherited_reduce = inherited_reduce or cls.scope.lookup('__reduce__') or cls.scope.lookup('__reduce_ex__')
+ cls = cls.base_type
+ all_members.sort(key=lambda e: e.name)
+
+ if inherited_reduce:
+ # This is not failsafe, as we may not know whether a cimported class defines a __reduce__.
+ # This is why we define __reduce_cython__ and only replace __reduce__
+ # (via ExtensionTypes.SetupReduce utility code) at runtime on class creation.
+ return
+
+ non_py = [
+ e for e in all_members
+ if not e.type.is_pyobject and (not e.type.can_coerce_to_pyobject(env)
+ or not e.type.can_coerce_from_pyobject(env))
+ ]
+
+ structs = [e for e in all_members if e.type.is_struct_or_union]
+
+ if cinit or non_py or (structs and not auto_pickle_forced):
+ if cinit:
+ # TODO(robertwb): We could allow this if __cinit__ has no require arguments.
+ msg = 'no default __reduce__ due to non-trivial __cinit__'
+ elif non_py:
+ msg = "%s cannot be converted to a Python object for pickling" % ','.join("self.%s" % e.name for e in non_py)
+ else:
+ # Extern structs may be only partially defined.
+ # TODO(robertwb): Limit the restriction to extern
+ # (and recursively extern-containing) structs.
+ msg = ("Pickling of struct members such as %s must be explicitly requested "
+ "with @auto_pickle(True)" % ','.join("self.%s" % e.name for e in structs))
+
+ if auto_pickle_forced:
+ error(node.pos, msg)
+
+ pickle_func = TreeFragment(u"""
+ def __reduce_cython__(self):
+ raise TypeError("%(msg)s")
+ def __setstate_cython__(self, __pyx_state):
+ raise TypeError("%(msg)s")
+ """ % {'msg': msg},
+ level='c_class', pipeline=[NormalizeTree(None)]).substitute({})
+ pickle_func.analyse_declarations(node.scope)
+ self.visit(pickle_func)
+ node.body.stats.append(pickle_func)
+
+ else:
+ for e in all_members:
+ if not e.type.is_pyobject:
+ e.type.create_to_py_utility_code(env)
+ e.type.create_from_py_utility_code(env)
+ all_members_names = sorted([e.name for e in all_members])
+ checksum = '0x%s' % hashlib.md5(' '.join(all_members_names).encode('utf-8')).hexdigest()[:7]
+ unpickle_func_name = '__pyx_unpickle_%s' % node.class_name
+
+ # TODO(robertwb): Move the state into the third argument
+ # so it can be pickled *after* self is memoized.
+ unpickle_func = TreeFragment(u"""
+ def %(unpickle_func_name)s(__pyx_type, long __pyx_checksum, __pyx_state):
cdef object __pyx_PickleError
cdef object __pyx_result
- if __pyx_checksum != %(checksum)s:
- from pickle import PickleError as __pyx_PickleError
- raise __pyx_PickleError("Incompatible checksums (%%s vs %(checksum)s = (%(members)s))" %% __pyx_checksum)
- __pyx_result = %(class_name)s.__new__(__pyx_type)
- if __pyx_state is not None:
- %(unpickle_func_name)s__set_state(<%(class_name)s> __pyx_result, __pyx_state)
- return __pyx_result
-
- cdef %(unpickle_func_name)s__set_state(%(class_name)s __pyx_result, tuple __pyx_state):
- %(assignments)s
- if len(__pyx_state) > %(num_members)d and hasattr(__pyx_result, '__dict__'):
- __pyx_result.__dict__.update(__pyx_state[%(num_members)d])
- """ % {
- 'unpickle_func_name': unpickle_func_name,
- 'checksum': checksum,
- 'members': ', '.join(all_members_names),
- 'class_name': node.class_name,
- 'assignments': '; '.join(
- '__pyx_result.%s = __pyx_state[%s]' % (v, ix)
- for ix, v in enumerate(all_members_names)),
- 'num_members': len(all_members_names),
- }, level='module', pipeline=[NormalizeTree(None)]).substitute({})
- unpickle_func.analyse_declarations(node.entry.scope)
- self.visit(unpickle_func)
- self.extra_module_declarations.append(unpickle_func)
-
- pickle_func = TreeFragment(u"""
- def __reduce_cython__(self):
+ if __pyx_checksum != %(checksum)s:
+ from pickle import PickleError as __pyx_PickleError
+ raise __pyx_PickleError("Incompatible checksums (%%s vs %(checksum)s = (%(members)s))" %% __pyx_checksum)
+ __pyx_result = %(class_name)s.__new__(__pyx_type)
+ if __pyx_state is not None:
+ %(unpickle_func_name)s__set_state(<%(class_name)s> __pyx_result, __pyx_state)
+ return __pyx_result
+
+ cdef %(unpickle_func_name)s__set_state(%(class_name)s __pyx_result, tuple __pyx_state):
+ %(assignments)s
+ if len(__pyx_state) > %(num_members)d and hasattr(__pyx_result, '__dict__'):
+ __pyx_result.__dict__.update(__pyx_state[%(num_members)d])
+ """ % {
+ 'unpickle_func_name': unpickle_func_name,
+ 'checksum': checksum,
+ 'members': ', '.join(all_members_names),
+ 'class_name': node.class_name,
+ 'assignments': '; '.join(
+ '__pyx_result.%s = __pyx_state[%s]' % (v, ix)
+ for ix, v in enumerate(all_members_names)),
+ 'num_members': len(all_members_names),
+ }, level='module', pipeline=[NormalizeTree(None)]).substitute({})
+ unpickle_func.analyse_declarations(node.entry.scope)
+ self.visit(unpickle_func)
+ self.extra_module_declarations.append(unpickle_func)
+
+ pickle_func = TreeFragment(u"""
+ def __reduce_cython__(self):
cdef tuple state
cdef object _dict
- cdef bint use_setstate
- state = (%(members)s)
- _dict = getattr(self, '__dict__', None)
- if _dict is not None:
- state += (_dict,)
- use_setstate = True
- else:
- use_setstate = %(any_notnone_members)s
- if use_setstate:
- return %(unpickle_func_name)s, (type(self), %(checksum)s, None), state
- else:
- return %(unpickle_func_name)s, (type(self), %(checksum)s, state)
-
- def __setstate_cython__(self, __pyx_state):
- %(unpickle_func_name)s__set_state(self, __pyx_state)
- """ % {
- 'unpickle_func_name': unpickle_func_name,
- 'checksum': checksum,
- 'members': ', '.join('self.%s' % v for v in all_members_names) + (',' if len(all_members_names) == 1 else ''),
- # Even better, we could check PyType_IS_GC.
- 'any_notnone_members' : ' or '.join(['self.%s is not None' % e.name for e in all_members if e.type.is_pyobject] or ['False']),
- },
- level='c_class', pipeline=[NormalizeTree(None)]).substitute({})
- pickle_func.analyse_declarations(node.scope)
+ cdef bint use_setstate
+ state = (%(members)s)
+ _dict = getattr(self, '__dict__', None)
+ if _dict is not None:
+ state += (_dict,)
+ use_setstate = True
+ else:
+ use_setstate = %(any_notnone_members)s
+ if use_setstate:
+ return %(unpickle_func_name)s, (type(self), %(checksum)s, None), state
+ else:
+ return %(unpickle_func_name)s, (type(self), %(checksum)s, state)
+
+ def __setstate_cython__(self, __pyx_state):
+ %(unpickle_func_name)s__set_state(self, __pyx_state)
+ """ % {
+ 'unpickle_func_name': unpickle_func_name,
+ 'checksum': checksum,
+ 'members': ', '.join('self.%s' % v for v in all_members_names) + (',' if len(all_members_names) == 1 else ''),
+ # Even better, we could check PyType_IS_GC.
+ 'any_notnone_members' : ' or '.join(['self.%s is not None' % e.name for e in all_members if e.type.is_pyobject] or ['False']),
+ },
+ level='c_class', pipeline=[NormalizeTree(None)]).substitute({})
+ pickle_func.analyse_declarations(node.scope)
self.enter_scope(node, node.scope) # functions should be visited in the class scope
- self.visit(pickle_func)
+ self.visit(pickle_func)
self.exit_scope()
- node.body.stats.append(pickle_func)
-
+ node.body.stats.append(pickle_func)
+
def _handle_fused_def_decorators(self, old_decorators, env, node):
"""
Create function calls to the decorators and reassignments to
@@ -1868,7 +1868,7 @@ if VALUE is not None:
def visit_FuncDefNode(self, node):
"""
- Analyse a function and its body, as that hasn't happened yet. Also
+ Analyse a function and its body, as that hasn't happened yet. Also
analyse the directive_locals set by @cython.locals().
Then, if we are a function with fused arguments, replace the function
@@ -1931,8 +1931,8 @@ if VALUE is not None:
binding = self.current_directives.get('binding')
rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding)
node.code_object = rhs.code_object
- if node.is_generator:
- node.gbody.code_object = node.code_object
+ if node.is_generator:
+ node.gbody.code_object = node.code_object
if env.is_py_class_scope:
rhs.binding = True
@@ -2059,7 +2059,7 @@ if VALUE is not None:
# Some nodes are no longer needed after declaration
# analysis and can be dropped. The analysis was performed
- # on these nodes in a separate recursive process from the
+ # on these nodes in a separate recursive process from the
# enclosing function or module, so we can simply drop them.
def visit_CDeclaratorNode(self, node):
# necessary to ensure that all CNameDeclaratorNodes are visited.
@@ -2354,20 +2354,20 @@ class AdjustDefByDirectives(CythonTransform, SkipDeclarations):
if 'inline' in self.directives:
modifiers.append('inline')
nogil = self.directives.get('nogil')
- except_val = self.directives.get('exceptval')
- return_type_node = self.directives.get('returns')
- if return_type_node is None and self.directives['annotation_typing']:
- return_type_node = node.return_type_annotation
- # for Python anntations, prefer safe exception handling by default
- if return_type_node is not None and except_val is None:
- except_val = (None, True) # except *
- elif except_val is None:
- # backward compatible default: no exception check
- except_val = (None, False)
+ except_val = self.directives.get('exceptval')
+ return_type_node = self.directives.get('returns')
+ if return_type_node is None and self.directives['annotation_typing']:
+ return_type_node = node.return_type_annotation
+ # for Python anntations, prefer safe exception handling by default
+ if return_type_node is not None and except_val is None:
+ except_val = (None, True) # except *
+ elif except_val is None:
+ # backward compatible default: no exception check
+ except_val = (None, False)
if 'ccall' in self.directives:
node = node.as_cfunction(
overridable=True, modifiers=modifiers, nogil=nogil,
- returns=return_type_node, except_val=except_val)
+ returns=return_type_node, except_val=except_val)
return self.visit(node)
if 'cfunc' in self.directives:
if self.in_py_class:
@@ -2375,7 +2375,7 @@ class AdjustDefByDirectives(CythonTransform, SkipDeclarations):
else:
node = node.as_cfunction(
overridable=False, modifiers=modifiers, nogil=nogil,
- returns=return_type_node, except_val=except_val)
+ returns=return_type_node, except_val=except_val)
return self.visit(node)
if 'inline' in modifiers:
error(node.pos, "Python functions cannot be declared 'inline'")
@@ -2531,23 +2531,23 @@ class YieldNodeCollector(TreeVisitor):
super(YieldNodeCollector, self).__init__()
self.yields = []
self.returns = []
- self.finallys = []
- self.excepts = []
+ self.finallys = []
+ self.excepts = []
self.has_return_value = False
- self.has_yield = False
- self.has_await = False
+ self.has_yield = False
+ self.has_await = False
def visit_Node(self, node):
self.visitchildren(node)
def visit_YieldExprNode(self, node):
self.yields.append(node)
- self.has_yield = True
+ self.has_yield = True
self.visitchildren(node)
def visit_AwaitExprNode(self, node):
- self.yields.append(node)
- self.has_await = True
+ self.yields.append(node)
+ self.has_await = True
self.visitchildren(node)
def visit_ReturnStatNode(self, node):
@@ -2556,14 +2556,14 @@ class YieldNodeCollector(TreeVisitor):
self.has_return_value = True
self.returns.append(node)
- def visit_TryFinallyStatNode(self, node):
- self.visitchildren(node)
- self.finallys.append(node)
-
- def visit_TryExceptStatNode(self, node):
- self.visitchildren(node)
- self.excepts.append(node)
-
+ def visit_TryFinallyStatNode(self, node):
+ self.visitchildren(node)
+ self.finallys.append(node)
+
+ def visit_TryExceptStatNode(self, node):
+ self.visitchildren(node)
+ self.excepts.append(node)
+
def visit_ClassDefNode(self, node):
pass
@@ -2599,31 +2599,31 @@ class MarkClosureVisitor(CythonTransform):
collector.visitchildren(node)
if node.is_async_def:
- coroutine_type = Nodes.AsyncDefNode
- if collector.has_yield:
- coroutine_type = Nodes.AsyncGenNode
- for yield_expr in collector.yields + collector.returns:
- yield_expr.in_async_gen = True
- elif self.current_directives['iterable_coroutine']:
- coroutine_type = Nodes.IterableAsyncDefNode
- elif collector.has_await:
- found = next(y for y in collector.yields if y.is_await)
- error(found.pos, "'await' not allowed in generators (use 'yield')")
- return node
- elif collector.has_yield:
- coroutine_type = Nodes.GeneratorDefNode
+ coroutine_type = Nodes.AsyncDefNode
+ if collector.has_yield:
+ coroutine_type = Nodes.AsyncGenNode
+ for yield_expr in collector.yields + collector.returns:
+ yield_expr.in_async_gen = True
+ elif self.current_directives['iterable_coroutine']:
+ coroutine_type = Nodes.IterableAsyncDefNode
+ elif collector.has_await:
+ found = next(y for y in collector.yields if y.is_await)
+ error(found.pos, "'await' not allowed in generators (use 'yield')")
+ return node
+ elif collector.has_yield:
+ coroutine_type = Nodes.GeneratorDefNode
else:
return node
- for i, yield_expr in enumerate(collector.yields, 1):
+ for i, yield_expr in enumerate(collector.yields, 1):
yield_expr.label_num = i
- for retnode in collector.returns + collector.finallys + collector.excepts:
+ for retnode in collector.returns + collector.finallys + collector.excepts:
retnode.in_generator = True
gbody = Nodes.GeneratorBodyDefNode(
- pos=node.pos, name=node.name, body=node.body,
- is_async_gen_body=node.is_async_def and collector.has_yield)
- coroutine = coroutine_type(
+ pos=node.pos, name=node.name, body=node.body,
+ is_async_gen_body=node.is_async_def and collector.has_yield)
+ coroutine = coroutine_type(
pos=node.pos, name=node.name, args=node.args,
star_arg=node.star_arg, starstar_arg=node.starstar_arg,
doc=node.doc, decorators=node.decorators,
@@ -2670,28 +2670,28 @@ class CreateClosureClasses(CythonTransform):
def find_entries_used_in_closures(self, node):
from_closure = []
in_closure = []
- for scope in node.local_scope.iter_local_scopes():
- for name, entry in scope.entries.items():
- if not name:
- continue
- if entry.from_closure:
- from_closure.append((name, entry))
- elif entry.in_closure:
- in_closure.append((name, entry))
+ for scope in node.local_scope.iter_local_scopes():
+ for name, entry in scope.entries.items():
+ if not name:
+ continue
+ if entry.from_closure:
+ from_closure.append((name, entry))
+ elif entry.in_closure:
+ in_closure.append((name, entry))
return from_closure, in_closure
def create_class_from_scope(self, node, target_module_scope, inner_node=None):
# move local variables into closure
if node.is_generator:
- for scope in node.local_scope.iter_local_scopes():
- for entry in scope.entries.values():
+ for scope in node.local_scope.iter_local_scopes():
+ for entry in scope.entries.values():
if not (entry.from_closure or entry.is_pyglobal or entry.is_cglobal):
- entry.in_closure = True
+ entry.in_closure = True
from_closure, in_closure = self.find_entries_used_in_closures(node)
in_closure.sort()
- # Now from the beginning
+ # Now from the beginning
node.needs_closure = False
node.needs_outer_scope = False
@@ -2733,10 +2733,10 @@ class CreateClosureClasses(CythonTransform):
func_scope.scope_class = entry
class_scope = entry.type.scope
class_scope.is_internal = True
- class_scope.is_closure_class_scope = True
- if node.is_async_def or node.is_generator:
- # Generators need their closure intact during cleanup as they resume to handle GeneratorExit
- class_scope.directives['no_gc_clear'] = True
+ class_scope.is_closure_class_scope = True
+ if node.is_async_def or node.is_generator:
+ # Generators need their closure intact during cleanup as they resume to handle GeneratorExit
+ class_scope.directives['no_gc_clear'] = True
if Options.closure_freelist_size:
class_scope.directives['freelist'] = Options.closure_freelist_size
@@ -2749,12 +2749,12 @@ class CreateClosureClasses(CythonTransform):
is_cdef=True)
node.needs_outer_scope = True
for name, entry in in_closure:
- closure_entry = class_scope.declare_var(
- pos=entry.pos,
- name=entry.name if not entry.in_subscope else None,
- cname=entry.cname,
- type=entry.type,
- is_cdef=True)
+ closure_entry = class_scope.declare_var(
+ pos=entry.pos,
+ name=entry.name if not entry.in_subscope else None,
+ cname=entry.cname,
+ type=entry.type,
+ is_cdef=True)
if entry.is_declared_generic:
closure_entry.is_declared_generic = 1
node.needs_closure = True
@@ -3191,22 +3191,22 @@ class TransformBuiltinMethods(EnvTransform):
def visit_GeneralCallNode(self, node):
function = node.function.as_cython_attribute()
- if function == u'cast':
- # NOTE: assuming simple tuple/dict nodes for positional_args and keyword_args
+ if function == u'cast':
+ # NOTE: assuming simple tuple/dict nodes for positional_args and keyword_args
args = node.positional_args.args
kwargs = node.keyword_args.compile_time_value(None)
- if (len(args) != 2 or len(kwargs) > 1 or
- (len(kwargs) == 1 and 'typecheck' not in kwargs)):
- error(node.function.pos,
- u"cast() takes exactly two arguments and an optional typecheck keyword")
- else:
- type = args[0].analyse_as_type(self.current_env())
- if type:
- typecheck = kwargs.get('typecheck', False)
- node = ExprNodes.TypecastNode(
- node.function.pos, type=type, operand=args[1], typecheck=typecheck)
+ if (len(args) != 2 or len(kwargs) > 1 or
+ (len(kwargs) == 1 and 'typecheck' not in kwargs)):
+ error(node.function.pos,
+ u"cast() takes exactly two arguments and an optional typecheck keyword")
+ else:
+ type = args[0].analyse_as_type(self.current_env())
+ if type:
+ typecheck = kwargs.get('typecheck', False)
+ node = ExprNodes.TypecastNode(
+ node.function.pos, type=type, operand=args[1], typecheck=typecheck)
else:
- error(args[0].pos, "Not a type")
+ error(args[0].pos, "Not a type")
self.visitchildren(node)
return node
@@ -3239,9 +3239,9 @@ class ReplaceFusedTypeChecks(VisitorTransform):
return self.transform(node)
def visit_PrimaryCmpNode(self, node):
- with Errors.local_errors(ignore=True):
- type1 = node.operand1.analyse_as_type(self.local_scope)
- type2 = node.operand2.analyse_as_type(self.local_scope)
+ with Errors.local_errors(ignore=True):
+ type1 = node.operand1.analyse_as_type(self.local_scope)
+ type2 = node.operand2.analyse_as_type(self.local_scope)
if type1 and type2:
false_node = ExprNodes.BoolNode(node.pos, value=False)
diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.pxd b/contrib/tools/cython/Cython/Compiler/Parsing.pxd
index e80ffc2dd6..25453b39ab 100644
--- a/contrib/tools/cython/Cython/Compiler/Parsing.pxd
+++ b/contrib/tools/cython/Cython/Compiler/Parsing.pxd
@@ -68,12 +68,12 @@ cdef p_opt_string_literal(PyrexScanner s, required_type=*)
cdef bint check_for_non_ascii_characters(unicode string)
@cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint)
cdef p_string_literal(PyrexScanner s, kind_override=*)
-cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s)
+cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s)
cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i)
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t)
-cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw)
+cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw)
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4)
-cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw)
+cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw)
cdef p_list_maker(PyrexScanner s)
cdef p_comp_iter(PyrexScanner s, body)
cdef p_comp_for(PyrexScanner s, body)
diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.py b/contrib/tools/cython/Cython/Compiler/Parsing.py
index 5bf2fad6fd..4d2f12a24a 100644
--- a/contrib/tools/cython/Cython/Compiler/Parsing.py
+++ b/contrib/tools/cython/Cython/Compiler/Parsing.py
@@ -9,17 +9,17 @@ from __future__ import absolute_import
import cython
cython.declare(Nodes=object, ExprNodes=object, EncodedString=object,
bytes_literal=object, StringEncoding=object,
- FileSourceDescriptor=object, lookup_unicodechar=object, unicode_category=object,
+ FileSourceDescriptor=object, lookup_unicodechar=object, unicode_category=object,
Future=object, Options=object, error=object, warning=object,
- Builtin=object, ModuleNode=object, Utils=object, _unicode=object, _bytes=object,
- re=object, sys=object, _parse_escape_sequences=object, _parse_escape_sequences_raw=object,
+ Builtin=object, ModuleNode=object, Utils=object, _unicode=object, _bytes=object,
+ re=object, sys=object, _parse_escape_sequences=object, _parse_escape_sequences_raw=object,
partial=object, reduce=object, _IS_PY3=cython.bint, _IS_2BYTE_UNICODE=cython.bint,
_CDEF_MODIFIERS=tuple)
from io import StringIO
import re
import sys
-from unicodedata import lookup as lookup_unicodechar, category as unicode_category
+from unicodedata import lookup as lookup_unicodechar, category as unicode_category
from functools import partial, reduce
from .Scanning import PyrexScanner, FileSourceDescriptor, StringSourceDescriptor
@@ -35,7 +35,7 @@ from . import Future
from . import Options
_IS_PY3 = sys.version_info[0] >= 3
-_IS_2BYTE_UNICODE = sys.maxunicode == 0xffff
+_IS_2BYTE_UNICODE = sys.maxunicode == 0xffff
_CDEF_MODIFIERS = ('inline', 'nogil', 'api')
@@ -503,7 +503,7 @@ def p_call_parse_args(s, allow_genexp=True):
break
s.next()
- if s.sy in ('for', 'async'):
+ if s.sy in ('for', 'async'):
if not keyword_args and not last_was_tuple_unpack:
if len(positional_args) == 1 and len(positional_args[0]) == 1:
positional_args = [[p_genexp(s, positional_args[0][0])]]
@@ -706,17 +706,17 @@ def p_atom(s):
elif sy == 'IDENT':
name = s.systring
if name == "None":
- result = ExprNodes.NoneNode(pos)
+ result = ExprNodes.NoneNode(pos)
elif name == "True":
- result = ExprNodes.BoolNode(pos, value=True)
+ result = ExprNodes.BoolNode(pos, value=True)
elif name == "False":
- result = ExprNodes.BoolNode(pos, value=False)
+ result = ExprNodes.BoolNode(pos, value=False)
elif name == "NULL" and not s.in_python_file:
- result = ExprNodes.NullNode(pos)
+ result = ExprNodes.NullNode(pos)
else:
- result = p_name(s, name)
- s.next()
- return result
+ result = p_name(s, name)
+ s.next()
+ return result
else:
s.error("Expected an identifier or literal")
@@ -774,15 +774,15 @@ def wrap_compile_time_constant(pos, value):
return ExprNodes.IntNode(pos, value=rep, constant_result=value)
elif isinstance(value, float):
return ExprNodes.FloatNode(pos, value=rep, constant_result=value)
- elif isinstance(value, complex):
- node = ExprNodes.ImagNode(pos, value=repr(value.imag), constant_result=complex(0.0, value.imag))
- if value.real:
- # FIXME: should we care about -0.0 ?
- # probably not worth using the '-' operator for negative imag values
- node = ExprNodes.binop_node(
- pos, '+', ExprNodes.FloatNode(pos, value=repr(value.real), constant_result=value.real), node,
- constant_result=value)
- return node
+ elif isinstance(value, complex):
+ node = ExprNodes.ImagNode(pos, value=repr(value.imag), constant_result=complex(0.0, value.imag))
+ if value.real:
+ # FIXME: should we care about -0.0 ?
+ # probably not worth using the '-' operator for negative imag values
+ node = ExprNodes.binop_node(
+ pos, '+', ExprNodes.FloatNode(pos, value=repr(value.real), constant_result=value.real), node,
+ constant_result=value)
+ return node
elif isinstance(value, _unicode):
return ExprNodes.UnicodeNode(pos, value=EncodedString(value))
elif isinstance(value, _bytes):
@@ -824,8 +824,8 @@ def p_cat_string_literal(s):
if set([kind, next_kind]) in (set(['f', 'u']), set(['f', ''])):
kind = 'f'
else:
- error(pos, "Cannot mix string literals of different types, expected %s'', got %s''" % (
- kind, next_kind))
+ error(pos, "Cannot mix string literals of different types, expected %s'', got %s''" % (
+ kind, next_kind))
continue
bstrings.append(next_bytes_value)
ustrings.append(next_unicode_value)
@@ -884,15 +884,15 @@ def p_string_literal(s, kind_override=None):
has_non_ascii_literal_characters = False
string_start_pos = (pos[0], pos[1], pos[2] + len(s.systring))
kind_string = s.systring.rstrip('"\'').lower()
- if len(kind_string) > 1:
- if len(set(kind_string)) != len(kind_string):
- error(pos, 'Duplicate string prefix character')
- if 'b' in kind_string and 'u' in kind_string:
- error(pos, 'String prefixes b and u cannot be combined')
- if 'b' in kind_string and 'f' in kind_string:
- error(pos, 'String prefixes b and f cannot be combined')
- if 'u' in kind_string and 'f' in kind_string:
- error(pos, 'String prefixes u and f cannot be combined')
+ if len(kind_string) > 1:
+ if len(set(kind_string)) != len(kind_string):
+ error(pos, 'Duplicate string prefix character')
+ if 'b' in kind_string and 'u' in kind_string:
+ error(pos, 'String prefixes b and u cannot be combined')
+ if 'b' in kind_string and 'f' in kind_string:
+ error(pos, 'String prefixes b and f cannot be combined')
+ if 'u' in kind_string and 'f' in kind_string:
+ error(pos, 'String prefixes u and f cannot be combined')
is_raw = 'r' in kind_string
@@ -900,11 +900,11 @@ def p_string_literal(s, kind_override=None):
# this should never happen, since the lexer does not allow combining c
# with other prefix characters
if len(kind_string) != 1:
- error(pos, 'Invalid string prefix for character literal')
+ error(pos, 'Invalid string prefix for character literal')
kind = 'c'
elif 'f' in kind_string:
- kind = 'f' # u is ignored
- is_raw = True # postpone the escape resolution
+ kind = 'f' # u is ignored
+ is_raw = True # postpone the escape resolution
elif 'b' in kind_string:
kind = 'b'
elif 'u' in kind_string:
@@ -935,13 +935,13 @@ def p_string_literal(s, kind_override=None):
if is_python3_source and not has_non_ascii_literal_characters and check_for_non_ascii_characters(systr):
has_non_ascii_literal_characters = True
elif sy == 'ESCAPE':
- # in Py2, 'ur' raw unicode strings resolve unicode escapes but nothing else
- if is_raw and (is_python3_source or kind != 'u' or systr[1] not in u'Uu'):
+ # in Py2, 'ur' raw unicode strings resolve unicode escapes but nothing else
+ if is_raw and (is_python3_source or kind != 'u' or systr[1] not in u'Uu'):
chars.append(systr)
- if is_python3_source and not has_non_ascii_literal_characters and check_for_non_ascii_characters(systr):
+ if is_python3_source and not has_non_ascii_literal_characters and check_for_non_ascii_characters(systr):
has_non_ascii_literal_characters = True
else:
- _append_escape_sequence(kind, chars, systr, s)
+ _append_escape_sequence(kind, chars, systr, s)
elif sy == 'NEWLINE':
chars.append(u'\n')
elif sy == 'END_STRING':
@@ -949,8 +949,8 @@ def p_string_literal(s, kind_override=None):
elif sy == 'EOF':
s.error("Unclosed string literal", pos=pos)
else:
- s.error("Unexpected token %r:%r in string literal" % (
- sy, s.systring))
+ s.error("Unexpected token %r:%r in string literal" % (
+ sy, s.systring))
if kind == 'c':
unicode_value = None
@@ -963,7 +963,7 @@ def p_string_literal(s, kind_override=None):
and is_python3_source and Future.unicode_literals in s.context.future_directives):
# Python 3 forbids literal non-ASCII characters in byte strings
if kind == 'b':
- s.error("bytes can only contain ASCII literal characters.", pos=pos)
+ s.error("bytes can only contain ASCII literal characters.", pos=pos)
bytes_value = None
if kind == 'f':
unicode_value = p_f_string(s, unicode_value, string_start_pos, is_raw='r' in kind_string)
@@ -971,125 +971,125 @@ def p_string_literal(s, kind_override=None):
return (kind, bytes_value, unicode_value)
-def _append_escape_sequence(kind, builder, escape_sequence, s):
- c = escape_sequence[1]
- if c in u"01234567":
- builder.append_charval(int(escape_sequence[1:], 8))
- elif c in u"'\"\\":
- builder.append(c)
- elif c in u"abfnrtv":
- builder.append(StringEncoding.char_from_escape_sequence(escape_sequence))
- elif c == u'\n':
- pass # line continuation
- elif c == u'x': # \xXX
- if len(escape_sequence) == 4:
- builder.append_charval(int(escape_sequence[2:], 16))
- else:
- s.error("Invalid hex escape '%s'" % escape_sequence, fatal=False)
- elif c in u'NUu' and kind in ('u', 'f', ''): # \uxxxx, \Uxxxxxxxx, \N{...}
- chrval = -1
- if c == u'N':
- uchar = None
- try:
- uchar = lookup_unicodechar(escape_sequence[3:-1])
- chrval = ord(uchar)
- except KeyError:
- s.error("Unknown Unicode character name %s" %
- repr(escape_sequence[3:-1]).lstrip('u'), fatal=False)
- except TypeError:
- # 2-byte unicode build of CPython?
- if (uchar is not None and _IS_2BYTE_UNICODE and len(uchar) == 2 and
- unicode_category(uchar[0]) == 'Cs' and unicode_category(uchar[1]) == 'Cs'):
- # surrogate pair instead of single character
- chrval = 0x10000 + (ord(uchar[0]) - 0xd800) >> 10 + (ord(uchar[1]) - 0xdc00)
- else:
- raise
- elif len(escape_sequence) in (6, 10):
- chrval = int(escape_sequence[2:], 16)
- if chrval > 1114111: # sys.maxunicode:
- s.error("Invalid unicode escape '%s'" % escape_sequence)
- chrval = -1
- else:
- s.error("Invalid unicode escape '%s'" % escape_sequence, fatal=False)
- if chrval >= 0:
- builder.append_uescape(chrval, escape_sequence)
- else:
- builder.append(escape_sequence)
-
-
-_parse_escape_sequences_raw, _parse_escape_sequences = [re.compile((
- # escape sequences:
- br'(\\(?:' +
- (br'\\?' if is_raw else (
- br'[\\abfnrtv"\'{]|'
- br'[0-7]{2,3}|'
- br'N\{[^}]*\}|'
- br'x[0-9a-fA-F]{2}|'
- br'u[0-9a-fA-F]{4}|'
- br'U[0-9a-fA-F]{8}|'
- br'[NxuU]|' # detect invalid escape sequences that do not match above
- )) +
- br')?|'
- # non-escape sequences:
- br'\{\{?|'
- br'\}\}?|'
- br'[^\\{}]+)'
- ).decode('us-ascii')).match
- for is_raw in (True, False)]
-
-
+def _append_escape_sequence(kind, builder, escape_sequence, s):
+ c = escape_sequence[1]
+ if c in u"01234567":
+ builder.append_charval(int(escape_sequence[1:], 8))
+ elif c in u"'\"\\":
+ builder.append(c)
+ elif c in u"abfnrtv":
+ builder.append(StringEncoding.char_from_escape_sequence(escape_sequence))
+ elif c == u'\n':
+ pass # line continuation
+ elif c == u'x': # \xXX
+ if len(escape_sequence) == 4:
+ builder.append_charval(int(escape_sequence[2:], 16))
+ else:
+ s.error("Invalid hex escape '%s'" % escape_sequence, fatal=False)
+ elif c in u'NUu' and kind in ('u', 'f', ''): # \uxxxx, \Uxxxxxxxx, \N{...}
+ chrval = -1
+ if c == u'N':
+ uchar = None
+ try:
+ uchar = lookup_unicodechar(escape_sequence[3:-1])
+ chrval = ord(uchar)
+ except KeyError:
+ s.error("Unknown Unicode character name %s" %
+ repr(escape_sequence[3:-1]).lstrip('u'), fatal=False)
+ except TypeError:
+ # 2-byte unicode build of CPython?
+ if (uchar is not None and _IS_2BYTE_UNICODE and len(uchar) == 2 and
+ unicode_category(uchar[0]) == 'Cs' and unicode_category(uchar[1]) == 'Cs'):
+ # surrogate pair instead of single character
+ chrval = 0x10000 + (ord(uchar[0]) - 0xd800) >> 10 + (ord(uchar[1]) - 0xdc00)
+ else:
+ raise
+ elif len(escape_sequence) in (6, 10):
+ chrval = int(escape_sequence[2:], 16)
+ if chrval > 1114111: # sys.maxunicode:
+ s.error("Invalid unicode escape '%s'" % escape_sequence)
+ chrval = -1
+ else:
+ s.error("Invalid unicode escape '%s'" % escape_sequence, fatal=False)
+ if chrval >= 0:
+ builder.append_uescape(chrval, escape_sequence)
+ else:
+ builder.append(escape_sequence)
+
+
+_parse_escape_sequences_raw, _parse_escape_sequences = [re.compile((
+ # escape sequences:
+ br'(\\(?:' +
+ (br'\\?' if is_raw else (
+ br'[\\abfnrtv"\'{]|'
+ br'[0-7]{2,3}|'
+ br'N\{[^}]*\}|'
+ br'x[0-9a-fA-F]{2}|'
+ br'u[0-9a-fA-F]{4}|'
+ br'U[0-9a-fA-F]{8}|'
+ br'[NxuU]|' # detect invalid escape sequences that do not match above
+ )) +
+ br')?|'
+ # non-escape sequences:
+ br'\{\{?|'
+ br'\}\}?|'
+ br'[^\\{}]+)'
+ ).decode('us-ascii')).match
+ for is_raw in (True, False)]
+
+
def _f_string_error_pos(pos, string, i):
return (pos[0], pos[1], pos[2] + i + 1) # FIXME: handle newlines in string
-def p_f_string(s, unicode_value, pos, is_raw):
+def p_f_string(s, unicode_value, pos, is_raw):
# Parses a PEP 498 f-string literal into a list of nodes. Nodes are either UnicodeNodes
# or FormattedValueNodes.
values = []
- next_start = 0
+ next_start = 0
size = len(unicode_value)
- builder = StringEncoding.UnicodeLiteralBuilder()
- _parse_seq = _parse_escape_sequences_raw if is_raw else _parse_escape_sequences
-
- while next_start < size:
- end = next_start
- match = _parse_seq(unicode_value, next_start)
- if match is None:
+ builder = StringEncoding.UnicodeLiteralBuilder()
+ _parse_seq = _parse_escape_sequences_raw if is_raw else _parse_escape_sequences
+
+ while next_start < size:
+ end = next_start
+ match = _parse_seq(unicode_value, next_start)
+ if match is None:
error(_f_string_error_pos(pos, unicode_value, next_start), "Invalid escape sequence")
-
- next_start = match.end()
- part = match.group()
- c = part[0]
- if c == '\\':
- if not is_raw and len(part) > 1:
- _append_escape_sequence('f', builder, part, s)
+
+ next_start = match.end()
+ part = match.group()
+ c = part[0]
+ if c == '\\':
+ if not is_raw and len(part) > 1:
+ _append_escape_sequence('f', builder, part, s)
+ else:
+ builder.append(part)
+ elif c == '{':
+ if part == '{{':
+ builder.append('{')
else:
- builder.append(part)
- elif c == '{':
- if part == '{{':
- builder.append('{')
- else:
- # start of an expression
- if builder.chars:
- values.append(ExprNodes.UnicodeNode(pos, value=builder.getstring()))
- builder = StringEncoding.UnicodeLiteralBuilder()
- next_start, expr_node = p_f_string_expr(s, unicode_value, pos, next_start, is_raw)
+ # start of an expression
+ if builder.chars:
+ values.append(ExprNodes.UnicodeNode(pos, value=builder.getstring()))
+ builder = StringEncoding.UnicodeLiteralBuilder()
+ next_start, expr_node = p_f_string_expr(s, unicode_value, pos, next_start, is_raw)
values.append(expr_node)
- elif c == '}':
- if part == '}}':
- builder.append('}')
- else:
+ elif c == '}':
+ if part == '}}':
+ builder.append('}')
+ else:
error(_f_string_error_pos(pos, unicode_value, end),
"f-string: single '}' is not allowed")
else:
- builder.append(part)
+ builder.append(part)
- if builder.chars:
- values.append(ExprNodes.UnicodeNode(pos, value=builder.getstring()))
+ if builder.chars:
+ values.append(ExprNodes.UnicodeNode(pos, value=builder.getstring()))
return values
-def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
+def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
# Parses a {}-delimited expression inside an f-string. Returns a FormattedValueNode
# and the index in the string that follows the expression.
i = starting_index
@@ -1157,10 +1157,10 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
i += 1
if i + 2 > size:
pass # error will be reported below
- else:
- conversion_char = unicode_value[i]
- i += 1
- terminal_char = unicode_value[i]
+ else:
+ conversion_char = unicode_value[i]
+ i += 1
+ terminal_char = unicode_value[i]
if terminal_char == ':':
in_triple_quotes = False
@@ -1206,17 +1206,17 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
# the format spec is itself treated like an f-string
if format_spec_str:
- format_spec = ExprNodes.JoinedStrNode(pos, values=p_f_string(s, format_spec_str, pos, is_raw))
+ format_spec = ExprNodes.JoinedStrNode(pos, values=p_f_string(s, format_spec_str, pos, is_raw))
return i + 1, ExprNodes.FormattedValueNode(
- pos, value=expr, conversion_char=conversion_char, format_spec=format_spec)
+ pos, value=expr, conversion_char=conversion_char, format_spec=format_spec)
# since PEP 448:
# list_display ::= "[" [listmaker] "]"
# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
# comp_iter ::= comp_for | comp_if
-# comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter]
+# comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter]
# comp_if ::= "if" test [comp_iter]
def p_list_maker(s):
@@ -1228,7 +1228,7 @@ def p_list_maker(s):
return ExprNodes.ListNode(pos, args=[])
expr = p_test_or_starred_expr(s)
- if s.sy in ('for', 'async'):
+ if s.sy in ('for', 'async'):
if expr.is_starred:
s.error("iterable unpacking cannot be used in comprehension")
append = ExprNodes.ComprehensionAppendNode(pos, expr=expr)
@@ -1250,7 +1250,7 @@ def p_list_maker(s):
def p_comp_iter(s, body):
- if s.sy in ('for', 'async'):
+ if s.sy in ('for', 'async'):
return p_comp_for(s, body)
elif s.sy == 'if':
return p_comp_if(s, body)
@@ -1259,17 +1259,17 @@ def p_comp_iter(s, body):
return body
def p_comp_for(s, body):
- pos = s.position()
- # [async] for ...
- is_async = False
- if s.sy == 'async':
- is_async = True
- s.next()
-
+ pos = s.position()
+ # [async] for ...
+ is_async = False
+ if s.sy == 'async':
+ is_async = True
+ s.next()
+
# s.sy == 'for'
- s.expect('for')
- kw = p_for_bounds(s, allow_testlist=False, is_async=is_async)
- kw.update(else_clause=None, body=p_comp_iter(s, body), is_async=is_async)
+ s.expect('for')
+ kw = p_for_bounds(s, allow_testlist=False, is_async=is_async)
+ kw.update(else_clause=None, body=p_comp_iter(s, body), is_async=is_async)
return Nodes.ForStatNode(pos, **kw)
def p_comp_if(s, body):
@@ -1337,7 +1337,7 @@ def p_dict_or_set_maker(s):
else:
break
- if s.sy in ('for', 'async'):
+ if s.sy in ('for', 'async'):
# dict/set comprehension
if len(parts) == 1 and isinstance(parts[0], list) and len(parts[0]) == 1:
item = parts[0][0]
@@ -1467,13 +1467,13 @@ def p_testlist_comp(s):
s.next()
exprs = p_test_or_starred_expr_list(s, expr)
return ExprNodes.TupleNode(pos, args = exprs)
- elif s.sy in ('for', 'async'):
+ elif s.sy in ('for', 'async'):
return p_genexp(s, expr)
else:
return expr
def p_genexp(s, expr):
- # s.sy == 'async' | 'for'
+ # s.sy == 'async' | 'for'
loop = p_comp_for(s, Nodes.ExprStatNode(
expr.pos, expr = ExprNodes.YieldExprNode(expr.pos, arg=expr)))
return ExprNodes.GeneratorExpressionNode(expr.pos, loop=loop)
@@ -1504,17 +1504,17 @@ def p_nonlocal_statement(s):
def p_expression_or_assignment(s):
- expr = p_testlist_star_expr(s)
- if s.sy == ':' and (expr.is_name or expr.is_subscript or expr.is_attribute):
- s.next()
- expr.annotation = p_test(s)
- if s.sy == '=' and expr.is_starred:
+ expr = p_testlist_star_expr(s)
+ if s.sy == ':' and (expr.is_name or expr.is_subscript or expr.is_attribute):
+ s.next()
+ expr.annotation = p_test(s)
+ if s.sy == '=' and expr.is_starred:
# This is a common enough error to make when learning Cython to let
# it fail as early as possible and give a very clear error message.
s.error("a starred assignment target must be in a list or tuple"
" - maybe you meant to use an index assignment: var[0] = ...",
- pos=expr.pos)
- expr_list = [expr]
+ pos=expr.pos)
+ expr_list = [expr]
while s.sy == '=':
s.next()
if s.sy == 'yield':
@@ -2044,12 +2044,12 @@ def p_include_statement(s, ctx):
if include_file_path:
s.included_files.append(include_file_name)
with Utils.open_source_file(include_file_path) as f:
- if Options.source_root:
- import os
- rel_path = os.path.relpath(include_file_path, Options.source_root)
- else:
- rel_path = None
- source_desc = FileSourceDescriptor(include_file_path, rel_path)
+ if Options.source_root:
+ import os
+ rel_path = os.path.relpath(include_file_path, Options.source_root)
+ else:
+ rel_path = None
+ source_desc = FileSourceDescriptor(include_file_path, rel_path)
s2 = PyrexScanner(f, source_desc, s, source_encoding=f.encoding, parse_comments=s.parse_comments)
tree = p_statement_list(s2, ctx)
return tree
@@ -2178,14 +2178,14 @@ def p_simple_statement_list(s, ctx, first_statement = 0):
stat = stats[0]
else:
stat = Nodes.StatListNode(pos, stats = stats)
-
- if s.sy not in ('NEWLINE', 'EOF'):
- # provide a better error message for users who accidentally write Cython code in .py files
- if isinstance(stat, Nodes.ExprStatNode):
- if stat.expr.is_name and stat.expr.name == 'cdef':
- s.error("The 'cdef' keyword is only allowed in Cython files (pyx/pxi/pxd)", pos)
+
+ if s.sy not in ('NEWLINE', 'EOF'):
+ # provide a better error message for users who accidentally write Cython code in .py files
+ if isinstance(stat, Nodes.ExprStatNode):
+ if stat.expr.is_name and stat.expr.name == 'cdef':
+ s.error("The 'cdef' keyword is only allowed in Cython files (pyx/pxi/pxd)", pos)
s.expect_newline("Syntax error in simple statement list")
-
+
return stat
def p_compile_time_expr(s):
@@ -2202,10 +2202,10 @@ def p_DEF_statement(s):
name = p_ident(s)
s.expect('=')
expr = p_compile_time_expr(s)
- if s.compile_time_eval:
- value = expr.compile_time_value(denv)
- #print "p_DEF_statement: %s = %r" % (name, value) ###
- denv.declare(name, value)
+ if s.compile_time_eval:
+ value = expr.compile_time_value(denv)
+ #print "p_DEF_statement: %s = %r" % (name, value) ###
+ denv.declare(name, value)
s.expect_newline("Expected a newline", ignore_semicolon=True)
return Nodes.PassStatNode(pos)
@@ -2501,12 +2501,12 @@ def p_c_simple_base_type(s, self_flag, nonempty, templates = None):
error(pos, "Expected an identifier, found '%s'" % s.sy)
if s.systring == 'const':
s.next()
- base_type = p_c_base_type(s, self_flag=self_flag, nonempty=nonempty, templates=templates)
- if isinstance(base_type, Nodes.MemoryViewSliceTypeNode):
- # reverse order to avoid having to write "(const int)[:]"
- base_type.base_type_node = Nodes.CConstTypeNode(pos, base_type=base_type.base_type_node)
- return base_type
- return Nodes.CConstTypeNode(pos, base_type=base_type)
+ base_type = p_c_base_type(s, self_flag=self_flag, nonempty=nonempty, templates=templates)
+ if isinstance(base_type, Nodes.MemoryViewSliceTypeNode):
+ # reverse order to avoid having to write "(const int)[:]"
+ base_type.base_type_node = Nodes.CConstTypeNode(pos, base_type=base_type.base_type_node)
+ return base_type
+ return Nodes.CConstTypeNode(pos, base_type=base_type)
if looking_at_base_type(s):
#print "p_c_simple_base_type: looking_at_base_type at", s.position()
is_basic = 1
@@ -2733,7 +2733,7 @@ special_basic_c_types = cython.declare(dict, {
"ssize_t" : (2, 0),
"size_t" : (0, 0),
"ptrdiff_t" : (2, 0),
- "Py_tss_t" : (1, 0),
+ "Py_tss_t" : (1, 0),
})
sign_and_longness_words = cython.declare(
@@ -3023,13 +3023,13 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0,
if s.sy == '=':
s.next()
if 'pxd' in ctx.level:
- if s.sy in ['*', '?']:
- # TODO(github/1736): Make this an error for inline declarations.
- default = ExprNodes.NoneNode(pos)
- s.next()
- elif 'inline' in ctx.modifiers:
- default = p_test(s)
- else:
+ if s.sy in ['*', '?']:
+ # TODO(github/1736): Make this an error for inline declarations.
+ default = ExprNodes.NoneNode(pos)
+ s.next()
+ elif 'inline' in ctx.modifiers:
+ default = p_test(s)
+ else:
error(pos, "default values cannot be specified in pxd files, use ? or *")
else:
default = p_test(s)
@@ -3108,13 +3108,13 @@ def p_cdef_extern_block(s, pos, ctx):
ctx.namespace = p_string_literal(s, 'u')[2]
if p_nogil(s):
ctx.nogil = 1
-
- # Use "docstring" as verbatim string to include
- verbatim_include, body = p_suite_with_docstring(s, ctx, True)
-
+
+ # Use "docstring" as verbatim string to include
+ verbatim_include, body = p_suite_with_docstring(s, ctx, True)
+
return Nodes.CDefExternNode(pos,
include_file = include_file,
- verbatim_include = verbatim_include,
+ verbatim_include = verbatim_include,
body = body,
namespace = ctx.namespace)
@@ -3270,7 +3270,7 @@ def p_c_func_or_var_declaration(s, pos, ctx):
cmethod_flag = ctx.level in ('c_class', 'c_class_pxd')
modifiers = p_c_modifiers(s)
base_type = p_c_base_type(s, nonempty = 1, templates = ctx.templates)
- declarator = p_c_declarator(s, ctx(modifiers=modifiers), cmethod_flag = cmethod_flag,
+ declarator = p_c_declarator(s, ctx(modifiers=modifiers), cmethod_flag = cmethod_flag,
assignable = 1, nonempty = 1)
declarator.overridable = ctx.overridable
if s.sy == 'IDENT' and s.systring == 'const' and ctx.level == 'cpp_class':
@@ -3488,16 +3488,16 @@ def p_c_class_definition(s, pos, ctx):
as_name = class_name
objstruct_name = None
typeobj_name = None
- bases = None
+ bases = None
check_size = None
if s.sy == '(':
- positional_args, keyword_args = p_call_parse_args(s, allow_genexp=False)
- if keyword_args:
- s.error("C classes cannot take keyword bases.")
- bases, _ = p_call_build_packed_args(pos, positional_args, keyword_args)
- if bases is None:
- bases = ExprNodes.TupleNode(pos, args=[])
-
+ positional_args, keyword_args = p_call_parse_args(s, allow_genexp=False)
+ if keyword_args:
+ s.error("C classes cannot take keyword bases.")
+ bases, _ = p_call_build_packed_args(pos, positional_args, keyword_args)
+ if bases is None:
+ bases = ExprNodes.TupleNode(pos, args=[])
+
if s.sy == '[':
if ctx.visibility not in ('public', 'extern') and not ctx.api:
error(s.position(), "Name options only allowed for 'public', 'api', or 'extern' C class")
@@ -3537,7 +3537,7 @@ def p_c_class_definition(s, pos, ctx):
module_name = ".".join(module_path),
class_name = class_name,
as_name = as_name,
- bases = bases,
+ bases = bases,
objstruct_name = objstruct_name,
typeobj_name = typeobj_name,
check_size = check_size,
diff --git a/contrib/tools/cython/Cython/Compiler/Pipeline.py b/contrib/tools/cython/Cython/Compiler/Pipeline.py
index 2cb8cb34ce..5194c3e49b 100644
--- a/contrib/tools/cython/Cython/Compiler/Pipeline.py
+++ b/contrib/tools/cython/Cython/Compiler/Pipeline.py
@@ -144,7 +144,7 @@ def create_pipeline(context, mode, exclude_classes=()):
from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
- from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
+ from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from .ParseTreeTransforms import CalculateQualifiedNamesTransform
from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
@@ -182,7 +182,7 @@ def create_pipeline(context, mode, exclude_classes=()):
NormalizeTree(context),
PostParse(context),
_specific_post_parse,
- TrackNumpyAttributes(),
+ TrackNumpyAttributes(),
InterpretCompilerDirectives(context, context.compiler_directives),
ParallelRangeTransform(context),
AdjustDefByDirectives(context),
@@ -324,40 +324,40 @@ def insert_into_pipeline(pipeline, transform, before=None, after=None):
# Running a pipeline
#
-_pipeline_entry_points = {}
-
-
+_pipeline_entry_points = {}
+
+
def run_pipeline(pipeline, source, printtree=True):
from .Visitor import PrintTree
- exec_ns = globals().copy() if DebugFlags.debug_verbose_pipeline else None
+ exec_ns = globals().copy() if DebugFlags.debug_verbose_pipeline else None
+
+ def run(phase, data):
+ return phase(data)
- def run(phase, data):
- return phase(data)
-
error = None
data = source
try:
try:
for phase in pipeline:
if phase is not None:
- if not printtree and isinstance(phase, PrintTree):
- continue
+ if not printtree and isinstance(phase, PrintTree):
+ continue
if DebugFlags.debug_verbose_pipeline:
t = time()
print("Entering pipeline phase %r" % phase)
- # create a new wrapper for each step to show the name in profiles
- phase_name = getattr(phase, '__name__', type(phase).__name__)
- try:
- run = _pipeline_entry_points[phase_name]
- except KeyError:
- exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
- run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
- data = run(phase, data)
+ # create a new wrapper for each step to show the name in profiles
+ phase_name = getattr(phase, '__name__', type(phase).__name__)
+ try:
+ run = _pipeline_entry_points[phase_name]
+ except KeyError:
+ exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
+ run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
+ data = run(phase, data)
if DebugFlags.debug_verbose_pipeline:
print(" %.3f seconds" % (time() - t))
except CompileError as err:
# err is set
- Errors.report_error(err, use_stack=False)
+ Errors.report_error(err, use_stack=False)
error = err
except InternalError as err:
# Only raise if there was not an earlier error
diff --git a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py
index 1a75d40825..3d4931cea6 100644
--- a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py
+++ b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py
@@ -28,15 +28,15 @@ class BaseType(object):
# List of attribute names of any subtypes
subtypes = []
_empty_declaration = None
- _specialization_name = None
+ _specialization_name = None
default_format_spec = None
def can_coerce_to_pyobject(self, env):
return False
- def can_coerce_from_pyobject(self, env):
- return False
-
+ def can_coerce_from_pyobject(self, env):
+ return False
+
def can_coerce_to_pystring(self, env, format_spec=None):
return False
@@ -52,15 +52,15 @@ class BaseType(object):
return self._empty_declaration
def specialization_name(self):
- if self._specialization_name is None:
- # This is not entirely robust.
- common_subs = (self.empty_declaration_code()
- .replace("unsigned ", "unsigned_")
- .replace("long long", "long_long")
- .replace(" ", "__"))
- self._specialization_name = re.sub(
- '[^a-zA-Z0-9_]', lambda x: '_%x_' % ord(x.group(0)), common_subs)
- return self._specialization_name
+ if self._specialization_name is None:
+ # This is not entirely robust.
+ common_subs = (self.empty_declaration_code()
+ .replace("unsigned ", "unsigned_")
+ .replace("long long", "long_long")
+ .replace(" ", "__"))
+ self._specialization_name = re.sub(
+ '[^a-zA-Z0-9_]', lambda x: '_%x_' % ord(x.group(0)), common_subs)
+ return self._specialization_name
def base_declaration_code(self, base_code, entity_code):
if entity_code:
@@ -189,11 +189,11 @@ class PyrexType(BaseType):
# is_returncode boolean Is used only to signal exceptions
# is_error boolean Is the dummy error type
# is_buffer boolean Is buffer access type
- # is_pythran_expr boolean Is Pythran expr
- # is_numpy_buffer boolean Is Numpy array buffer
+ # is_pythran_expr boolean Is Pythran expr
+ # is_numpy_buffer boolean Is Numpy array buffer
# has_attributes boolean Has C dot-selectable attributes
- # default_value string Initial value that can be assigned before first user assignment.
- # declaration_value string The value statically assigned on declaration (if any).
+ # default_value string Initial value that can be assigned before first user assignment.
+ # declaration_value string The value statically assigned on declaration (if any).
# entry Entry The Entry for this type
#
# declaration_code(entity_code,
@@ -251,11 +251,11 @@ class PyrexType(BaseType):
is_buffer = 0
is_ctuple = 0
is_memoryviewslice = 0
- is_pythran_expr = 0
- is_numpy_buffer = 0
+ is_pythran_expr = 0
+ is_numpy_buffer = 0
has_attributes = 0
default_value = ""
- declaration_value = ""
+ declaration_value = ""
def resolve(self):
# If a typedef, returns the base type.
@@ -316,25 +316,25 @@ class PyrexType(BaseType):
def needs_nonecheck(self):
return 0
- def _assign_from_py_code(self, source_code, result_code, error_pos, code,
- from_py_function=None, error_condition=None, extra_args=None):
- args = ', ' + ', '.join('%s' % arg for arg in extra_args) if extra_args else ''
- convert_call = "%s(%s%s)" % (
- from_py_function or self.from_py_function,
- source_code,
- args,
- )
- if self.is_enum:
- convert_call = typecast(self, c_long_type, convert_call)
- return '%s = %s; %s' % (
- result_code,
- convert_call,
- code.error_goto_if(error_condition or self.error_condition(result_code), error_pos))
-
-
+ def _assign_from_py_code(self, source_code, result_code, error_pos, code,
+ from_py_function=None, error_condition=None, extra_args=None):
+ args = ', ' + ', '.join('%s' % arg for arg in extra_args) if extra_args else ''
+ convert_call = "%s(%s%s)" % (
+ from_py_function or self.from_py_function,
+ source_code,
+ args,
+ )
+ if self.is_enum:
+ convert_call = typecast(self, c_long_type, convert_call)
+ return '%s = %s; %s' % (
+ result_code,
+ convert_call,
+ code.error_goto_if(error_condition or self.error_condition(result_code), error_pos))
+
+
def public_decl(base_code, dll_linkage):
if dll_linkage:
- return "%s(%s)" % (dll_linkage, base_code.replace(',', ' __PYX_COMMA '))
+ return "%s(%s)" % (dll_linkage, base_code.replace(',', ' __PYX_COMMA '))
else:
return base_code
@@ -509,10 +509,10 @@ class CTypedefType(BaseType):
def from_py_call_code(self, source_code, result_code, error_pos, code,
from_py_function=None, error_condition=None):
return self.typedef_base_type.from_py_call_code(
- source_code, result_code, error_pos, code,
- from_py_function or self.from_py_function,
- error_condition or self.error_condition(result_code)
- )
+ source_code, result_code, error_pos, code,
+ from_py_function or self.from_py_function,
+ error_condition or self.error_condition(result_code)
+ )
def overflow_check_binop(self, binop, env, const_rhs=False):
env.use_utility_code(UtilityCode.load("Common", "Overflow.c"))
@@ -554,10 +554,10 @@ class CTypedefType(BaseType):
def can_coerce_to_pyobject(self, env):
return self.typedef_base_type.can_coerce_to_pyobject(env)
- def can_coerce_from_pyobject(self, env):
- return self.typedef_base_type.can_coerce_from_pyobject(env)
+ def can_coerce_from_pyobject(self, env):
+ return self.typedef_base_type.can_coerce_from_pyobject(env)
+
-
class MemoryViewSliceType(PyrexType):
is_memoryviewslice = 1
@@ -635,7 +635,7 @@ class MemoryViewSliceType(PyrexType):
def same_as_resolved_type(self, other_type):
return ((other_type.is_memoryviewslice and
- #self.writable_needed == other_type.writable_needed and # FIXME: should be only uni-directional
+ #self.writable_needed == other_type.writable_needed and # FIXME: should be only uni-directional
self.dtype.same_as(other_type.dtype) and
self.axes == other_type.axes) or
other_type is error_type)
@@ -738,9 +738,9 @@ class MemoryViewSliceType(PyrexType):
elif attribute in ("is_c_contig", "is_f_contig"):
# is_c_contig and is_f_contig functions
- for (c_or_f, cython_name) in (('C', 'is_c_contig'), ('F', 'is_f_contig')):
+ for (c_or_f, cython_name) in (('C', 'is_c_contig'), ('F', 'is_f_contig')):
- is_contig_name = MemoryView.get_is_contig_func_name(c_or_f, self.ndim)
+ is_contig_name = MemoryView.get_is_contig_func_name(c_or_f, self.ndim)
cfunctype = CFuncType(
return_type=c_bint_type,
@@ -754,7 +754,7 @@ class MemoryViewSliceType(PyrexType):
defining=1,
cname=is_contig_name)
- entry.utility_code_definition = MemoryView.get_is_contig_utility(c_or_f, self.ndim)
+ entry.utility_code_definition = MemoryView.get_is_contig_utility(c_or_f, self.ndim)
return True
@@ -787,21 +787,21 @@ class MemoryViewSliceType(PyrexType):
src = self
- #if not copying and self.writable_needed and not dst.writable_needed:
- # return False
-
- src_dtype, dst_dtype = src.dtype, dst.dtype
- if dst_dtype.is_const:
- # Requesting read-only views is always ok => consider only the non-const base type.
- dst_dtype = dst_dtype.const_base_type
- if src_dtype.is_const:
- # When assigning between read-only views, compare only the non-const base types.
- src_dtype = src_dtype.const_base_type
- elif copying and src_dtype.is_const:
- # Copying by value => ignore const on source.
- src_dtype = src_dtype.const_base_type
-
- if src_dtype != dst_dtype:
+ #if not copying and self.writable_needed and not dst.writable_needed:
+ # return False
+
+ src_dtype, dst_dtype = src.dtype, dst.dtype
+ if dst_dtype.is_const:
+ # Requesting read-only views is always ok => consider only the non-const base type.
+ dst_dtype = dst_dtype.const_base_type
+ if src_dtype.is_const:
+ # When assigning between read-only views, compare only the non-const base types.
+ src_dtype = src_dtype.const_base_type
+ elif copying and src_dtype.is_const:
+ # Copying by value => ignore const on source.
+ src_dtype = src_dtype.const_base_type
+
+ if src_dtype != dst_dtype:
return False
if src.ndim != dst.ndim:
@@ -875,9 +875,9 @@ class MemoryViewSliceType(PyrexType):
def can_coerce_to_pyobject(self, env):
return True
- def can_coerce_from_pyobject(self, env):
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ return True
+
def check_for_null_code(self, cname):
return cname + '.memview'
@@ -919,12 +919,12 @@ class MemoryViewSliceType(PyrexType):
def from_py_call_code(self, source_code, result_code, error_pos, code,
from_py_function=None, error_condition=None):
- # NOTE: auto-detection of readonly buffers is disabled:
- # writable = self.writable_needed or not self.dtype.is_const
- writable = not self.dtype.is_const
- return self._assign_from_py_code(
- source_code, result_code, error_pos, code, from_py_function, error_condition,
- extra_args=['PyBUF_WRITABLE' if writable else '0'])
+ # NOTE: auto-detection of readonly buffers is disabled:
+ # writable = self.writable_needed or not self.dtype.is_const
+ writable = not self.dtype.is_const
+ return self._assign_from_py_code(
+ source_code, result_code, error_pos, code, from_py_function, error_condition,
+ extra_args=['PyBUF_WRITABLE' if writable else '0'])
def create_to_py_utility_code(self, env):
self._dtype_to_py_func, self._dtype_from_py_func = self.dtype_object_conversion_funcs(env)
@@ -952,29 +952,29 @@ class MemoryViewSliceType(PyrexType):
if self.dtype.is_pyobject:
utility_name = "MemviewObjectToObject"
else:
- self.dtype.create_to_py_utility_code(env)
- to_py_function = self.dtype.to_py_function
-
- from_py_function = None
- if not self.dtype.is_const:
- self.dtype.create_from_py_utility_code(env)
- from_py_function = self.dtype.from_py_function
-
- if not (to_py_function or from_py_function):
+ self.dtype.create_to_py_utility_code(env)
+ to_py_function = self.dtype.to_py_function
+
+ from_py_function = None
+ if not self.dtype.is_const:
+ self.dtype.create_from_py_utility_code(env)
+ from_py_function = self.dtype.from_py_function
+
+ if not (to_py_function or from_py_function):
return "NULL", "NULL"
- if not to_py_function:
+ if not to_py_function:
get_function = "NULL"
- if not from_py_function:
+ if not from_py_function:
set_function = "NULL"
utility_name = "MemviewDtypeToObject"
error_condition = (self.dtype.error_condition('value') or
'PyErr_Occurred()')
context.update(
- to_py_function=to_py_function,
- from_py_function=from_py_function,
- dtype=self.dtype.empty_declaration_code(),
- error_condition=error_condition,
+ to_py_function=to_py_function,
+ from_py_function=from_py_function,
+ dtype=self.dtype.empty_declaration_code(),
+ error_condition=error_condition,
)
utility = TempitaUtilityCode.load_cached(
@@ -1059,14 +1059,14 @@ class BufferType(BaseType):
self.mode = mode
self.negative_indices = negative_indices
self.cast = cast
- self.is_numpy_buffer = self.base.name == "ndarray"
+ self.is_numpy_buffer = self.base.name == "ndarray"
def can_coerce_to_pyobject(self,env):
return True
- def can_coerce_from_pyobject(self,env):
- return True
-
+ def can_coerce_from_pyobject(self,env):
+ return True
+
def as_argument_type(self):
return self
@@ -1124,7 +1124,7 @@ class PyObjectType(PyrexType):
name = "object"
is_pyobject = 1
default_value = "0"
- declaration_value = "0"
+ declaration_value = "0"
buffer_defaults = None
is_extern = False
is_subclassed = False
@@ -1139,9 +1139,9 @@ class PyObjectType(PyrexType):
def can_coerce_to_pyobject(self, env):
return True
- def can_coerce_from_pyobject(self, env):
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ return True
+
def default_coerced_ctype(self):
"""The default C type that this Python type coerces to, or None."""
return None
@@ -1344,13 +1344,13 @@ class PyExtensionType(PyObjectType):
# vtabstruct_cname string Name of C method table struct
# vtabptr_cname string Name of pointer to C method table
# vtable_cname string Name of C method table definition
- # early_init boolean Whether to initialize early (as opposed to during module execution).
+ # early_init boolean Whether to initialize early (as opposed to during module execution).
# defered_declarations [thunk] Used to declare class hierarchies in order
# check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match
is_extension_type = 1
has_attributes = 1
- early_init = 1
+ early_init = 1
objtypedef_cname = None
@@ -1478,9 +1478,9 @@ class CType(PyrexType):
def can_coerce_to_pyobject(self, env):
return self.create_to_py_utility_code(env)
- def can_coerce_from_pyobject(self, env):
- return self.create_from_py_utility_code(env)
-
+ def can_coerce_from_pyobject(self, env):
+ return self.create_from_py_utility_code(env)
+
def error_condition(self, result_code):
conds = []
if self.is_string or self.is_pyunicode_ptr:
@@ -1511,53 +1511,53 @@ class CType(PyrexType):
def from_py_call_code(self, source_code, result_code, error_pos, code,
from_py_function=None, error_condition=None):
- return self._assign_from_py_code(
- source_code, result_code, error_pos, code, from_py_function, error_condition)
-
-
-
-class PythranExpr(CType):
- # Pythran object of a given type
-
- to_py_function = "__Pyx_pythran_to_python"
- is_pythran_expr = True
- writable = True
- has_attributes = 1
-
- def __init__(self, pythran_type, org_buffer=None):
- self.org_buffer = org_buffer
- self.pythran_type = pythran_type
- self.name = self.pythran_type
- self.cname = self.pythran_type
- self.from_py_function = "from_python<%s>" % (self.pythran_type)
- self.scope = None
-
- def declaration_code(self, entity_code, for_display=0, dll_linkage=None, pyrex=0):
- assert not pyrex
- return "%s %s" % (self.cname, entity_code)
-
- def attributes_known(self):
- if self.scope is None:
- from . import Symtab
- # FIXME: fake C scope, might be better represented by a struct or C++ class scope
- self.scope = scope = Symtab.CClassScope('', None, visibility="extern")
- scope.parent_type = self
- scope.directives = {}
- scope.declare_var("shape", CPtrType(c_long_type), None, cname="_shape", is_cdef=True)
- scope.declare_var("ndim", c_long_type, None, cname="value", is_cdef=True)
-
- return True
-
- def __eq__(self, other):
- return isinstance(other, PythranExpr) and self.pythran_type == other.pythran_type
-
- def __ne__(self, other):
- return not (isinstance(other, PythranExpr) and self.pythran_type == other.pythran_type)
-
- def __hash__(self):
- return hash(self.pythran_type)
-
-
+ return self._assign_from_py_code(
+ source_code, result_code, error_pos, code, from_py_function, error_condition)
+
+
+
+class PythranExpr(CType):
+ # Pythran object of a given type
+
+ to_py_function = "__Pyx_pythran_to_python"
+ is_pythran_expr = True
+ writable = True
+ has_attributes = 1
+
+ def __init__(self, pythran_type, org_buffer=None):
+ self.org_buffer = org_buffer
+ self.pythran_type = pythran_type
+ self.name = self.pythran_type
+ self.cname = self.pythran_type
+ self.from_py_function = "from_python<%s>" % (self.pythran_type)
+ self.scope = None
+
+ def declaration_code(self, entity_code, for_display=0, dll_linkage=None, pyrex=0):
+ assert not pyrex
+ return "%s %s" % (self.cname, entity_code)
+
+ def attributes_known(self):
+ if self.scope is None:
+ from . import Symtab
+ # FIXME: fake C scope, might be better represented by a struct or C++ class scope
+ self.scope = scope = Symtab.CClassScope('', None, visibility="extern")
+ scope.parent_type = self
+ scope.directives = {}
+ scope.declare_var("shape", CPtrType(c_long_type), None, cname="_shape", is_cdef=True)
+ scope.declare_var("ndim", c_long_type, None, cname="value", is_cdef=True)
+
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, PythranExpr) and self.pythran_type == other.pythran_type
+
+ def __ne__(self, other):
+ return not (isinstance(other, PythranExpr) and self.pythran_type == other.pythran_type)
+
+ def __hash__(self):
+ return hash(self.pythran_type)
+
+
class CConstType(BaseType):
is_const = 1
@@ -1594,20 +1594,20 @@ class CConstType(BaseType):
def can_coerce_to_pyobject(self, env):
return self.const_base_type.can_coerce_to_pyobject(env)
- def can_coerce_from_pyobject(self, env):
- return self.const_base_type.can_coerce_from_pyobject(env)
-
+ def can_coerce_from_pyobject(self, env):
+ return self.const_base_type.can_coerce_from_pyobject(env)
+
def create_to_py_utility_code(self, env):
if self.const_base_type.create_to_py_utility_code(env):
self.to_py_function = self.const_base_type.to_py_function
return True
- def same_as_resolved_type(self, other_type):
- if other_type.is_const:
- return self.const_base_type.same_as_resolved_type(other_type.const_base_type)
- # Accept const LHS <- non-const RHS.
- return self.const_base_type.same_as_resolved_type(other_type)
-
+ def same_as_resolved_type(self, other_type):
+ if other_type.is_const:
+ return self.const_base_type.same_as_resolved_type(other_type.const_base_type)
+ # Accept const LHS <- non-const RHS.
+ return self.const_base_type.same_as_resolved_type(other_type)
+
def __getattr__(self, name):
return getattr(self.const_base_type, name)
@@ -1778,9 +1778,9 @@ class ForbidUseClass:
ForbidUse = ForbidUseClass()
-class CIntLike(object):
- """Mixin for shared behaviour of C integers and enums.
- """
+class CIntLike(object):
+ """Mixin for shared behaviour of C integers and enums.
+ """
to_py_function = None
from_py_function = None
to_pyunicode_utility = None
@@ -1789,27 +1789,27 @@ class CIntLike(object):
def can_coerce_to_pyobject(self, env):
return True
- def can_coerce_from_pyobject(self, env):
- return True
-
- def create_to_py_utility_code(self, env):
- if type(self).to_py_function is None:
- self.to_py_function = "__Pyx_PyInt_From_" + self.specialization_name()
- env.use_utility_code(TempitaUtilityCode.load_cached(
- "CIntToPy", "TypeConversion.c",
- context={"TYPE": self.empty_declaration_code(),
- "TO_PY_FUNCTION": self.to_py_function}))
- return True
-
- def create_from_py_utility_code(self, env):
- if type(self).from_py_function is None:
- self.from_py_function = "__Pyx_PyInt_As_" + self.specialization_name()
- env.use_utility_code(TempitaUtilityCode.load_cached(
- "CIntFromPy", "TypeConversion.c",
- context={"TYPE": self.empty_declaration_code(),
- "FROM_PY_FUNCTION": self.from_py_function}))
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ return True
+
+ def create_to_py_utility_code(self, env):
+ if type(self).to_py_function is None:
+ self.to_py_function = "__Pyx_PyInt_From_" + self.specialization_name()
+ env.use_utility_code(TempitaUtilityCode.load_cached(
+ "CIntToPy", "TypeConversion.c",
+ context={"TYPE": self.empty_declaration_code(),
+ "TO_PY_FUNCTION": self.to_py_function}))
+ return True
+
+ def create_from_py_utility_code(self, env):
+ if type(self).from_py_function is None:
+ self.from_py_function = "__Pyx_PyInt_As_" + self.specialization_name()
+ env.use_utility_code(TempitaUtilityCode.load_cached(
+ "CIntFromPy", "TypeConversion.c",
+ context={"TYPE": self.empty_declaration_code(),
+ "FROM_PY_FUNCTION": self.from_py_function}))
+ return True
+
@staticmethod
def _parse_format(format_spec):
padding = ' '
@@ -1853,12 +1853,12 @@ class CIntLike(object):
return "%s(%s, %d, '%s', '%s')" % (utility_code_name, cvalue, width, padding_char, format_type)
-class CIntType(CIntLike, CNumericType):
+class CIntType(CIntLike, CNumericType):
+
+ is_int = 1
+ typedef_flag = 0
+ exception_value = -1
- is_int = 1
- typedef_flag = 0
- exception_value = -1
-
def get_to_py_type_conversion(self):
if self.rank < list(rank_to_type_name).index('int'):
# This assumes sizeof(short) < sizeof(int)
@@ -2171,8 +2171,8 @@ class CComplexType(CNumericType):
if (not src_type.is_complex and src_type.is_numeric and src_type.is_typedef
and src_type.typedef_is_external):
return False
- elif src_type.is_pyobject:
- return True
+ elif src_type.is_pyobject:
+ return True
else:
return super(CComplexType, self).assignable_from(src_type)
@@ -2225,9 +2225,9 @@ class CComplexType(CNumericType):
def can_coerce_to_pyobject(self, env):
return True
- def can_coerce_from_pyobject(self, env):
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ return True
+
def create_to_py_utility_code(self, env):
env.use_utility_code(UtilityCode.load_cached('ToPy', 'Complex.c'))
return True
@@ -2274,25 +2274,25 @@ complex_ops = {
}
-class CPyTSSTType(CType):
- #
- # PEP-539 "Py_tss_t" type
- #
-
- declaration_value = "Py_tss_NEEDS_INIT"
-
- def __repr__(self):
- return "<Py_tss_t>"
-
- def declaration_code(self, entity_code,
- for_display=0, dll_linkage=None, pyrex=0):
- if pyrex or for_display:
- base_code = "Py_tss_t"
- else:
- base_code = public_decl("Py_tss_t", dll_linkage)
- return self.base_declaration_code(base_code, entity_code)
-
-
+class CPyTSSTType(CType):
+ #
+ # PEP-539 "Py_tss_t" type
+ #
+
+ declaration_value = "Py_tss_NEEDS_INIT"
+
+ def __repr__(self):
+ return "<Py_tss_t>"
+
+ def declaration_code(self, entity_code,
+ for_display=0, dll_linkage=None, pyrex=0):
+ if pyrex or for_display:
+ base_code = "Py_tss_t"
+ else:
+ base_code = public_decl("Py_tss_t", dll_linkage)
+ return self.base_declaration_code(base_code, entity_code)
+
+
class CPointerBaseType(CType):
# common base type for pointer/array types
#
@@ -2316,17 +2316,17 @@ class CPointerBaseType(CType):
if base_type.signed == 2:
self.to_py_function = "__Pyx_PyObject_FromCString"
if self.is_ptr:
- self.from_py_function = "__Pyx_PyObject_As%sSString"
+ self.from_py_function = "__Pyx_PyObject_As%sSString"
elif base_type.signed:
self.to_py_function = "__Pyx_PyObject_FromString"
if self.is_ptr:
- self.from_py_function = "__Pyx_PyObject_As%sString"
+ self.from_py_function = "__Pyx_PyObject_As%sString"
else:
self.to_py_function = "__Pyx_PyObject_FromCString"
if self.is_ptr:
- self.from_py_function = "__Pyx_PyObject_As%sUString"
- if self.is_ptr:
- self.from_py_function %= '' if self.base_type.is_const else 'Writable'
+ self.from_py_function = "__Pyx_PyObject_As%sUString"
+ if self.is_ptr:
+ self.from_py_function %= '' if self.base_type.is_const else 'Writable'
self.exception_value = "NULL"
elif self.is_pyunicode_ptr and not base_type.is_error:
self.to_py_function = "__Pyx_PyUnicode_FromUnicode"
@@ -2415,21 +2415,21 @@ class CArrayType(CPointerBaseType):
if isinstance(actual, CArrayType):
return self.base_type.deduce_template_params(actual.base_type)
else:
- return {}
+ return {}
def can_coerce_to_pyobject(self, env):
return self.base_type.can_coerce_to_pyobject(env)
- def can_coerce_from_pyobject(self, env):
- return self.base_type.can_coerce_from_pyobject(env)
-
+ def can_coerce_from_pyobject(self, env):
+ return self.base_type.can_coerce_from_pyobject(env)
+
def create_to_py_utility_code(self, env):
if self.to_py_function is not None:
return self.to_py_function
if not self.base_type.create_to_py_utility_code(env):
return False
- safe_typename = self.base_type.specialization_name()
+ safe_typename = self.base_type.specialization_name()
to_py_function = "__Pyx_carray_to_py_%s" % safe_typename
to_tuple_function = "__Pyx_carray_to_tuple_%s" % safe_typename
@@ -2437,7 +2437,7 @@ class CArrayType(CPointerBaseType):
context = {
'cname': to_py_function,
'to_tuple_cname': to_tuple_function,
- 'base_type': self.base_type,
+ 'base_type': self.base_type,
}
env.use_utility_code(CythonUtilityCode.load(
"carray.to_py", "CConvert.pyx",
@@ -2467,12 +2467,12 @@ class CArrayType(CPointerBaseType):
if not self.base_type.create_from_py_utility_code(env):
return False
- from_py_function = "__Pyx_carray_from_py_%s" % self.base_type.specialization_name()
+ from_py_function = "__Pyx_carray_from_py_%s" % self.base_type.specialization_name()
from .UtilityCode import CythonUtilityCode
context = {
'cname': from_py_function,
- 'base_type': self.base_type,
+ 'base_type': self.base_type,
}
env.use_utility_code(CythonUtilityCode.load(
"carray.from_py", "CConvert.pyx",
@@ -2483,7 +2483,7 @@ class CArrayType(CPointerBaseType):
def from_py_call_code(self, source_code, result_code, error_pos, code,
from_py_function=None, error_condition=None):
- assert not error_condition, '%s: %s' % (error_pos, error_condition)
+ assert not error_condition, '%s: %s' % (error_pos, error_condition)
call_code = "%s(%s, %s, %s)" % (
from_py_function or self.from_py_function,
source_code, result_code, self.size)
@@ -2554,7 +2554,7 @@ class CPtrType(CPointerBaseType):
if isinstance(actual, CPtrType):
return self.base_type.deduce_template_params(actual.base_type)
else:
- return {}
+ return {}
def invalid_value(self):
return "1"
@@ -2683,19 +2683,19 @@ class CFuncType(CType):
",".join(arg_reprs),
except_clause)
- def with_with_gil(self, with_gil):
- if with_gil == self.with_gil:
- return self
- else:
- return CFuncType(
- self.return_type, self.args, self.has_varargs,
- self.exception_value, self.exception_check,
- self.calling_convention, self.nogil,
- with_gil,
- self.is_overridable, self.optional_arg_count,
- self.is_const_method, self.is_static_method,
- self.templates, self.is_strict_signature)
-
+ def with_with_gil(self, with_gil):
+ if with_gil == self.with_gil:
+ return self
+ else:
+ return CFuncType(
+ self.return_type, self.args, self.has_varargs,
+ self.exception_value, self.exception_check,
+ self.calling_convention, self.nogil,
+ with_gil,
+ self.is_overridable, self.optional_arg_count,
+ self.is_const_method, self.is_static_method,
+ self.templates, self.is_strict_signature)
+
def calling_convention_prefix(self):
cc = self.calling_convention
if cc:
@@ -2710,11 +2710,11 @@ class CFuncType(CType):
return self.same_c_signature_as_resolved_type(
other_type.resolve(), as_cmethod)
- def same_c_signature_as_resolved_type(self, other_type, as_cmethod=False, as_pxd_definition=False,
- exact_semantics=True):
- # If 'exact_semantics' is false, allow any equivalent C signatures
- # if the Cython semantics are compatible, i.e. the same or wider for 'other_type'.
-
+ def same_c_signature_as_resolved_type(self, other_type, as_cmethod=False, as_pxd_definition=False,
+ exact_semantics=True):
+ # If 'exact_semantics' is false, allow any equivalent C signatures
+ # if the Cython semantics are compatible, i.e. the same or wider for 'other_type'.
+
#print "CFuncType.same_c_signature_as_resolved_type:", \
# self, other_type, "as_cmethod =", as_cmethod ###
if other_type is error_type:
@@ -2736,21 +2736,21 @@ class CFuncType(CType):
return 0
if self.optional_arg_count != other_type.optional_arg_count:
return 0
- if as_pxd_definition:
- # A narrowing of the return type declared in the pxd is allowed.
- if not self.return_type.subtype_of_resolved_type(other_type.return_type):
- return 0
- else:
- if not self.return_type.same_as(other_type.return_type):
- return 0
+ if as_pxd_definition:
+ # A narrowing of the return type declared in the pxd is allowed.
+ if not self.return_type.subtype_of_resolved_type(other_type.return_type):
+ return 0
+ else:
+ if not self.return_type.same_as(other_type.return_type):
+ return 0
if not self.same_calling_convention_as(other_type):
return 0
- if exact_semantics:
- if self.exception_check != other_type.exception_check:
- return 0
- if not self._same_exception_value(other_type.exception_value):
- return 0
- elif not self._is_exception_compatible_with(other_type):
+ if exact_semantics:
+ if self.exception_check != other_type.exception_check:
+ return 0
+ if not self._same_exception_value(other_type.exception_value):
+ return 0
+ elif not self._is_exception_compatible_with(other_type):
return 0
return 1
@@ -2802,25 +2802,25 @@ class CFuncType(CType):
return 0
if self.nogil != other_type.nogil:
return 0
- if not self._is_exception_compatible_with(other_type):
+ if not self._is_exception_compatible_with(other_type):
return 0
self.original_sig = other_type.original_sig or other_type
return 1
- def _is_exception_compatible_with(self, other_type):
- # narrower exception checks are ok, but prevent mismatches
- if self.exception_check == '+' and other_type.exception_check != '+':
- # must catch C++ exceptions if we raise them
- return 0
- if not other_type.exception_check or other_type.exception_value is not None:
- # if other does not *always* check exceptions, self must comply
- if not self._same_exception_value(other_type.exception_value):
- return 0
- if self.exception_check and self.exception_check != other_type.exception_check:
- # a redundant exception check doesn't make functions incompatible, but a missing one does
- return 0
- return 1
-
+ def _is_exception_compatible_with(self, other_type):
+ # narrower exception checks are ok, but prevent mismatches
+ if self.exception_check == '+' and other_type.exception_check != '+':
+ # must catch C++ exceptions if we raise them
+ return 0
+ if not other_type.exception_check or other_type.exception_value is not None:
+ # if other does not *always* check exceptions, self must comply
+ if not self._same_exception_value(other_type.exception_value):
+ return 0
+ if self.exception_check and self.exception_check != other_type.exception_check:
+ # a redundant exception check doesn't make functions incompatible, but a missing one does
+ return 0
+ return 1
+
def narrower_c_signature_than(self, other_type, as_cmethod = 0):
return self.narrower_c_signature_than_resolved_type(other_type.resolve(), as_cmethod)
@@ -2865,18 +2865,18 @@ class CFuncType(CType):
sc2 = other.calling_convention == '__stdcall'
return sc1 == sc2
- def same_as_resolved_type(self, other_type, as_cmethod=False):
- return self.same_c_signature_as_resolved_type(other_type, as_cmethod=as_cmethod) \
+ def same_as_resolved_type(self, other_type, as_cmethod=False):
+ return self.same_c_signature_as_resolved_type(other_type, as_cmethod=as_cmethod) \
and self.nogil == other_type.nogil
- def pointer_assignable_from_resolved_type(self, rhs_type):
- # Accept compatible exception/nogil declarations for the RHS.
- if rhs_type is error_type:
- return 1
- if not rhs_type.is_cfunction:
- return 0
- return rhs_type.same_c_signature_as_resolved_type(self, exact_semantics=False) \
- and not (self.nogil and not rhs_type.nogil)
+ def pointer_assignable_from_resolved_type(self, rhs_type):
+ # Accept compatible exception/nogil declarations for the RHS.
+ if rhs_type is error_type:
+ return 1
+ if not rhs_type.is_cfunction:
+ return 0
+ return rhs_type.same_c_signature_as_resolved_type(self, exact_semantics=False) \
+ and not (self.nogil and not rhs_type.nogil)
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0,
@@ -2991,7 +2991,7 @@ class CFuncType(CType):
result = []
permutations = self.get_all_specialized_permutations()
- new_cfunc_entries = []
+ new_cfunc_entries = []
for cname, fused_to_specific in permutations:
new_func_type = self.entry.type.specialize(fused_to_specific)
@@ -3006,16 +3006,16 @@ class CFuncType(CType):
new_func_type.entry = new_entry
result.append(new_func_type)
- new_cfunc_entries.append(new_entry)
+ new_cfunc_entries.append(new_entry)
+
+ cfunc_entries = self.entry.scope.cfunc_entries
+ try:
+ cindex = cfunc_entries.index(self.entry)
+ except ValueError:
+ cfunc_entries.extend(new_cfunc_entries)
+ else:
+ cfunc_entries[cindex:cindex+1] = new_cfunc_entries
- cfunc_entries = self.entry.scope.cfunc_entries
- try:
- cindex = cfunc_entries.index(self.entry)
- except ValueError:
- cfunc_entries.extend(new_cfunc_entries)
- else:
- cfunc_entries[cindex:cindex+1] = new_cfunc_entries
-
self.cached_specialized_types = result
return result
@@ -3228,18 +3228,18 @@ class CFuncTypeArg(BaseType):
or_none = False
accept_none = True
accept_builtin_subtypes = False
- annotation = None
+ annotation = None
subtypes = ['type']
- def __init__(self, name, type, pos, cname=None, annotation=None):
+ def __init__(self, name, type, pos, cname=None, annotation=None):
self.name = name
if cname is not None:
self.cname = cname
else:
self.cname = Naming.var_prefix + name
- if annotation is not None:
- self.annotation = annotation
+ if annotation is not None:
+ self.annotation = annotation
self.type = type
self.pos = pos
self.needs_type_test = False # TODO: should these defaults be set in analyse_types()?
@@ -3253,7 +3253,7 @@ class CFuncTypeArg(BaseType):
def specialize(self, values):
return CFuncTypeArg(self.name, self.type.specialize(values), self.pos, self.cname)
-
+
class ToPyStructUtilityCode(object):
requires = None
@@ -3281,8 +3281,8 @@ class ToPyStructUtilityCode(object):
code.putln("%s {" % self.header)
code.putln("PyObject* res;")
code.putln("PyObject* member;")
- code.putln("res = __Pyx_PyDict_NewPresized(%d); if (unlikely(!res)) return NULL;" %
- len(self.type.scope.var_entries))
+ code.putln("res = __Pyx_PyDict_NewPresized(%d); if (unlikely(!res)) return NULL;" %
+ len(self.type.scope.var_entries))
for member in self.type.scope.var_entries:
nameconst_cname = code.get_py_string_const(member.name, identifier=True)
code.putln("%s; if (unlikely(!member)) goto bad;" % (
@@ -3336,10 +3336,10 @@ class CStructOrUnionType(CType):
self._convert_from_py_code = None
self.packed = packed
- def can_coerce_to_pyobject(self, env):
- if self._convert_to_py_code is False:
- return None # tri-state-ish
-
+ def can_coerce_to_pyobject(self, env):
+ if self._convert_to_py_code is False:
+ return None # tri-state-ish
+
if env.outer_scope is None:
return False
@@ -3349,7 +3349,7 @@ class CStructOrUnionType(CType):
safe_union_types = set()
for member in self.scope.var_entries:
member_type = member.type
- if not member_type.can_coerce_to_pyobject(env):
+ if not member_type.can_coerce_to_pyobject(env):
self.to_py_function = None
self._convert_to_py_code = False
return False
@@ -3365,29 +3365,29 @@ class CStructOrUnionType(CType):
self._convert_from_py_code = False
return False
- return True
-
- def create_to_py_utility_code(self, env):
- if not self.can_coerce_to_pyobject(env):
- return False
-
- if self._convert_to_py_code is None:
- for member in self.scope.var_entries:
- member.type.create_to_py_utility_code(env)
+ return True
+
+ def create_to_py_utility_code(self, env):
+ if not self.can_coerce_to_pyobject(env):
+ return False
+
+ if self._convert_to_py_code is None:
+ for member in self.scope.var_entries:
+ member.type.create_to_py_utility_code(env)
forward_decl = self.entry.visibility != 'extern' and not self.typedef_flag
self._convert_to_py_code = ToPyStructUtilityCode(self, forward_decl, env)
env.use_utility_code(self._convert_to_py_code)
return True
- def can_coerce_from_pyobject(self, env):
- if env.outer_scope is None or self._convert_from_py_code is False:
- return False
- for member in self.scope.var_entries:
- if not member.type.can_coerce_from_pyobject(env):
- return False
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ if env.outer_scope is None or self._convert_from_py_code is False:
+ return False
+ for member in self.scope.var_entries:
+ if not member.type.can_coerce_from_pyobject(env):
+ return False
+ return True
+
def create_from_py_utility_code(self, env):
if env.outer_scope is None:
return False
@@ -3396,11 +3396,11 @@ class CStructOrUnionType(CType):
return None # tri-state-ish
if self._convert_from_py_code is None:
- if not self.scope.var_entries:
- # There are obviously missing fields; don't allow instantiation
- # where absolutely no content is provided.
- return False
-
+ if not self.scope.var_entries:
+ # There are obviously missing fields; don't allow instantiation
+ # where absolutely no content is provided.
+ return False
+
for member in self.scope.var_entries:
if not member.type.create_from_py_utility_code(env):
self.from_py_function = None
@@ -3408,7 +3408,7 @@ class CStructOrUnionType(CType):
return False
context = dict(
- struct_type=self,
+ struct_type=self,
var_entries=self.scope.var_entries,
funcname=self.from_py_function,
)
@@ -3543,34 +3543,34 @@ class CppClassType(CType):
else:
return ''
- def can_coerce_from_pyobject(self, env):
- if self.cname in builtin_cpp_conversions:
- template_count = builtin_cpp_conversions[self.cname]
- for ix, T in enumerate(self.templates or []):
- if ix >= template_count:
- break
- if T.is_pyobject or not T.can_coerce_from_pyobject(env):
- return False
- return True
- elif self.cname in cpp_string_conversions:
- return True
- return False
-
+ def can_coerce_from_pyobject(self, env):
+ if self.cname in builtin_cpp_conversions:
+ template_count = builtin_cpp_conversions[self.cname]
+ for ix, T in enumerate(self.templates or []):
+ if ix >= template_count:
+ break
+ if T.is_pyobject or not T.can_coerce_from_pyobject(env):
+ return False
+ return True
+ elif self.cname in cpp_string_conversions:
+ return True
+ return False
+
def create_from_py_utility_code(self, env):
if self.from_py_function is not None:
return True
if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions:
X = "XYZABC"
tags = []
- context = {}
+ context = {}
for ix, T in enumerate(self.templates or []):
if ix >= builtin_cpp_conversions[self.cname]:
break
if T.is_pyobject or not T.create_from_py_utility_code(env):
return False
tags.append(T.specialization_name())
- context[X[ix]] = T
-
+ context[X[ix]] = T
+
if self.cname in cpp_string_conversions:
cls = 'string'
tags = type_identifier(self),
@@ -3579,42 +3579,42 @@ class CppClassType(CType):
else:
cls = 'arcadia_' + self.cname
cname = '__pyx_convert_%s_from_py_%s' % (cls, '__and_'.join(tags))
- context.update({
+ context.update({
'cname': cname,
'maybe_unordered': self.maybe_unordered(),
'type': self.cname,
- })
+ })
from .UtilityCode import CythonUtilityCode
env.use_utility_code(CythonUtilityCode.load(
- cls.replace('unordered_', '') + ".from_py", "CppConvert.pyx",
- context=context, compiler_directives=env.directives))
+ cls.replace('unordered_', '') + ".from_py", "CppConvert.pyx",
+ context=context, compiler_directives=env.directives))
self.from_py_function = cname
return True
- def can_coerce_to_pyobject(self, env):
- if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions:
- for ix, T in enumerate(self.templates or []):
- if ix >= builtin_cpp_conversions[self.cname]:
- break
- if T.is_pyobject or not T.can_coerce_to_pyobject(env):
- return False
- return True
-
+ def can_coerce_to_pyobject(self, env):
+ if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions:
+ for ix, T in enumerate(self.templates or []):
+ if ix >= builtin_cpp_conversions[self.cname]:
+ break
+ if T.is_pyobject or not T.can_coerce_to_pyobject(env):
+ return False
+ return True
+
def create_to_py_utility_code(self, env):
if self.to_py_function is not None:
return True
if self.cname in builtin_cpp_conversions or self.cname in cpp_string_conversions:
X = "XYZABC"
tags = []
- context = {}
+ context = {}
for ix, T in enumerate(self.templates or []):
if ix >= builtin_cpp_conversions[self.cname]:
break
if not T.create_to_py_utility_code(env):
return False
tags.append(T.specialization_name())
- context[X[ix]] = T
-
+ context[X[ix]] = T
+
if self.cname in cpp_string_conversions:
cls = 'string'
prefix = 'PyObject_' # gets specialised by explicit type casts in CoerceToPyTypeNode
@@ -3626,15 +3626,15 @@ class CppClassType(CType):
cls = 'arcadia_' + self.cname
prefix = ''
cname = "__pyx_convert_%s%s_to_py_%s" % (prefix, cls, "____".join(tags))
- context.update({
+ context.update({
'cname': cname,
'maybe_unordered': self.maybe_unordered(),
'type': self.cname,
- })
+ })
from .UtilityCode import CythonUtilityCode
env.use_utility_code(CythonUtilityCode.load(
- cls.replace('unordered_', '') + ".to_py", "CppConvert.pyx",
- context=context, compiler_directives=env.directives))
+ cls.replace('unordered_', '') + ".to_py", "CppConvert.pyx",
+ context=context, compiler_directives=env.directives))
self.to_py_function = cname
return True
@@ -3718,33 +3718,33 @@ class CppClassType(CType):
return specialized
def deduce_template_params(self, actual):
- if actual.is_const:
- actual = actual.const_base_type
- if actual.is_reference:
- actual = actual.ref_base_type
+ if actual.is_const:
+ actual = actual.const_base_type
+ if actual.is_reference:
+ actual = actual.ref_base_type
if self == actual:
return {}
elif actual.is_cpp_class:
- self_template_type = self
- while getattr(self_template_type, 'template_type', None):
- self_template_type = self_template_type.template_type
+ self_template_type = self
+ while getattr(self_template_type, 'template_type', None):
+ self_template_type = self_template_type.template_type
def all_bases(cls):
yield cls
for parent in cls.base_classes:
for base in all_bases(parent):
yield base
for actual_base in all_bases(actual):
- template_type = actual_base
- while getattr(template_type, 'template_type', None):
- template_type = template_type.template_type
- if (self_template_type.empty_declaration_code()
- == template_type.empty_declaration_code()):
- return reduce(
- merge_template_deductions,
- [formal_param.deduce_template_params(actual_param)
- for (formal_param, actual_param)
- in zip(self.templates, actual_base.templates)],
- {})
+ template_type = actual_base
+ while getattr(template_type, 'template_type', None):
+ template_type = template_type.template_type
+ if (self_template_type.empty_declaration_code()
+ == template_type.empty_declaration_code()):
+ return reduce(
+ merge_template_deductions,
+ [formal_param.deduce_template_params(actual_param)
+ for (formal_param, actual_param)
+ in zip(self.templates, actual_base.templates)],
+ {})
else:
return {}
@@ -3781,28 +3781,28 @@ class CppClassType(CType):
return 1
return 0
- def subclass_dist(self, super_type):
- if self.same_as_resolved_type(super_type):
- return 0
- elif not self.base_classes:
- return float('inf')
- else:
- return 1 + min(b.subclass_dist(super_type) for b in self.base_classes)
-
+ def subclass_dist(self, super_type):
+ if self.same_as_resolved_type(super_type):
+ return 0
+ elif not self.base_classes:
+ return float('inf')
+ else:
+ return 1 + min(b.subclass_dist(super_type) for b in self.base_classes)
+
def same_as_resolved_type(self, other_type):
if other_type.is_cpp_class:
if self == other_type:
return 1
- # This messy logic is needed due to GH Issue #1852.
+ # This messy logic is needed due to GH Issue #1852.
elif (self.cname == other_type.cname and
- (self.template_type and other_type.template_type
- or self.templates
- or other_type.templates)):
+ (self.template_type and other_type.template_type
+ or self.templates
+ or other_type.templates)):
if self.templates == other_type.templates:
return 1
for t1, t2 in zip(self.templates, other_type.templates):
if is_optional_template_param(t1) and is_optional_template_param(t2):
- break
+ break
if not t1.same_as_resolved_type(t2):
return 0
return 1
@@ -3812,10 +3812,10 @@ class CppClassType(CType):
# TODO: handle operator=(...) here?
if other_type is error_type:
return True
- elif other_type.is_cpp_class:
- return other_type.is_subclass(self)
- elif other_type.is_string and self.cname in cpp_string_conversions:
- return True
+ elif other_type.is_cpp_class:
+ return other_type.is_subclass(self)
+ elif other_type.is_string and self.cname in cpp_string_conversions:
+ return True
def attributes_known(self):
return self.scope is not None
@@ -3833,23 +3833,23 @@ class CppClassType(CType):
func_type = func_type.base_type
return func_type.return_type
- def get_constructor(self, pos):
- constructor = self.scope.lookup('<init>')
- if constructor is not None:
- return constructor
-
- # Otherwise: automatically declare no-args default constructor.
- # Make it "nogil" if the base classes allow it.
- nogil = True
- for base in self.base_classes:
- base_constructor = base.scope.lookup('<init>')
- if base_constructor and not base_constructor.type.nogil:
- nogil = False
- break
-
- func_type = CFuncType(self, [], exception_check='+', nogil=nogil)
- return self.scope.declare_cfunction(u'<init>', func_type, pos)
-
+ def get_constructor(self, pos):
+ constructor = self.scope.lookup('<init>')
+ if constructor is not None:
+ return constructor
+
+ # Otherwise: automatically declare no-args default constructor.
+ # Make it "nogil" if the base classes allow it.
+ nogil = True
+ for base in self.base_classes:
+ base_constructor = base.scope.lookup('<init>')
+ if base_constructor and not base_constructor.type.nogil:
+ nogil = False
+ break
+
+ func_type = CFuncType(self, [], exception_check='+', nogil=nogil)
+ return self.scope.declare_cfunction(u'<init>', func_type, pos)
+
def check_nullary_constructor(self, pos, msg="stack allocated"):
constructor = self.scope.lookup(u'<init>')
if constructor is not None and best_match([], constructor.all_alternatives()) is None:
@@ -3903,7 +3903,7 @@ def is_optional_template_param(type):
return isinstance(type, TemplatePlaceholderType) and type.optional
-class CEnumType(CIntLike, CType):
+class CEnumType(CIntLike, CType):
# name string
# cname string or None
# typedef_flag boolean
@@ -3991,12 +3991,12 @@ class CTupleType(CType):
return False
return True
- def can_coerce_from_pyobject(self, env):
- for component in self.components:
- if not component.can_coerce_from_pyobject(env):
- return False
- return True
-
+ def can_coerce_from_pyobject(self, env):
+ for component in self.components:
+ if not component.can_coerce_from_pyobject(env):
+ return False
+ return True
+
def create_to_py_utility_code(self, env):
if self._convert_to_py_code is False:
return None # tri-state-ish
@@ -4175,9 +4175,9 @@ c_gilstate_type = CEnumType("PyGILState_STATE", "PyGILState_STATE", True)
c_threadstate_type = CStructOrUnionType("PyThreadState", "struct", None, 1, "PyThreadState")
c_threadstate_ptr_type = CPtrType(c_threadstate_type)
-# PEP-539 "Py_tss_t" type
-c_pytss_t_type = CPyTSSTType()
-
+# PEP-539 "Py_tss_t" type
+c_pytss_t_type = CPyTSSTType()
+
# the Py_buffer type is defined in Builtin.py
c_py_buffer_type = CStructOrUnionType("Py_buffer", "struct", None, 1, "Py_buffer")
c_py_buffer_ptr_type = CPtrType(c_py_buffer_type)
@@ -4243,7 +4243,7 @@ modifiers_and_name_to_type = {
#
(1, 0, "void"): c_void_type,
- (1, 0, "Py_tss_t"): c_pytss_t_type,
+ (1, 0, "Py_tss_t"): c_pytss_t_type,
(1, 0, "bint"): c_bint_type,
(0, 0, "Py_UNICODE"): c_py_unicode_type,
@@ -4329,7 +4329,7 @@ def best_match(arg_types, functions, pos=None, env=None, args=None):
[pattern.type.deduce_template_params(actual) for (pattern, actual) in zip(func_type.args, arg_types)],
{})
if deductions is None:
- errors.append((func, "Unable to deduce type parameters for %s given (%s)" % (func_type, ', '.join(map(str, arg_types)))))
+ errors.append((func, "Unable to deduce type parameters for %s given (%s)" % (func_type, ', '.join(map(str, arg_types)))))
elif len(deductions) < len(func_type.templates):
errors.append((func, "Unable to deduce type parameter %s" % (
", ".join([param.name for param in set(func_type.templates) - set(deductions.keys())]))))
@@ -4362,7 +4362,7 @@ def best_match(arg_types, functions, pos=None, env=None, args=None):
needed_coercions = {}
for index, (func, func_type) in enumerate(candidates):
- score = [0,0,0,0,0,0,0]
+ score = [0,0,0,0,0,0,0]
for i in range(min(actual_nargs, len(func_type.args))):
src_type = arg_types[i]
dst_type = func_type.args[i].type
@@ -4373,17 +4373,17 @@ def best_match(arg_types, functions, pos=None, env=None, args=None):
# function that takes a char *, the coercion will mean that the
# type will simply become bytes. We need to do this coercion
# manually for overloaded and fused functions
- if not assignable:
- c_src_type = None
- if src_type.is_pyobject:
- if src_type.is_builtin_type and src_type.name == 'str' and dst_type.resolve().is_string:
- c_src_type = dst_type.resolve()
- else:
- c_src_type = src_type.default_coerced_ctype()
- elif src_type.is_pythran_expr:
- c_src_type = src_type.org_buffer
-
- if c_src_type is not None:
+ if not assignable:
+ c_src_type = None
+ if src_type.is_pyobject:
+ if src_type.is_builtin_type and src_type.name == 'str' and dst_type.resolve().is_string:
+ c_src_type = dst_type.resolve()
+ else:
+ c_src_type = src_type.default_coerced_ctype()
+ elif src_type.is_pythran_expr:
+ c_src_type = src_type.org_buffer
+
+ if c_src_type is not None:
assignable = dst_type.assignable_from(c_src_type)
if assignable:
src_type = c_src_type
@@ -4400,13 +4400,13 @@ def best_match(arg_types, functions, pos=None, env=None, args=None):
(src_type.is_float and dst_type.is_float)):
score[2] += abs(dst_type.rank + (not dst_type.signed) -
(src_type.rank + (not src_type.signed))) + 1
- elif dst_type.is_ptr and src_type.is_ptr:
- if dst_type.base_type == c_void_type:
- score[4] += 1
- elif src_type.base_type.is_cpp_class and src_type.base_type.is_subclass(dst_type.base_type):
- score[6] += src_type.base_type.subclass_dist(dst_type.base_type)
- else:
- score[5] += 1
+ elif dst_type.is_ptr and src_type.is_ptr:
+ if dst_type.base_type == c_void_type:
+ score[4] += 1
+ elif src_type.base_type.is_cpp_class and src_type.base_type.is_subclass(dst_type.base_type):
+ score[6] += src_type.base_type.subclass_dist(dst_type.base_type)
+ else:
+ score[5] += 1
elif not src_type.is_pyobject:
score[1] += 1
else:
@@ -4464,10 +4464,10 @@ def widest_numeric_type(type1, type2):
type1 = type1.ref_base_type
if type2.is_reference:
type2 = type2.ref_base_type
- if type1.is_const:
- type1 = type1.const_base_type
- if type2.is_const:
- type2 = type2.const_base_type
+ if type1.is_const:
+ type1 = type1.const_base_type
+ if type2.is_const:
+ type2 = type2.const_base_type
if type1 == type2:
widest_type = type1
elif type1.is_complex or type2.is_complex:
diff --git a/contrib/tools/cython/Cython/Compiler/Pythran.py b/contrib/tools/cython/Cython/Compiler/Pythran.py
index eaf0f72bd7..c02704a918 100644
--- a/contrib/tools/cython/Cython/Compiler/Pythran.py
+++ b/contrib/tools/cython/Cython/Compiler/Pythran.py
@@ -1,11 +1,11 @@
-# cython: language_level=3
-
-from __future__ import absolute_import
-
-from .PyrexTypes import CType, CTypedefType, CStructOrUnionType
-
-import cython
-
+# cython: language_level=3
+
+from __future__ import absolute_import
+
+from .PyrexTypes import CType, CTypedefType, CStructOrUnionType
+
+import cython
+
try:
import pythran
pythran_is_pre_0_9 = tuple(map(int, pythran.__version__.split('.')[0:2])) < (0, 9)
@@ -14,123 +14,123 @@ except ImportError:
pythran = None
pythran_is_pre_0_9 = True
pythran_is_pre_0_9_6 = True
-
+
if pythran_is_pre_0_9_6:
pythran_builtins = '__builtin__'
else:
pythran_builtins = 'builtins'
-# Pythran/Numpy specific operations
-
-def has_np_pythran(env):
+# Pythran/Numpy specific operations
+
+def has_np_pythran(env):
if env is None:
return False
directives = getattr(env, 'directives', None)
return (directives and directives.get('np_pythran', False))
-
-@cython.ccall
-def is_pythran_supported_dtype(type_):
- if isinstance(type_, CTypedefType):
- return is_pythran_supported_type(type_.typedef_base_type)
- return type_.is_numeric
-
-
-def pythran_type(Ty, ptype="ndarray"):
- if Ty.is_buffer:
- ndim,dtype = Ty.ndim, Ty.dtype
- if isinstance(dtype, CStructOrUnionType):
- ctype = dtype.cname
- elif isinstance(dtype, CType):
- ctype = dtype.sign_and_name()
- elif isinstance(dtype, CTypedefType):
- ctype = dtype.typedef_cname
- else:
- raise ValueError("unsupported type %s!" % dtype)
+
+@cython.ccall
+def is_pythran_supported_dtype(type_):
+ if isinstance(type_, CTypedefType):
+ return is_pythran_supported_type(type_.typedef_base_type)
+ return type_.is_numeric
+
+
+def pythran_type(Ty, ptype="ndarray"):
+ if Ty.is_buffer:
+ ndim,dtype = Ty.ndim, Ty.dtype
+ if isinstance(dtype, CStructOrUnionType):
+ ctype = dtype.cname
+ elif isinstance(dtype, CType):
+ ctype = dtype.sign_and_name()
+ elif isinstance(dtype, CTypedefType):
+ ctype = dtype.typedef_cname
+ else:
+ raise ValueError("unsupported type %s!" % dtype)
if pythran_is_pre_0_9:
return "pythonic::types::%s<%s,%d>" % (ptype,ctype, ndim)
else:
return "pythonic::types::%s<%s,pythonic::types::pshape<%s>>" % (ptype,ctype, ",".join(("long",)*ndim))
- if Ty.is_pythran_expr:
- return Ty.pythran_type
- #if Ty.is_none:
+ if Ty.is_pythran_expr:
+ return Ty.pythran_type
+ #if Ty.is_none:
# return "decltype(pythonic::builtins::None)"
- if Ty.is_numeric:
- return Ty.sign_and_name()
- raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty)))
-
-
-@cython.cfunc
-def type_remove_ref(ty):
- return "typename std::remove_reference<%s>::type" % ty
-
-
-def pythran_binop_type(op, tA, tB):
+ if Ty.is_numeric:
+ return Ty.sign_and_name()
+ raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty)))
+
+
+@cython.cfunc
+def type_remove_ref(ty):
+ return "typename std::remove_reference<%s>::type" % ty
+
+
+def pythran_binop_type(op, tA, tB):
if op == '**':
return 'decltype(pythonic::numpy::functor::power{}(std::declval<%s>(), std::declval<%s>()))' % (
pythran_type(tA), pythran_type(tB))
else:
return "decltype(std::declval<%s>() %s std::declval<%s>())" % (
pythran_type(tA), op, pythran_type(tB))
-
-
-def pythran_unaryop_type(op, type_):
- return "decltype(%sstd::declval<%s>())" % (
- op, pythran_type(type_))
-
-
-@cython.cfunc
-def _index_access(index_code, indices):
- indexing = ",".join([index_code(idx) for idx in indices])
- return ('[%s]' if len(indices) == 1 else '(%s)') % indexing
-
-
-def _index_type_code(index_with_type):
- idx, index_type = index_with_type
- if idx.is_slice:
+
+
+def pythran_unaryop_type(op, type_):
+ return "decltype(%sstd::declval<%s>())" % (
+ op, pythran_type(type_))
+
+
+@cython.cfunc
+def _index_access(index_code, indices):
+ indexing = ",".join([index_code(idx) for idx in indices])
+ return ('[%s]' if len(indices) == 1 else '(%s)') % indexing
+
+
+def _index_type_code(index_with_type):
+ idx, index_type = index_with_type
+ if idx.is_slice:
n = 2 + int(not idx.step.is_none)
return "pythonic::%s::functor::slice{}(%s)" % (
pythran_builtins,
",".join(["0"]*n))
- elif index_type.is_int:
- return "std::declval<%s>()" % index_type.sign_and_name()
- elif index_type.is_pythran_expr:
- return "std::declval<%s>()" % index_type.pythran_type
- raise ValueError("unsupported indexing type %s!" % index_type)
-
-
-def _index_code(idx):
- if idx.is_slice:
- values = idx.start, idx.stop, idx.step
- if idx.step.is_none:
- func = "contiguous_slice"
- values = values[:2]
- else:
- func = "slice"
- return "pythonic::types::%s(%s)" % (
- func, ",".join((v.pythran_result() for v in values)))
- elif idx.type.is_int:
- return to_pythran(idx)
- elif idx.type.is_pythran_expr:
- return idx.pythran_result()
- raise ValueError("unsupported indexing type %s" % idx.type)
-
-
-def pythran_indexing_type(type_, indices):
- return type_remove_ref("decltype(std::declval<%s>()%s)" % (
- pythran_type(type_),
- _index_access(_index_type_code, indices),
- ))
-
-
-def pythran_indexing_code(indices):
- return _index_access(_index_code, indices)
-
+ elif index_type.is_int:
+ return "std::declval<%s>()" % index_type.sign_and_name()
+ elif index_type.is_pythran_expr:
+ return "std::declval<%s>()" % index_type.pythran_type
+ raise ValueError("unsupported indexing type %s!" % index_type)
+
+
+def _index_code(idx):
+ if idx.is_slice:
+ values = idx.start, idx.stop, idx.step
+ if idx.step.is_none:
+ func = "contiguous_slice"
+ values = values[:2]
+ else:
+ func = "slice"
+ return "pythonic::types::%s(%s)" % (
+ func, ",".join((v.pythran_result() for v in values)))
+ elif idx.type.is_int:
+ return to_pythran(idx)
+ elif idx.type.is_pythran_expr:
+ return idx.pythran_result()
+ raise ValueError("unsupported indexing type %s" % idx.type)
+
+
+def pythran_indexing_type(type_, indices):
+ return type_remove_ref("decltype(std::declval<%s>()%s)" % (
+ pythran_type(type_),
+ _index_access(_index_type_code, indices),
+ ))
+
+
+def pythran_indexing_code(indices):
+ return _index_access(_index_code, indices)
+
def np_func_to_list(func):
if not func.is_numpy_attribute:
return []
return np_func_to_list(func.obj) + [func.attribute]
-
+
if pythran is None:
def pythran_is_numpy_func_supported(name):
return False
@@ -149,79 +149,79 @@ def pythran_functor(func):
submodules = "::".join(func[:-1] + ["functor"])
return "pythonic::numpy::%s::%s" % (submodules, func[-1])
-def pythran_func_type(func, args):
- args = ",".join(("std::declval<%s>()" % pythran_type(a.type) for a in args))
+def pythran_func_type(func, args):
+ args = ",".join(("std::declval<%s>()" % pythran_type(a.type) for a in args))
return "decltype(%s{}(%s))" % (pythran_functor(func), args)
-
-
-@cython.ccall
-def to_pythran(op, ptype=None):
- op_type = op.type
- if op_type.is_int:
- # Make sure that integer literals always have exactly the type that the templates expect.
- return op_type.cast_code(op.result())
- if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]):
- return op.result()
- if op.is_none:
+
+
+@cython.ccall
+def to_pythran(op, ptype=None):
+ op_type = op.type
+ if op_type.is_int:
+ # Make sure that integer literals always have exactly the type that the templates expect.
+ return op_type.cast_code(op.result())
+ if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]):
+ return op.result()
+ if op.is_none:
return "pythonic::%s::None" % pythran_builtins
- if ptype is None:
- ptype = pythran_type(op_type)
-
- assert op.type.is_pyobject
- return "from_python<%s>(%s)" % (ptype, op.py_result())
-
-
-@cython.cfunc
-def is_type(type_, types):
- for attr in types:
- if getattr(type_, attr, False):
- return True
- return False
-
-
-def is_pythran_supported_node_or_none(node):
- return node.is_none or is_pythran_supported_type(node.type)
-
-
-@cython.ccall
-def is_pythran_supported_type(type_):
- pythran_supported = (
- "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_none", "is_complex")
- return is_type(type_, pythran_supported) or is_pythran_expr(type_)
-
-
-def is_pythran_supported_operation_type(type_):
- pythran_supported = (
- "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_complex")
- return is_type(type_,pythran_supported) or is_pythran_expr(type_)
-
-
-@cython.ccall
-def is_pythran_expr(type_):
- return type_.is_pythran_expr
-
-
-def is_pythran_buffer(type_):
- return (type_.is_numpy_buffer and is_pythran_supported_dtype(type_.dtype) and
- type_.mode in ("c", "strided") and not type_.cast)
-
+ if ptype is None:
+ ptype = pythran_type(op_type)
+
+ assert op.type.is_pyobject
+ return "from_python<%s>(%s)" % (ptype, op.py_result())
+
+
+@cython.cfunc
+def is_type(type_, types):
+ for attr in types:
+ if getattr(type_, attr, False):
+ return True
+ return False
+
+
+def is_pythran_supported_node_or_none(node):
+ return node.is_none or is_pythran_supported_type(node.type)
+
+
+@cython.ccall
+def is_pythran_supported_type(type_):
+ pythran_supported = (
+ "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_none", "is_complex")
+ return is_type(type_, pythran_supported) or is_pythran_expr(type_)
+
+
+def is_pythran_supported_operation_type(type_):
+ pythran_supported = (
+ "is_pythran_expr", "is_int", "is_numeric", "is_float", "is_complex")
+ return is_type(type_,pythran_supported) or is_pythran_expr(type_)
+
+
+@cython.ccall
+def is_pythran_expr(type_):
+ return type_.is_pythran_expr
+
+
+def is_pythran_buffer(type_):
+ return (type_.is_numpy_buffer and is_pythran_supported_dtype(type_.dtype) and
+ type_.mode in ("c", "strided") and not type_.cast)
+
def pythran_get_func_include_file(func):
func = np_func_to_list(func)
return "pythonic/numpy/%s.hpp" % "/".join(func)
-
-def include_pythran_generic(env):
- # Generic files
- env.add_include_file("pythonic/core.hpp")
- env.add_include_file("pythonic/python/core.hpp")
- env.add_include_file("pythonic/types/bool.hpp")
- env.add_include_file("pythonic/types/ndarray.hpp")
+
+def include_pythran_generic(env):
+ # Generic files
+ env.add_include_file("pythonic/core.hpp")
+ env.add_include_file("pythonic/python/core.hpp")
+ env.add_include_file("pythonic/types/bool.hpp")
+ env.add_include_file("pythonic/types/ndarray.hpp")
env.add_include_file("pythonic/numpy/power.hpp")
env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins)
- env.add_include_file("<new>") # for placement new
-
- for i in (8, 16, 32, 64):
- env.add_include_file("pythonic/types/uint%d.hpp" % i)
- env.add_include_file("pythonic/types/int%d.hpp" % i)
- for t in ("float", "float32", "float64", "set", "slice", "tuple", "int",
+ env.add_include_file("<new>") # for placement new
+
+ for i in (8, 16, 32, 64):
+ env.add_include_file("pythonic/types/uint%d.hpp" % i)
+ env.add_include_file("pythonic/types/int%d.hpp" % i)
+ for t in ("float", "float32", "float64", "set", "slice", "tuple", "int",
"complex", "complex64", "complex128"):
- env.add_include_file("pythonic/types/%s.hpp" % t)
+ env.add_include_file("pythonic/types/%s.hpp" % t)
diff --git a/contrib/tools/cython/Cython/Compiler/Scanning.py b/contrib/tools/cython/Cython/Compiler/Scanning.py
index 421ca64c86..c721bba69b 100644
--- a/contrib/tools/cython/Cython/Compiler/Scanning.py
+++ b/contrib/tools/cython/Cython/Compiler/Scanning.py
@@ -1,4 +1,4 @@
-# cython: infer_types=True, language_level=3, py2_import=True, auto_pickle=False
+# cython: infer_types=True, language_level=3, py2_import=True, auto_pickle=False
#
# Cython Scanner
#
@@ -63,13 +63,13 @@ class Method(object):
# self.kwargs is almost always unused => avoid call overhead
return method(text, **self.kwargs) if self.kwargs is not None else method(text)
- def __copy__(self):
- return self # immutable, no need to copy
+ def __copy__(self):
+ return self # immutable, no need to copy
+
+ def __deepcopy__(self, memo):
+ return self # immutable, no need to copy
+
- def __deepcopy__(self, memo):
- return self # immutable, no need to copy
-
-
#------------------------------------------------------------------
class CompileTimeScope(object):
@@ -170,7 +170,7 @@ class SourceDescriptor(object):
if self._escaped_description is None:
esc_desc = \
self.get_description().encode('ASCII', 'replace').decode("ASCII")
- # Use forward slashes on Windows since these paths
+ # Use forward slashes on Windows since these paths
# will be used in the #line directives in the C/C++ files.
self._escaped_description = esc_desc.replace('\\', '/')
return self._escaped_description
@@ -196,13 +196,13 @@ class SourceDescriptor(object):
except AttributeError:
return False
- def __copy__(self):
- return self # immutable, no need to copy
+ def __copy__(self):
+ return self # immutable, no need to copy
+
+ def __deepcopy__(self, memo):
+ return self # immutable, no need to copy
+
- def __deepcopy__(self, memo):
- return self # immutable, no need to copy
-
-
class FileSourceDescriptor(SourceDescriptor):
"""
Represents a code source. A code source is a more generic abstraction
@@ -215,9 +215,9 @@ class FileSourceDescriptor(SourceDescriptor):
filename = Utils.decode_filename(filename)
self.path_description = path_description or filename
self.filename = filename
- # Prefer relative paths to current directory (which is most likely the project root) over absolute paths.
- workdir = os.path.abspath('.') + os.sep
- self.file_path = filename[len(workdir):] if filename.startswith(workdir) else filename
+ # Prefer relative paths to current directory (which is most likely the project root) over absolute paths.
+ workdir = os.path.abspath('.') + os.sep
+ self.file_path = filename[len(workdir):] if filename.startswith(workdir) else filename
self.set_file_type_from_name(filename)
self._cmp_name = filename
self._lines = {}
@@ -245,8 +245,8 @@ class FileSourceDescriptor(SourceDescriptor):
return lines
def get_description(self):
- # Dump path_description, it's already arcadia root relative (required for proper file matching in coverage)
- return self.path_description
+ # Dump path_description, it's already arcadia root relative (required for proper file matching in coverage)
+ return self.path_description
try:
return os.path.relpath(self.path_description)
except ValueError:
@@ -261,7 +261,7 @@ class FileSourceDescriptor(SourceDescriptor):
return path
def get_filenametable_entry(self):
- return self.file_path
+ return self.file_path
def __eq__(self, other):
return isinstance(other, FileSourceDescriptor) and self.filename == other.filename
diff --git a/contrib/tools/cython/Cython/Compiler/StringEncoding.py b/contrib/tools/cython/Cython/Compiler/StringEncoding.py
index 617d7502de..c37e8aab79 100644
--- a/contrib/tools/cython/Cython/Compiler/StringEncoding.py
+++ b/contrib/tools/cython/Cython/Compiler/StringEncoding.py
@@ -87,7 +87,7 @@ class BytesLiteralBuilder(object):
def getstrings(self):
return (self.getstring(), None)
-
+
class StrLiteralBuilder(object):
"""Assemble both a bytes and a unicode representation of a string.
"""
@@ -219,14 +219,14 @@ def bytes_literal(s, encoding):
return s
-def encoded_string(s, encoding):
- assert isinstance(s, (_unicode, bytes))
- s = EncodedString(s)
- if encoding is not None:
- s.encoding = encoding
- return s
-
-
+def encoded_string(s, encoding):
+ assert isinstance(s, (_unicode, bytes))
+ s = EncodedString(s)
+ if encoding is not None:
+ s.encoding = encoding
+ return s
+
+
char_from_escape_sequence = {
r'\a' : u'\a',
r'\b' : u'\b',
diff --git a/contrib/tools/cython/Cython/Compiler/Symtab.py b/contrib/tools/cython/Cython/Compiler/Symtab.py
index 868f96ebf7..7361a55aea 100644
--- a/contrib/tools/cython/Cython/Compiler/Symtab.py
+++ b/contrib/tools/cython/Cython/Compiler/Symtab.py
@@ -4,9 +4,9 @@
from __future__ import absolute_import
-import re
+import re
import copy
-import operator
+import operator
try:
import __builtin__ as builtins
@@ -18,9 +18,9 @@ from .StringEncoding import EncodedString
from . import Options, Naming
from . import PyrexTypes
from .PyrexTypes import py_object_type, unspecified_type
-from .TypeSlots import (
- pyfunction_signature, pymethod_signature, richcmp_special_methods,
- get_special_method_signature, get_property_accessor_signature)
+from .TypeSlots import (
+ pyfunction_signature, pymethod_signature, richcmp_special_methods,
+ get_special_method_signature, get_property_accessor_signature)
from . import Future
from . import Code
@@ -36,13 +36,13 @@ iso_c99_keywords = set(
def c_safe_identifier(cname):
# There are some C limitations on struct entry names.
- if ((cname[:2] == '__' and not (cname.startswith(Naming.pyrex_prefix)
- or cname in ('__weakref__', '__dict__')))
- or cname in iso_c99_keywords):
+ if ((cname[:2] == '__' and not (cname.startswith(Naming.pyrex_prefix)
+ or cname in ('__weakref__', '__dict__')))
+ or cname in iso_c99_keywords):
cname = Naming.pyrex_prefix + cname
return cname
-
+
class BufferAux(object):
writable_needed = False
@@ -61,7 +61,7 @@ class Entry(object):
# cname string C name of entity
# type PyrexType Type of entity
# doc string Doc string
- # annotation ExprNode PEP 484/526 annotation
+ # annotation ExprNode PEP 484/526 annotation
# init string Initial value
# visibility 'private' or 'public' or 'extern'
# is_builtin boolean Is an entry in the Python builtins dict
@@ -91,7 +91,7 @@ class Entry(object):
# is_arg boolean Is the arg of a method
# is_local boolean Is a local variable
# in_closure boolean Is referenced in an inner scope
- # in_subscope boolean Belongs to a generator expression scope
+ # in_subscope boolean Belongs to a generator expression scope
# is_readonly boolean Can't be assigned to
# func_cname string C func implementing Python func
# func_modifiers [string] C function modifiers ('inline')
@@ -123,7 +123,7 @@ class Entry(object):
#
# buffer_aux BufferAux or None Extra information needed for buffer variables
# inline_func_in_pxd boolean Hacky special case for inline function in pxd file.
- # Ideally this should not be necessary.
+ # Ideally this should not be necessary.
# might_overflow boolean In an arithmetic expression that could cause
# overflow (used for type inference).
# utility_code_definition For some Cython builtins, the utility code
@@ -140,7 +140,7 @@ class Entry(object):
inline_func_in_pxd = False
borrowed = 0
init = ""
- annotation = None
+ annotation = None
visibility = 'private'
is_builtin = 0
is_cglobal = 0
@@ -168,7 +168,7 @@ class Entry(object):
is_local = 0
in_closure = 0
from_closure = 0
- in_subscope = 0
+ in_subscope = 0
is_declared_generic = 0
is_readonly = 0
pyfunc_cname = None
@@ -219,12 +219,12 @@ class Entry(object):
def __repr__(self):
return "%s(<%x>, name=%s, type=%s)" % (type(self).__name__, id(self), self.name, self.type)
- def already_declared_here(self):
- error(self.pos, "Previous declaration is here")
-
+ def already_declared_here(self):
+ error(self.pos, "Previous declaration is here")
+
def redeclared(self, pos):
error(pos, "'%s' does not match previous declaration" % self.name)
- self.already_declared_here()
+ self.already_declared_here()
def all_alternatives(self):
return [self] + self.overloaded_alternatives
@@ -308,7 +308,7 @@ class Scope(object):
is_py_class_scope = 0
is_c_class_scope = 0
is_closure_scope = 0
- is_genexpr_scope = 0
+ is_genexpr_scope = 0
is_passthrough = 0
is_cpp_class_scope = 0
is_property_scope = 0
@@ -318,7 +318,7 @@ class Scope(object):
in_cinclude = 0
nogil = 0
fused_to_specific = None
- return_type = None
+ return_type = None
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
@@ -335,7 +335,7 @@ class Scope(object):
self.qualified_name = EncodedString(name)
self.scope_prefix = mangled_name
self.entries = {}
- self.subscopes = set()
+ self.subscopes = set()
self.const_entries = []
self.type_entries = []
self.sue_entries = []
@@ -430,12 +430,12 @@ class Scope(object):
""" Return the module-level scope containing this scope. """
return self.outer_scope.builtin_scope()
- def iter_local_scopes(self):
- yield self
- if self.subscopes:
- for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')):
- yield scope
-
+ def iter_local_scopes(self):
+ yield self
+ if self.subscopes:
+ for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')):
+ yield scope
+
def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
@@ -447,33 +447,33 @@ class Scope(object):
warning(pos, "'%s' is a reserved name in C." % cname, -1)
entries = self.entries
if name and name in entries and not shadow:
- old_entry = entries[name]
-
- # Reject redeclared C++ functions only if they have the same type signature.
- cpp_override_allowed = False
- if type.is_cfunction and old_entry.type.is_cfunction and self.is_cpp():
- for alt_entry in old_entry.all_alternatives():
- if type == alt_entry.type:
- if name == '<init>' and not type.args:
- # Cython pre-declares the no-args constructor - allow later user definitions.
- cpp_override_allowed = True
- break
- else:
- cpp_override_allowed = True
-
- if cpp_override_allowed:
- # C++ function/method overrides with different signatures are ok.
+ old_entry = entries[name]
+
+ # Reject redeclared C++ functions only if they have the same type signature.
+ cpp_override_allowed = False
+ if type.is_cfunction and old_entry.type.is_cfunction and self.is_cpp():
+ for alt_entry in old_entry.all_alternatives():
+ if type == alt_entry.type:
+ if name == '<init>' and not type.args:
+ # Cython pre-declares the no-args constructor - allow later user definitions.
+ cpp_override_allowed = True
+ break
+ else:
+ cpp_override_allowed = True
+
+ if cpp_override_allowed:
+ # C++ function/method overrides with different signatures are ok.
+ pass
+ elif self.is_cpp_class_scope and entries[name].is_inherited:
+ # Likewise ignore inherited classes.
pass
- elif self.is_cpp_class_scope and entries[name].is_inherited:
- # Likewise ignore inherited classes.
- pass
elif visibility == 'extern':
- # Silenced outside of "cdef extern" blocks, until we have a safe way to
- # prevent pxd-defined cpdef functions from ending up here.
- warning(pos, "'%s' redeclared " % name, 1 if self.in_cinclude else 0)
+ # Silenced outside of "cdef extern" blocks, until we have a safe way to
+ # prevent pxd-defined cpdef functions from ending up here.
+ warning(pos, "'%s' redeclared " % name, 1 if self.in_cinclude else 0)
elif visibility != 'ignore':
error(pos, "'%s' redeclared " % name)
- entries[name].already_declared_here()
+ entries[name].already_declared_here()
entry = Entry(name, cname, type, pos = pos)
entry.in_cinclude = self.in_cinclude
entry.create_wrapper = create_wrapper
@@ -605,7 +605,7 @@ class Scope(object):
else:
if not (entry.is_type and entry.type.is_cpp_class):
error(pos, "'%s' redeclared " % name)
- entry.already_declared_here()
+ entry.already_declared_here()
return None
elif scope and entry.type.scope:
warning(pos, "'%s' already defined (ignoring second definition)" % name, 0)
@@ -616,13 +616,13 @@ class Scope(object):
if base_classes:
if entry.type.base_classes and entry.type.base_classes != base_classes:
error(pos, "Base type does not match previous declaration")
- entry.already_declared_here()
+ entry.already_declared_here()
else:
entry.type.base_classes = base_classes
if templates or entry.type.templates:
if templates != entry.type.templates:
error(pos, "Template parameters do not match previous declaration")
- entry.already_declared_here()
+ entry.already_declared_here()
def declare_inherited_attributes(entry, base_classes):
for base_class in base_classes:
@@ -632,7 +632,7 @@ class Scope(object):
error(pos, "Cannot inherit from incomplete type")
else:
declare_inherited_attributes(entry, base_class.base_classes)
- entry.type.scope.declare_inherited_cpp_attributes(base_class)
+ entry.type.scope.declare_inherited_cpp_attributes(base_class)
if scope:
declare_inherited_attributes(entry, base_classes)
scope.declare_var(name="this", cname="this", type=PyrexTypes.CPtrType(entry.type), pos=entry.pos)
@@ -773,10 +773,10 @@ class Scope(object):
if overridable != entry.is_overridable:
warning(pos, "Function '%s' previously declared as '%s'" % (
name, 'cpdef' if overridable else 'cdef'), 1)
- if entry.type.same_as(type):
- # Fix with_gil vs nogil.
- entry.type = entry.type.with_with_gil(type.with_gil)
- else:
+ if entry.type.same_as(type):
+ # Fix with_gil vs nogil.
+ entry.type = entry.type.with_with_gil(type.with_gil)
+ else:
if visibility == 'extern' and entry.visibility == 'extern':
can_override = False
if self.is_cpp():
@@ -796,10 +796,10 @@ class Scope(object):
else:
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
- elif not in_pxd and entry.defined_in_pxd and type.compatible_signature_with(entry.type):
- # TODO: check that this was done by a signature optimisation and not a user error.
- #warning(pos, "Function signature does not match previous declaration", 1)
- entry.type = type
+ elif not in_pxd and entry.defined_in_pxd and type.compatible_signature_with(entry.type):
+ # TODO: check that this was done by a signature optimisation and not a user error.
+ #warning(pos, "Function signature does not match previous declaration", 1)
+ entry.type = type
else:
error(pos, "Function signature does not match previous declaration")
else:
@@ -830,23 +830,23 @@ class Scope(object):
type.entry = entry
return entry
- def add_cfunction(self, name, type, pos, cname, visibility, modifiers, inherited=False):
+ def add_cfunction(self, name, type, pos, cname, visibility, modifiers, inherited=False):
# Add a C function entry without giving it a func_cname.
entry = self.declare(name, cname, type, pos, visibility)
entry.is_cfunction = 1
if modifiers:
entry.func_modifiers = modifiers
- if inherited or type.is_fused:
- self.cfunc_entries.append(entry)
- else:
- # For backwards compatibility reasons, we must keep all non-fused methods
- # before all fused methods, but separately for each type.
- i = len(self.cfunc_entries)
- for cfunc_entry in reversed(self.cfunc_entries):
- if cfunc_entry.is_inherited or not cfunc_entry.type.is_fused:
- break
- i -= 1
- self.cfunc_entries.insert(i, entry)
+ if inherited or type.is_fused:
+ self.cfunc_entries.append(entry)
+ else:
+ # For backwards compatibility reasons, we must keep all non-fused methods
+ # before all fused methods, but separately for each type.
+ i = len(self.cfunc_entries)
+ for cfunc_entry in reversed(self.cfunc_entries):
+ if cfunc_entry.is_inherited or not cfunc_entry.type.is_fused:
+ break
+ i -= 1
+ self.cfunc_entries.insert(i, entry)
return entry
def find(self, name, pos):
@@ -946,19 +946,19 @@ class Scope(object):
self.global_scope().use_entry_utility_code(entry)
def defines_any(self, names):
- # Test whether any of the given names are defined in this scope.
+ # Test whether any of the given names are defined in this scope.
for name in names:
if name in self.entries:
return 1
return 0
- def defines_any_special(self, names):
- # Test whether any of the given names are defined as special methods in this scope.
- for name in names:
- if name in self.entries and self.entries[name].is_special:
- return 1
- return 0
-
+ def defines_any_special(self, names):
+ # Test whether any of the given names are defined as special methods in this scope.
+ for name in names:
+ if name in self.entries and self.entries[name].is_special:
+ return 1
+ return 0
+
def infer_types(self):
from .TypeInference import get_type_inferer
get_type_inferer().infer_types(self)
@@ -1114,8 +1114,8 @@ class ModuleScope(Scope):
# doc string Module doc string
# doc_cname string C name of module doc string
# utility_code_list [UtilityCode] Queuing utility codes for forwarding to Code.py
- # c_includes {key: IncludeCode} C headers or verbatim code to be generated
- # See process_include() for more documentation
+ # c_includes {key: IncludeCode} C headers or verbatim code to be generated
+ # See process_include() for more documentation
# string_to_entry {string : Entry} Map string const to entry
# identifier_to_entry {string : Entry} Map identifier string const to entry
# context Context
@@ -1158,7 +1158,7 @@ class ModuleScope(Scope):
self.doc_cname = Naming.moddoc_cname
self.utility_code_list = []
self.module_entries = {}
- self.c_includes = {}
+ self.c_includes = {}
self.type_names = dict(outer_scope.type_names)
self.pxd_file_loaded = 0
self.cimported_modules = []
@@ -1169,10 +1169,10 @@ class ModuleScope(Scope):
self.undeclared_cached_builtins = []
self.namespace_cname = self.module_cname
self._cached_tuple_types = {}
- for var_name in ['__builtins__', '__name__', '__file__', '__doc__', '__path__',
- '__spec__', '__loader__', '__package__', '__cached__']:
+ for var_name in ['__builtins__', '__name__', '__file__', '__doc__', '__path__',
+ '__spec__', '__loader__', '__package__', '__cached__']:
self.declare_var(EncodedString(var_name), py_object_type, None)
- self.process_include(Code.IncludeCode("Python.h", initial=True))
+ self.process_include(Code.IncludeCode("Python.h", initial=True))
def qualifying_scope(self):
return self.parent_module
@@ -1299,58 +1299,58 @@ class ModuleScope(Scope):
module = module.lookup_submodule(submodule)
return module
- def add_include_file(self, filename, verbatim_include=None, late=False):
- """
- Add `filename` as include file. Add `verbatim_include` as
- verbatim text in the C file.
- Both `filename` and `verbatim_include` can be `None` or empty.
- """
- inc = Code.IncludeCode(filename, verbatim_include, late=late)
- self.process_include(inc)
-
- def process_include(self, inc):
- """
- Add `inc`, which is an instance of `IncludeCode`, to this
- `ModuleScope`. This either adds a new element to the
- `c_includes` dict or it updates an existing entry.
-
- In detail: the values of the dict `self.c_includes` are
- instances of `IncludeCode` containing the code to be put in the
- generated C file. The keys of the dict are needed to ensure
- uniqueness in two ways: if an include file is specified in
- multiple "cdef extern" blocks, only one `#include` statement is
- generated. Second, the same include might occur multiple times
- if we find it through multiple "cimport" paths. So we use the
- generated code (of the form `#include "header.h"`) as dict key.
-
- If verbatim code does not belong to any include file (i.e. it
- was put in a `cdef extern from *` block), then we use a unique
- dict key: namely, the `sortkey()`.
-
- One `IncludeCode` object can contain multiple pieces of C code:
- one optional "main piece" for the include file and several other
- pieces for the verbatim code. The `IncludeCode.dict_update`
- method merges the pieces of two different `IncludeCode` objects
- if needed.
- """
- key = inc.mainpiece()
- if key is None:
- key = inc.sortkey()
- inc.dict_update(self.c_includes, key)
- inc = self.c_includes[key]
-
+ def add_include_file(self, filename, verbatim_include=None, late=False):
+ """
+ Add `filename` as include file. Add `verbatim_include` as
+ verbatim text in the C file.
+ Both `filename` and `verbatim_include` can be `None` or empty.
+ """
+ inc = Code.IncludeCode(filename, verbatim_include, late=late)
+ self.process_include(inc)
+
+ def process_include(self, inc):
+ """
+ Add `inc`, which is an instance of `IncludeCode`, to this
+ `ModuleScope`. This either adds a new element to the
+ `c_includes` dict or it updates an existing entry.
+
+ In detail: the values of the dict `self.c_includes` are
+ instances of `IncludeCode` containing the code to be put in the
+ generated C file. The keys of the dict are needed to ensure
+ uniqueness in two ways: if an include file is specified in
+ multiple "cdef extern" blocks, only one `#include` statement is
+ generated. Second, the same include might occur multiple times
+ if we find it through multiple "cimport" paths. So we use the
+ generated code (of the form `#include "header.h"`) as dict key.
+
+ If verbatim code does not belong to any include file (i.e. it
+ was put in a `cdef extern from *` block), then we use a unique
+ dict key: namely, the `sortkey()`.
+
+ One `IncludeCode` object can contain multiple pieces of C code:
+ one optional "main piece" for the include file and several other
+ pieces for the verbatim code. The `IncludeCode.dict_update`
+ method merges the pieces of two different `IncludeCode` objects
+ if needed.
+ """
+ key = inc.mainpiece()
+ if key is None:
+ key = inc.sortkey()
+ inc.dict_update(self.c_includes, key)
+ inc = self.c_includes[key]
+
def add_imported_module(self, scope):
if scope not in self.cimported_modules:
- for inc in scope.c_includes.values():
- self.process_include(inc)
+ for inc in scope.c_includes.values():
+ self.process_include(inc)
self.cimported_modules.append(scope)
for m in scope.cimported_modules:
self.add_imported_module(m)
def add_imported_entry(self, name, entry, pos):
- if entry.is_pyglobal:
- # Allow cimports to follow imports.
- entry.is_variable = True
+ if entry.is_pyglobal:
+ # Allow cimports to follow imports.
+ entry.is_variable = True
if entry not in self.entries:
self.entries[name] = entry
else:
@@ -1376,7 +1376,7 @@ class ModuleScope(Scope):
return entry
else:
entry = self.declare_var(name, py_object_type, pos)
- entry.is_variable = 0
+ entry.is_variable = 0
entry.as_module = scope
self.add_imported_module(scope)
return entry
@@ -1428,8 +1428,8 @@ class ModuleScope(Scope):
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
if is_cdef:
entry.is_cglobal = 1
- if entry.type.declaration_value:
- entry.init = entry.type.declaration_value
+ if entry.type.declaration_value:
+ entry.init = entry.type.declaration_value
self.var_entries.append(entry)
else:
entry.is_pyglobal = 1
@@ -1440,9 +1440,9 @@ class ModuleScope(Scope):
def declare_cfunction(self, name, type, pos,
cname=None, visibility='private', api=0, in_pxd=0,
defining=0, modifiers=(), utility_code=None, overridable=False):
- if not defining and 'inline' in modifiers:
- # TODO(github/1736): Make this an error.
- warning(pos, "Declarations should not be declared inline.", 1)
+ if not defining and 'inline' in modifiers:
+ # TODO(github/1736): Make this an error.
+ warning(pos, "Declarations should not be declared inline.", 1)
# Add an entry for a C function.
if not cname:
if visibility == 'extern' or (visibility == 'public' and defining):
@@ -1763,8 +1763,8 @@ class LocalScope(Scope):
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
- if entry.type.declaration_value:
- entry.init = entry.type.declaration_value
+ if entry.type.declaration_value:
+ entry.init = entry.type.declaration_value
entry.is_local = 1
entry.in_with_gil_block = self._in_with_gil_block
@@ -1784,7 +1784,7 @@ class LocalScope(Scope):
orig_entry = self.lookup_here(name)
if orig_entry and orig_entry.scope is self and not orig_entry.from_closure:
error(pos, "'%s' redeclared as nonlocal" % name)
- orig_entry.already_declared_here()
+ orig_entry.already_declared_here()
else:
entry = self.lookup(name)
if entry is None or not entry.from_closure:
@@ -1795,10 +1795,10 @@ class LocalScope(Scope):
# Return None if not found.
entry = Scope.lookup(self, name)
if entry is not None:
- entry_scope = entry.scope
- while entry_scope.is_genexpr_scope:
- entry_scope = entry_scope.outer_scope
- if entry_scope is not self and entry_scope.is_closure_scope:
+ entry_scope = entry.scope
+ while entry_scope.is_genexpr_scope:
+ entry_scope = entry_scope.outer_scope
+ if entry_scope is not self and entry_scope.is_closure_scope:
if hasattr(entry.scope, "scope_class"):
raise InternalError("lookup() after scope class created.")
# The actual c fragment for the different scopes differs
@@ -1811,19 +1811,19 @@ class LocalScope(Scope):
return entry
def mangle_closure_cnames(self, outer_scope_cname):
- for scope in self.iter_local_scopes():
- for entry in scope.entries.values():
- if entry.from_closure:
- cname = entry.outer_entry.cname
- if self.is_passthrough:
- entry.cname = cname
- else:
- if cname.startswith(Naming.cur_scope_cname):
- cname = cname[len(Naming.cur_scope_cname)+2:]
- entry.cname = "%s->%s" % (outer_scope_cname, cname)
- elif entry.in_closure:
- entry.original_cname = entry.cname
- entry.cname = "%s->%s" % (Naming.cur_scope_cname, entry.cname)
+ for scope in self.iter_local_scopes():
+ for entry in scope.entries.values():
+ if entry.from_closure:
+ cname = entry.outer_entry.cname
+ if self.is_passthrough:
+ entry.cname = cname
+ else:
+ if cname.startswith(Naming.cur_scope_cname):
+ cname = cname[len(Naming.cur_scope_cname)+2:]
+ entry.cname = "%s->%s" % (outer_scope_cname, cname)
+ elif entry.in_closure:
+ entry.original_cname = entry.cname
+ entry.cname = "%s->%s" % (Naming.cur_scope_cname, entry.cname)
class GeneratorExpressionScope(Scope):
@@ -1831,25 +1831,25 @@ class GeneratorExpressionScope(Scope):
to generators, these can be easily inlined in some cases, so all
we really need is a scope that holds the loop variable(s).
"""
- is_genexpr_scope = True
-
+ is_genexpr_scope = True
+
def __init__(self, outer_scope):
- parent_scope = outer_scope
- # TODO: also ignore class scopes?
- while parent_scope.is_genexpr_scope:
- parent_scope = parent_scope.parent_scope
- name = parent_scope.global_scope().next_id(Naming.genexpr_id_ref)
- Scope.__init__(self, name, outer_scope, parent_scope)
+ parent_scope = outer_scope
+ # TODO: also ignore class scopes?
+ while parent_scope.is_genexpr_scope:
+ parent_scope = parent_scope.parent_scope
+ name = parent_scope.global_scope().next_id(Naming.genexpr_id_ref)
+ Scope.__init__(self, name, outer_scope, parent_scope)
self.directives = outer_scope.directives
self.genexp_prefix = "%s%d%s" % (Naming.pyrex_prefix, len(name), name)
- # Class/ExtType scopes are filled at class creation time, i.e. from the
- # module init function or surrounding function.
- while outer_scope.is_genexpr_scope or outer_scope.is_c_class_scope or outer_scope.is_py_class_scope:
- outer_scope = outer_scope.outer_scope
- self.var_entries = outer_scope.var_entries # keep declarations outside
- outer_scope.subscopes.add(self)
-
+ # Class/ExtType scopes are filled at class creation time, i.e. from the
+ # module init function or surrounding function.
+ while outer_scope.is_genexpr_scope or outer_scope.is_c_class_scope or outer_scope.is_py_class_scope:
+ outer_scope = outer_scope.outer_scope
+ self.var_entries = outer_scope.var_entries # keep declarations outside
+ outer_scope.subscopes.add(self)
+
def mangle(self, prefix, name):
return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name))
@@ -1865,12 +1865,12 @@ class GeneratorExpressionScope(Scope):
# this scope must hold its name exclusively
cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id()))
entry = self.declare(name, cname, type, pos, visibility)
- entry.is_variable = True
- if self.parent_scope.is_module_scope:
- entry.is_cglobal = True
- else:
- entry.is_local = True
- entry.in_subscope = True
+ entry.is_variable = True
+ if self.parent_scope.is_module_scope:
+ entry.is_cglobal = True
+ else:
+ entry.is_local = True
+ entry.in_subscope = True
self.var_entries.append(entry)
self.entries[name] = entry
return entry
@@ -1916,7 +1916,7 @@ class StructOrUnionScope(Scope):
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0,
- allow_pyobject=False, allow_memoryview=False):
+ allow_pyobject=False, allow_memoryview=False):
# Add an entry for an attribute.
if not cname:
cname = name
@@ -1928,12 +1928,12 @@ class StructOrUnionScope(Scope):
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject and not allow_pyobject:
- error(pos, "C struct/union member cannot be a Python object")
- elif type.is_memoryviewslice and not allow_memoryview:
- # Memory views wrap their buffer owner as a Python object.
- error(pos, "C struct/union member cannot be a memory view")
+ error(pos, "C struct/union member cannot be a Python object")
+ elif type.is_memoryviewslice and not allow_memoryview:
+ # Memory views wrap their buffer owner as a Python object.
+ error(pos, "C struct/union member cannot be a memory view")
if visibility != 'private':
- error(pos, "C struct/union member cannot be declared %s" % visibility)
+ error(pos, "C struct/union member cannot be declared %s" % visibility)
return entry
def declare_cfunction(self, name, type, pos,
@@ -2018,7 +2018,7 @@ class PyClassScope(ClassScope):
orig_entry = self.lookup_here(name)
if orig_entry and orig_entry.scope is self and not orig_entry.from_closure:
error(pos, "'%s' redeclared as nonlocal" % name)
- orig_entry.already_declared_here()
+ orig_entry.already_declared_here()
else:
entry = self.lookup(name)
if entry is None:
@@ -2058,7 +2058,7 @@ class CClassScope(ClassScope):
# inherited_var_entries [Entry] Adapted var entries from base class
is_c_class_scope = 1
- is_closure_class_scope = False
+ is_closure_class_scope = False
has_pyobject_attrs = False
has_memoryview_attrs = False
@@ -2102,7 +2102,7 @@ class CClassScope(ClassScope):
for entry in self.var_entries:
if entry.type.is_pyobject:
- if include_weakref or (self.is_closure_class_scope or entry.name != "__weakref__"):
+ if include_weakref or (self.is_closure_class_scope or entry.name != "__weakref__"):
if include_gc_simple or not entry.type.is_gc_simple:
py_attrs.append(entry)
elif entry.type == PyrexTypes.c_py_buffer_type:
@@ -2122,7 +2122,7 @@ class CClassScope(ClassScope):
error(pos,
"C attributes cannot be added in implementation part of"
" extension type defined in a pxd")
- if not self.is_closure_class_scope and get_special_method_signature(name):
+ if not self.is_closure_class_scope and get_special_method_signature(name):
error(pos,
"The name '%s' is reserved for a special method."
% name)
@@ -2140,7 +2140,7 @@ class CClassScope(ClassScope):
self.has_memoryview_attrs = True
elif type.is_cpp_class:
self.has_cpp_class_attrs = True
- elif type.is_pyobject and (self.is_closure_class_scope or name != '__weakref__'):
+ elif type.is_pyobject and (self.is_closure_class_scope or name != '__weakref__'):
self.has_pyobject_attrs = True
if (not type.is_builtin_type
or not type.scope or type.scope.needs_gc()):
@@ -2153,7 +2153,7 @@ class CClassScope(ClassScope):
# so do conversion ourself rather than rely on the CPython mechanism (through
# a property; made in AnalyseDeclarationsTransform).
entry.needs_property = True
- if not self.is_closure_class_scope and name == "__weakref__":
+ if not self.is_closure_class_scope and name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
if not (type.is_pyobject or type.can_coerce_to_pyobject(self)):
# we're not testing for coercion *from* Python here - that would fail later
@@ -2177,13 +2177,13 @@ class CClassScope(ClassScope):
def declare_pyfunction(self, name, pos, allow_redefine=False):
# Add an entry for a method.
- if name in richcmp_special_methods:
- if self.lookup_here('__richcmp__'):
- error(pos, "Cannot define both % and __richcmp__" % name)
- elif name == '__richcmp__':
- for n in richcmp_special_methods:
- if self.lookup_here(n):
- error(pos, "Cannot define both % and __richcmp__" % n)
+ if name in richcmp_special_methods:
+ if self.lookup_here('__richcmp__'):
+ error(pos, "Cannot define both % and __richcmp__" % name)
+ elif name == '__richcmp__':
+ for n in richcmp_special_methods:
+ if self.lookup_here(n):
+ error(pos, "Cannot define both % and __richcmp__" % n)
if name == "__new__":
error(pos, "__new__ method of extension type will change semantics "
"in a future version of Pyrex and Cython. Use __cinit__ instead.")
@@ -2203,7 +2203,7 @@ class CClassScope(ClassScope):
return entry
def lookup_here(self, name):
- if not self.is_closure_class_scope and name == "__new__":
+ if not self.is_closure_class_scope and name == "__new__":
name = EncodedString("__cinit__")
entry = ClassScope.lookup_here(self, name)
if entry and entry.is_builtin_cmethod:
@@ -2242,18 +2242,18 @@ class CClassScope(ClassScope):
if entry.is_final_cmethod and entry.is_inherited:
error(pos, "Overriding final methods is not allowed")
elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
- # Fix with_gil vs nogil.
- entry.type = entry.type.with_with_gil(type.with_gil)
+ # Fix with_gil vs nogil.
+ entry.type = entry.type.with_with_gil(type.with_gil)
elif type.compatible_signature_with(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
- if (self.defined and not in_pxd
- and not type.same_c_signature_as_resolved_type(entry.type, as_cmethod = 1, as_pxd_definition = 1)):
- # TODO(robertwb): Make this an error.
- warning(pos,
- "Compatible but non-identical C method '%s' not redeclared "
+ if (self.defined and not in_pxd
+ and not type.same_c_signature_as_resolved_type(entry.type, as_cmethod = 1, as_pxd_definition = 1)):
+ # TODO(robertwb): Make this an error.
+ warning(pos,
+ "Compatible but non-identical C method '%s' not redeclared "
"in definition part of extension type '%s'. "
"This may cause incorrect vtables to be generated." % (
name, self.class_name), 2)
- warning(entry.pos, "Previous declaration is here", 2)
+ warning(entry.pos, "Previous declaration is here", 2)
entry = self.add_cfunction(name, type, pos, cname, visibility='ignore', modifiers=modifiers)
else:
error(pos, "Signature not compatible with previous declaration")
@@ -2262,7 +2262,7 @@ class CClassScope(ClassScope):
if self.defined:
error(pos,
"C method '%s' not previously declared in definition part of"
- " extension type '%s'" % (name, self.class_name))
+ " extension type '%s'" % (name, self.class_name))
entry = self.add_cfunction(name, type, pos, cname, visibility, modifiers)
if defining:
entry.func_cname = self.mangle(Naming.func_prefix, name)
@@ -2279,11 +2279,11 @@ class CClassScope(ClassScope):
return entry
- def add_cfunction(self, name, type, pos, cname, visibility, modifiers, inherited=False):
+ def add_cfunction(self, name, type, pos, cname, visibility, modifiers, inherited=False):
# Add a cfunction entry without giving it a func_cname.
prev_entry = self.lookup_here(name)
entry = ClassScope.add_cfunction(self, name, type, pos, cname,
- visibility, modifiers, inherited=inherited)
+ visibility, modifiers, inherited=inherited)
entry.is_cmethod = 1
entry.prev_entry = prev_entry
return entry
@@ -2345,7 +2345,7 @@ class CClassScope(ClassScope):
cname = adapt(cname)
entry = self.add_cfunction(base_entry.name, base_entry.type,
base_entry.pos, cname,
- base_entry.visibility, base_entry.func_modifiers, inherited=True)
+ base_entry.visibility, base_entry.func_modifiers, inherited=True)
entry.is_inherited = 1
if base_entry.is_final_cmethod:
entry.is_final_cmethod = True
@@ -2380,18 +2380,18 @@ class CppClassScope(Scope):
def declare_var(self, name, type, pos,
cname = None, visibility = 'extern',
- api = 0, in_pxd = 0, is_cdef = 0, defining = 0):
+ api = 0, in_pxd = 0, is_cdef = 0, defining = 0):
# Add an entry for an attribute.
if not cname:
cname = name
entry = self.lookup_here(name)
if defining and entry is not None:
- if entry.type.same_as(type):
- # Fix with_gil vs nogil.
- entry.type = entry.type.with_with_gil(type.with_gil)
- elif type.is_cfunction and type.compatible_signature_with(entry.type):
- entry.type = type
- else:
+ if entry.type.same_as(type):
+ # Fix with_gil vs nogil.
+ entry.type = entry.type.with_with_gil(type.with_gil)
+ elif type.is_cfunction and type.compatible_signature_with(entry.type):
+ entry.type = type
+ else:
error(pos, "Function signature does not match previous declaration")
else:
entry = self.declare(name, cname, type, pos, visibility)
@@ -2406,31 +2406,31 @@ class CppClassScope(Scope):
def declare_cfunction(self, name, type, pos,
cname=None, visibility='extern', api=0, in_pxd=0,
defining=0, modifiers=(), utility_code=None, overridable=False):
- class_name = self.name.split('::')[-1]
- if name in (class_name, '__init__') and cname is None:
- cname = "%s__init__%s" % (Naming.func_prefix, class_name)
+ class_name = self.name.split('::')[-1]
+ if name in (class_name, '__init__') and cname is None:
+ cname = "%s__init__%s" % (Naming.func_prefix, class_name)
name = '<init>'
- type.return_type = PyrexTypes.CVoidType()
- # This is called by the actual constructor, but need to support
- # arguments that cannot by called by value.
- type.original_args = type.args
- def maybe_ref(arg):
- if arg.type.is_cpp_class and not arg.type.is_reference:
- return PyrexTypes.CFuncTypeArg(
- arg.name, PyrexTypes.c_ref_type(arg.type), arg.pos)
- else:
- return arg
- type.args = [maybe_ref(arg) for arg in type.args]
+ type.return_type = PyrexTypes.CVoidType()
+ # This is called by the actual constructor, but need to support
+ # arguments that cannot by called by value.
+ type.original_args = type.args
+ def maybe_ref(arg):
+ if arg.type.is_cpp_class and not arg.type.is_reference:
+ return PyrexTypes.CFuncTypeArg(
+ arg.name, PyrexTypes.c_ref_type(arg.type), arg.pos)
+ else:
+ return arg
+ type.args = [maybe_ref(arg) for arg in type.args]
elif name == '__dealloc__' and cname is None:
- cname = "%s__dealloc__%s" % (Naming.func_prefix, class_name)
+ cname = "%s__dealloc__%s" % (Naming.func_prefix, class_name)
name = '<del>'
- type.return_type = PyrexTypes.CVoidType()
- if name in ('<init>', '<del>') and type.nogil:
- for base in self.type.base_classes:
- base_entry = base.scope.lookup(name)
- if base_entry and not base_entry.type.nogil:
- error(pos, "Constructor cannot be called without GIL unless all base constructors can also be called without GIL")
- error(base_entry.pos, "Base constructor defined here.")
+ type.return_type = PyrexTypes.CVoidType()
+ if name in ('<init>', '<del>') and type.nogil:
+ for base in self.type.base_classes:
+ base_entry = base.scope.lookup(name)
+ if base_entry and not base_entry.type.nogil:
+ error(pos, "Constructor cannot be called without GIL unless all base constructors can also be called without GIL")
+ error(base_entry.pos, "Base constructor defined here.")
prev_entry = self.lookup_here(name)
entry = self.declare_var(name, type, pos,
defining=defining,
@@ -2441,22 +2441,22 @@ class CppClassScope(Scope):
type.entry = entry
return entry
- def declare_inherited_cpp_attributes(self, base_class):
- base_scope = base_class.scope
- template_type = base_class
- while getattr(template_type, 'template_type', None):
- template_type = template_type.template_type
- if getattr(template_type, 'templates', None):
- base_templates = [T.name for T in template_type.templates]
- else:
- base_templates = ()
+ def declare_inherited_cpp_attributes(self, base_class):
+ base_scope = base_class.scope
+ template_type = base_class
+ while getattr(template_type, 'template_type', None):
+ template_type = template_type.template_type
+ if getattr(template_type, 'templates', None):
+ base_templates = [T.name for T in template_type.templates]
+ else:
+ base_templates = ()
# Declare entries for all the C++ attributes of an
# inherited type, with cnames modified appropriately
# to work with this type.
for base_entry in \
base_scope.inherited_var_entries + base_scope.var_entries:
- #constructor/destructor is not inherited
- if base_entry.name in ("<init>", "<del>"):
+ #constructor/destructor is not inherited
+ if base_entry.name in ("<init>", "<del>"):
continue
#print base_entry.name, self.entries
if base_entry.name in self.entries:
@@ -2464,7 +2464,7 @@ class CppClassScope(Scope):
entry = self.declare(base_entry.name, base_entry.cname,
base_entry.type, None, 'extern')
entry.is_variable = 1
- entry.is_inherited = 1
+ entry.is_inherited = 1
self.inherited_var_entries.append(entry)
for base_entry in base_scope.cfunc_entries:
entry = self.declare_cfunction(base_entry.name, base_entry.type,
@@ -2473,12 +2473,12 @@ class CppClassScope(Scope):
modifiers=base_entry.func_modifiers,
utility_code=base_entry.utility_code)
entry.is_inherited = 1
- for base_entry in base_scope.type_entries:
- if base_entry.name not in base_templates:
- entry = self.declare_type(base_entry.name, base_entry.type,
- base_entry.pos, base_entry.cname,
- base_entry.visibility)
- entry.is_inherited = 1
+ for base_entry in base_scope.type_entries:
+ if base_entry.name not in base_templates:
+ entry = self.declare_type(base_entry.name, base_entry.type,
+ base_entry.pos, base_entry.cname,
+ base_entry.visibility)
+ entry.is_inherited = 1
def specialize(self, values, type_entry):
scope = CppClassScope(self.name, self.outer_scope)
diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py
index 793672925d..3792f26e99 100644
--- a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py
+++ b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py
@@ -13,7 +13,7 @@ class TestMemviewParsing(CythonTest):
def not_parseable(self, expected_error, s):
e = self.should_fail(lambda: self.fragment(s), Errors.CompileError)
self.assertEqual(expected_error, e.message_only)
-
+
def test_default_1dim(self):
self.parse(u"cdef int[:] x")
self.parse(u"cdef short int[:] x")
diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py
index ef9cd62bb1..9ee8da5478 100644
--- a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py
+++ b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py
@@ -45,7 +45,7 @@ class TestTreeFragments(CythonTest):
T = F.substitute({"v" : NameNode(pos=None, name="a")})
v = F.root.stats[1].rhs.operand2.operand1
a = T.stats[1].rhs.operand2.operand1
- self.assertEqual(v.pos, a.pos)
+ self.assertEqual(v.pos, a.pos)
def test_temps(self):
TemplateTransform.temp_name_counter = 0
diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestTreePath.py b/contrib/tools/cython/Cython/Compiler/Tests/TestTreePath.py
index dd14846652..bee53b3d2b 100644
--- a/contrib/tools/cython/Cython/Compiler/Tests/TestTreePath.py
+++ b/contrib/tools/cython/Cython/Compiler/Tests/TestTreePath.py
@@ -20,75 +20,75 @@ class TestTreePath(TransformTest):
def test_node_path(self):
t = self._build_tree()
- self.assertEqual(2, len(find_all(t, "//DefNode")))
- self.assertEqual(2, len(find_all(t, "//NameNode")))
- self.assertEqual(1, len(find_all(t, "//ReturnStatNode")))
- self.assertEqual(1, len(find_all(t, "//DefNode//ReturnStatNode")))
+ self.assertEqual(2, len(find_all(t, "//DefNode")))
+ self.assertEqual(2, len(find_all(t, "//NameNode")))
+ self.assertEqual(1, len(find_all(t, "//ReturnStatNode")))
+ self.assertEqual(1, len(find_all(t, "//DefNode//ReturnStatNode")))
def test_node_path_star(self):
t = self._build_tree()
- self.assertEqual(10, len(find_all(t, "//*")))
- self.assertEqual(8, len(find_all(t, "//DefNode//*")))
- self.assertEqual(0, len(find_all(t, "//NameNode//*")))
+ self.assertEqual(10, len(find_all(t, "//*")))
+ self.assertEqual(8, len(find_all(t, "//DefNode//*")))
+ self.assertEqual(0, len(find_all(t, "//NameNode//*")))
def test_node_path_attribute(self):
t = self._build_tree()
- self.assertEqual(2, len(find_all(t, "//NameNode/@name")))
- self.assertEqual(['fun', 'decorator'], find_all(t, "//NameNode/@name"))
+ self.assertEqual(2, len(find_all(t, "//NameNode/@name")))
+ self.assertEqual(['fun', 'decorator'], find_all(t, "//NameNode/@name"))
def test_node_path_attribute_dotted(self):
t = self._build_tree()
- self.assertEqual(1, len(find_all(t, "//ReturnStatNode/@value.name")))
- self.assertEqual(['fun'], find_all(t, "//ReturnStatNode/@value.name"))
+ self.assertEqual(1, len(find_all(t, "//ReturnStatNode/@value.name")))
+ self.assertEqual(['fun'], find_all(t, "//ReturnStatNode/@value.name"))
def test_node_path_child(self):
t = self._build_tree()
- self.assertEqual(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode")))
- self.assertEqual(1, len(find_all(t, "//ReturnStatNode/NameNode")))
+ self.assertEqual(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode")))
+ self.assertEqual(1, len(find_all(t, "//ReturnStatNode/NameNode")))
def test_node_path_node_predicate(self):
t = self._build_tree()
- self.assertEqual(0, len(find_all(t, "//DefNode[.//ForInStatNode]")))
- self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
- self.assertEqual(1, len(find_all(t, "//ReturnStatNode[./NameNode]")))
- self.assertEqual(Nodes.ReturnStatNode,
- type(find_first(t, "//ReturnStatNode[./NameNode]")))
+ self.assertEqual(0, len(find_all(t, "//DefNode[.//ForInStatNode]")))
+ self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
+ self.assertEqual(1, len(find_all(t, "//ReturnStatNode[./NameNode]")))
+ self.assertEqual(Nodes.ReturnStatNode,
+ type(find_first(t, "//ReturnStatNode[./NameNode]")))
def test_node_path_node_predicate_step(self):
t = self._build_tree()
- self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
- self.assertEqual(8, len(find_all(t, "//DefNode[.//NameNode]//*")))
- self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode]//ReturnStatNode")))
- self.assertEqual(Nodes.ReturnStatNode,
- type(find_first(t, "//DefNode[.//NameNode]//ReturnStatNode")))
+ self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
+ self.assertEqual(8, len(find_all(t, "//DefNode[.//NameNode]//*")))
+ self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode]//ReturnStatNode")))
+ self.assertEqual(Nodes.ReturnStatNode,
+ type(find_first(t, "//DefNode[.//NameNode]//ReturnStatNode")))
def test_node_path_attribute_exists(self):
t = self._build_tree()
- self.assertEqual(2, len(find_all(t, "//NameNode[@name]")))
- self.assertEqual(ExprNodes.NameNode,
- type(find_first(t, "//NameNode[@name]")))
+ self.assertEqual(2, len(find_all(t, "//NameNode[@name]")))
+ self.assertEqual(ExprNodes.NameNode,
+ type(find_first(t, "//NameNode[@name]")))
def test_node_path_attribute_exists_not(self):
t = self._build_tree()
- self.assertEqual(0, len(find_all(t, "//NameNode[not(@name)]")))
- self.assertEqual(2, len(find_all(t, "//NameNode[not(@honking)]")))
+ self.assertEqual(0, len(find_all(t, "//NameNode[not(@name)]")))
+ self.assertEqual(2, len(find_all(t, "//NameNode[not(@honking)]")))
def test_node_path_and(self):
t = self._build_tree()
- self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode and .//NameNode]")))
- self.assertEqual(0, len(find_all(t, "//NameNode[@honking and @name]")))
- self.assertEqual(0, len(find_all(t, "//NameNode[@name and @honking]")))
- self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name] and @name]")))
+ self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode and .//NameNode]")))
+ self.assertEqual(0, len(find_all(t, "//NameNode[@honking and @name]")))
+ self.assertEqual(0, len(find_all(t, "//NameNode[@name and @honking]")))
+ self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name] and @name]")))
def test_node_path_attribute_string_predicate(self):
t = self._build_tree()
- self.assertEqual(1, len(find_all(t, "//NameNode[@name = 'decorator']")))
+ self.assertEqual(1, len(find_all(t, "//NameNode[@name = 'decorator']")))
def test_node_path_recursive_predicate(self):
t = self._build_tree()
- self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name]]")))
- self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode[@name = 'decorator']]")))
- self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode[./NameNode[@name = 'fun']]/NameNode]")))
+ self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name]]")))
+ self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode[@name = 'decorator']]")))
+ self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode[./NameNode[@name = 'fun']]/NameNode]")))
if __name__ == '__main__':
unittest.main()
diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestTypes.py b/contrib/tools/cython/Cython/Compiler/Tests/TestTypes.py
index 11e07dd75c..f2f6f3773b 100644
--- a/contrib/tools/cython/Cython/Compiler/Tests/TestTypes.py
+++ b/contrib/tools/cython/Cython/Compiler/Tests/TestTypes.py
@@ -1,19 +1,19 @@
-from __future__ import absolute_import
-
-import unittest
-
-import Cython.Compiler.PyrexTypes as PT
-
-
-class TestMethodDispatcherTransform(unittest.TestCase):
-
- def test_widest_numeric_type(self):
- def assert_widest(type1, type2, widest):
- self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
-
- assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
- assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
- assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
-
- cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
- assert_widest(PT.c_int_type, cenum, PT.c_int_type)
+from __future__ import absolute_import
+
+import unittest
+
+import Cython.Compiler.PyrexTypes as PT
+
+
+class TestMethodDispatcherTransform(unittest.TestCase):
+
+ def test_widest_numeric_type(self):
+ def assert_widest(type1, type2, widest):
+ self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
+
+ assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
+ assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
+ assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
+
+ cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
+ assert_widest(PT.c_int_type, cenum, PT.c_int_type)
diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestUtilityLoad.py b/contrib/tools/cython/Cython/Compiler/Tests/TestUtilityLoad.py
index fe360ec53a..3d1906ca0b 100644
--- a/contrib/tools/cython/Cython/Compiler/Tests/TestUtilityLoad.py
+++ b/contrib/tools/cython/Cython/Compiler/Tests/TestUtilityLoad.py
@@ -23,27 +23,27 @@ class TestUtilityLoader(unittest.TestCase):
def test_load_as_string(self):
got = strip_2tup(self.cls.load_as_string(self.name))
- self.assertEqual(got, self.expected)
+ self.assertEqual(got, self.expected)
got = strip_2tup(self.cls.load_as_string(self.name, self.filename))
- self.assertEqual(got, self.expected)
+ self.assertEqual(got, self.expected)
def test_load(self):
utility = self.cls.load(self.name)
got = strip_2tup((utility.proto, utility.impl))
- self.assertEqual(got, self.expected)
+ self.assertEqual(got, self.expected)
required, = utility.requires
got = strip_2tup((required.proto, required.impl))
- self.assertEqual(got, self.required)
+ self.assertEqual(got, self.required)
utility = self.cls.load(self.name, from_file=self.filename)
got = strip_2tup((utility.proto, utility.impl))
- self.assertEqual(got, self.expected)
+ self.assertEqual(got, self.expected)
utility = self.cls.load_cached(self.name, from_file=self.filename)
got = strip_2tup((utility.proto, utility.impl))
- self.assertEqual(got, self.expected)
+ self.assertEqual(got, self.expected)
class TestTempitaUtilityLoader(TestUtilityLoader):
@@ -60,20 +60,20 @@ class TestTempitaUtilityLoader(TestUtilityLoader):
def test_load_as_string(self):
got = strip_2tup(self.cls.load_as_string(self.name, context=self.context))
- self.assertEqual(got, self.expected_tempita)
+ self.assertEqual(got, self.expected_tempita)
def test_load(self):
utility = self.cls.load(self.name, context=self.context)
got = strip_2tup((utility.proto, utility.impl))
- self.assertEqual(got, self.expected_tempita)
+ self.assertEqual(got, self.expected_tempita)
required, = utility.requires
got = strip_2tup((required.proto, required.impl))
- self.assertEqual(got, self.required_tempita)
+ self.assertEqual(got, self.required_tempita)
utility = self.cls.load(self.name, from_file=self.filename, context=self.context)
got = strip_2tup((utility.proto, utility.impl))
- self.assertEqual(got, self.expected_tempita)
+ self.assertEqual(got, self.expected_tempita)
class TestCythonUtilityLoader(TestTempitaUtilityLoader):
diff --git a/contrib/tools/cython/Cython/Compiler/TreeFragment.py b/contrib/tools/cython/Cython/Compiler/TreeFragment.py
index ca4c636d99..b85da8191a 100644
--- a/contrib/tools/cython/Cython/Compiler/TreeFragment.py
+++ b/contrib/tools/cython/Cython/Compiler/TreeFragment.py
@@ -24,7 +24,7 @@ from . import UtilNodes
class StringParseContext(Main.Context):
- def __init__(self, name, include_directories=None, compiler_directives=None, cpp=False):
+ def __init__(self, name, include_directories=None, compiler_directives=None, cpp=False):
if include_directories is None:
include_directories = []
if compiler_directives is None:
@@ -209,9 +209,9 @@ def strip_common_indent(lines):
"""Strips empty lines and common indentation from the list of strings given in lines"""
# TODO: Facilitate textwrap.indent instead
lines = [x for x in lines if x.strip() != u""]
- if lines:
- minindent = min([len(_match_indent(x).group(0)) for x in lines])
- lines = [x[minindent:] for x in lines]
+ if lines:
+ minindent = min([len(_match_indent(x).group(0)) for x in lines])
+ lines = [x[minindent:] for x in lines]
return lines
diff --git a/contrib/tools/cython/Cython/Compiler/TreePath.py b/contrib/tools/cython/Cython/Compiler/TreePath.py
index da3877dbe8..8585905557 100644
--- a/contrib/tools/cython/Cython/Compiler/TreePath.py
+++ b/contrib/tools/cython/Cython/Compiler/TreePath.py
@@ -191,8 +191,8 @@ def parse_path_value(next):
return int(value)
except ValueError:
pass
- elif token[1].isdigit():
- return int(token[1])
+ elif token[1].isdigit():
+ return int(token[1])
else:
name = token[1].lower()
if name == 'true':
diff --git a/contrib/tools/cython/Cython/Compiler/TypeInference.py b/contrib/tools/cython/Cython/Compiler/TypeInference.py
index 939db6cc94..c7ffee7d24 100644
--- a/contrib/tools/cython/Cython/Compiler/TypeInference.py
+++ b/contrib/tools/cython/Cython/Compiler/TypeInference.py
@@ -250,7 +250,7 @@ class MarkParallelAssignments(EnvTransform):
def visit_YieldExprNode(self, node):
if self.parallel_block_stack:
- error(node.pos, "'%s' not allowed in parallel sections" % node.expr_keyword)
+ error(node.pos, "'%s' not allowed in parallel sections" % node.expr_keyword)
return node
def visit_ReturnStatNode(self, node):
@@ -306,13 +306,13 @@ class MarkOverflowingArithmetic(CythonTransform):
else:
return self.visit_dangerous_node(node)
- def visit_SimpleCallNode(self, node):
- if node.function.is_name and node.function.name == 'abs':
- # Overflows for minimum value of fixed size ints.
- return self.visit_dangerous_node(node)
- else:
- return self.visit_neutral_node(node)
-
+ def visit_SimpleCallNode(self, node):
+ if node.function.is_name and node.function.name == 'abs':
+ # Overflows for minimum value of fixed size ints.
+ return self.visit_dangerous_node(node)
+ else:
+ return self.visit_neutral_node(node)
+
visit_UnopNode = visit_neutral_node
visit_UnaryMinusNode = visit_dangerous_node
@@ -378,7 +378,7 @@ class SimpleAssignmentTypeInferer(object):
self.set_entry_type(entry, py_object_type)
return
- # Set of assignments
+ # Set of assignments
assignments = set()
assmts_resolved = set()
dependencies = {}
@@ -415,24 +415,24 @@ class SimpleAssignmentTypeInferer(object):
entry = node.entry
return spanning_type(types, entry.might_overflow, entry.pos, scope)
- def inferred_types(entry):
- has_none = False
- has_pyobjects = False
- types = []
- for assmt in entry.cf_assignments:
- if assmt.rhs.is_none:
- has_none = True
- else:
- rhs_type = assmt.inferred_type
- if rhs_type and rhs_type.is_pyobject:
- has_pyobjects = True
- types.append(rhs_type)
- # Ignore None assignments as long as there are concrete Python type assignments.
- # but include them if None is the only assigned Python object.
- if has_none and not has_pyobjects:
- types.append(py_object_type)
- return types
-
+ def inferred_types(entry):
+ has_none = False
+ has_pyobjects = False
+ types = []
+ for assmt in entry.cf_assignments:
+ if assmt.rhs.is_none:
+ has_none = True
+ else:
+ rhs_type = assmt.inferred_type
+ if rhs_type and rhs_type.is_pyobject:
+ has_pyobjects = True
+ types.append(rhs_type)
+ # Ignore None assignments as long as there are concrete Python type assignments.
+ # but include them if None is the only assigned Python object.
+ if has_none and not has_pyobjects:
+ types.append(py_object_type)
+ return types
+
def resolve_assignments(assignments):
resolved = set()
for assmt in assignments:
@@ -485,7 +485,7 @@ class SimpleAssignmentTypeInferer(object):
continue
entry_type = py_object_type
if assmts_resolved.issuperset(entry.cf_assignments):
- types = inferred_types(entry)
+ types = inferred_types(entry)
if types and all(types):
entry_type = spanning_type(
types, entry.might_overflow, entry.pos, scope)
@@ -495,9 +495,9 @@ class SimpleAssignmentTypeInferer(object):
def reinfer():
dirty = False
for entry in inferred:
- for assmt in entry.cf_assignments:
- assmt.infer_type()
- types = inferred_types(entry)
+ for assmt in entry.cf_assignments:
+ assmt.infer_type()
+ types = inferred_types(entry)
new_type = spanning_type(types, entry.might_overflow, entry.pos, scope)
if new_type != entry.type:
self.set_entry_type(entry, new_type)
@@ -563,8 +563,8 @@ def safe_spanning_type(types, might_overflow, pos, scope):
# find_spanning_type() only returns 'bint' for clean boolean
# operations without other int types, so this is safe, too
return result_type
- elif result_type.is_pythran_expr:
- return result_type
+ elif result_type.is_pythran_expr:
+ return result_type
elif result_type.is_ptr:
# Any pointer except (signed|unsigned|) char* can't implicitly
# become a PyObject, and inferring char* is now accepted, too.
diff --git a/contrib/tools/cython/Cython/Compiler/TypeSlots.py b/contrib/tools/cython/Cython/Compiler/TypeSlots.py
index 3337cee960..0b4ff67042 100644
--- a/contrib/tools/cython/Cython/Compiler/TypeSlots.py
+++ b/contrib/tools/cython/Cython/Compiler/TypeSlots.py
@@ -12,9 +12,9 @@ from .Errors import error
invisible = ['__cinit__', '__dealloc__', '__richcmp__',
'__nonzero__', '__bool__']
-richcmp_special_methods = ['__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__']
+richcmp_special_methods = ['__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__']
+
-
class Signature(object):
# Method slot signature descriptor.
#
@@ -305,11 +305,11 @@ class MethodSlot(SlotDescriptor):
def slot_code(self, scope):
entry = scope.lookup_here(self.method_name)
- if entry and entry.is_special and entry.func_cname:
+ if entry and entry.is_special and entry.func_cname:
return entry.func_cname
for method_name in self.alternatives:
entry = scope.lookup_here(method_name)
- if entry and entry.is_special and entry.func_cname:
+ if entry and entry.is_special and entry.func_cname:
return entry.func_cname
return "0"
@@ -365,13 +365,13 @@ class ConstructorSlot(InternalMethodSlot):
self.method = method
def slot_code(self, scope):
- entry = scope.lookup_here(self.method)
+ entry = scope.lookup_here(self.method)
if (self.slot_name != 'tp_new'
and scope.parent_type.base_type
and not scope.has_pyobject_attrs
and not scope.has_memoryview_attrs
and not scope.has_cpp_class_attrs
- and not (entry and entry.is_special)):
+ and not (entry and entry.is_special)):
# if the type does not have object attributes, it can
# delegate GC methods to its parent - iff the parent
# functions are defined in the same module
@@ -380,8 +380,8 @@ class ConstructorSlot(InternalMethodSlot):
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return self.slot_code(parent_type_scope)
- if entry and not entry.is_special:
- return "0"
+ if entry and not entry.is_special:
+ return "0"
return InternalMethodSlot.slot_code(self, scope)
@@ -399,23 +399,23 @@ class SyntheticSlot(InternalMethodSlot):
self.default_value = default_value
def slot_code(self, scope):
- if scope.defines_any_special(self.user_methods):
+ if scope.defines_any_special(self.user_methods):
return InternalMethodSlot.slot_code(self, scope)
else:
return self.default_value
-class RichcmpSlot(MethodSlot):
- def slot_code(self, scope):
- entry = scope.lookup_here(self.method_name)
- if entry and entry.is_special and entry.func_cname:
- return entry.func_cname
- elif scope.defines_any_special(richcmp_special_methods):
- return scope.mangle_internal(self.slot_name)
- else:
- return "0"
-
-
+class RichcmpSlot(MethodSlot):
+ def slot_code(self, scope):
+ entry = scope.lookup_here(self.method_name)
+ if entry and entry.is_special and entry.func_cname:
+ return entry.func_cname
+ elif scope.defines_any_special(richcmp_special_methods):
+ return scope.mangle_internal(self.slot_name)
+ else:
+ return "0"
+
+
class TypeFlagsSlot(SlotDescriptor):
# Descriptor for the type flags slot.
@@ -535,7 +535,7 @@ class DictOffsetSlot(SlotDescriptor):
# Slot descriptor for a class' dict offset, for dynamic attributes.
def slot_code(self, scope):
- dict_entry = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None
+ dict_entry = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None
if dict_entry and dict_entry.is_variable:
if getattr(dict_entry.type, 'cname', None) != 'PyDict_Type':
error(dict_entry.pos, "__dict__ slot must be of type 'dict'")
@@ -575,8 +575,8 @@ def get_special_method_signature(name):
slot = method_name_to_slot.get(name)
if slot:
return slot.signature
- elif name in richcmp_special_methods:
- return ibinaryfunc
+ elif name in richcmp_special_methods:
+ return ibinaryfunc
else:
return None
@@ -612,20 +612,20 @@ def get_slot_function(scope, slot):
return slot_code
return None
-
-def get_slot_by_name(slot_name):
- # For now, only search the type struct, no referenced sub-structs.
- for slot in slot_table:
- if slot.slot_name == slot_name:
- return slot
- assert False, "Slot not found: %s" % slot_name
-
-
-def get_slot_code_by_name(scope, slot_name):
- slot = get_slot_by_name(slot_name)
- return slot.slot_code(scope)
-
-
+
+def get_slot_by_name(slot_name):
+ # For now, only search the type struct, no referenced sub-structs.
+ for slot in slot_table:
+ if slot.slot_name == slot_name:
+ return slot
+ assert False, "Slot not found: %s" % slot_name
+
+
+def get_slot_code_by_name(scope, slot_name):
+ slot = get_slot_by_name(slot_name)
+ return slot.slot_code(scope)
+
+
#------------------------------------------------------------------------------------------
#
# Signatures for generic Python functions and methods.
@@ -692,7 +692,7 @@ delattrofunc = Signature("TO", 'r')
cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
hashfunc = Signature("T", "h") # typedef Py_hash_t (*hashfunc)(PyObject *);
-richcmpfunc = Signature("TOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+richcmpfunc = Signature("TOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
@@ -725,7 +725,7 @@ property_accessor_signatures = {
#
#------------------------------------------------------------------------------------------
-PyNumberMethods_Py3_GUARD = "PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000)"
+PyNumberMethods_Py3_GUARD = "PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000)"
PyNumberMethods = (
MethodSlot(binaryfunc, "nb_add", "__add__"),
@@ -856,7 +856,7 @@ slot_table = (
GCDependentSlot("tp_traverse"),
GCClearReferencesSlot("tp_clear"),
- RichcmpSlot(richcmpfunc, "tp_richcompare", "__richcmp__", inherited=False), # Py3 checks for __hash__
+ RichcmpSlot(richcmpfunc, "tp_richcompare", "__richcmp__", inherited=False), # Py3 checks for __hash__
EmptySlot("tp_weaklistoffset"),
@@ -910,7 +910,7 @@ MethodSlot(objargproc, "", "__delitem__")
MethodSlot(ssizessizeobjargproc, "", "__setslice__")
MethodSlot(ssizessizeargproc, "", "__delslice__")
MethodSlot(getattrofunc, "", "__getattr__")
-MethodSlot(getattrofunc, "", "__getattribute__")
+MethodSlot(getattrofunc, "", "__getattribute__")
MethodSlot(setattrofunc, "", "__setattr__")
MethodSlot(delattrofunc, "", "__delattr__")
MethodSlot(descrgetfunc, "", "__get__")
diff --git a/contrib/tools/cython/Cython/Compiler/UtilNodes.py b/contrib/tools/cython/Cython/Compiler/UtilNodes.py
index bafbbb37c2..c41748ace0 100644
--- a/contrib/tools/cython/Cython/Compiler/UtilNodes.py
+++ b/contrib/tools/cython/Cython/Compiler/UtilNodes.py
@@ -1,7 +1,7 @@
#
# Nodes used as utilities and support for transforms etc.
# These often make up sets including both Nodes and ExprNodes
-# so it is convenient to have them in a separate module.
+# so it is convenient to have them in a separate module.
#
from __future__ import absolute_import
@@ -267,9 +267,9 @@ class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin):
def infer_type(self, env):
return self.subexpression.infer_type(env)
- def may_be_none(self):
- return self.subexpression.may_be_none()
-
+ def may_be_none(self):
+ return self.subexpression.may_be_none()
+
def result(self):
return self.subexpression.result()
diff --git a/contrib/tools/cython/Cython/Compiler/UtilityCode.py b/contrib/tools/cython/Cython/Compiler/UtilityCode.py
index 4a90470fd1..98e9ab5bfb 100644
--- a/contrib/tools/cython/Cython/Compiler/UtilityCode.py
+++ b/contrib/tools/cython/Cython/Compiler/UtilityCode.py
@@ -11,7 +11,7 @@ class NonManglingModuleScope(Symtab.ModuleScope):
def __init__(self, prefix, *args, **kw):
self.prefix = prefix
self.cython_scope = None
- self.cpp = kw.pop('cpp', False)
+ self.cpp = kw.pop('cpp', False)
Symtab.ModuleScope.__init__(self, *args, **kw)
def add_imported_entry(self, name, entry, pos):
@@ -43,7 +43,7 @@ class CythonUtilityCodeContext(StringParseContext):
if self.scope is None:
self.scope = NonManglingModuleScope(
- self.prefix, module_name, parent_module=None, context=self, cpp=self.cpp)
+ self.prefix, module_name, parent_module=None, context=self, cpp=self.cpp)
return self.scope
@@ -76,13 +76,13 @@ class CythonUtilityCode(Code.UtilityCodeBase):
# while the generated node trees can be altered in the compilation of a
# single file.
# Hence, delay any processing until later.
- context_types = {}
+ context_types = {}
if context is not None:
- from .PyrexTypes import BaseType
- for key, value in context.items():
- if isinstance(value, BaseType):
- context[key] = key
- context_types[key] = value
+ from .PyrexTypes import BaseType
+ for key, value in context.items():
+ if isinstance(value, BaseType):
+ context[key] = key
+ context_types[key] = value
impl = Code.sub_tempita(impl, context, file, name)
self.impl = impl
self.name = name
@@ -92,7 +92,7 @@ class CythonUtilityCode(Code.UtilityCodeBase):
self.from_scope = from_scope
self.outer_module_scope = outer_module_scope
self.compiler_directives = compiler_directives
- self.context_types = context_types
+ self.context_types = context_types
def __eq__(self, other):
if isinstance(other, CythonUtilityCode):
@@ -118,8 +118,8 @@ class CythonUtilityCode(Code.UtilityCodeBase):
from . import Pipeline, ParseTreeTransforms
context = CythonUtilityCodeContext(
- self.name, compiler_directives=self.compiler_directives,
- cpp=cython_scope.is_cpp() if cython_scope else False)
+ self.name, compiler_directives=self.compiler_directives,
+ cpp=cython_scope.is_cpp() if cython_scope else False)
context.prefix = self.prefix
context.cython_scope = cython_scope
#context = StringParseContext(self.name)
@@ -170,18 +170,18 @@ class CythonUtilityCode(Code.UtilityCodeBase):
pipeline, scope_transform,
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
- if self.context_types:
- # inject types into module scope
- def scope_transform(module_node):
- for name, type in self.context_types.items():
- entry = module_node.scope.declare_type(name, type, None, visibility='extern')
- entry.in_cinclude = True
- return module_node
-
- pipeline = Pipeline.insert_into_pipeline(
- pipeline, scope_transform,
- before=ParseTreeTransforms.AnalyseDeclarationsTransform)
-
+ if self.context_types:
+ # inject types into module scope
+ def scope_transform(module_node):
+ for name, type in self.context_types.items():
+ entry = module_node.scope.declare_type(name, type, None, visibility='extern')
+ entry.in_cinclude = True
+ return module_node
+
+ pipeline = Pipeline.insert_into_pipeline(
+ pipeline, scope_transform,
+ before=ParseTreeTransforms.AnalyseDeclarationsTransform)
+
(err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False)
assert not err, err
self.tree = tree
@@ -223,7 +223,7 @@ class CythonUtilityCode(Code.UtilityCodeBase):
for dep in self.requires:
if dep.is_cython_utility:
- dep.declare_in_scope(dest_scope, cython_scope=cython_scope)
+ dep.declare_in_scope(dest_scope, cython_scope=cython_scope)
return original_scope
diff --git a/contrib/tools/cython/Cython/Compiler/Visitor.py b/contrib/tools/cython/Cython/Compiler/Visitor.py
index c06b764f14..a35d13e1d0 100644
--- a/contrib/tools/cython/Cython/Compiler/Visitor.py
+++ b/contrib/tools/cython/Cython/Compiler/Visitor.py
@@ -598,23 +598,23 @@ class MethodDispatcherTransform(EnvTransform):
# into a C function call (defined in the builtin scope)
if not function.entry:
return node
- entry = function.entry
+ entry = function.entry
is_builtin = (
- entry.is_builtin or
- entry is self.current_env().builtin_scope().lookup_here(function.name))
+ entry.is_builtin or
+ entry is self.current_env().builtin_scope().lookup_here(function.name))
if not is_builtin:
if function.cf_state and function.cf_state.is_single:
# we know the value of the variable
# => see if it's usable instead
return self._delegate_to_assigned_value(
node, function, arg_list, kwargs)
- if arg_list and entry.is_cmethod and entry.scope and entry.scope.parent_type.is_builtin_type:
- if entry.scope.parent_type is arg_list[0].type:
- # Optimised (unbound) method of a builtin type => try to "de-optimise".
- return self._dispatch_to_method_handler(
- entry.name, self_arg=None, is_unbound_method=True,
- type_name=entry.scope.parent_type.name,
- node=node, function=function, arg_list=arg_list, kwargs=kwargs)
+ if arg_list and entry.is_cmethod and entry.scope and entry.scope.parent_type.is_builtin_type:
+ if entry.scope.parent_type is arg_list[0].type:
+ # Optimised (unbound) method of a builtin type => try to "de-optimise".
+ return self._dispatch_to_method_handler(
+ entry.name, self_arg=None, is_unbound_method=True,
+ type_name=entry.scope.parent_type.name,
+ node=node, function=function, arg_list=arg_list, kwargs=kwargs)
return node
function_handler = self._find_handler(
"function_%s" % function.name, kwargs)
@@ -640,7 +640,7 @@ class MethodDispatcherTransform(EnvTransform):
obj_type = self_arg.type
is_unbound_method = False
if obj_type.is_builtin_type:
- if obj_type is Builtin.type_type and self_arg.is_name and arg_list and arg_list[0].type.is_pyobject:
+ if obj_type is Builtin.type_type and self_arg.is_name and arg_list and arg_list[0].type.is_pyobject:
# calling an unbound method like 'list.append(L,x)'
# (ignoring 'type.mro()' here ...)
type_name = self_arg.name
diff --git a/contrib/tools/cython/Cython/Coverage.py b/contrib/tools/cython/Cython/Coverage.py
index b6bb10073f..5aa9df2ce0 100644
--- a/contrib/tools/cython/Cython/Coverage.py
+++ b/contrib/tools/cython/Cython/Coverage.py
@@ -12,7 +12,7 @@ import sys
from collections import defaultdict
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter # requires coverage.py 4.0+
-from coverage.files import canonical_filename
+from coverage.files import canonical_filename
from .Utils import find_root_package_dir, is_package_dir, open_source_file
@@ -46,8 +46,8 @@ def _find_dep_file_path(main_file, file_path, relative_path_search=False):
for sys_path in sys.path:
test_path = os.path.realpath(os.path.join(sys_path, file_path))
if os.path.exists(test_path):
- return canonical_filename(test_path)
- return canonical_filename(abs_path)
+ return canonical_filename(test_path)
+ return canonical_filename(abs_path)
class Plugin(CoveragePlugin):
@@ -65,14 +65,14 @@ class Plugin(CoveragePlugin):
"""
Try to find a C source file for a file path found by the tracer.
"""
- # TODO We need to pxd-files to the include map. For more info see pybuild.py
- # Currently skip such files, because they are not supported in Arcadia pybuild with coverage.
- if os.path.splitext(filename)[-1] not in ('.pyx', '.pxi'):
- return None
+ # TODO We need to pxd-files to the include map. For more info see pybuild.py
+ # Currently skip such files, because they are not supported in Arcadia pybuild with coverage.
+ if os.path.splitext(filename)[-1] not in ('.pyx', '.pxi'):
+ return None
if filename.startswith('<') or filename.startswith('memory:'):
return None
c_file = py_file = None
- filename = canonical_filename(filename)
+ filename = canonical_filename(filename)
if self._c_files_map and filename in self._c_files_map:
c_file = self._c_files_map[filename][0]
@@ -102,21 +102,21 @@ class Plugin(CoveragePlugin):
# from coverage.python import PythonFileReporter
# return PythonFileReporter(filename)
- filename = canonical_filename(filename)
+ filename = canonical_filename(filename)
if self._c_files_map and filename in self._c_files_map:
c_file, rel_file_path, code = self._c_files_map[filename]
else:
c_file, _ = self._find_source_files(filename)
if not c_file:
- if standalone():
- raise AssertionError(filename)
+ if standalone():
+ raise AssertionError(filename)
return None # unknown file
rel_file_path, code = self._read_source_lines(c_file, filename)
- if code is None:
- if standalone():
- raise AssertionError(filename)
- return None # no source found
-
+ if code is None:
+ if standalone():
+ raise AssertionError(filename)
+ return None # no source found
+
return CythonModuleReporter(c_file, filename, rel_file_path, code)
def _find_source_files(self, filename):
@@ -124,16 +124,16 @@ class Plugin(CoveragePlugin):
ext = ext.lower()
if ext in MODULE_FILE_EXTENSIONS:
pass
- elif ext == '.pyd':
- # Windows extension module
- platform_suffix = re.search(r'[.]cp[0-9]+-win[_a-z0-9]*$', basename, re.I)
+ elif ext == '.pyd':
+ # Windows extension module
+ platform_suffix = re.search(r'[.]cp[0-9]+-win[_a-z0-9]*$', basename, re.I)
+ if platform_suffix:
+ basename = basename[:platform_suffix.start()]
+ elif ext == '.so':
+ # Linux/Unix/Mac extension module
+ platform_suffix = re.search(r'[.](?:cpython|pypy)-[0-9]+[-_a-z0-9]*$', basename, re.I)
if platform_suffix:
basename = basename[:platform_suffix.start()]
- elif ext == '.so':
- # Linux/Unix/Mac extension module
- platform_suffix = re.search(r'[.](?:cpython|pypy)-[0-9]+[-_a-z0-9]*$', basename, re.I)
- if platform_suffix:
- basename = basename[:platform_suffix.start()]
elif ext == '.pxi':
# if we get here, it means that the first traced line of a Cython module was
# not in the main module but in an include file, so try a little harder to
@@ -141,8 +141,8 @@ class Plugin(CoveragePlugin):
self._find_c_source_files(os.path.dirname(filename), filename)
if filename in self._c_files_map:
return self._c_files_map[filename][0], None
- if standalone():
- raise AssertionError(filename)
+ if standalone():
+ raise AssertionError(filename)
else:
# none of our business
return None, None
@@ -176,20 +176,20 @@ class Plugin(CoveragePlugin):
Desperately parse all C files in the directory or its package parents
(not re-descending) to find the (included) source file in one of them.
"""
- if standalone():
- if os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'):
- broot = os.environ['PYTHON_COVERAGE_CYTHON_BUILD_ROOT']
- iter_files = lambda: (os.path.join(root, filename) for root, _, files in os.walk(broot) for filename in files)
- else:
+ if standalone():
+ if os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'):
+ broot = os.environ['PYTHON_COVERAGE_CYTHON_BUILD_ROOT']
+ iter_files = lambda: (os.path.join(root, filename) for root, _, files in os.walk(broot) for filename in files)
+ else:
import library.python.resource
iter_files = library.python.resource.resfs_files
- for c_file in iter_files():
- if os.path.splitext(c_file)[1] in C_FILE_EXTENSIONS:
- self._read_source_lines(c_file, source_file)
- if source_file in self._c_files_map:
- return
- raise AssertionError((source_file, os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT')))
-
+ for c_file in iter_files():
+ if os.path.splitext(c_file)[1] in C_FILE_EXTENSIONS:
+ self._read_source_lines(c_file, source_file)
+ if source_file in self._c_files_map:
+ return
+ raise AssertionError((source_file, os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT')))
+
if not os.path.isdir(dir_path):
return
splitext = os.path.splitext
@@ -248,7 +248,7 @@ class Plugin(CoveragePlugin):
executable_lines = defaultdict(set)
current_filename = None
- with OpenFile(c_file) as lines:
+ with OpenFile(c_file) as lines:
lines = iter(lines)
for line in lines:
match = match_source_path_line(line)
@@ -305,10 +305,10 @@ class CythonModuleTracer(FileTracer):
return self._file_path_map[source_file]
except KeyError:
pass
- if standalone():
- abs_path = self.module_file
- else:
- abs_path = _find_dep_file_path(filename, source_file)
+ if standalone():
+ abs_path = self.module_file
+ else:
+ abs_path = _find_dep_file_path(filename, source_file)
if self.py_file and source_file[-3:].lower() == '.py':
# always let coverage.py handle this case itself
@@ -331,7 +331,7 @@ class CythonModuleReporter(FileReporter):
self.name = rel_file_path
self.c_file = c_file
self._code = code
- self._abs_filename = self._find_abs_filename()
+ self._abs_filename = self._find_abs_filename()
def lines(self):
"""
@@ -352,8 +352,8 @@ class CythonModuleReporter(FileReporter):
"""
Return the source code of the file as a string.
"""
- if os.path.exists(self._abs_filename):
- with open_source_file(self._abs_filename) as f:
+ if os.path.exists(self._abs_filename):
+ with open_source_file(self._abs_filename) as f:
return f.read()
else:
return '\n'.join(
@@ -364,114 +364,114 @@ class CythonModuleReporter(FileReporter):
"""
Iterate over the source code tokens.
"""
- if os.path.exists(self._abs_filename):
- with open_source_file(self._abs_filename) as f:
+ if os.path.exists(self._abs_filename):
+ with open_source_file(self._abs_filename) as f:
for line in f:
yield [('txt', line.rstrip('\n'))]
else:
for line in self._iter_source_tokens():
- yield line
-
- def _find_abs_filename(self):
- for root in [
- os.environ.get('PYTHON_COVERAGE_ARCADIA_SOURCE_ROOT'),
- os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'),
- ]:
- if root:
- abs_path = os.path.join(root, self.filename)
- if root and os.path.exists(abs_path):
- return abs_path
- return self.filename
-
-
+ yield line
+
+ def _find_abs_filename(self):
+ for root in [
+ os.environ.get('PYTHON_COVERAGE_ARCADIA_SOURCE_ROOT'),
+ os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'),
+ ]:
+ if root:
+ abs_path = os.path.join(root, self.filename)
+ if root and os.path.exists(abs_path):
+ return abs_path
+ return self.filename
+
+
def coverage_init(reg, options):
reg.add_file_tracer(Plugin())
-
-
-# ========================== Arcadia specific =================================
-
-def standalone():
- return getattr(sys, 'is_standalone_binary', False)
-
-
-class OpenFile(object):
-
- def __init__(self, filename, mode='r'):
- assert 'r' in mode, ('Read-only', mode)
- self.filename = filename
- self.mode = mode
- self.file = None
- self.build_root = os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT')
-
- def __enter__(self):
- # See redefined _find_c_source() description for more info
- if self.build_root:
- self.file = open(os.path.join(self.build_root, self.filename), self.mode)
- return self.file
- elif standalone():
+
+
+# ========================== Arcadia specific =================================
+
+def standalone():
+ return getattr(sys, 'is_standalone_binary', False)
+
+
+class OpenFile(object):
+
+ def __init__(self, filename, mode='r'):
+ assert 'r' in mode, ('Read-only', mode)
+ self.filename = filename
+ self.mode = mode
+ self.file = None
+ self.build_root = os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT')
+
+ def __enter__(self):
+ # See redefined _find_c_source() description for more info
+ if self.build_root:
+ self.file = open(os.path.join(self.build_root, self.filename), self.mode)
+ return self.file
+ elif standalone():
import library.python.resource
from six import StringIO
-
+
content = library.python.resource.resfs_read(self.filename, builtin=True)
- assert content, (self.filename, os.environ.items())
+ assert content, (self.filename, os.environ.items())
return StringIO(content.decode())
- else:
- self.file = open(self.filename, self.mode)
- return self.file
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- if self.file:
- self.file.close()
-
-# ======================= Redefine some methods ===============================
-
-if standalone():
- import json
-
- CYTHON_INCLUDE_MAP = {'undef': True}
-
-
- def _find_c_source(base_path):
- '''
- There are two different coverage stages when c source file might be required:
- * trace - python calls c_tracefunc on every line and CythonModuleTracer needs to match
- pyd and pxi files with source files. This is test's runtime and tests' clean environment might
- doesn't contain required sources and generated files (c, cpp), that's why we get files from resfs_src.
- * report - coverage data contains only covered data and CythonModuleReporter needs to
- parse source files to obtain missing lines and branches. This is test_tool's resolve/build_report step.
- test_tools doesn't have compiled in sources, however, it can extract required files
- from binary and set PYTHON_COVERAGE_CYTHON_BUILD_ROOT to guide coverage.
- '''
- if os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'):
- # Report stage (resolve)
- def exists(filename):
- return os.path.exists(os.path.join(os.environ['PYTHON_COVERAGE_CYTHON_BUILD_ROOT'], filename))
- else:
- # Trace stage (test's runtime)
- def exists(filename):
+ else:
+ self.file = open(self.filename, self.mode)
+ return self.file
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self.file:
+ self.file.close()
+
+# ======================= Redefine some methods ===============================
+
+if standalone():
+ import json
+
+ CYTHON_INCLUDE_MAP = {'undef': True}
+
+
+ def _find_c_source(base_path):
+ '''
+ There are two different coverage stages when c source file might be required:
+ * trace - python calls c_tracefunc on every line and CythonModuleTracer needs to match
+ pyd and pxi files with source files. This is test's runtime and tests' clean environment might
+ doesn't contain required sources and generated files (c, cpp), that's why we get files from resfs_src.
+ * report - coverage data contains only covered data and CythonModuleReporter needs to
+ parse source files to obtain missing lines and branches. This is test_tool's resolve/build_report step.
+ test_tools doesn't have compiled in sources, however, it can extract required files
+ from binary and set PYTHON_COVERAGE_CYTHON_BUILD_ROOT to guide coverage.
+ '''
+ if os.environ.get('PYTHON_COVERAGE_CYTHON_BUILD_ROOT'):
+ # Report stage (resolve)
+ def exists(filename):
+ return os.path.exists(os.path.join(os.environ['PYTHON_COVERAGE_CYTHON_BUILD_ROOT'], filename))
+ else:
+ # Trace stage (test's runtime)
+ def exists(filename):
import library.python.resource
return library.python.resource.resfs_src(filename, resfs_file=True)
-
- if os.environ.get('PYTHON_COVERAGE_CYTHON_INCLUDE_MAP'):
- if CYTHON_INCLUDE_MAP.get('undef'):
- with open(os.environ['PYTHON_COVERAGE_CYTHON_INCLUDE_MAP']) as afile:
- data = json.load(afile)
+
+ if os.environ.get('PYTHON_COVERAGE_CYTHON_INCLUDE_MAP'):
+ if CYTHON_INCLUDE_MAP.get('undef'):
+ with open(os.environ['PYTHON_COVERAGE_CYTHON_INCLUDE_MAP']) as afile:
+ data = json.load(afile)
data = {os.path.splitext(k)[0]: v for k, v in data.items()}
-
- CYTHON_INCLUDE_MAP.clear()
- CYTHON_INCLUDE_MAP.update(data)
-
- if base_path in CYTHON_INCLUDE_MAP:
- # target file was included and should be sought inside another pyx file
- base_path = CYTHON_INCLUDE_MAP[base_path]
-
- for suffix in ['.pyx.c', '.pyx.cpp'] + C_FILE_EXTENSIONS:
- if exists(base_path + suffix):
- return base_path + suffix
-
- return None
-
-
- def _find_dep_file_path(main_file, file_path, relative_path_search=False):
- # file_path is already arcadia root relative
- return canonical_filename(file_path)
+
+ CYTHON_INCLUDE_MAP.clear()
+ CYTHON_INCLUDE_MAP.update(data)
+
+ if base_path in CYTHON_INCLUDE_MAP:
+ # target file was included and should be sought inside another pyx file
+ base_path = CYTHON_INCLUDE_MAP[base_path]
+
+ for suffix in ['.pyx.c', '.pyx.cpp'] + C_FILE_EXTENSIONS:
+ if exists(base_path + suffix):
+ return base_path + suffix
+
+ return None
+
+
+ def _find_dep_file_path(main_file, file_path, relative_path_search=False):
+ # file_path is already arcadia root relative
+ return canonical_filename(file_path)
diff --git a/contrib/tools/cython/Cython/Debugger/Cygdb.py b/contrib/tools/cython/Cython/Debugger/Cygdb.py
index b71ba44d8e..45f31ce6f7 100644
--- a/contrib/tools/cython/Cython/Debugger/Cygdb.py
+++ b/contrib/tools/cython/Cython/Debugger/Cygdb.py
@@ -138,21 +138,21 @@ def main(path_to_debug_info=None, gdb_argv=None, no_import=False):
tempfilename = make_command_file(path_to_debug_info, no_import=no_import)
logger.info("Launching %s with command file: %s and gdb_argv: %s",
options.gdb, tempfilename, gdb_argv)
- with open(tempfilename) as tempfile:
- logger.debug('Command file (%s) contains: """\n%s"""', tempfilename, tempfile.read())
- logger.info("Spawning %s...", options.gdb)
- p = subprocess.Popen([options.gdb, '-command', tempfilename] + gdb_argv)
- logger.info("Spawned %s (pid %d)", options.gdb, p.pid)
- while True:
- try:
- logger.debug("Waiting for gdb (pid %d) to exit...", p.pid)
- ret = p.wait()
- logger.debug("Wait for gdb (pid %d) to exit is done. Returned: %r", p.pid, ret)
- except KeyboardInterrupt:
- pass
- else:
- break
- logger.debug("Closing temp command file with fd: %s", tempfile.fileno())
+ with open(tempfilename) as tempfile:
+ logger.debug('Command file (%s) contains: """\n%s"""', tempfilename, tempfile.read())
+ logger.info("Spawning %s...", options.gdb)
+ p = subprocess.Popen([options.gdb, '-command', tempfilename] + gdb_argv)
+ logger.info("Spawned %s (pid %d)", options.gdb, p.pid)
+ while True:
+ try:
+ logger.debug("Waiting for gdb (pid %d) to exit...", p.pid)
+ ret = p.wait()
+ logger.debug("Wait for gdb (pid %d) to exit is done. Returned: %r", p.pid, ret)
+ except KeyboardInterrupt:
+ pass
+ else:
+ break
+ logger.debug("Closing temp command file with fd: %s", tempfile.fileno())
logger.debug("Removing temp command file: %s", tempfilename)
os.remove(tempfilename)
logger.debug("Removed temp command file: %s", tempfilename)
diff --git a/contrib/tools/cython/Cython/Debugger/Tests/TestLibCython.py b/contrib/tools/cython/Cython/Debugger/Tests/TestLibCython.py
index be9eebf81f..13560646ff 100644
--- a/contrib/tools/cython/Cython/Debugger/Tests/TestLibCython.py
+++ b/contrib/tools/cython/Cython/Debugger/Tests/TestLibCython.py
@@ -90,8 +90,8 @@ class DebuggerTestCase(unittest.TestCase):
shutil.copy(codefile, self.destfile)
shutil.copy(cfuncs_file, self.cfuncs_destfile + '.c')
- shutil.copy(cfuncs_file.replace('.c', '.h'),
- self.cfuncs_destfile + '.h')
+ shutil.copy(cfuncs_file.replace('.c', '.h'),
+ self.cfuncs_destfile + '.h')
compiler = ccompiler.new_compiler()
compiler.compile(['cfuncs.c'], debug=True, extra_postargs=['-fPIC'])
diff --git a/contrib/tools/cython/Cython/Debugger/Tests/cfuncs.h b/contrib/tools/cython/Cython/Debugger/Tests/cfuncs.h
index 1fd59e5b02..3b21bc2d5f 100644
--- a/contrib/tools/cython/Cython/Debugger/Tests/cfuncs.h
+++ b/contrib/tools/cython/Cython/Debugger/Tests/cfuncs.h
@@ -1 +1 @@
-void some_c_function(void);
+void some_c_function(void);
diff --git a/contrib/tools/cython/Cython/Debugger/Tests/codefile b/contrib/tools/cython/Cython/Debugger/Tests/codefile
index 0150d68ee3..6b4c6b6add 100644
--- a/contrib/tools/cython/Cython/Debugger/Tests/codefile
+++ b/contrib/tools/cython/Cython/Debugger/Tests/codefile
@@ -1,7 +1,7 @@
cdef extern from "stdio.h":
int puts(char *s)
-cdef extern from "cfuncs.h":
+cdef extern from "cfuncs.h":
void some_c_function()
import os
@@ -12,9 +12,9 @@ python_var = 13
def spam(a=0):
cdef:
int b, c
-
+
b = c = d = 0
-
+
b = 1
c = 2
int(10)
@@ -23,11 +23,11 @@ def spam(a=0):
some_c_function()
cpdef eggs():
- pass
-
+ pass
+
cdef ham():
pass
-
+
cdef class SomeClass(object):
def spam(self):
pass
diff --git a/contrib/tools/cython/Cython/Debugger/Tests/test_libcython_in_gdb.py b/contrib/tools/cython/Cython/Debugger/Tests/test_libcython_in_gdb.py
index e2fc724a01..bd7608d607 100644
--- a/contrib/tools/cython/Cython/Debugger/Tests/test_libcython_in_gdb.py
+++ b/contrib/tools/cython/Cython/Debugger/Tests/test_libcython_in_gdb.py
@@ -19,7 +19,7 @@ import tempfile
import functools
import traceback
import itertools
-#from test import test_support
+#from test import test_support
import gdb
@@ -39,8 +39,8 @@ def print_on_call_decorator(func):
try:
return func(self, *args, **kwargs)
- except Exception:
- _debug("An exception occurred:", traceback.format_exc())
+ except Exception:
+ _debug("An exception occurred:", traceback.format_exc())
raise
return wrapper
diff --git a/contrib/tools/cython/Cython/Debugger/Tests/test_libpython_in_gdb.py b/contrib/tools/cython/Cython/Debugger/Tests/test_libpython_in_gdb.py
index 48e75b03bd..6f34cee47b 100644
--- a/contrib/tools/cython/Cython/Debugger/Tests/test_libpython_in_gdb.py
+++ b/contrib/tools/cython/Cython/Debugger/Tests/test_libpython_in_gdb.py
@@ -56,28 +56,28 @@ class TestPrettyPrinters(test_libcython_in_gdb.DebugTestCase):
else:
funcname = 'PyBytes_FromStringAndSize'
- assert b'"' not in string
+ assert b'"' not in string
# ensure double quotes
- code = '(PyObject *) %s("%s", %d)' % (funcname, string.decode('iso8859-1'), len(string))
+ code = '(PyObject *) %s("%s", %d)' % (funcname, string.decode('iso8859-1'), len(string))
return self.pyobject_fromcode(code, gdbvar=gdbvar)
def alloc_unicodestring(self, string, gdbvar=None):
postfix = libpython.get_inferior_unicode_postfix()
- funcname = 'PyUnicode%s_DecodeUnicodeEscape' % (postfix,)
+ funcname = 'PyUnicode%s_DecodeUnicodeEscape' % (postfix,)
- data = string.encode("unicode_escape").decode('iso8859-1')
+ data = string.encode("unicode_escape").decode('iso8859-1')
return self.pyobject_fromcode(
- '(PyObject *) %s("%s", %d, "strict")' % (
- funcname, data.replace('"', r'\"').replace('\\', r'\\'), len(data)),
+ '(PyObject *) %s("%s", %d, "strict")' % (
+ funcname, data.replace('"', r'\"').replace('\\', r'\\'), len(data)),
gdbvar=gdbvar)
def test_bytestring(self):
- bytestring = self.alloc_bytestring(b"spam")
+ bytestring = self.alloc_bytestring(b"spam")
if inferior_python_version < (3, 0):
bytestring_class = libpython.PyStringObjectPtr
- expected = repr(b"spam")
+ expected = repr(b"spam")
else:
bytestring_class = libpython.PyBytesObjectPtr
expected = "b'spam'"
@@ -88,7 +88,7 @@ class TestPrettyPrinters(test_libcython_in_gdb.DebugTestCase):
def test_unicode(self):
unicode_string = self.alloc_unicodestring(u"spam ἄλφα")
- expected = u"'spam ἄλφα'"
+ expected = u"'spam ἄλφα'"
if inferior_python_version < (3, 0):
expected = 'u' + expected
diff --git a/contrib/tools/cython/Cython/Debugger/libcython.py b/contrib/tools/cython/Cython/Debugger/libcython.py
index b47d63f289..23153789b6 100644
--- a/contrib/tools/cython/Cython/Debugger/libcython.py
+++ b/contrib/tools/cython/Cython/Debugger/libcython.py
@@ -18,13 +18,13 @@ import collections
import gdb
-try: # python 2
- UNICODE = unicode
- BYTES = str
-except NameError: # python 3
- UNICODE = str
- BYTES = bytes
-
+try: # python 2
+ UNICODE = unicode
+ BYTES = str
+except NameError: # python 3
+ UNICODE = str
+ BYTES = bytes
+
try:
from lxml import etree
have_lxml = True
@@ -488,7 +488,7 @@ class SourceFileDescriptor(object):
class CyGDBError(gdb.GdbError):
"""
- Base class for Cython-command related errors
+ Base class for Cython-command related errors
"""
def __init__(self, *args):
@@ -696,8 +696,8 @@ class CyImport(CythonCommand):
completer_class = gdb.COMPLETE_FILENAME
def invoke(self, args, from_tty):
- if isinstance(args, BYTES):
- args = args.decode(_filesystemencoding)
+ if isinstance(args, BYTES):
+ args = args.decode(_filesystemencoding)
for arg in string_to_argv(args):
try:
f = open(arg)
@@ -842,9 +842,9 @@ class CyBreak(CythonCommand):
gdb.execute('break %s' % func.pf_cname)
def invoke(self, function_names, from_tty):
- if isinstance(function_names, BYTES):
- function_names = function_names.decode(_filesystemencoding)
- argv = string_to_argv(function_names)
+ if isinstance(function_names, BYTES):
+ function_names = function_names.decode(_filesystemencoding)
+ argv = string_to_argv(function_names)
if function_names.startswith('-p'):
argv = argv[1:]
python_breakpoints = True
@@ -900,7 +900,7 @@ class CythonInfo(CythonBase, libpython.PythonInfo):
def lineno(self, frame):
# Take care of the Python and Cython levels. We need to care for both
- # as we can't simply dispatch to 'py-step', since that would work for
+ # as we can't simply dispatch to 'py-step', since that would work for
# stepping through Python code, but it would not step back into Cython-
# related code. The C level should be dispatched to the 'step' command.
if self.is_cython_function(frame):
diff --git a/contrib/tools/cython/Cython/Debugger/libpython.py b/contrib/tools/cython/Cython/Debugger/libpython.py
index 2d1d6c648f..fea626dd73 100644
--- a/contrib/tools/cython/Cython/Debugger/libpython.py
+++ b/contrib/tools/cython/Cython/Debugger/libpython.py
@@ -25,10 +25,10 @@ giving file/line information and the state of local variables
In particular, given a gdb.Value corresponding to a PyObject* in the inferior
process, we can generate a "proxy value" within the gdb process. For example,
given a PyObject* in the inferior process that is in fact a PyListObject*
-holding three PyObject* that turn out to be PyBytesObject* instances, we can
-generate a proxy value within the gdb process that is a list of bytes
-instances:
- [b"foo", b"bar", b"baz"]
+holding three PyObject* that turn out to be PyBytesObject* instances, we can
+generate a proxy value within the gdb process that is a list of bytes
+instances:
+ [b"foo", b"bar", b"baz"]
Doing so can be expensive for complicated graphs of objects, and could take
some time, so we also have a "write_repr" method that writes a representation
@@ -47,47 +47,47 @@ the type names are known to the debugger
The module also extends gdb with some python-specific commands.
'''
-# NOTE: some gdbs are linked with Python 3, so this file should be dual-syntax
-# compatible (2.6+ and 3.0+). See #19308.
+# NOTE: some gdbs are linked with Python 3, so this file should be dual-syntax
+# compatible (2.6+ and 3.0+). See #19308.
-from __future__ import print_function
-import gdb
+from __future__ import print_function
+import gdb
import os
-import locale
+import locale
import sys
-if sys.version_info[0] >= 3:
- unichr = chr
+if sys.version_info[0] >= 3:
+ unichr = chr
xrange = range
- long = int
+ long = int
# Look up the gdb.Type for some standard types:
-# Those need to be refreshed as types (pointer sizes) may change when
-# gdb loads different executables
-
-def _type_char_ptr():
- return gdb.lookup_type('char').pointer() # char*
-
-
-def _type_unsigned_char_ptr():
- return gdb.lookup_type('unsigned char').pointer() # unsigned char*
-
-
-def _type_unsigned_short_ptr():
- return gdb.lookup_type('unsigned short').pointer()
-
-
-def _type_unsigned_int_ptr():
- return gdb.lookup_type('unsigned int').pointer()
-
-
-def _sizeof_void_p():
- return gdb.lookup_type('void').pointer().sizeof
-
-
-# value computed later, see PyUnicodeObjectPtr.proxy()
-_is_pep393 = None
-
+# Those need to be refreshed as types (pointer sizes) may change when
+# gdb loads different executables
+
+def _type_char_ptr():
+ return gdb.lookup_type('char').pointer() # char*
+
+
+def _type_unsigned_char_ptr():
+ return gdb.lookup_type('unsigned char').pointer() # unsigned char*
+
+
+def _type_unsigned_short_ptr():
+ return gdb.lookup_type('unsigned short').pointer()
+
+
+def _type_unsigned_int_ptr():
+ return gdb.lookup_type('unsigned int').pointer()
+
+
+def _sizeof_void_p():
+ return gdb.lookup_type('void').pointer().sizeof
+
+
+# value computed later, see PyUnicodeObjectPtr.proxy()
+_is_pep393 = None
+
Py_TPFLAGS_HEAPTYPE = (1 << 9)
Py_TPFLAGS_LONG_SUBCLASS = (1 << 24)
Py_TPFLAGS_LIST_SUBCLASS = (1 << 25)
@@ -99,20 +99,20 @@ Py_TPFLAGS_BASE_EXC_SUBCLASS = (1 << 30)
Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31)
-MAX_OUTPUT_LEN=1024
-
+MAX_OUTPUT_LEN=1024
+
hexdigits = "0123456789abcdef"
ENCODING = locale.getpreferredencoding()
-EVALFRAME = '_PyEval_EvalFrameDefault'
+EVALFRAME = '_PyEval_EvalFrameDefault'
class NullPyObjectPtr(RuntimeError):
pass
def safety_limit(val):
- # Given an integer value from the process being debugged, limit it to some
+ # Given an integer value from the process being debugged, limit it to some
# safety threshold so that arbitrary breakage within said process doesn't
# break the gdb process too much (e.g. sizes of iterations, sizes of lists)
return min(val, 1000)
@@ -121,45 +121,45 @@ def safety_limit(val):
def safe_range(val):
# As per range, but don't trust the value too much: cap it to a safety
# threshold in case the data was corrupted
- return xrange(safety_limit(int(val)))
-
-if sys.version_info[0] >= 3:
- def write_unicode(file, text):
- file.write(text)
-else:
- def write_unicode(file, text):
- # Write a byte or unicode string to file. Unicode strings are encoded to
- # ENCODING encoding with 'backslashreplace' error handler to avoid
- # UnicodeEncodeError.
- if isinstance(text, unicode):
- text = text.encode(ENCODING, 'backslashreplace')
- file.write(text)
-
-try:
- os_fsencode = os.fsencode
-except AttributeError:
- def os_fsencode(filename):
- if not isinstance(filename, unicode):
- return filename
- encoding = sys.getfilesystemencoding()
- if encoding == 'mbcs':
- # mbcs doesn't support surrogateescape
- return filename.encode(encoding)
- encoded = []
- for char in filename:
- # surrogateescape error handler
- if 0xDC80 <= ord(char) <= 0xDCFF:
- byte = chr(ord(char) - 0xDC00)
- else:
- byte = char.encode(encoding)
- encoded.append(byte)
- return ''.join(encoded)
+ return xrange(safety_limit(int(val)))
+
+if sys.version_info[0] >= 3:
+ def write_unicode(file, text):
+ file.write(text)
+else:
+ def write_unicode(file, text):
+ # Write a byte or unicode string to file. Unicode strings are encoded to
+ # ENCODING encoding with 'backslashreplace' error handler to avoid
+ # UnicodeEncodeError.
+ if isinstance(text, unicode):
+ text = text.encode(ENCODING, 'backslashreplace')
+ file.write(text)
+
+try:
+ os_fsencode = os.fsencode
+except AttributeError:
+ def os_fsencode(filename):
+ if not isinstance(filename, unicode):
+ return filename
+ encoding = sys.getfilesystemencoding()
+ if encoding == 'mbcs':
+ # mbcs doesn't support surrogateescape
+ return filename.encode(encoding)
+ encoded = []
+ for char in filename:
+ # surrogateescape error handler
+ if 0xDC80 <= ord(char) <= 0xDCFF:
+ byte = chr(ord(char) - 0xDC00)
+ else:
+ byte = char.encode(encoding)
+ encoded.append(byte)
+ return ''.join(encoded)
class StringTruncated(RuntimeError):
pass
class TruncatedStringIO(object):
- '''Similar to io.StringIO, but can truncate the output by raising a
+ '''Similar to io.StringIO, but can truncate the output by raising a
StringTruncated exception'''
def __init__(self, maxlen=None):
self._val = ''
@@ -179,8 +179,8 @@ class TruncatedStringIO(object):
class PyObjectPtr(object):
"""
- Class wrapping a gdb.Value that's either a (PyObject*) within the
- inferior process, or some subclass pointer e.g. (PyBytesObject*)
+ Class wrapping a gdb.Value that's either a (PyObject*) within the
+ inferior process, or some subclass pointer e.g. (PyBytesObject*)
There will be a subclass for every refined PyObject type that we care
about.
@@ -260,7 +260,7 @@ class PyObjectPtr(object):
return PyTypeObjectPtr(self.field('ob_type'))
def is_null(self):
- return 0 == long(self._gdbval)
+ return 0 == long(self._gdbval)
def is_optimized_out(self):
'''
@@ -321,7 +321,7 @@ class PyObjectPtr(object):
return '<%s at remote 0x%x>' % (self.tp_name, self.address)
return FakeRepr(self.safe_tp_name(),
- long(self._gdbval))
+ long(self._gdbval))
def write_repr(self, out, visited):
'''
@@ -360,8 +360,8 @@ class PyObjectPtr(object):
# class
return cls
- #print('tp_flags = 0x%08x' % tp_flags)
- #print('tp_name = %r' % tp_name)
+ #print('tp_flags = 0x%08x' % tp_flags)
+ #print('tp_name = %r' % tp_name)
name_map = {'bool': PyBoolObjectPtr,
'classobj': PyClassObjectPtr,
@@ -370,13 +370,13 @@ class PyObjectPtr(object):
'set' : PySetObjectPtr,
'frozenset' : PySetObjectPtr,
'builtin_function_or_method' : PyCFunctionObjectPtr,
- 'method-wrapper': wrapperobject,
+ 'method-wrapper': wrapperobject,
}
if tp_name in name_map:
return name_map[tp_name]
- if tp_flags & Py_TPFLAGS_HEAPTYPE:
- return HeapTypeObjectPtr
+ if tp_flags & Py_TPFLAGS_HEAPTYPE:
+ return HeapTypeObjectPtr
if tp_flags & Py_TPFLAGS_LONG_SUBCLASS:
return PyLongObjectPtr
@@ -384,16 +384,16 @@ class PyObjectPtr(object):
return PyListObjectPtr
if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS:
return PyTupleObjectPtr
- if tp_flags & Py_TPFLAGS_BYTES_SUBCLASS:
- return PyBytesObjectPtr
+ if tp_flags & Py_TPFLAGS_BYTES_SUBCLASS:
+ return PyBytesObjectPtr
if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS:
return PyUnicodeObjectPtr
if tp_flags & Py_TPFLAGS_DICT_SUBCLASS:
return PyDictObjectPtr
if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS:
return PyBaseExceptionObjectPtr
- #if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS:
- # return PyTypeObjectPtr
+ #if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS:
+ # return PyTypeObjectPtr
# Use the base class:
return cls
@@ -408,7 +408,7 @@ class PyObjectPtr(object):
p = PyObjectPtr(gdbval)
cls = cls.subclass_from_type(p.type())
return cls(gdbval, cast_to=cls.get_gdb_type())
- except RuntimeError:
+ except RuntimeError:
# Handle any kind of error e.g. NULL ptrs by simply using the base
# class
pass
@@ -419,7 +419,7 @@ class PyObjectPtr(object):
return gdb.lookup_type(cls._typename).pointer()
def as_address(self):
- return long(self._gdbval)
+ return long(self._gdbval)
class PyVarObjectPtr(PyObjectPtr):
_typename = 'PyVarObject'
@@ -439,7 +439,7 @@ class ProxyAlreadyVisited(object):
def _write_instance_repr(out, visited, name, pyop_attrdict, address):
- '''Shared code for use by all classes:
+ '''Shared code for use by all classes:
write a representation to file-like object "out"'''
out.write('<')
out.write(name)
@@ -448,7 +448,7 @@ def _write_instance_repr(out, visited, name, pyop_attrdict, address):
if isinstance(pyop_attrdict, PyDictObjectPtr):
out.write('(')
first = True
- for pyop_arg, pyop_val in pyop_attrdict.iteritems():
+ for pyop_arg, pyop_val in pyop_attrdict.iteritems():
if not first:
out.write(', ')
first = False
@@ -468,27 +468,27 @@ class InstanceProxy(object):
def __repr__(self):
if isinstance(self.attrdict, dict):
- kwargs = ', '.join(["%s=%r" % (arg, val)
- for arg, val in self.attrdict.iteritems()])
- return '<%s(%s) at remote 0x%x>' % (self.cl_name,
- kwargs, self.address)
+ kwargs = ', '.join(["%s=%r" % (arg, val)
+ for arg, val in self.attrdict.iteritems()])
+ return '<%s(%s) at remote 0x%x>' % (self.cl_name,
+ kwargs, self.address)
else:
- return '<%s at remote 0x%x>' % (self.cl_name,
- self.address)
+ return '<%s at remote 0x%x>' % (self.cl_name,
+ self.address)
-def _PyObject_VAR_SIZE(typeobj, nitems):
- if _PyObject_VAR_SIZE._type_size_t is None:
- _PyObject_VAR_SIZE._type_size_t = gdb.lookup_type('size_t')
+def _PyObject_VAR_SIZE(typeobj, nitems):
+ if _PyObject_VAR_SIZE._type_size_t is None:
+ _PyObject_VAR_SIZE._type_size_t = gdb.lookup_type('size_t')
return ( ( typeobj.field('tp_basicsize') +
nitems * typeobj.field('tp_itemsize') +
- (_sizeof_void_p() - 1)
- ) & ~(_sizeof_void_p() - 1)
- ).cast(_PyObject_VAR_SIZE._type_size_t)
-_PyObject_VAR_SIZE._type_size_t = None
+ (_sizeof_void_p() - 1)
+ ) & ~(_sizeof_void_p() - 1)
+ ).cast(_PyObject_VAR_SIZE._type_size_t)
+_PyObject_VAR_SIZE._type_size_t = None
-class HeapTypeObjectPtr(PyObjectPtr):
- _typename = 'PyObject'
+class HeapTypeObjectPtr(PyObjectPtr):
+ _typename = 'PyObject'
def get_attr_dict(self):
'''
@@ -507,9 +507,9 @@ class HeapTypeObjectPtr(PyObjectPtr):
size = _PyObject_VAR_SIZE(typeobj, tsize)
dictoffset += size
assert dictoffset > 0
- assert dictoffset % _sizeof_void_p() == 0
+ assert dictoffset % _sizeof_void_p() == 0
- dictptr = self._gdbval.cast(_type_char_ptr()) + dictoffset
+ dictptr = self._gdbval.cast(_type_char_ptr()) + dictoffset
PyObjectPtrPtr = PyObjectPtr.get_gdb_type().pointer()
dictptr = dictptr.cast(PyObjectPtrPtr)
return PyObjectPtr.from_pyobject_ptr(dictptr.dereference())
@@ -522,7 +522,7 @@ class HeapTypeObjectPtr(PyObjectPtr):
def proxyval(self, visited):
'''
- Support for classes.
+ Support for classes.
Currently we just locate the dictionary using a transliteration to
python of _PyObject_GetDictPtr, ignoring descriptors
@@ -539,8 +539,8 @@ class HeapTypeObjectPtr(PyObjectPtr):
attr_dict = {}
tp_name = self.safe_tp_name()
- # Class:
- return InstanceProxy(tp_name, attr_dict, long(self._gdbval))
+ # Class:
+ return InstanceProxy(tp_name, attr_dict, long(self._gdbval))
def write_repr(self, out, visited):
# Guard against infinite loops:
@@ -549,9 +549,9 @@ class HeapTypeObjectPtr(PyObjectPtr):
return
visited.add(self.as_address())
- pyop_attrdict = self.get_attr_dict()
- _write_instance_repr(out, visited,
- self.safe_tp_name(), pyop_attrdict, self.as_address())
+ pyop_attrdict = self.get_attr_dict()
+ _write_instance_repr(out, visited,
+ self.safe_tp_name(), pyop_attrdict, self.as_address())
class ProxyException(Exception):
def __init__(self, tp_name, args):
@@ -608,11 +608,11 @@ class BuiltInMethodProxy(object):
self.pyop_m_self = pyop_m_self
def __repr__(self):
- return ('<built-in method %s of %s object at remote 0x%x>'
- % (self.ml_name,
- self.pyop_m_self.safe_tp_name(),
- self.pyop_m_self.as_address())
- )
+ return ('<built-in method %s of %s object at remote 0x%x>'
+ % (self.ml_name,
+ self.pyop_m_self.safe_tp_name(),
+ self.pyop_m_self.as_address())
+ )
class PyCFunctionObjectPtr(PyObjectPtr):
"""
@@ -671,17 +671,17 @@ class PyDictObjectPtr(PyObjectPtr):
def iteritems(self):
'''
Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs,
- analogous to dict.iteritems()
+ analogous to dict.iteritems()
'''
- keys = self.field('ma_keys')
- values = self.field('ma_values')
- entries, nentries = self._get_entries(keys)
- for i in safe_range(nentries):
- ep = entries[i]
- if long(values):
- pyop_value = PyObjectPtr.from_pyobject_ptr(values[i])
- else:
- pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value'])
+ keys = self.field('ma_keys')
+ values = self.field('ma_values')
+ entries, nentries = self._get_entries(keys)
+ for i in safe_range(nentries):
+ ep = entries[i]
+ if long(values):
+ pyop_value = PyObjectPtr.from_pyobject_ptr(values[i])
+ else:
+ pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value'])
if not pyop_value.is_null():
pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key'])
yield (pyop_key, pyop_value)
@@ -693,7 +693,7 @@ class PyDictObjectPtr(PyObjectPtr):
visited.add(self.as_address())
result = {}
- for pyop_key, pyop_value in self.iteritems():
+ for pyop_key, pyop_value in self.iteritems():
proxy_key = pyop_key.proxyval(visited)
proxy_value = pyop_value.proxyval(visited)
result[proxy_key] = proxy_value
@@ -708,7 +708,7 @@ class PyDictObjectPtr(PyObjectPtr):
out.write('{')
first = True
- for pyop_key, pyop_value in self.iteritems():
+ for pyop_key, pyop_value in self.iteritems():
if not first:
out.write(', ')
first = False
@@ -717,31 +717,31 @@ class PyDictObjectPtr(PyObjectPtr):
pyop_value.write_repr(out, visited)
out.write('}')
- def _get_entries(self, keys):
- dk_nentries = int(keys['dk_nentries'])
- dk_size = int(keys['dk_size'])
- try:
- # <= Python 3.5
- return keys['dk_entries'], dk_size
- except RuntimeError:
- # >= Python 3.6
- pass
-
- if dk_size <= 0xFF:
- offset = dk_size
- elif dk_size <= 0xFFFF:
- offset = 2 * dk_size
- elif dk_size <= 0xFFFFFFFF:
- offset = 4 * dk_size
- else:
- offset = 8 * dk_size
-
- ent_addr = keys['dk_indices']['as_1'].address
- ent_addr = ent_addr.cast(_type_unsigned_char_ptr()) + offset
- ent_ptr_t = gdb.lookup_type('PyDictKeyEntry').pointer()
- ent_addr = ent_addr.cast(ent_ptr_t)
-
- return ent_addr, dk_nentries
+ def _get_entries(self, keys):
+ dk_nentries = int(keys['dk_nentries'])
+ dk_size = int(keys['dk_size'])
+ try:
+ # <= Python 3.5
+ return keys['dk_entries'], dk_size
+ except RuntimeError:
+ # >= Python 3.6
+ pass
+
+ if dk_size <= 0xFF:
+ offset = dk_size
+ elif dk_size <= 0xFFFF:
+ offset = 2 * dk_size
+ elif dk_size <= 0xFFFFFFFF:
+ offset = 4 * dk_size
+ else:
+ offset = 8 * dk_size
+
+ ent_addr = keys['dk_indices']['as_1'].address
+ ent_addr = ent_addr.cast(_type_unsigned_char_ptr()) + offset
+ ent_ptr_t = gdb.lookup_type('PyDictKeyEntry').pointer()
+ ent_addr = ent_addr.cast(ent_ptr_t)
+
+ return ent_addr, dk_nentries
class PyListObjectPtr(PyObjectPtr):
@@ -798,9 +798,9 @@ class PyLongObjectPtr(PyObjectPtr):
#define PyLong_SHIFT 30
#define PyLong_SHIFT 15
'''
- ob_size = long(self.field('ob_size'))
+ ob_size = long(self.field('ob_size'))
if ob_size == 0:
- return 0
+ return 0
ob_digit = self.field('ob_digit')
@@ -809,7 +809,7 @@ class PyLongObjectPtr(PyObjectPtr):
else:
SHIFT = 30
- digits = [long(ob_digit[i]) * 2**(SHIFT*i)
+ digits = [long(ob_digit[i]) * 2**(SHIFT*i)
for i in safe_range(abs(ob_size))]
result = sum(digits)
if ob_size < 0:
@@ -828,10 +828,10 @@ class PyBoolObjectPtr(PyLongObjectPtr):
<bool> instances (Py_True/Py_False) within the process being debugged.
"""
def proxyval(self, visited):
- if PyLongObjectPtr.proxyval(self, visited):
- return True
- else:
- return False
+ if PyLongObjectPtr.proxyval(self, visited):
+ return True
+ else:
+ return False
class PyNoneStructPtr(PyObjectPtr):
"""
@@ -881,10 +881,10 @@ class PyFrameObjectPtr(PyObjectPtr):
the global variables of this frame
'''
if self.is_optimized_out():
- return ()
+ return ()
pyop_globals = self.pyop_field('f_globals')
- return pyop_globals.iteritems()
+ return pyop_globals.iteritems()
def iter_builtins(self):
'''
@@ -892,10 +892,10 @@ class PyFrameObjectPtr(PyObjectPtr):
the builtin variables
'''
if self.is_optimized_out():
- return ()
+ return ()
pyop_builtins = self.pyop_field('f_builtins')
- return pyop_builtins.iteritems()
+ return pyop_builtins.iteritems()
def get_var_by_name(self, name):
'''
@@ -931,7 +931,7 @@ class PyFrameObjectPtr(PyObjectPtr):
if self.is_optimized_out():
return None
f_trace = self.field('f_trace')
- if long(f_trace) != 0:
+ if long(f_trace) != 0:
# we have a non-NULL f_trace:
return self.f_lineno
else:
@@ -946,11 +946,11 @@ class PyFrameObjectPtr(PyObjectPtr):
if self.is_optimized_out():
return '(frame information optimized out)'
filename = self.filename()
- try:
- f = open(os_fsencode(filename), 'r')
- except IOError:
- return None
- with f:
+ try:
+ f = open(os_fsencode(filename), 'r')
+ except IOError:
+ return None
+ with f:
all_lines = f.readlines()
# Convert from 1-based current_line_num to 0-based list offset:
return all_lines[self.current_line_num()-1]
@@ -976,39 +976,39 @@ class PyFrameObjectPtr(PyObjectPtr):
out.write(')')
- def print_traceback(self):
- if self.is_optimized_out():
- sys.stdout.write(' (frame information optimized out)\n')
- return
- visited = set()
- sys.stdout.write(' File "%s", line %i, in %s\n'
- % (self.co_filename.proxyval(visited),
- self.current_line_num(),
- self.co_name.proxyval(visited)))
+ def print_traceback(self):
+ if self.is_optimized_out():
+ sys.stdout.write(' (frame information optimized out)\n')
+ return
+ visited = set()
+ sys.stdout.write(' File "%s", line %i, in %s\n'
+ % (self.co_filename.proxyval(visited),
+ self.current_line_num(),
+ self.co_name.proxyval(visited)))
class PySetObjectPtr(PyObjectPtr):
_typename = 'PySetObject'
- @classmethod
- def _dummy_key(self):
- return gdb.lookup_global_symbol('_PySet_Dummy').value()
-
- def __iter__(self):
- dummy_ptr = self._dummy_key()
- table = self.field('table')
- for i in safe_range(self.field('mask') + 1):
- setentry = table[i]
- key = setentry['key']
- if key != 0 and key != dummy_ptr:
- yield PyObjectPtr.from_pyobject_ptr(key)
-
+ @classmethod
+ def _dummy_key(self):
+ return gdb.lookup_global_symbol('_PySet_Dummy').value()
+
+ def __iter__(self):
+ dummy_ptr = self._dummy_key()
+ table = self.field('table')
+ for i in safe_range(self.field('mask') + 1):
+ setentry = table[i]
+ key = setentry['key']
+ if key != 0 and key != dummy_ptr:
+ yield PyObjectPtr.from_pyobject_ptr(key)
+
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('%s(...)' % self.safe_tp_name())
visited.add(self.as_address())
- members = (key.proxyval(visited) for key in self)
+ members = (key.proxyval(visited) for key in self)
if self.safe_tp_name() == 'frozenset':
return frozenset(members)
else:
@@ -1037,11 +1037,11 @@ class PySetObjectPtr(PyObjectPtr):
out.write('{')
first = True
- for key in self:
- if not first:
- out.write(', ')
- first = False
- key.write_repr(out, visited)
+ for key in self:
+ if not first:
+ out.write(', ')
+ first = False
+ key.write_repr(out, visited)
out.write('}')
if tp_name != 'set':
@@ -1054,13 +1054,13 @@ class PyBytesObjectPtr(PyObjectPtr):
def __str__(self):
field_ob_size = self.field('ob_size')
field_ob_sval = self.field('ob_sval')
- char_ptr = field_ob_sval.address.cast(_type_unsigned_char_ptr())
- return ''.join([chr(char_ptr[i]) for i in safe_range(field_ob_size)])
+ char_ptr = field_ob_sval.address.cast(_type_unsigned_char_ptr())
+ return ''.join([chr(char_ptr[i]) for i in safe_range(field_ob_size)])
def proxyval(self, visited):
return str(self)
- def write_repr(self, out, visited):
+ def write_repr(self, out, visited):
# Write this out as a Python 3 bytes literal, i.e. with a "b" prefix
# Get a PyStringObject* within the Python 2 gdb process:
@@ -1071,7 +1071,7 @@ class PyBytesObjectPtr(PyObjectPtr):
quote = "'"
if "'" in proxy and not '"' in proxy:
quote = '"'
- out.write('b')
+ out.write('b')
out.write(quote)
for byte in proxy:
if byte == quote or byte == '\\':
@@ -1110,8 +1110,8 @@ class PyTupleObjectPtr(PyObjectPtr):
return ProxyAlreadyVisited('(...)')
visited.add(self.as_address())
- result = tuple(PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
- for i in safe_range(int_from_int(self.field('ob_size'))))
+ result = tuple(PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
+ for i in safe_range(int_from_int(self.field('ob_size'))))
return result
def write_repr(self, out, visited):
@@ -1132,10 +1132,10 @@ class PyTupleObjectPtr(PyObjectPtr):
else:
out.write(')')
-class PyTypeObjectPtr(PyObjectPtr):
- _typename = 'PyTypeObject'
+class PyTypeObjectPtr(PyObjectPtr):
+ _typename = 'PyTypeObject'
+
-
def _unichr_is_printable(char):
# Logic adapted from Python 3's Tools/unicode/makeunicodedata.py
if char == u" ":
@@ -1144,7 +1144,7 @@ def _unichr_is_printable(char):
return unicodedata.category(char) not in ("C", "Z")
if sys.maxunicode >= 0x10000:
- _unichr = unichr
+ _unichr = unichr
else:
# Needed for proper surrogate support if sizeof(Py_UNICODE) is 2 in gdb
def _unichr(x):
@@ -1164,46 +1164,46 @@ class PyUnicodeObjectPtr(PyObjectPtr):
return _type_Py_UNICODE.sizeof
def proxyval(self, visited):
- global _is_pep393
- if _is_pep393 is None:
- fields = gdb.lookup_type('PyUnicodeObject').target().fields()
- _is_pep393 = 'data' in [f.name for f in fields]
- if _is_pep393:
- # Python 3.3 and newer
- may_have_surrogates = False
- compact = self.field('_base')
- ascii = compact['_base']
- state = ascii['state']
- is_compact_ascii = (int(state['ascii']) and int(state['compact']))
- if not int(state['ready']):
- # string is not ready
- field_length = long(compact['wstr_length'])
- may_have_surrogates = True
- field_str = ascii['wstr']
- else:
- field_length = long(ascii['length'])
- if is_compact_ascii:
- field_str = ascii.address + 1
- elif int(state['compact']):
- field_str = compact.address + 1
- else:
- field_str = self.field('data')['any']
- repr_kind = int(state['kind'])
- if repr_kind == 1:
- field_str = field_str.cast(_type_unsigned_char_ptr())
- elif repr_kind == 2:
- field_str = field_str.cast(_type_unsigned_short_ptr())
- elif repr_kind == 4:
- field_str = field_str.cast(_type_unsigned_int_ptr())
- else:
- # Python 3.2 and earlier
- field_length = long(self.field('length'))
- field_str = self.field('str')
- may_have_surrogates = self.char_width() == 2
+ global _is_pep393
+ if _is_pep393 is None:
+ fields = gdb.lookup_type('PyUnicodeObject').target().fields()
+ _is_pep393 = 'data' in [f.name for f in fields]
+ if _is_pep393:
+ # Python 3.3 and newer
+ may_have_surrogates = False
+ compact = self.field('_base')
+ ascii = compact['_base']
+ state = ascii['state']
+ is_compact_ascii = (int(state['ascii']) and int(state['compact']))
+ if not int(state['ready']):
+ # string is not ready
+ field_length = long(compact['wstr_length'])
+ may_have_surrogates = True
+ field_str = ascii['wstr']
+ else:
+ field_length = long(ascii['length'])
+ if is_compact_ascii:
+ field_str = ascii.address + 1
+ elif int(state['compact']):
+ field_str = compact.address + 1
+ else:
+ field_str = self.field('data')['any']
+ repr_kind = int(state['kind'])
+ if repr_kind == 1:
+ field_str = field_str.cast(_type_unsigned_char_ptr())
+ elif repr_kind == 2:
+ field_str = field_str.cast(_type_unsigned_short_ptr())
+ elif repr_kind == 4:
+ field_str = field_str.cast(_type_unsigned_int_ptr())
+ else:
+ # Python 3.2 and earlier
+ field_length = long(self.field('length'))
+ field_str = self.field('str')
+ may_have_surrogates = self.char_width() == 2
# Gather a list of ints from the Py_UNICODE array; these are either
- # UCS-1, UCS-2 or UCS-4 code points:
- if not may_have_surrogates:
+ # UCS-1, UCS-2 or UCS-4 code points:
+ if not may_have_surrogates:
Py_UNICODEs = [int(field_str[i]) for i in safe_range(field_length)]
else:
# A more elaborate routine if sizeof(Py_UNICODE) is 2 in the
@@ -1230,14 +1230,14 @@ class PyUnicodeObjectPtr(PyObjectPtr):
# Convert the int code points to unicode characters, and generate a
# local unicode instance.
# This splits surrogate pairs if sizeof(Py_UNICODE) is 2 here (in gdb).
- result = u''.join([
- (_unichr(ucs) if ucs <= 0x10ffff else '\ufffd')
- for ucs in Py_UNICODEs])
+ result = u''.join([
+ (_unichr(ucs) if ucs <= 0x10ffff else '\ufffd')
+ for ucs in Py_UNICODEs])
return result
def write_repr(self, out, visited):
- # Write this out as a Python 3 str literal, i.e. without a "u" prefix
-
+ # Write this out as a Python 3 str literal, i.e. without a "u" prefix
+
# Get a PyUnicodeObject* within the Python 2 gdb process:
proxy = self.proxyval(visited)
@@ -1344,41 +1344,41 @@ class PyUnicodeObjectPtr(PyObjectPtr):
out.write(quote)
-class wrapperobject(PyObjectPtr):
- _typename = 'wrapperobject'
-
- def safe_name(self):
- try:
- name = self.field('descr')['d_base']['name'].string()
- return repr(name)
- except (NullPyObjectPtr, RuntimeError):
- return '<unknown name>'
-
- def safe_tp_name(self):
- try:
- return self.field('self')['ob_type']['tp_name'].string()
- except (NullPyObjectPtr, RuntimeError):
- return '<unknown tp_name>'
-
- def safe_self_addresss(self):
- try:
- address = long(self.field('self'))
- return '%#x' % address
- except (NullPyObjectPtr, RuntimeError):
- return '<failed to get self address>'
-
- def proxyval(self, visited):
- name = self.safe_name()
- tp_name = self.safe_tp_name()
- self_address = self.safe_self_addresss()
- return ("<method-wrapper %s of %s object at %s>"
- % (name, tp_name, self_address))
-
- def write_repr(self, out, visited):
- proxy = self.proxyval(visited)
- out.write(proxy)
-
-
+class wrapperobject(PyObjectPtr):
+ _typename = 'wrapperobject'
+
+ def safe_name(self):
+ try:
+ name = self.field('descr')['d_base']['name'].string()
+ return repr(name)
+ except (NullPyObjectPtr, RuntimeError):
+ return '<unknown name>'
+
+ def safe_tp_name(self):
+ try:
+ return self.field('self')['ob_type']['tp_name'].string()
+ except (NullPyObjectPtr, RuntimeError):
+ return '<unknown tp_name>'
+
+ def safe_self_addresss(self):
+ try:
+ address = long(self.field('self'))
+ return '%#x' % address
+ except (NullPyObjectPtr, RuntimeError):
+ return '<failed to get self address>'
+
+ def proxyval(self, visited):
+ name = self.safe_name()
+ tp_name = self.safe_tp_name()
+ self_address = self.safe_self_addresss()
+ return ("<method-wrapper %s of %s object at %s>"
+ % (name, tp_name, self_address))
+
+ def write_repr(self, out, visited):
+ proxy = self.proxyval(visited)
+ out.write(proxy)
+
+
def int_from_int(gdbval):
return int(str(gdbval))
@@ -1411,14 +1411,14 @@ class PyObjectPtrPrinter:
def pretty_printer_lookup(gdbval):
type = gdbval.type.unqualified()
- if type.code != gdb.TYPE_CODE_PTR:
- return None
-
- type = type.target().unqualified()
- t = str(type)
- if t in ("PyObject", "PyFrameObject", "PyUnicodeObject", "wrapperobject"):
- return PyObjectPtrPrinter(gdbval)
-
+ if type.code != gdb.TYPE_CODE_PTR:
+ return None
+
+ type = type.target().unqualified()
+ t = str(type)
+ if t in ("PyObject", "PyFrameObject", "PyUnicodeObject", "wrapperobject"):
+ return PyObjectPtrPrinter(gdbval)
+
"""
During development, I've been manually invoking the code in this way:
(gdb) python
@@ -1438,17 +1438,17 @@ that this python file is installed to the same path as the library (or its
/usr/lib/libpython2.6.so.1.0-gdb.py
/usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py
"""
-def register (obj):
+def register (obj):
if obj is None:
obj = gdb
# Wire up the pretty-printer
obj.pretty_printers.append(pretty_printer_lookup)
-register (gdb.current_objfile ())
+register (gdb.current_objfile ())
+
+
-
-
# Unfortunately, the exact API exposed by the gdb module varies somewhat
# from build to build
# See http://bugs.python.org/issue8279?#msg102276
@@ -1497,26 +1497,26 @@ class Frame(object):
iter_frame = iter_frame.newer()
return index
- # We divide frames into:
- # - "python frames":
- # - "bytecode frames" i.e. PyEval_EvalFrameEx
- # - "other python frames": things that are of interest from a python
- # POV, but aren't bytecode (e.g. GC, GIL)
- # - everything else
-
- def is_python_frame(self):
- '''Is this a _PyEval_EvalFrameDefault frame, or some other important
- frame? (see is_other_python_frame for what "important" means in this
- context)'''
- if self.is_evalframe():
- return True
- if self.is_other_python_frame():
- return True
- return False
-
- def is_evalframe(self):
- '''Is this a _PyEval_EvalFrameDefault frame?'''
- if self._gdbframe.name() == EVALFRAME:
+ # We divide frames into:
+ # - "python frames":
+ # - "bytecode frames" i.e. PyEval_EvalFrameEx
+ # - "other python frames": things that are of interest from a python
+ # POV, but aren't bytecode (e.g. GC, GIL)
+ # - everything else
+
+ def is_python_frame(self):
+ '''Is this a _PyEval_EvalFrameDefault frame, or some other important
+ frame? (see is_other_python_frame for what "important" means in this
+ context)'''
+ if self.is_evalframe():
+ return True
+ if self.is_other_python_frame():
+ return True
+ return False
+
+ def is_evalframe(self):
+ '''Is this a _PyEval_EvalFrameDefault frame?'''
+ if self._gdbframe.name() == EVALFRAME:
'''
I believe we also need to filter on the inline
struct frame_id.inline_depth, only regarding frames with
@@ -1525,86 +1525,86 @@ class Frame(object):
So we reject those with type gdb.INLINE_FRAME
'''
if self._gdbframe.type() == gdb.NORMAL_FRAME:
- # We have a _PyEval_EvalFrameDefault frame:
+ # We have a _PyEval_EvalFrameDefault frame:
return True
return False
- def is_other_python_frame(self):
- '''Is this frame worth displaying in python backtraces?
- Examples:
- - waiting on the GIL
- - garbage-collecting
- - within a CFunction
- If it is, return a descriptive string
- For other frames, return False
- '''
- if self.is_waiting_for_gil():
- return 'Waiting for the GIL'
-
- if self.is_gc_collect():
- return 'Garbage-collecting'
-
- # Detect invocations of PyCFunction instances:
- frame = self._gdbframe
- caller = frame.name()
- if not caller:
- return False
-
- if caller in ('_PyCFunction_FastCallDict',
- '_PyCFunction_FastCallKeywords'):
- arg_name = 'func'
- # Within that frame:
- # "func" is the local containing the PyObject* of the
- # PyCFunctionObject instance
- # "f" is the same value, but cast to (PyCFunctionObject*)
- # "self" is the (PyObject*) of the 'self'
+ def is_other_python_frame(self):
+ '''Is this frame worth displaying in python backtraces?
+ Examples:
+ - waiting on the GIL
+ - garbage-collecting
+ - within a CFunction
+ If it is, return a descriptive string
+ For other frames, return False
+ '''
+ if self.is_waiting_for_gil():
+ return 'Waiting for the GIL'
+
+ if self.is_gc_collect():
+ return 'Garbage-collecting'
+
+ # Detect invocations of PyCFunction instances:
+ frame = self._gdbframe
+ caller = frame.name()
+ if not caller:
+ return False
+
+ if caller in ('_PyCFunction_FastCallDict',
+ '_PyCFunction_FastCallKeywords'):
+ arg_name = 'func'
+ # Within that frame:
+ # "func" is the local containing the PyObject* of the
+ # PyCFunctionObject instance
+ # "f" is the same value, but cast to (PyCFunctionObject*)
+ # "self" is the (PyObject*) of the 'self'
+ try:
+ # Use the prettyprinter for the func:
+ func = frame.read_var(arg_name)
+ return str(func)
+ except RuntimeError:
+ return 'PyCFunction invocation (unable to read %s)' % arg_name
+
+ if caller == 'wrapper_call':
try:
- # Use the prettyprinter for the func:
- func = frame.read_var(arg_name)
- return str(func)
- except RuntimeError:
- return 'PyCFunction invocation (unable to read %s)' % arg_name
-
- if caller == 'wrapper_call':
- try:
- func = frame.read_var('wp')
- return str(func)
- except RuntimeError:
- return '<wrapper_call invocation>'
-
- # This frame isn't worth reporting:
- return False
-
- def is_waiting_for_gil(self):
- '''Is this frame waiting on the GIL?'''
- # This assumes the _POSIX_THREADS version of Python/ceval_gil.h:
- name = self._gdbframe.name()
- if name:
- return 'pthread_cond_timedwait' in name
-
- def is_gc_collect(self):
- '''Is this frame "collect" within the garbage-collector?'''
- return self._gdbframe.name() == 'collect'
-
+ func = frame.read_var('wp')
+ return str(func)
+ except RuntimeError:
+ return '<wrapper_call invocation>'
+
+ # This frame isn't worth reporting:
+ return False
+
+ def is_waiting_for_gil(self):
+ '''Is this frame waiting on the GIL?'''
+ # This assumes the _POSIX_THREADS version of Python/ceval_gil.h:
+ name = self._gdbframe.name()
+ if name:
+ return 'pthread_cond_timedwait' in name
+
+ def is_gc_collect(self):
+ '''Is this frame "collect" within the garbage-collector?'''
+ return self._gdbframe.name() == 'collect'
+
def get_pyop(self):
try:
- f = self._gdbframe.read_var('f')
- frame = PyFrameObjectPtr.from_pyobject_ptr(f)
- if not frame.is_optimized_out():
- return frame
- # gdb is unable to get the "f" argument of PyEval_EvalFrameEx()
- # because it was "optimized out". Try to get "f" from the frame
- # of the caller, PyEval_EvalCodeEx().
- orig_frame = frame
- caller = self._gdbframe.older()
- if caller:
- f = caller.read_var('f')
- frame = PyFrameObjectPtr.from_pyobject_ptr(f)
- if not frame.is_optimized_out():
- return frame
- return orig_frame
- except ValueError:
+ f = self._gdbframe.read_var('f')
+ frame = PyFrameObjectPtr.from_pyobject_ptr(f)
+ if not frame.is_optimized_out():
+ return frame
+ # gdb is unable to get the "f" argument of PyEval_EvalFrameEx()
+ # because it was "optimized out". Try to get "f" from the frame
+ # of the caller, PyEval_EvalCodeEx().
+ orig_frame = frame
+ caller = self._gdbframe.older()
+ if caller:
+ f = caller.read_var('f')
+ frame = PyFrameObjectPtr.from_pyobject_ptr(f)
+ if not frame.is_optimized_out():
+ return frame
+ return orig_frame
+ except ValueError:
return None
@classmethod
@@ -1616,30 +1616,30 @@ class Frame(object):
@classmethod
def get_selected_python_frame(cls):
- '''Try to obtain the Frame for the python-related code in the selected
- frame, or None'''
- try:
- frame = cls.get_selected_frame()
- except gdb.error:
- # No frame: Python didn't start yet
- return None
-
- while frame:
- if frame.is_python_frame():
- return frame
- frame = frame.older()
-
- # Not found:
- return None
-
- @classmethod
- def get_selected_bytecode_frame(cls):
- '''Try to obtain the Frame for the python bytecode interpreter in the
- selected GDB frame, or None'''
+ '''Try to obtain the Frame for the python-related code in the selected
+ frame, or None'''
+ try:
+ frame = cls.get_selected_frame()
+ except gdb.error:
+ # No frame: Python didn't start yet
+ return None
+
+ while frame:
+ if frame.is_python_frame():
+ return frame
+ frame = frame.older()
+
+ # Not found:
+ return None
+
+ @classmethod
+ def get_selected_bytecode_frame(cls):
+ '''Try to obtain the Frame for the python bytecode interpreter in the
+ selected GDB frame, or None'''
frame = cls.get_selected_frame()
while frame:
- if frame.is_evalframe():
+ if frame.is_evalframe():
return frame
frame = frame.older()
@@ -1647,41 +1647,41 @@ class Frame(object):
return None
def print_summary(self):
- if self.is_evalframe():
+ if self.is_evalframe():
pyop = self.get_pyop()
if pyop:
line = pyop.get_truncated_repr(MAX_OUTPUT_LEN)
write_unicode(sys.stdout, '#%i %s\n' % (self.get_index(), line))
- if not pyop.is_optimized_out():
- line = pyop.current_line()
- if line is not None:
- sys.stdout.write(' %s\n' % line.strip())
+ if not pyop.is_optimized_out():
+ line = pyop.current_line()
+ if line is not None:
+ sys.stdout.write(' %s\n' % line.strip())
else:
sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index())
else:
- info = self.is_other_python_frame()
- if info:
- sys.stdout.write('#%i %s\n' % (self.get_index(), info))
- else:
- sys.stdout.write('#%i\n' % self.get_index())
-
- def print_traceback(self):
- if self.is_evalframe():
- pyop = self.get_pyop()
- if pyop:
- pyop.print_traceback()
- if not pyop.is_optimized_out():
- line = pyop.current_line()
- if line is not None:
- sys.stdout.write(' %s\n' % line.strip())
- else:
- sys.stdout.write(' (unable to read python frame information)\n')
- else:
- info = self.is_other_python_frame()
- if info:
- sys.stdout.write(' %s\n' % info)
- else:
- sys.stdout.write(' (not a python frame)\n')
+ info = self.is_other_python_frame()
+ if info:
+ sys.stdout.write('#%i %s\n' % (self.get_index(), info))
+ else:
+ sys.stdout.write('#%i\n' % self.get_index())
+
+ def print_traceback(self):
+ if self.is_evalframe():
+ pyop = self.get_pyop()
+ if pyop:
+ pyop.print_traceback()
+ if not pyop.is_optimized_out():
+ line = pyop.current_line()
+ if line is not None:
+ sys.stdout.write(' %s\n' % line.strip())
+ else:
+ sys.stdout.write(' (unable to read python frame information)\n')
+ else:
+ info = self.is_other_python_frame()
+ if info:
+ sys.stdout.write(' %s\n' % info)
+ else:
+ sys.stdout.write(' (not a python frame)\n')
class PyList(gdb.Command):
'''List the current Python source code, if any
@@ -1701,7 +1701,7 @@ class PyList(gdb.Command):
gdb.COMMAND_FILES,
gdb.COMPLETE_NONE)
-
+
def invoke(self, args, from_tty):
import re
@@ -1717,14 +1717,14 @@ class PyList(gdb.Command):
if m:
start, end = map(int, m.groups())
- # py-list requires an actual PyEval_EvalFrameEx frame:
- frame = Frame.get_selected_bytecode_frame()
+ # py-list requires an actual PyEval_EvalFrameEx frame:
+ frame = Frame.get_selected_bytecode_frame()
if not frame:
- print('Unable to locate gdb frame for python bytecode interpreter')
+ print('Unable to locate gdb frame for python bytecode interpreter')
return
pyop = frame.get_pyop()
- if not pyop or pyop.is_optimized_out():
+ if not pyop or pyop.is_optimized_out():
print('Unable to read information on python frame')
return
@@ -1738,13 +1738,13 @@ class PyList(gdb.Command):
if start<1:
start = 1
- try:
- f = open(os_fsencode(filename), 'r')
- except IOError as err:
- sys.stdout.write('Unable to open %s: %s\n'
- % (filename, err))
- return
- with f:
+ try:
+ f = open(os_fsencode(filename), 'r')
+ except IOError as err:
+ sys.stdout.write('Unable to open %s: %s\n'
+ % (filename, err))
+ return
+ with f:
all_lines = f.readlines()
# start and end are 1-based, all_lines is 0-based;
# so [start-1:end] as a python slice gives us [start, end] as a
@@ -1756,17 +1756,17 @@ class PyList(gdb.Command):
linestr = '>' + linestr
sys.stdout.write('%4s %s' % (linestr, line))
-
+
# ...and register the command:
PyList()
def move_in_stack(move_up):
'''Move up or down the stack (for the py-up/py-down command)'''
frame = Frame.get_selected_python_frame()
- if not frame:
- print('Unable to locate python frame')
- return
-
+ if not frame:
+ print('Unable to locate python frame')
+ return
+
while frame:
if move_up:
iter_frame = frame.older()
@@ -1776,7 +1776,7 @@ def move_in_stack(move_up):
if not iter_frame:
break
- if iter_frame.is_python_frame():
+ if iter_frame.is_python_frame():
# Result:
if iter_frame.select():
iter_frame.print_summary()
@@ -1797,7 +1797,7 @@ class PyUp(gdb.Command):
gdb.COMMAND_STACK,
gdb.COMPLETE_NONE)
-
+
def invoke(self, args, from_tty):
move_in_stack(move_up=True)
@@ -1809,7 +1809,7 @@ class PyDown(gdb.Command):
gdb.COMMAND_STACK,
gdb.COMPLETE_NONE)
-
+
def invoke(self, args, from_tty):
move_in_stack(move_up=False)
@@ -1818,28 +1818,28 @@ if hasattr(gdb.Frame, 'select'):
PyUp()
PyDown()
-class PyBacktraceFull(gdb.Command):
- 'Display the current python frame and all the frames within its call stack (if any)'
- def __init__(self):
- gdb.Command.__init__ (self,
- "py-bt-full",
- gdb.COMMAND_STACK,
- gdb.COMPLETE_NONE)
-
-
- def invoke(self, args, from_tty):
- frame = Frame.get_selected_python_frame()
- if not frame:
- print('Unable to locate python frame')
- return
-
- while frame:
- if frame.is_python_frame():
- frame.print_summary()
- frame = frame.older()
-
-PyBacktraceFull()
-
+class PyBacktraceFull(gdb.Command):
+ 'Display the current python frame and all the frames within its call stack (if any)'
+ def __init__(self):
+ gdb.Command.__init__ (self,
+ "py-bt-full",
+ gdb.COMMAND_STACK,
+ gdb.COMPLETE_NONE)
+
+
+ def invoke(self, args, from_tty):
+ frame = Frame.get_selected_python_frame()
+ if not frame:
+ print('Unable to locate python frame')
+ return
+
+ while frame:
+ if frame.is_python_frame():
+ frame.print_summary()
+ frame = frame.older()
+
+PyBacktraceFull()
+
class PyBacktrace(gdb.Command):
'Display the current python frame and all the frames within its call stack (if any)'
def __init__(self):
@@ -1851,14 +1851,14 @@ class PyBacktrace(gdb.Command):
def invoke(self, args, from_tty):
frame = Frame.get_selected_python_frame()
- if not frame:
- print('Unable to locate python frame')
- return
-
- sys.stdout.write('Traceback (most recent call first):\n')
+ if not frame:
+ print('Unable to locate python frame')
+ return
+
+ sys.stdout.write('Traceback (most recent call first):\n')
while frame:
- if frame.is_python_frame():
- frame.print_traceback()
+ if frame.is_python_frame():
+ frame.print_traceback()
frame = frame.older()
PyBacktrace()
@@ -1871,7 +1871,7 @@ class PyPrint(gdb.Command):
gdb.COMMAND_DATA,
gdb.COMPLETE_NONE)
-
+
def invoke(self, args, from_tty):
name = str(args)
@@ -1888,10 +1888,10 @@ class PyPrint(gdb.Command):
pyop_var, scope = pyop_frame.get_var_by_name(name)
if pyop_var:
- print('%s %r = %s'
- % (scope,
- name,
- pyop_var.get_truncated_repr(MAX_OUTPUT_LEN)))
+ print('%s %r = %s'
+ % (scope,
+ name,
+ pyop_var.get_truncated_repr(MAX_OUTPUT_LEN)))
else:
print('%r not found' % name)
@@ -1899,13 +1899,13 @@ PyPrint()
class PyLocals(gdb.Command):
'Look up the given python variable name, and print it'
- def __init__(self, command="py-locals"):
- gdb.Command.__init__ (self,
- command,
- gdb.COMMAND_DATA,
- gdb.COMPLETE_NONE)
+ def __init__(self, command="py-locals"):
+ gdb.Command.__init__ (self,
+ command,
+ gdb.COMMAND_DATA,
+ gdb.COMPLETE_NONE)
+
-
def invoke(self, args, from_tty):
name = str(args)
@@ -1933,19 +1933,19 @@ class PyLocals(gdb.Command):
def get_namespace(self, pyop_frame):
return pyop_frame.iter_locals()
-PyLocals()
-
-
-##################################################################
-## added, not in CPython
-##################################################################
-
-import re
-import warnings
-import tempfile
-import textwrap
-import itertools
-
+PyLocals()
+
+
+##################################################################
+## added, not in CPython
+##################################################################
+
+import re
+import warnings
+import tempfile
+import textwrap
+import itertools
+
class PyGlobals(PyLocals):
'List all the globals in the currently select Python frame'
@@ -1953,7 +1953,7 @@ class PyGlobals(PyLocals):
return pyop_frame.iter_globals()
-PyGlobals("py-globals")
+PyGlobals("py-globals")
class PyNameEquals(gdb.Function):
@@ -2022,10 +2022,10 @@ class _LoggingState(object):
"""
def __init__(self):
- f = tempfile.NamedTemporaryFile('r+')
- self.file = f
- self.filename = f.name
- self.fd = f.fileno()
+ f = tempfile.NamedTemporaryFile('r+')
+ self.file = f
+ self.filename = f.name
+ self.fd = f.fileno()
_execute("set logging file %s" % self.filename)
self.file_position_stack = []
@@ -2594,7 +2594,7 @@ class PythonCodeExecutor(object):
inferior.
Of course, executing any code in the inferior may be dangerous and may
- leave the debuggee in an unsafe state or terminate it altogether.
+ leave the debuggee in an unsafe state or terminate it altogether.
"""
if '\0' in code:
raise gdb.GdbError("String contains NUL byte.")
@@ -2670,8 +2670,8 @@ class FixGdbCommand(gdb.Command):
def fix_gdb(self):
"""
- It seems that invoking either 'cy exec' and 'py-exec' work perfectly
- fine, but after this gdb's python API is entirely broken.
+ It seems that invoking either 'cy exec' and 'py-exec' work perfectly
+ fine, but after this gdb's python API is entirely broken.
Maybe some uncleared exception value is still set?
sys.exc_clear() didn't help. A demonstration:
diff --git a/contrib/tools/cython/Cython/Distutils/build_ext.py b/contrib/tools/cython/Cython/Distutils/build_ext.py
index e1ecff9938..598bb4a89b 100644
--- a/contrib/tools/cython/Cython/Distutils/build_ext.py
+++ b/contrib/tools/cython/Cython/Distutils/build_ext.py
@@ -14,12 +14,12 @@ else:
class new_build_ext(_build_ext, object):
def finalize_options(self):
if self.distribution.ext_modules:
- nthreads = getattr(self, 'parallel', None) # -j option in Py3.5+
- nthreads = int(nthreads) if nthreads else None
+ nthreads = getattr(self, 'parallel', None) # -j option in Py3.5+
+ nthreads = int(nthreads) if nthreads else None
from Cython.Build.Dependencies import cythonize
self.distribution.ext_modules[:] = cythonize(
- self.distribution.ext_modules, nthreads=nthreads, force=self.force)
- super(new_build_ext, self).finalize_options()
+ self.distribution.ext_modules, nthreads=nthreads, force=self.force)
+ super(new_build_ext, self).finalize_options()
# This will become new_build_ext in the future.
from .old_build_ext import old_build_ext as build_ext
diff --git a/contrib/tools/cython/Cython/Distutils/old_build_ext.py b/contrib/tools/cython/Cython/Distutils/old_build_ext.py
index e6ed3c1013..aa2a1cf229 100644
--- a/contrib/tools/cython/Cython/Distutils/old_build_ext.py
+++ b/contrib/tools/cython/Cython/Distutils/old_build_ext.py
@@ -1,10 +1,10 @@
"""Cython.Distutils.old_build_ext
Implements a version of the Distutils 'build_ext' command, for
-building Cython extension modules.
+building Cython extension modules.
-Note that this module is deprecated. Use cythonize() instead.
-"""
+Note that this module is deprecated. Use cythonize() instead.
+"""
__revision__ = "$Id:$"
diff --git a/contrib/tools/cython/Cython/Includes/cpython/__init__.pxd b/contrib/tools/cython/Cython/Includes/cpython/__init__.pxd
index d038304115..c81f4e6655 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/__init__.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/__init__.pxd
@@ -10,13 +10,13 @@
# Read http://docs.python.org/api/refcounts.html which is so
# important I've copied it below.
#
-# For all the declaration below, whenever the Py_ function returns
+# For all the declaration below, whenever the Py_ function returns
# a *new reference* to a PyObject*, the return type is "object".
# When the function returns a borrowed reference, the return
# type is PyObject*. When Cython sees "object" as a return type
# it doesn't increment the reference count. When it sees PyObject*
# in order to use the result you must explicitly cast to <object>,
-# and when you do that Cython increments the reference count whether
+# and when you do that Cython increments the reference count whether
# you want it to or not, forcing you to an explicit DECREF (or leak memory).
# To avoid this we make the above convention. Note, you can
# always locally override this convention by putting something like
diff --git a/contrib/tools/cython/Cython/Includes/cpython/array.pxd b/contrib/tools/cython/Cython/Includes/cpython/array.pxd
index f27d46aea4..19230a0a82 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/array.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/array.pxd
@@ -1,46 +1,46 @@
"""
array.pxd
-
+
Cython interface to Python's array.array module.
-
+
* 1D contiguous data view
* tools for fast array creation, maximum C-speed and handiness
* suitable as allround light weight auto-array within Cython code too
-
+
Usage:
-
+
>>> cimport array
- Usage through Cython buffer interface (Py2.3+):
-
+ Usage through Cython buffer interface (Py2.3+):
+
>>> def f(arg1, unsigned i, double dx)
... array.array[double] a = arg1
... a[i] += dx
-
+
Fast C-level new_array(_zeros), resize_array, copy_array, Py_SIZE(obj),
zero_array
-
- cdef array.array[double] k = array.copy(d)
+
+ cdef array.array[double] k = array.copy(d)
cdef array.array[double] n = array.array(d, Py_SIZE(d) * 2 )
cdef array.array[double] m = array.zeros_like(FLOAT_TEMPLATE)
array.resize(f, 200000)
-
- Zero overhead with naked data pointer views by union:
- _f, _d, _i, _c, _u, ...
+
+ Zero overhead with naked data pointer views by union:
+ _f, _d, _i, _c, _u, ...
=> Original C array speed + Python dynamic memory management
cdef array.array a = inarray
- if
+ if
a._d[2] += 0.66 # use as double array without extra casting
-
+
float *subview = vector._f + 10 # starting from 10th element
- unsigned char *subview_buffer = vector._B + 4
-
- Suitable as lightweight arrays intra Cython without speed penalty.
- Replacement for C stack/malloc arrays; no trouble with refcounting,
+ unsigned char *subview_buffer = vector._B + 4
+
+ Suitable as lightweight arrays intra Cython without speed penalty.
+ Replacement for C stack/malloc arrays; no trouble with refcounting,
mem.leaks; seamless Python compatibility, buffer() optional
-
+
last changes: 2009-05-15 rk
: 2009-12-06 bp
: 2012-05-02 andreasvc
@@ -75,8 +75,8 @@ cdef extern from *: # Hard-coded utility code hack.
char *as_chars
unsigned long *as_ulongs
long *as_longs
- unsigned long long *as_ulonglongs
- long long *as_longlongs
+ unsigned long long *as_ulonglongs
+ long long *as_longlongs
short *as_shorts
unsigned short *as_ushorts
Py_UNICODE *as_pyunicodes
@@ -92,7 +92,7 @@ cdef extern from *: # Hard-coded utility code hack.
def __getbuffer__(self, Py_buffer* info, int flags):
# This implementation of getbuffer is geared towards Cython
- # requirements, and does not yet fulfill the PEP.
+ # requirements, and does not yet fulfill the PEP.
# In particular strided access is always provided regardless
# of flags
item_count = Py_SIZE(self)
@@ -143,7 +143,7 @@ cdef inline array copy(array self):
return op
cdef inline int extend_buffer(array self, char* stuff, Py_ssize_t n) except -1:
- """ efficient appending of new stuff of same type
+ """ efficient appending of new stuff of same type
(e.g. of same array type)
n: number of elements (not number of bytes!) """
cdef Py_ssize_t itemsize = self.ob_descr.itemsize
diff --git a/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd b/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd
index bf32617074..cd0f90719b 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd
@@ -5,7 +5,7 @@ cdef extern from "Python.h":
pass
cdef extern from "datetime.h":
-
+
ctypedef extern class datetime.date[object PyDateTime_Date]:
pass
@@ -23,11 +23,11 @@ cdef extern from "datetime.h":
ctypedef struct PyDateTime_Date:
pass
-
+
ctypedef struct PyDateTime_Time:
char hastzinfo
PyObject *tzinfo
-
+
ctypedef struct PyDateTime_DateTime:
char hastzinfo
PyObject *tzinfo
@@ -36,22 +36,22 @@ cdef extern from "datetime.h":
int days
int seconds
int microseconds
-
+
# Define structure for C API.
ctypedef struct PyDateTime_CAPI:
- # type objects
+ # type objects
PyTypeObject *DateType
PyTypeObject *DateTimeType
PyTypeObject *TimeType
PyTypeObject *DeltaType
PyTypeObject *TZInfoType
-
+
# constructors
object (*Date_FromDate)(int, int, int, PyTypeObject*)
object (*DateTime_FromDateAndTime)(int, int, int, int, int, int, int, object, PyTypeObject*)
object (*Time_FromTime)(int, int, int, int, object, PyTypeObject*)
object (*Delta_FromDelta)(int, int, int, int, PyTypeObject*)
-
+
# constructors for the DB API
object (*DateTime_FromTimestamp)(object, object, object)
object (*Date_FromTimestamp)(object, object)
@@ -96,7 +96,7 @@ cdef extern from "datetime.h":
# PyDateTime CAPI object.
PyDateTime_CAPI *PyDateTimeAPI
-
+
void PyDateTime_IMPORT()
# Datetime C API initialization function.
@@ -108,7 +108,7 @@ cdef inline void import_datetime():
# Note, there are no range checks for any of the arguments.
cdef inline object date_new(int year, int month, int day):
return PyDateTimeAPI.Date_FromDate(year, month, day, PyDateTimeAPI.DateType)
-
+
# Create time object using DateTime CAPI factory function
# Note, there are no range checks for any of the arguments.
cdef inline object time_new(int hour, int minute, int second, int microsecond, object tz):
@@ -127,7 +127,7 @@ cdef inline object timedelta_new(int days, int seconds, int useconds):
# More recognizable getters for date/time/datetime/timedelta.
# There are no setters because datetime.h hasn't them.
# This is because of immutable nature of these objects by design.
-# If you would change time/date/datetime/timedelta object you need to recreate.
+# If you would change time/date/datetime/timedelta object you need to recreate.
# Get tzinfo of time
cdef inline object time_tzinfo(object o):
@@ -136,7 +136,7 @@ cdef inline object time_tzinfo(object o):
else:
return None
-# Get tzinfo of datetime
+# Get tzinfo of datetime
cdef inline object datetime_tzinfo(object o):
if (<PyDateTime_DateTime*>o).hastzinfo:
return <object>(<PyDateTime_DateTime*>o).tzinfo
@@ -146,7 +146,7 @@ cdef inline object datetime_tzinfo(object o):
# Get year of date
cdef inline int date_year(object o):
return PyDateTime_GET_YEAR(o)
-
+
# Get month of date
cdef inline int date_month(object o):
return PyDateTime_GET_MONTH(o)
@@ -158,7 +158,7 @@ cdef inline int date_day(object o):
# Get year of datetime
cdef inline int datetime_year(object o):
return PyDateTime_GET_YEAR(o)
-
+
# Get month of datetime
cdef inline int datetime_month(object o):
return PyDateTime_GET_MONTH(o)
diff --git a/contrib/tools/cython/Cython/Includes/cpython/long.pxd b/contrib/tools/cython/Cython/Includes/cpython/long.pxd
index b4754dff6a..eb8140d417 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/long.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/long.pxd
@@ -30,14 +30,14 @@ cdef extern from "Python.h":
# Return value: New reference.
# Return a new PyLongObject object from a C unsigned long, or NULL on failure.
- object PyLong_FromSsize_t(Py_ssize_t v)
- # Return value: New reference.
- # Return a new PyLongObject object from a C Py_ssize_t, or NULL on failure.)
-
- object PyLong_FromSize_t(size_t v)
- # Return value: New reference.
- # Return a new PyLongObject object from a C size_t, or NULL on failure.
-
+ object PyLong_FromSsize_t(Py_ssize_t v)
+ # Return value: New reference.
+ # Return a new PyLongObject object from a C Py_ssize_t, or NULL on failure.)
+
+ object PyLong_FromSize_t(size_t v)
+ # Return value: New reference.
+ # Return a new PyLongObject object from a C size_t, or NULL on failure.
+
object PyLong_FromLongLong(PY_LONG_LONG v)
# Return value: New reference.
# Return a new PyLongObject object from a C long long, or NULL on failure.
@@ -72,12 +72,12 @@ cdef extern from "Python.h":
# range [2, 36]; if it is out of range, ValueError will be
# raised.
- # object PyLong_FromUnicodeObject(object u, int base)
- # Convert a sequence of Unicode digits in the string u to a Python integer
- # value. The Unicode string is first encoded to a byte string using
- # PyUnicode_EncodeDecimal() and then converted using PyLong_FromString().
- # New in version 3.3.
-
+ # object PyLong_FromUnicodeObject(object u, int base)
+ # Convert a sequence of Unicode digits in the string u to a Python integer
+ # value. The Unicode string is first encoded to a byte string using
+ # PyUnicode_EncodeDecimal() and then converted using PyLong_FromString().
+ # New in version 3.3.
+
object PyLong_FromVoidPtr(void *p)
# Return value: New reference.
# Create a Python integer or long integer from the pointer p. The
@@ -89,27 +89,27 @@ cdef extern from "Python.h":
# Return a C long representation of the contents of pylong. If
# pylong is greater than LONG_MAX, an OverflowError is raised.
- # long PyLong_AsLongAndOverflow(object pylong, int *overflow) except? -1
- # Return a C long representation of the contents of pylong. If pylong is
- # greater than LONG_MAX or less than LONG_MIN, set *overflow to 1 or -1,
- # respectively, and return -1; otherwise, set *overflow to 0. If any other
- # exception occurs (for example a TypeError or MemoryError), then -1 will
- # be returned and *overflow will be 0.
- # New in version 2.7.
-
- # PY_LONG_LONG PyLong_AsLongLongAndOverflow(object pylong, int *overflow) except? -1
- # Return a C long long representation of the contents of pylong. If pylong
- # is greater than PY_LLONG_MAX or less than PY_LLONG_MIN, set *overflow to
- # 1 or -1, respectively, and return -1; otherwise, set *overflow to 0. If
- # any other exception occurs (for example a TypeError or MemoryError), then
- # -1 will be returned and *overflow will be 0.
- # New in version 2.7.
-
- Py_ssize_t PyLong_AsSsize_t(object pylong) except? -1
- # Return a C Py_ssize_t representation of the contents of pylong. If pylong
- # is greater than PY_SSIZE_T_MAX, an OverflowError is raised and -1 will be
- # returned.
-
+ # long PyLong_AsLongAndOverflow(object pylong, int *overflow) except? -1
+ # Return a C long representation of the contents of pylong. If pylong is
+ # greater than LONG_MAX or less than LONG_MIN, set *overflow to 1 or -1,
+ # respectively, and return -1; otherwise, set *overflow to 0. If any other
+ # exception occurs (for example a TypeError or MemoryError), then -1 will
+ # be returned and *overflow will be 0.
+ # New in version 2.7.
+
+ # PY_LONG_LONG PyLong_AsLongLongAndOverflow(object pylong, int *overflow) except? -1
+ # Return a C long long representation of the contents of pylong. If pylong
+ # is greater than PY_LLONG_MAX or less than PY_LLONG_MIN, set *overflow to
+ # 1 or -1, respectively, and return -1; otherwise, set *overflow to 0. If
+ # any other exception occurs (for example a TypeError or MemoryError), then
+ # -1 will be returned and *overflow will be 0.
+ # New in version 2.7.
+
+ Py_ssize_t PyLong_AsSsize_t(object pylong) except? -1
+ # Return a C Py_ssize_t representation of the contents of pylong. If pylong
+ # is greater than PY_SSIZE_T_MAX, an OverflowError is raised and -1 will be
+ # returned.
+
unsigned long PyLong_AsUnsignedLong(object pylong) except? -1
# Return a C unsigned long representation of the contents of
# pylong. If pylong is greater than ULONG_MAX, an OverflowError is
diff --git a/contrib/tools/cython/Cython/Includes/cpython/module.pxd b/contrib/tools/cython/Cython/Includes/cpython/module.pxd
index a21d19c58d..8eb323b010 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/module.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/module.pxd
@@ -35,19 +35,19 @@ cdef extern from "Python.h":
# unless a non-empty fromlist was given. Changed in version 2.4:
# failing imports remove incomplete module objects.
- object PyImport_ImportModuleLevel(char *name, object globals, object locals, object fromlist, int level)
- # Return value: New reference.
-
- # Import a module. This is best described by referring to the
- # built-in Python function __import__(), as the standard
- # __import__() function calls this function directly.
-
- # The return value is a new reference to the imported module or
- # top-level package, or NULL with an exception set on failure. Like
- # for __import__(), the return value when a submodule of a package
- # was requested is normally the top-level package, unless a
- # non-empty fromlist was given.
-
+ object PyImport_ImportModuleLevel(char *name, object globals, object locals, object fromlist, int level)
+ # Return value: New reference.
+
+ # Import a module. This is best described by referring to the
+ # built-in Python function __import__(), as the standard
+ # __import__() function calls this function directly.
+
+ # The return value is a new reference to the imported module or
+ # top-level package, or NULL with an exception set on failure. Like
+ # for __import__(), the return value when a submodule of a package
+ # was requested is normally the top-level package, unless a
+ # non-empty fromlist was given.
+
object PyImport_Import(object name)
# Return value: New reference.
# This is a higher-level interface that calls the current ``import
diff --git a/contrib/tools/cython/Cython/Includes/cpython/object.pxd b/contrib/tools/cython/Cython/Includes/cpython/object.pxd
index e7183a6a5f..5a81166393 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/object.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/object.pxd
@@ -10,27 +10,27 @@ cdef extern from "Python.h":
ctypedef object (*unaryfunc)(object)
ctypedef object (*binaryfunc)(object, object)
ctypedef object (*ternaryfunc)(object, object, object)
- ctypedef int (*inquiry)(object) except -1
- ctypedef Py_ssize_t (*lenfunc)(object) except -1
+ ctypedef int (*inquiry)(object) except -1
+ ctypedef Py_ssize_t (*lenfunc)(object) except -1
ctypedef object (*ssizeargfunc)(object, Py_ssize_t)
ctypedef object (*ssizessizeargfunc)(object, Py_ssize_t, Py_ssize_t)
- ctypedef int (*ssizeobjargproc)(object, Py_ssize_t, object) except -1
- ctypedef int (*ssizessizeobjargproc)(object, Py_ssize_t, Py_ssize_t, object) except -1
- ctypedef int (*objobjargproc)(object, object, object) except -1
- ctypedef int (*objobjproc)(object, object) except -1
+ ctypedef int (*ssizeobjargproc)(object, Py_ssize_t, object) except -1
+ ctypedef int (*ssizessizeobjargproc)(object, Py_ssize_t, Py_ssize_t, object) except -1
+ ctypedef int (*objobjargproc)(object, object, object) except -1
+ ctypedef int (*objobjproc)(object, object) except -1
- ctypedef Py_hash_t (*hashfunc)(object) except -1
+ ctypedef Py_hash_t (*hashfunc)(object) except -1
ctypedef object (*reprfunc)(object)
- ctypedef int (*cmpfunc)(object, object) except -2
+ ctypedef int (*cmpfunc)(object, object) except -2
ctypedef object (*richcmpfunc)(object, object, int)
# The following functions use 'PyObject*' as first argument instead of 'object' to prevent
# accidental reference counting when calling them during a garbage collection run.
ctypedef void (*destructor)(PyObject*)
- ctypedef int (*visitproc)(PyObject*, void *) except -1
- ctypedef int (*traverseproc)(PyObject*, visitproc, void*) except -1
- ctypedef void (*freefunc)(void*)
+ ctypedef int (*visitproc)(PyObject*, void *) except -1
+ ctypedef int (*traverseproc)(PyObject*, visitproc, void*) except -1
+ ctypedef void (*freefunc)(void*)
ctypedef object (*descrgetfunc)(object, object, object)
ctypedef int (*descrsetfunc)(object, object, object) except -1
@@ -47,7 +47,7 @@ cdef extern from "Python.h":
destructor tp_dealloc
traverseproc tp_traverse
inquiry tp_clear
- freefunc tp_free
+ freefunc tp_free
ternaryfunc tp_call
hashfunc tp_hash
@@ -104,8 +104,8 @@ cdef extern from "Python.h":
# or NULL on failure. This is the equivalent of the Python
# expression "o.attr_name".
- object PyObject_GenericGetAttr(object o, object attr_name)
-
+ object PyObject_GenericGetAttr(object o, object attr_name)
+
int PyObject_SetAttrString(object o, const char *attr_name, object v) except -1
# Set the value of the attribute named attr_name, for object o, to
# the value v. Returns -1 on failure. This is the equivalent of
@@ -116,8 +116,8 @@ cdef extern from "Python.h":
# the value v. Returns -1 on failure. This is the equivalent of
# the Python statement "o.attr_name = v".
- int PyObject_GenericSetAttr(object o, object attr_name, object v) except -1
-
+ int PyObject_GenericSetAttr(object o, object attr_name, object v) except -1
+
int PyObject_DelAttrString(object o, const char *attr_name) except -1
# Delete attribute named attr_name, for object o. Returns -1 on
# failure. This is the equivalent of the Python statement: "del
diff --git a/contrib/tools/cython/Cython/Includes/cpython/pythread.pxd b/contrib/tools/cython/Cython/Includes/cpython/pythread.pxd
index 3978ce25f4..392bef7d64 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/pythread.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/pythread.pxd
@@ -6,11 +6,11 @@ cdef extern from "pythread.h":
ctypedef void *PyThread_type_sema
void PyThread_init_thread()
- long PyThread_start_new_thread(void (*)(void *), void *) # FIXME: legacy
- #unsigned long PyThread_start_new_thread(void (*)(void *), void *) # returned 'long' before Py3.7
+ long PyThread_start_new_thread(void (*)(void *), void *) # FIXME: legacy
+ #unsigned long PyThread_start_new_thread(void (*)(void *), void *) # returned 'long' before Py3.7
void PyThread_exit_thread()
- long PyThread_get_thread_ident() # FIXME: legacy
- #unsigned long PyThread_get_thread_ident() # returned 'long' before Py3.7
+ long PyThread_get_thread_ident() # FIXME: legacy
+ #unsigned long PyThread_get_thread_ident() # returned 'long' before Py3.7
PyThread_type_lock PyThread_allocate_lock()
void PyThread_free_lock(PyThread_type_lock)
@@ -31,7 +31,7 @@ cdef extern from "pythread.h":
size_t PyThread_get_stacksize()
int PyThread_set_stacksize(size_t)
- # Thread Local Storage (TLS) API deprecated in CPython 3.7+
+ # Thread Local Storage (TLS) API deprecated in CPython 3.7+
int PyThread_create_key()
void PyThread_delete_key(int)
int PyThread_set_key_value(int, void *)
@@ -40,14 +40,14 @@ cdef extern from "pythread.h":
# Cleanup after a fork
void PyThread_ReInitTLS()
-
- # Thread Specific Storage (TSS) API in CPython 3.7+ (also backported)
- #ctypedef struct Py_tss_t: pass # Cython built-in type
- Py_tss_t Py_tss_NEEDS_INIT # Not normally useful: Cython auto-initialises declared "Py_tss_t" variables.
- Py_tss_t * PyThread_tss_alloc()
- void PyThread_tss_free(Py_tss_t *key)
- int PyThread_tss_is_created(Py_tss_t *key)
- int PyThread_tss_create(Py_tss_t *key)
- void PyThread_tss_delete(Py_tss_t *key)
- int PyThread_tss_set(Py_tss_t *key, void *value)
- void * PyThread_tss_get(Py_tss_t *key)
+
+ # Thread Specific Storage (TSS) API in CPython 3.7+ (also backported)
+ #ctypedef struct Py_tss_t: pass # Cython built-in type
+ Py_tss_t Py_tss_NEEDS_INIT # Not normally useful: Cython auto-initialises declared "Py_tss_t" variables.
+ Py_tss_t * PyThread_tss_alloc()
+ void PyThread_tss_free(Py_tss_t *key)
+ int PyThread_tss_is_created(Py_tss_t *key)
+ int PyThread_tss_create(Py_tss_t *key)
+ void PyThread_tss_delete(Py_tss_t *key)
+ int PyThread_tss_set(Py_tss_t *key, void *value)
+ void * PyThread_tss_get(Py_tss_t *key)
diff --git a/contrib/tools/cython/Cython/Includes/cpython/set.pxd b/contrib/tools/cython/Cython/Includes/cpython/set.pxd
index 244990fa04..ae31d28ae3 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/set.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/set.pxd
@@ -44,15 +44,15 @@ cdef extern from "Python.h":
# Return true if p is a set object or a frozenset object but not
# an instance of a subtype.
- bint PyFrozenSet_Check(object p)
- # Return true if p is a frozenset object or an instance of a subtype.
-
+ bint PyFrozenSet_Check(object p)
+ # Return true if p is a frozenset object or an instance of a subtype.
+
bint PyFrozenSet_CheckExact(object p)
# Return true if p is a frozenset object but not an instance of a subtype.
- bint PySet_Check(object p)
- # Return true if p is a set object or an instance of a subtype.
-
+ bint PySet_Check(object p)
+ # Return true if p is a set object or an instance of a subtype.
+
object PySet_New(object iterable)
# Return value: New reference.
# Return a new set containing objects returned by the
diff --git a/contrib/tools/cython/Cython/Includes/cpython/tuple.pxd b/contrib/tools/cython/Cython/Includes/cpython/tuple.pxd
index 6541cb471d..09c46e0b4b 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/tuple.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/tuple.pxd
@@ -45,7 +45,7 @@ cdef extern from "Python.h":
# Return value: New reference.
# Take a slice of the tuple pointed to by p from low to high and return it as a new tuple.
- int PyTuple_SetItem(object p, Py_ssize_t pos, object o) except -1
+ int PyTuple_SetItem(object p, Py_ssize_t pos, object o) except -1
# Insert a reference to object o at position pos of the tuple
# pointed to by p. Return 0 on success. Note: This function
# ``steals'' a reference to o.
diff --git a/contrib/tools/cython/Cython/Includes/cpython/weakref.pxd b/contrib/tools/cython/Cython/Includes/cpython/weakref.pxd
index 44deb5ce9a..9c4b50f564 100644
--- a/contrib/tools/cython/Cython/Includes/cpython/weakref.pxd
+++ b/contrib/tools/cython/Cython/Includes/cpython/weakref.pxd
@@ -33,7 +33,7 @@ cdef extern from "Python.h":
# a weakly-referencable object, or if callback is not callable,
# None, or NULL, this will return NULL and raise TypeError.
- PyObject* PyWeakref_GetObject(object ref) except NULL
+ PyObject* PyWeakref_GetObject(object ref) except NULL
# Return the referenced object from a weak reference, ref. If the
# referent is no longer live, returns None.
diff --git a/contrib/tools/cython/Cython/Includes/libc/limits.pxd b/contrib/tools/cython/Cython/Includes/libc/limits.pxd
index 21c323ba94..39d10a1ff9 100644
--- a/contrib/tools/cython/Cython/Includes/libc/limits.pxd
+++ b/contrib/tools/cython/Cython/Includes/libc/limits.pxd
@@ -1,28 +1,28 @@
# 5.2.4.2.1 Sizes of integer types <limits.h>
cdef extern from "<limits.h>":
- const int CHAR_BIT
- const int MB_LEN_MAX
+ const int CHAR_BIT
+ const int MB_LEN_MAX
- const char CHAR_MIN
- const char CHAR_MAX
+ const char CHAR_MIN
+ const char CHAR_MAX
- const signed char SCHAR_MIN
- const signed char SCHAR_MAX
- const unsigned char UCHAR_MAX
+ const signed char SCHAR_MIN
+ const signed char SCHAR_MAX
+ const unsigned char UCHAR_MAX
- const short SHRT_MIN
- const short SHRT_MAX
- const unsigned short USHRT_MAX
+ const short SHRT_MIN
+ const short SHRT_MAX
+ const unsigned short USHRT_MAX
- const int INT_MIN
- const int INT_MAX
- const unsigned int UINT_MAX
+ const int INT_MIN
+ const int INT_MAX
+ const unsigned int UINT_MAX
- const long LONG_MIN
- const long LONG_MAX
- const unsigned long ULONG_MAX
+ const long LONG_MIN
+ const long LONG_MAX
+ const unsigned long ULONG_MAX
- const long long LLONG_MIN
- const long long LLONG_MAX
- const unsigned long long ULLONG_MAX
+ const long long LLONG_MIN
+ const long long LLONG_MAX
+ const unsigned long long ULLONG_MAX
diff --git a/contrib/tools/cython/Cython/Includes/libc/signal.pxd b/contrib/tools/cython/Cython/Includes/libc/signal.pxd
index d110f6e716..5d34935543 100644
--- a/contrib/tools/cython/Cython/Includes/libc/signal.pxd
+++ b/contrib/tools/cython/Cython/Includes/libc/signal.pxd
@@ -13,52 +13,52 @@ cdef extern from "<signal.h>" nogil:
sighandler_t signal (int signum, sighandler_t action)
int raise_"raise" (int signum)
- # Signals
- enum:
- # Program Error
- SIGFPE
- SIGILL
- SIGSEGV
- SIGBUS
- SIGABRT
- SIGIOT
- SIGTRAP
- SIGEMT
- SIGSYS
- SIGSTKFLT
- # Termination
- SIGTERM
- SIGINT
- SIGQUIT
- SIGKILL
- SIGHUP
- # Alarm
- SIGALRM
- SIGVTALRM
- SIGPROF
- # Asynchronous I/O
- SIGIO
- SIGURG
- SIGPOLL
- # Job Control
- SIGCHLD
- SIGCLD
- SIGCONT
- SIGSTOP
- SIGTSTP
- SIGTTIN
- SIGTTOU
- # Operation Error
- SIGPIPE
- SIGLOST
- SIGXCPU
- SIGXFSZ
- SIGPWR
- # Miscellaneous
- SIGUSR1
- SIGUSR2
- SIGWINCH
- SIGINFO
- # Real-time signals
- SIGRTMIN
- SIGRTMAX
+ # Signals
+ enum:
+ # Program Error
+ SIGFPE
+ SIGILL
+ SIGSEGV
+ SIGBUS
+ SIGABRT
+ SIGIOT
+ SIGTRAP
+ SIGEMT
+ SIGSYS
+ SIGSTKFLT
+ # Termination
+ SIGTERM
+ SIGINT
+ SIGQUIT
+ SIGKILL
+ SIGHUP
+ # Alarm
+ SIGALRM
+ SIGVTALRM
+ SIGPROF
+ # Asynchronous I/O
+ SIGIO
+ SIGURG
+ SIGPOLL
+ # Job Control
+ SIGCHLD
+ SIGCLD
+ SIGCONT
+ SIGSTOP
+ SIGTSTP
+ SIGTTIN
+ SIGTTOU
+ # Operation Error
+ SIGPIPE
+ SIGLOST
+ SIGXCPU
+ SIGXFSZ
+ SIGPWR
+ # Miscellaneous
+ SIGUSR1
+ SIGUSR2
+ SIGWINCH
+ SIGINFO
+ # Real-time signals
+ SIGRTMIN
+ SIGRTMAX
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/algorithm.pxd b/contrib/tools/cython/Cython/Includes/libcpp/algorithm.pxd
index 04929eb3b0..ec7c3835b4 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/algorithm.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/algorithm.pxd
@@ -7,14 +7,14 @@ cdef extern from "<algorithm>" namespace "std" nogil:
bool binary_search[Iter, T, Compare](Iter first, Iter last, const T& value,
Compare comp)
- Iter lower_bound[Iter, T](Iter first, Iter last, const T& value)
- Iter lower_bound[Iter, T, Compare](Iter first, Iter last, const T& value,
- Compare comp)
-
- Iter upper_bound[Iter, T](Iter first, Iter last, const T& value)
- Iter upper_bound[Iter, T, Compare](Iter first, Iter last, const T& value,
- Compare comp)
-
+ Iter lower_bound[Iter, T](Iter first, Iter last, const T& value)
+ Iter lower_bound[Iter, T, Compare](Iter first, Iter last, const T& value,
+ Compare comp)
+
+ Iter upper_bound[Iter, T](Iter first, Iter last, const T& value)
+ Iter upper_bound[Iter, T, Compare](Iter first, Iter last, const T& value,
+ Compare comp)
+
void partial_sort[Iter](Iter first, Iter middle, Iter last)
void partial_sort[Iter, Compare](Iter first, Iter middle, Iter last,
Compare comp)
@@ -22,10 +22,10 @@ cdef extern from "<algorithm>" namespace "std" nogil:
void sort[Iter](Iter first, Iter last)
void sort[Iter, Compare](Iter first, Iter last, Compare comp)
- # Removing duplicates
- Iter unique[Iter](Iter first, Iter last)
- Iter unique[Iter, BinaryPredicate](Iter first, Iter last, BinaryPredicate p)
-
+ # Removing duplicates
+ Iter unique[Iter](Iter first, Iter last)
+ Iter unique[Iter, BinaryPredicate](Iter first, Iter last, BinaryPredicate p)
+
# Binary heaps (priority queues)
void make_heap[Iter](Iter first, Iter last)
void make_heap[Iter, Compare](Iter first, Iter last, Compare comp)
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/deque.pxd b/contrib/tools/cython/Cython/Includes/libcpp/deque.pxd
index baf5514cde..9e2b2291d0 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/deque.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/deque.pxd
@@ -1,44 +1,44 @@
cdef extern from "<deque>" namespace "std" nogil:
cdef cppclass deque[T,ALLOCATOR=*]:
- ctypedef T value_type
- ctypedef ALLOCATOR allocator_type
-
- # these should really be allocator_type.size_type and
- # allocator_type.difference_type to be true to the C++ definition
+ ctypedef T value_type
+ ctypedef ALLOCATOR allocator_type
+
+ # these should really be allocator_type.size_type and
+ # allocator_type.difference_type to be true to the C++ definition
# but cython doesn't support deferred access on template arguments
- ctypedef size_t size_type
- ctypedef ptrdiff_t difference_type
-
+ ctypedef size_t size_type
+ ctypedef ptrdiff_t difference_type
+
cppclass iterator:
T& operator*()
iterator operator++()
iterator operator--()
- iterator operator+(size_type)
- iterator operator-(size_type)
- difference_type operator-(iterator)
+ iterator operator+(size_type)
+ iterator operator-(size_type)
+ difference_type operator-(iterator)
bint operator==(iterator)
bint operator!=(iterator)
- bint operator<(iterator)
- bint operator>(iterator)
- bint operator<=(iterator)
- bint operator>=(iterator)
+ bint operator<(iterator)
+ bint operator>(iterator)
+ bint operator<=(iterator)
+ bint operator>=(iterator)
cppclass reverse_iterator:
T& operator*()
- reverse_iterator operator++()
- reverse_iterator operator--()
- reverse_iterator operator+(size_type)
- reverse_iterator operator-(size_type)
- difference_type operator-(reverse_iterator)
+ reverse_iterator operator++()
+ reverse_iterator operator--()
+ reverse_iterator operator+(size_type)
+ reverse_iterator operator-(size_type)
+ difference_type operator-(reverse_iterator)
bint operator==(reverse_iterator)
bint operator!=(reverse_iterator)
- bint operator<(reverse_iterator)
- bint operator>(reverse_iterator)
- bint operator<=(reverse_iterator)
- bint operator>=(reverse_iterator)
+ bint operator<(reverse_iterator)
+ bint operator>(reverse_iterator)
+ bint operator<=(reverse_iterator)
+ bint operator>=(reverse_iterator)
cppclass const_iterator(iterator):
pass
- cppclass const_reverse_iterator(reverse_iterator):
- pass
+ cppclass const_reverse_iterator(reverse_iterator):
+ pass
deque() except +
deque(deque&) except +
deque(size_t) except +
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/functional.pxd b/contrib/tools/cython/Cython/Includes/libcpp/functional.pxd
index 413a5a8e80..94cbd9e1dd 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/functional.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/functional.pxd
@@ -1,13 +1,13 @@
-cdef extern from "<functional>" namespace "std" nogil:
- cdef cppclass function[T]:
- function() except +
- function(T*) except +
- function(function&) except +
- function(void*) except +
-
- function operator=(T*)
- function operator=(function&)
- function operator=(void*)
- function operator=[U](U)
-
- bint operator bool()
+cdef extern from "<functional>" namespace "std" nogil:
+ cdef cppclass function[T]:
+ function() except +
+ function(T*) except +
+ function(function&) except +
+ function(void*) except +
+
+ function operator=(T*)
+ function operator=(function&)
+ function operator=(void*)
+ function operator=[U](U)
+
+ bint operator bool()
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/iterator.pxd b/contrib/tools/cython/Cython/Includes/libcpp/iterator.pxd
index 680ba8f4bd..e0f8bd8d6e 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/iterator.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/iterator.pxd
@@ -14,7 +14,7 @@ cdef extern from "<iterator>" namespace "std" nogil:
pass
cdef cppclass random_access_iterator_tag(bidirectional_iterator_tag):
pass
-
+
cdef cppclass back_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
pass
cdef cppclass front_insert_iterator[T](iterator[output_iterator_tag,void,void,void,void]):
@@ -29,4 +29,4 @@ cdef extern from "<iterator>" namespace "std" nogil:
##insert_iterator<Container> inserter (Container& x, typename Container::iterator it)
insert_iterator[CONTAINER] inserter[CONTAINER,ITERATOR](CONTAINER &, ITERATOR)
-
+
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/limits.pxd b/contrib/tools/cython/Cython/Includes/libcpp/limits.pxd
index 045fe5efdd..c325263b72 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/limits.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/limits.pxd
@@ -5,7 +5,7 @@ cdef extern from "<limits>" namespace "std" nogil:
round_to_nearest = 1
round_toward_infinity = 2
round_toward_neg_infinity = 3
-
+
enum float_denorm_style:
denorm_indeterminate = -1
denorm_absent = 0
@@ -37,7 +37,7 @@ cdef extern from "<limits>" namespace "std" nogil:
const int min_exponent10
const int max_exponent
const int max_exponent10
-
+
const bint has_infinity
const bint has_quiet_NaN
const bint has_signaling_NaN
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/memory.pxd b/contrib/tools/cython/Cython/Includes/libcpp/memory.pxd
index a92c115e38..2151c1ec7f 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/memory.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/memory.pxd
@@ -16,8 +16,8 @@ cdef extern from "<memory>" namespace "std" nogil:
void construct( T *, const T &) #C++98. The C++11 version is variadic AND perfect-forwarding
void destroy(T *) #C++98
void destroy[U](U *) #unique_ptr unit tests fail w/this
-
-
+
+
cdef cppclass unique_ptr[T,DELETER=*]:
unique_ptr()
unique_ptr(nullptr_t)
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/string.pxd b/contrib/tools/cython/Cython/Includes/libcpp/string.pxd
index e72ee9b03a..a894144f1f 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/string.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/string.pxd
@@ -164,30 +164,30 @@ cdef extern from "<string>" namespace "std" nogil:
string substr(size_t pos) except +
string substr()
- #string& operator= (const string&)
- #string& operator= (const char*)
+ #string& operator= (const string&)
+ #string& operator= (const char*)
#string& operator= (char)
string operator+ (const string&) except +
string operator+ (const char*) except +
- bint operator==(const string&)
- bint operator==(const char*)
+ bint operator==(const string&)
+ bint operator==(const char*)
bint operator!= (const string&)
bint operator!= (const char*)
- bint operator< (const string&)
- bint operator< (const char*)
+ bint operator< (const string&)
+ bint operator< (const char*)
- bint operator> (const string&)
- bint operator> (const char*)
+ bint operator> (const string&)
+ bint operator> (const char*)
- bint operator<= (const string&)
- bint operator<= (const char*)
+ bint operator<= (const string&)
+ bint operator<= (const char*)
- bint operator>= (const string&)
- bint operator>= (const char*)
+ bint operator>= (const string&)
+ bint operator>= (const char*)
string to_string(int val) except +
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd b/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd
index 62b05f1a4b..a00fbbed28 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd
@@ -43,8 +43,8 @@ cdef extern from "<unordered_map>" namespace "std" nogil:
const_iterator const_end "end"()
pair[iterator, iterator] equal_range(T&)
pair[const_iterator, const_iterator] const_equal_range "equal_range"(const T&)
- iterator erase(iterator)
- iterator erase(iterator, iterator)
+ iterator erase(iterator)
+ iterator erase(iterator, iterator)
size_t erase(T&)
iterator find(T&)
const_iterator const_find "find"(T&)
diff --git a/contrib/tools/cython/Cython/Includes/libcpp/vector.pxd b/contrib/tools/cython/Cython/Includes/libcpp/vector.pxd
index 8c77e302bd..9b007dd0c7 100644
--- a/contrib/tools/cython/Cython/Includes/libcpp/vector.pxd
+++ b/contrib/tools/cython/Cython/Includes/libcpp/vector.pxd
@@ -2,20 +2,20 @@ cdef extern from "<vector>" namespace "std" nogil:
cdef cppclass vector[T,ALLOCATOR=*]:
ctypedef T value_type
ctypedef ALLOCATOR allocator_type
-
- # these should really be allocator_type.size_type and
- # allocator_type.difference_type to be true to the C++ definition
+
+ # these should really be allocator_type.size_type and
+ # allocator_type.difference_type to be true to the C++ definition
# but cython doesn't support deferred access on template arguments
- ctypedef size_t size_type
- ctypedef ptrdiff_t difference_type
-
+ ctypedef size_t size_type
+ ctypedef ptrdiff_t difference_type
+
cppclass iterator:
T& operator*()
iterator operator++()
iterator operator--()
- iterator operator+(size_type)
- iterator operator-(size_type)
- difference_type operator-(iterator)
+ iterator operator+(size_type)
+ iterator operator-(size_type)
+ difference_type operator-(iterator)
bint operator==(iterator)
bint operator!=(iterator)
bint operator<(iterator)
@@ -24,11 +24,11 @@ cdef extern from "<vector>" namespace "std" nogil:
bint operator>=(iterator)
cppclass reverse_iterator:
T& operator*()
- reverse_iterator operator++()
- reverse_iterator operator--()
- reverse_iterator operator+(size_type)
- reverse_iterator operator-(size_type)
- difference_type operator-(reverse_iterator)
+ reverse_iterator operator++()
+ reverse_iterator operator--()
+ reverse_iterator operator+(size_type)
+ reverse_iterator operator-(size_type)
+ difference_type operator-(reverse_iterator)
bint operator==(reverse_iterator)
bint operator!=(reverse_iterator)
bint operator<(reverse_iterator)
@@ -41,10 +41,10 @@ cdef extern from "<vector>" namespace "std" nogil:
pass
vector() except +
vector(vector&) except +
- vector(size_type) except +
- vector(size_type, T&) except +
+ vector(size_type) except +
+ vector(size_type, T&) except +
#vector[input_iterator](input_iterator, input_iterator)
- T& operator[](size_type)
+ T& operator[](size_type)
#vector& operator=(vector&)
bint operator==(vector&, vector&)
bint operator!=(vector&, vector&)
@@ -52,13 +52,13 @@ cdef extern from "<vector>" namespace "std" nogil:
bint operator>(vector&, vector&)
bint operator<=(vector&, vector&)
bint operator>=(vector&, vector&)
- void assign(size_type, const T&)
+ void assign(size_type, const T&)
void assign[input_iterator](input_iterator, input_iterator) except +
- T& at(size_type) except +
+ T& at(size_type) except +
T& back()
iterator begin()
const_iterator const_begin "begin"()
- size_type capacity()
+ size_type capacity()
void clear()
bint empty()
iterator end()
@@ -67,19 +67,19 @@ cdef extern from "<vector>" namespace "std" nogil:
iterator erase(iterator, iterator)
T& front()
iterator insert(iterator, const T&) except +
- iterator insert(iterator, size_type, const T&) except +
- iterator insert[Iter](iterator, Iter, Iter) except +
- size_type max_size()
+ iterator insert(iterator, size_type, const T&) except +
+ iterator insert[Iter](iterator, Iter, Iter) except +
+ size_type max_size()
void pop_back()
void push_back(T&) except +
reverse_iterator rbegin()
- const_reverse_iterator const_rbegin "crbegin"()
+ const_reverse_iterator const_rbegin "crbegin"()
reverse_iterator rend()
- const_reverse_iterator const_rend "crend"()
- void reserve(size_type)
- void resize(size_type) except +
- void resize(size_type, T&) except +
- size_type size()
+ const_reverse_iterator const_rend "crend"()
+ void reserve(size_type)
+ void resize(size_type) except +
+ void resize(size_type, T&) except +
+ size_type size()
void swap(vector&)
# C++11 methods
diff --git a/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd b/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd
index ca49373471..15700c05ef 100644
--- a/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd
+++ b/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd
@@ -18,7 +18,7 @@ DEF _buffer_format_string_len = 255
cimport cpython.buffer as pybuf
from cpython.ref cimport Py_INCREF
-from cpython.mem cimport PyObject_Malloc, PyObject_Free
+from cpython.mem cimport PyObject_Malloc, PyObject_Free
from cpython.object cimport PyObject, PyTypeObject
from cpython.type cimport type
cimport libc.stdio as stdio
@@ -52,8 +52,8 @@ cdef extern from "numpy/arrayobject.h":
NPY_STRING
NPY_UNICODE
NPY_VOID
- NPY_DATETIME
- NPY_TIMEDELTA
+ NPY_DATETIME
+ NPY_TIMEDELTA
NPY_NTYPES
NPY_NOTYPE
@@ -90,7 +90,7 @@ cdef extern from "numpy/arrayobject.h":
NPY_ANYORDER
NPY_CORDER
NPY_FORTRANORDER
- NPY_KEEPORDER
+ NPY_KEEPORDER
ctypedef enum NPY_CASTING:
NPY_NO_CASTING
@@ -194,12 +194,12 @@ cdef extern from "numpy/arrayobject.h":
ctypedef void (*PyArray_VectorUnaryFunc)(void *, void *, npy_intp, void *, void *)
- ctypedef struct PyArray_ArrayDescr:
- # shape is a tuple, but Cython doesn't support "tuple shape"
- # inside a non-PyObject declaration, so we have to declare it
- # as just a PyObject*.
- PyObject* shape
-
+ ctypedef struct PyArray_ArrayDescr:
+ # shape is a tuple, but Cython doesn't support "tuple shape"
+ # inside a non-PyObject declaration, so we have to declare it
+ # as just a PyObject*.
+ PyObject* shape
+
ctypedef struct PyArray_Descr:
pass
@@ -209,11 +209,11 @@ cdef extern from "numpy/arrayobject.h":
cdef PyTypeObject* typeobj
cdef char kind
cdef char type
- # Numpy sometimes mutates this without warning (e.g. it'll
- # sometimes change "|" to "<" in shared dtype objects on
- # little-endian machines). If this matters to you, use
- # PyArray_IsNativeByteOrder(dtype.byteorder) instead of
- # directly accessing this field.
+ # Numpy sometimes mutates this without warning (e.g. it'll
+ # sometimes change "|" to "<" in shared dtype objects on
+ # little-endian machines). If this matters to you, use
+ # PyArray_IsNativeByteOrder(dtype.byteorder) instead of
+ # directly accessing this field.
cdef char byteorder
cdef char flags
cdef int type_num
@@ -221,10 +221,10 @@ cdef extern from "numpy/arrayobject.h":
cdef int alignment
cdef dict fields
cdef tuple names
- # Use PyDataType_HASSUBARRAY to test whether this field is
- # valid (the pointer can be NULL). Most users should access
- # this field via the inline helper method PyDataType_SHAPE.
- cdef PyArray_ArrayDescr* subarray
+ # Use PyDataType_HASSUBARRAY to test whether this field is
+ # valid (the pointer can be NULL). Most users should access
+ # this field via the inline helper method PyDataType_SHAPE.
+ cdef PyArray_ArrayDescr* subarray
ctypedef class numpy.flatiter [object PyArrayIterObject, check_size ignore]:
# Use through macros
@@ -257,11 +257,11 @@ cdef extern from "numpy/arrayobject.h":
# -- the details of this may change.
def __getbuffer__(ndarray self, Py_buffer* info, int flags):
# This implementation of getbuffer is geared towards Cython
- # requirements, and does not yet fulfill the PEP.
+ # requirements, and does not yet fulfill the PEP.
# In particular strided access is always provided regardless
# of flags
- cdef int i, ndim
+ cdef int i, ndim
cdef int endian_detector = 1
cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)
@@ -277,10 +277,10 @@ cdef extern from "numpy/arrayobject.h":
info.buf = PyArray_DATA(self)
info.ndim = ndim
- if sizeof(npy_intp) != sizeof(Py_ssize_t):
+ if sizeof(npy_intp) != sizeof(Py_ssize_t):
# Allocate new buffer for strides and shape info.
# This is allocated as one block, strides first.
- info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)
+ info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)
info.shape = info.strides + ndim
for i in range(ndim):
info.strides[i] = PyArray_STRIDES(self)[i]
@@ -297,9 +297,9 @@ cdef extern from "numpy/arrayobject.h":
cdef dtype descr = <dtype>PyArray_DESCR(self)
cdef int offset
- info.obj = self
+ info.obj = self
- if not PyDataType_HASFIELDS(descr):
+ if not PyDataType_HASFIELDS(descr):
t = descr.type_num
if ((descr.byteorder == c'>' and little_endian) or
(descr.byteorder == c'<' and not little_endian)):
@@ -326,7 +326,7 @@ cdef extern from "numpy/arrayobject.h":
info.format = f
return
else:
- info.format = <char*>PyObject_Malloc(_buffer_format_string_len)
+ info.format = <char*>PyObject_Malloc(_buffer_format_string_len)
info.format[0] = c'^' # Native data types, manual alignment
offset = 0
f = _util_dtypestring(descr, info.format + 1,
@@ -336,9 +336,9 @@ cdef extern from "numpy/arrayobject.h":
def __releasebuffer__(ndarray self, Py_buffer* info):
if PyArray_HASFIELDS(self):
- PyObject_Free(info.format)
+ PyObject_Free(info.format)
if sizeof(npy_intp) != sizeof(Py_ssize_t):
- PyObject_Free(info.strides)
+ PyObject_Free(info.strides)
# info.shape was stored after info.strides in the same block
ctypedef unsigned char npy_bool
@@ -388,28 +388,28 @@ cdef extern from "numpy/arrayobject.h":
double imag
ctypedef struct npy_clongdouble:
- long double real
- long double imag
+ long double real
+ long double imag
ctypedef struct npy_complex64:
- float real
- float imag
+ float real
+ float imag
ctypedef struct npy_complex128:
double real
double imag
ctypedef struct npy_complex160:
- long double real
- long double imag
+ long double real
+ long double imag
ctypedef struct npy_complex192:
- long double real
- long double imag
+ long double real
+ long double imag
ctypedef struct npy_complex256:
- long double real
- long double imag
+ long double real
+ long double imag
ctypedef struct PyArray_Dims:
npy_intp *ptr
@@ -476,7 +476,7 @@ cdef extern from "numpy/arrayobject.h":
bint PyDataType_ISEXTENDED(dtype)
bint PyDataType_ISOBJECT(dtype)
bint PyDataType_HASFIELDS(dtype)
- bint PyDataType_HASSUBARRAY(dtype)
+ bint PyDataType_HASSUBARRAY(dtype)
bint PyArray_ISBOOL(ndarray)
bint PyArray_ISUNSIGNED(ndarray)
@@ -832,12 +832,12 @@ cdef inline object PyArray_MultiIterNew4(a, b, c, d):
cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
-cdef inline tuple PyDataType_SHAPE(dtype d):
- if PyDataType_HASSUBARRAY(d):
- return <tuple>d.subarray.shape
- else:
- return ()
-
+cdef inline tuple PyDataType_SHAPE(dtype d):
+ if PyDataType_HASSUBARRAY(d):
+ return <tuple>d.subarray.shape
+ else:
+ return ()
+
cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:
# Recursive utility function used in __getbuffer__ to get format
# string. The new location in the format string is returned.
diff --git a/contrib/tools/cython/Cython/Includes/posix/signal.pxd b/contrib/tools/cython/Cython/Includes/posix/signal.pxd
index 375799bba1..9fe7d9c36c 100644
--- a/contrib/tools/cython/Cython/Includes/posix/signal.pxd
+++ b/contrib/tools/cython/Cython/Includes/posix/signal.pxd
@@ -31,11 +31,11 @@ cdef extern from "<signal.h>" nogil:
sigset_t sa_mask
int sa_flags
- ctypedef struct stack_t:
- void *ss_sp
- int ss_flags
- size_t ss_size
-
+ ctypedef struct stack_t:
+ void *ss_sp
+ int ss_flags
+ size_t ss_size
+
enum: SA_NOCLDSTOP
enum: SIG_BLOCK
enum: SIG_UNBLOCK
@@ -69,5 +69,5 @@ cdef extern from "<signal.h>" nogil:
int sigemptyset (sigset_t *)
int sigfillset (sigset_t *)
int sigismember (const sigset_t *, int)
-
- int sigaltstack(const stack_t *, stack_t *)
+
+ int sigaltstack(const stack_t *, stack_t *)
diff --git a/contrib/tools/cython/Cython/Includes/posix/stat.pxd b/contrib/tools/cython/Cython/Includes/posix/stat.pxd
index 090ab19f95..69c2eca166 100644
--- a/contrib/tools/cython/Cython/Includes/posix/stat.pxd
+++ b/contrib/tools/cython/Cython/Includes/posix/stat.pxd
@@ -18,11 +18,11 @@ cdef extern from "<sys/stat.h>" nogil:
time_t st_mtime
time_t st_ctime
- # st_birthtime exists on *BSD and OS X.
- # Under Linux, defining it here does not hurt. Compilation under Linux
- # will only (and rightfully) fail when attempting to use the field.
- time_t st_birthtime
-
+ # st_birthtime exists on *BSD and OS X.
+ # Under Linux, defining it here does not hurt. Compilation under Linux
+ # will only (and rightfully) fail when attempting to use the field.
+ time_t st_birthtime
+
# POSIX prescribes including both <sys/stat.h> and <unistd.h> for these
cdef extern from "<unistd.h>" nogil:
int fchmod(int, mode_t)
diff --git a/contrib/tools/cython/Cython/Includes/posix/types.pxd b/contrib/tools/cython/Cython/Includes/posix/types.pxd
index e751a892f0..308f2954ee 100644
--- a/contrib/tools/cython/Cython/Includes/posix/types.pxd
+++ b/contrib/tools/cython/Cython/Includes/posix/types.pxd
@@ -22,8 +22,8 @@ cdef extern from "<sys/types.h>":
ctypedef long nlink_t
ctypedef long off_t
ctypedef long pid_t
- ctypedef struct sigset_t:
- pass
+ ctypedef struct sigset_t:
+ pass
ctypedef long suseconds_t
ctypedef long time_t
ctypedef long timer_t
diff --git a/contrib/tools/cython/Cython/Parser/Grammar b/contrib/tools/cython/Cython/Parser/Grammar
index 6e6333a597..214e36d5a3 100644
--- a/contrib/tools/cython/Cython/Parser/Grammar
+++ b/contrib/tools/cython/Cython/Parser/Grammar
@@ -127,7 +127,7 @@ arglist: argument (',' argument)* [',']
# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
# we explicitly match '*' here, too, to give it proper precedence.
# Illegal combinations and orderings are blocked in ast.c:
-# multiple (test comp_for) arguments are blocked; keyword unpackings
+# multiple (test comp_for) arguments are blocked; keyword unpackings
# that precede iterable unpackings are blocked; etc.
argument: ( test [comp_for] |
test '=' test |
diff --git a/contrib/tools/cython/Cython/Plex/Scanners.pxd b/contrib/tools/cython/Cython/Plex/Scanners.pxd
index f6a8dea456..6e75f55e61 100644
--- a/contrib/tools/cython/Cython/Plex/Scanners.pxd
+++ b/contrib/tools/cython/Cython/Plex/Scanners.pxd
@@ -38,10 +38,10 @@ cdef class Scanner:
##cdef tuple position(self) # used frequently by Parsing.py
@cython.final
- @cython.locals(cur_pos=Py_ssize_t, cur_line=Py_ssize_t, cur_line_start=Py_ssize_t,
- input_state=long, next_pos=Py_ssize_t, state=dict,
- buf_start_pos=Py_ssize_t, buf_len=Py_ssize_t, buf_index=Py_ssize_t,
- trace=bint, discard=Py_ssize_t, data=unicode, buffer=unicode)
+ @cython.locals(cur_pos=Py_ssize_t, cur_line=Py_ssize_t, cur_line_start=Py_ssize_t,
+ input_state=long, next_pos=Py_ssize_t, state=dict,
+ buf_start_pos=Py_ssize_t, buf_len=Py_ssize_t, buf_index=Py_ssize_t,
+ trace=bint, discard=Py_ssize_t, data=unicode, buffer=unicode)
cdef run_machine_inlined(self)
@cython.final
diff --git a/contrib/tools/cython/Cython/Runtime/refnanny.pyx b/contrib/tools/cython/Cython/Runtime/refnanny.pyx
index f09e01ede8..d4b873fe97 100644
--- a/contrib/tools/cython/Cython/Runtime/refnanny.pyx
+++ b/contrib/tools/cython/Cython/Runtime/refnanny.pyx
@@ -1,4 +1,4 @@
-# cython: language_level=3, auto_pickle=False
+# cython: language_level=3, auto_pickle=False
from cpython.ref cimport PyObject, Py_INCREF, Py_DECREF, Py_XDECREF, Py_XINCREF
from cpython.exc cimport PyErr_Fetch, PyErr_Restore
diff --git a/contrib/tools/cython/Cython/Shadow.py b/contrib/tools/cython/Cython/Shadow.py
index ecd13504a1..e7b9e4f612 100644
--- a/contrib/tools/cython/Cython/Shadow.py
+++ b/contrib/tools/cython/Cython/Shadow.py
@@ -109,12 +109,12 @@ cclass = ccall = cfunc = _EmptyDecoratorAndManager()
returns = wraparound = boundscheck = initializedcheck = nonecheck = \
embedsignature = cdivision = cdivision_warnings = \
- always_allows_keywords = profile = linetrace = infer_types = \
+ always_allows_keywords = profile = linetrace = infer_types = \
unraisable_tracebacks = freelist = \
- lambda _: _EmptyDecoratorAndManager()
+ lambda _: _EmptyDecoratorAndManager()
+
+exceptval = lambda _=None, check=True: _EmptyDecoratorAndManager()
-exceptval = lambda _=None, check=True: _EmptyDecoratorAndManager()
-
overflowcheck = lambda _: _EmptyDecoratorAndManager()
optimization = _Optimization()
@@ -149,7 +149,7 @@ def cdiv(a, b):
q = a / b
if q < 0:
q += 1
- return q
+ return q
def cmod(a, b):
r = a % b
@@ -396,7 +396,7 @@ py_complex = typedef(complex, "double complex")
int_types = ['char', 'short', 'Py_UNICODE', 'int', 'Py_UCS4', 'long', 'longlong', 'Py_ssize_t', 'size_t']
float_types = ['longdouble', 'double', 'float']
complex_types = ['longdoublecomplex', 'doublecomplex', 'floatcomplex', 'complex']
-other_types = ['bint', 'void', 'Py_tss_t']
+other_types = ['bint', 'void', 'Py_tss_t']
to_repr = {
'longlong': 'long long',
@@ -431,18 +431,18 @@ for name in complex_types:
gs[name] = typedef(py_complex, to_repr(name, name))
bint = typedef(bool, "bint")
-void = typedef(None, "void")
-Py_tss_t = typedef(None, "Py_tss_t")
+void = typedef(None, "void")
+Py_tss_t = typedef(None, "Py_tss_t")
for t in int_types + float_types + complex_types + other_types:
for i in range(1, 4):
- gs["%s_%s" % ('p'*i, t)] = gs[t]._pointer(i)
+ gs["%s_%s" % ('p'*i, t)] = gs[t]._pointer(i)
+
+NULL = gs['p_void'](0)
-NULL = gs['p_void'](0)
+# looks like 'gs' has some users out there by now...
+#del gs
-# looks like 'gs' has some users out there by now...
-#del gs
-
integral = floating = numeric = _FusedType()
type_ordering = [py_int, py_long, py_float, py_complex]
diff --git a/contrib/tools/cython/Cython/StringIOTree.pxd b/contrib/tools/cython/Cython/StringIOTree.pxd
index 2f2f4b7111..20455c9dfb 100644
--- a/contrib/tools/cython/Cython/StringIOTree.pxd
+++ b/contrib/tools/cython/Cython/StringIOTree.pxd
@@ -1,17 +1,17 @@
-cimport cython
-
-cdef class StringIOTree:
- cdef public list prepended_children
- cdef public object stream
- cdef public object write
- cdef public list markers
-
- @cython.locals(x=StringIOTree)
- cpdef getvalue(self)
- @cython.locals(child=StringIOTree)
- cpdef copyto(self, target)
- cpdef commit(self)
- #def insert(self, iotree)
- #def insertion_point(self)
- @cython.locals(c=StringIOTree)
- cpdef allmarkers(self)
+cimport cython
+
+cdef class StringIOTree:
+ cdef public list prepended_children
+ cdef public object stream
+ cdef public object write
+ cdef public list markers
+
+ @cython.locals(x=StringIOTree)
+ cpdef getvalue(self)
+ @cython.locals(child=StringIOTree)
+ cpdef copyto(self, target)
+ cpdef commit(self)
+ #def insert(self, iotree)
+ #def insertion_point(self)
+ @cython.locals(c=StringIOTree)
+ cpdef allmarkers(self)
diff --git a/contrib/tools/cython/Cython/StringIOTree.py b/contrib/tools/cython/Cython/StringIOTree.py
index cba83c1181..d8239efeda 100644
--- a/contrib/tools/cython/Cython/StringIOTree.py
+++ b/contrib/tools/cython/Cython/StringIOTree.py
@@ -1,44 +1,44 @@
# cython: auto_pickle=False
-r"""
-Implements a buffer with insertion points. When you know you need to
-"get back" to a place and write more later, simply call insertion_point()
-at that spot and get a new StringIOTree object that is "left behind".
-
-EXAMPLE:
-
->>> a = StringIOTree()
->>> _= a.write('first\n')
->>> b = a.insertion_point()
->>> _= a.write('third\n')
->>> _= b.write('second\n')
->>> a.getvalue().split()
-['first', 'second', 'third']
-
->>> c = b.insertion_point()
->>> d = c.insertion_point()
->>> _= d.write('alpha\n')
->>> _= b.write('gamma\n')
->>> _= c.write('beta\n')
->>> b.getvalue().split()
-['second', 'alpha', 'beta', 'gamma']
-
->>> i = StringIOTree()
->>> d.insert(i)
->>> _= i.write('inserted\n')
->>> out = StringIO()
->>> a.copyto(out)
->>> out.getvalue().split()
-['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third']
-"""
-
-from __future__ import absolute_import #, unicode_literals
-
+r"""
+Implements a buffer with insertion points. When you know you need to
+"get back" to a place and write more later, simply call insertion_point()
+at that spot and get a new StringIOTree object that is "left behind".
+
+EXAMPLE:
+
+>>> a = StringIOTree()
+>>> _= a.write('first\n')
+>>> b = a.insertion_point()
+>>> _= a.write('third\n')
+>>> _= b.write('second\n')
+>>> a.getvalue().split()
+['first', 'second', 'third']
+
+>>> c = b.insertion_point()
+>>> d = c.insertion_point()
+>>> _= d.write('alpha\n')
+>>> _= b.write('gamma\n')
+>>> _= c.write('beta\n')
+>>> b.getvalue().split()
+['second', 'alpha', 'beta', 'gamma']
+
+>>> i = StringIOTree()
+>>> d.insert(i)
+>>> _= i.write('inserted\n')
+>>> out = StringIO()
+>>> a.copyto(out)
+>>> out.getvalue().split()
+['first', 'second', 'alpha', 'inserted', 'beta', 'gamma', 'third']
+"""
+
+from __future__ import absolute_import #, unicode_literals
+
try:
- # Prefer cStringIO since io.StringIO() does not support writing 'str' in Py2.
+ # Prefer cStringIO since io.StringIO() does not support writing 'str' in Py2.
from cStringIO import StringIO
except ImportError:
- from io import StringIO
+ from io import StringIO
class StringIOTree(object):
diff --git a/contrib/tools/cython/Cython/Tempita/_tempita.py b/contrib/tools/cython/Cython/Tempita/_tempita.py
index 82ab4e720d..587f6e4841 100644
--- a/contrib/tools/cython/Cython/Tempita/_tempita.py
+++ b/contrib/tools/cython/Cython/Tempita/_tempita.py
@@ -119,7 +119,7 @@ class Template(object):
self.default_namespace['start_braces'] = delimeters[0]
self.default_namespace['end_braces'] = delimeters[1]
self.delimeters = delimeters
-
+
self._unicode = is_unicode(content)
if name is None and stacklevel is not None:
try:
diff --git a/contrib/tools/cython/Cython/Tests/TestCodeWriter.py b/contrib/tools/cython/Cython/Tests/TestCodeWriter.py
index dc638fea92..42e457da20 100644
--- a/contrib/tools/cython/Cython/Tests/TestCodeWriter.py
+++ b/contrib/tools/cython/Cython/Tests/TestCodeWriter.py
@@ -4,7 +4,7 @@ class TestCodeWriter(CythonTest):
# CythonTest uses the CodeWriter heavily, so do some checking by
# roundtripping Cython code through the test framework.
- # Note that this test is dependent upon the normal Cython parser
+ # Note that this test is dependent upon the normal Cython parser
# to generate the input trees to the CodeWriter. This save *a lot*
# of time; better to spend that time writing other tests than perfecting
# this one...
diff --git a/contrib/tools/cython/Cython/Tests/TestJediTyper.py b/contrib/tools/cython/Cython/Tests/TestJediTyper.py
index 6530f18b9d..253adef171 100644
--- a/contrib/tools/cython/Cython/Tests/TestJediTyper.py
+++ b/contrib/tools/cython/Cython/Tests/TestJediTyper.py
@@ -129,7 +129,7 @@ class TestJediTyper(TransformTest):
variables = types.pop((None, (1, 0)))
self.assertFalse(types)
self.assertEqual({'a': set(['list']), 'b': set(['list']), 'c': set(['list']), 'd': set(['list'])}, variables)
-
+
def test_typing_function_list(self):
code = '''\
def func(x):
@@ -149,14 +149,14 @@ class TestJediTyper(TransformTest):
code = '''\
a = dict()
b = {i: i**2 for i in range(10)}
- c = a
+ c = a
'''
types = self._test(code)
self.assertIn((None, (1, 0)), types)
variables = types.pop((None, (1, 0)))
self.assertFalse(types)
self.assertEqual({'a': set(['dict']), 'b': set(['dict']), 'c': set(['dict'])}, variables)
-
+
def test_typing_function_dict(self):
code = '''\
def func(x):
@@ -186,7 +186,7 @@ class TestJediTyper(TransformTest):
variables = types.pop((None, (1, 0)))
self.assertFalse(types)
self.assertEqual({'a': set(['set']), 'c': set(['set']), 'd': set(['set']), 'e': set(['set'])}, variables)
-
+
def test_typing_function_set(self):
code = '''\
def func(x):
diff --git a/contrib/tools/cython/Cython/Utility/AsyncGen.c b/contrib/tools/cython/Cython/Utility/AsyncGen.c
index ce36cd68b0..9a11d6a129 100644
--- a/contrib/tools/cython/Cython/Utility/AsyncGen.c
+++ b/contrib/tools/cython/Cython/Utility/AsyncGen.c
@@ -1,234 +1,234 @@
-// This is copied from genobject.c in CPython 3.6.
-// Try to keep it in sync by doing this from time to time:
-// sed -e 's|__pyx_||ig' Cython/Utility/AsyncGen.c | diff -udw - cpython/Objects/genobject.c | less
-
-//////////////////// AsyncGenerator.proto ////////////////////
-//@requires: Coroutine.c::Coroutine
-
-#define __Pyx_AsyncGen_USED
-typedef struct {
- __pyx_CoroutineObject coro;
- PyObject *ag_finalizer;
- int ag_hooks_inited;
- int ag_closed;
-} __pyx_PyAsyncGenObject;
-
-static PyTypeObject *__pyx__PyAsyncGenWrappedValueType = 0;
-static PyTypeObject *__pyx__PyAsyncGenASendType = 0;
-static PyTypeObject *__pyx__PyAsyncGenAThrowType = 0;
-static PyTypeObject *__pyx_AsyncGenType = 0;
-
-#define __Pyx_AsyncGen_CheckExact(obj) (Py_TYPE(obj) == __pyx_AsyncGenType)
-#define __pyx_PyAsyncGenASend_CheckExact(o) \
- (Py_TYPE(o) == __pyx__PyAsyncGenASendType)
-#define __pyx_PyAsyncGenAThrow_CheckExact(o) \
- (Py_TYPE(o) == __pyx__PyAsyncGenAThrowType)
-
-static PyObject *__Pyx_async_gen_anext(PyObject *o);
-static CYTHON_INLINE PyObject *__Pyx_async_gen_asend_iternext(PyObject *o);
-static PyObject *__Pyx_async_gen_asend_send(PyObject *o, PyObject *arg);
-static PyObject *__Pyx_async_gen_asend_close(PyObject *o, PyObject *args);
-static PyObject *__Pyx_async_gen_athrow_close(PyObject *o, PyObject *args);
-
-static PyObject *__Pyx__PyAsyncGenValueWrapperNew(PyObject *val);
-
-
-static __pyx_CoroutineObject *__Pyx_AsyncGen_New(
- __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
- PyObject *name, PyObject *qualname, PyObject *module_name) {
- __pyx_PyAsyncGenObject *gen = PyObject_GC_New(__pyx_PyAsyncGenObject, __pyx_AsyncGenType);
- if (unlikely(!gen))
- return NULL;
- gen->ag_finalizer = NULL;
- gen->ag_closed = 0;
- gen->ag_hooks_inited = 0;
- return __Pyx__Coroutine_NewInit((__pyx_CoroutineObject*)gen, body, code, closure, name, qualname, module_name);
-}
-
-static int __pyx_AsyncGen_init(void);
-static void __Pyx_PyAsyncGen_Fini(void);
-
-//////////////////// AsyncGenerator.cleanup ////////////////////
-
-__Pyx_PyAsyncGen_Fini();
-
-//////////////////// AsyncGeneratorInitFinalizer ////////////////////
-
-// this is separated out because it needs more adaptation
-
-#if PY_VERSION_HEX < 0x030600B0
-static int __Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o) {
-#if 0
- // TODO: implement finalizer support in older Python versions
- PyThreadState *tstate;
- PyObject *finalizer;
- PyObject *firstiter;
-#endif
-
- if (likely(o->ag_hooks_inited)) {
- return 0;
- }
-
- o->ag_hooks_inited = 1;
-
-#if 0
- tstate = __Pyx_PyThreadState_Current;
-
- finalizer = tstate->async_gen_finalizer;
- if (finalizer) {
- Py_INCREF(finalizer);
- o->ag_finalizer = finalizer;
- }
-
- firstiter = tstate->async_gen_firstiter;
- if (firstiter) {
- PyObject *res;
-
- Py_INCREF(firstiter);
- res = __Pyx_PyObject_CallOneArg(firstiter, (PyObject*)o);
- Py_DECREF(firstiter);
- if (res == NULL) {
- return 1;
- }
- Py_DECREF(res);
- }
-#endif
-
- return 0;
-}
-#endif
-
-
-//////////////////// AsyncGenerator ////////////////////
-//@requires: AsyncGeneratorInitFinalizer
-//@requires: Coroutine.c::Coroutine
-//@requires: Coroutine.c::ReturnWithStopIteration
+// This is copied from genobject.c in CPython 3.6.
+// Try to keep it in sync by doing this from time to time:
+// sed -e 's|__pyx_||ig' Cython/Utility/AsyncGen.c | diff -udw - cpython/Objects/genobject.c | less
+
+//////////////////// AsyncGenerator.proto ////////////////////
+//@requires: Coroutine.c::Coroutine
+
+#define __Pyx_AsyncGen_USED
+typedef struct {
+ __pyx_CoroutineObject coro;
+ PyObject *ag_finalizer;
+ int ag_hooks_inited;
+ int ag_closed;
+} __pyx_PyAsyncGenObject;
+
+static PyTypeObject *__pyx__PyAsyncGenWrappedValueType = 0;
+static PyTypeObject *__pyx__PyAsyncGenASendType = 0;
+static PyTypeObject *__pyx__PyAsyncGenAThrowType = 0;
+static PyTypeObject *__pyx_AsyncGenType = 0;
+
+#define __Pyx_AsyncGen_CheckExact(obj) (Py_TYPE(obj) == __pyx_AsyncGenType)
+#define __pyx_PyAsyncGenASend_CheckExact(o) \
+ (Py_TYPE(o) == __pyx__PyAsyncGenASendType)
+#define __pyx_PyAsyncGenAThrow_CheckExact(o) \
+ (Py_TYPE(o) == __pyx__PyAsyncGenAThrowType)
+
+static PyObject *__Pyx_async_gen_anext(PyObject *o);
+static CYTHON_INLINE PyObject *__Pyx_async_gen_asend_iternext(PyObject *o);
+static PyObject *__Pyx_async_gen_asend_send(PyObject *o, PyObject *arg);
+static PyObject *__Pyx_async_gen_asend_close(PyObject *o, PyObject *args);
+static PyObject *__Pyx_async_gen_athrow_close(PyObject *o, PyObject *args);
+
+static PyObject *__Pyx__PyAsyncGenValueWrapperNew(PyObject *val);
+
+
+static __pyx_CoroutineObject *__Pyx_AsyncGen_New(
+ __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
+ PyObject *name, PyObject *qualname, PyObject *module_name) {
+ __pyx_PyAsyncGenObject *gen = PyObject_GC_New(__pyx_PyAsyncGenObject, __pyx_AsyncGenType);
+ if (unlikely(!gen))
+ return NULL;
+ gen->ag_finalizer = NULL;
+ gen->ag_closed = 0;
+ gen->ag_hooks_inited = 0;
+ return __Pyx__Coroutine_NewInit((__pyx_CoroutineObject*)gen, body, code, closure, name, qualname, module_name);
+}
+
+static int __pyx_AsyncGen_init(void);
+static void __Pyx_PyAsyncGen_Fini(void);
+
+//////////////////// AsyncGenerator.cleanup ////////////////////
+
+__Pyx_PyAsyncGen_Fini();
+
+//////////////////// AsyncGeneratorInitFinalizer ////////////////////
+
+// this is separated out because it needs more adaptation
+
+#if PY_VERSION_HEX < 0x030600B0
+static int __Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o) {
+#if 0
+ // TODO: implement finalizer support in older Python versions
+ PyThreadState *tstate;
+ PyObject *finalizer;
+ PyObject *firstiter;
+#endif
+
+ if (likely(o->ag_hooks_inited)) {
+ return 0;
+ }
+
+ o->ag_hooks_inited = 1;
+
+#if 0
+ tstate = __Pyx_PyThreadState_Current;
+
+ finalizer = tstate->async_gen_finalizer;
+ if (finalizer) {
+ Py_INCREF(finalizer);
+ o->ag_finalizer = finalizer;
+ }
+
+ firstiter = tstate->async_gen_firstiter;
+ if (firstiter) {
+ PyObject *res;
+
+ Py_INCREF(firstiter);
+ res = __Pyx_PyObject_CallOneArg(firstiter, (PyObject*)o);
+ Py_DECREF(firstiter);
+ if (res == NULL) {
+ return 1;
+ }
+ Py_DECREF(res);
+ }
+#endif
+
+ return 0;
+}
+#endif
+
+
+//////////////////// AsyncGenerator ////////////////////
+//@requires: AsyncGeneratorInitFinalizer
+//@requires: Coroutine.c::Coroutine
+//@requires: Coroutine.c::ReturnWithStopIteration
//@requires: ObjectHandling.c::PyObjectCall2Args
-//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
-
-PyDoc_STRVAR(__Pyx_async_gen_send_doc,
-"send(arg) -> send 'arg' into generator,\n\
-return next yielded value or raise StopIteration.");
-
-PyDoc_STRVAR(__Pyx_async_gen_close_doc,
-"close() -> raise GeneratorExit inside generator.");
-
-PyDoc_STRVAR(__Pyx_async_gen_throw_doc,
-"throw(typ[,val[,tb]]) -> raise exception in generator,\n\
-return next yielded value or raise StopIteration.");
-
-PyDoc_STRVAR(__Pyx_async_gen_await_doc,
-"__await__() -> return a representation that can be passed into the 'await' expression.");
-
-// COPY STARTS HERE:
-
-static PyObject *__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *, PyObject *);
-static PyObject *__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *, PyObject *);
-
-static const char *__Pyx_NON_INIT_CORO_MSG = "can't send non-None value to a just-started coroutine";
-static const char *__Pyx_ASYNC_GEN_IGNORED_EXIT_MSG = "async generator ignored GeneratorExit";
-
-typedef enum {
- __PYX_AWAITABLE_STATE_INIT, /* new awaitable, has not yet been iterated */
- __PYX_AWAITABLE_STATE_ITER, /* being iterated */
- __PYX_AWAITABLE_STATE_CLOSED, /* closed */
-} __pyx_AwaitableState;
-
-typedef struct {
- PyObject_HEAD
- __pyx_PyAsyncGenObject *ags_gen;
-
- /* Can be NULL, when in the __anext__() mode (equivalent of "asend(None)") */
- PyObject *ags_sendval;
-
- __pyx_AwaitableState ags_state;
-} __pyx_PyAsyncGenASend;
-
-
-typedef struct {
- PyObject_HEAD
- __pyx_PyAsyncGenObject *agt_gen;
-
- /* Can be NULL, when in the "aclose()" mode (equivalent of "athrow(GeneratorExit)") */
- PyObject *agt_args;
-
- __pyx_AwaitableState agt_state;
-} __pyx_PyAsyncGenAThrow;
-
-
-typedef struct {
- PyObject_HEAD
- PyObject *agw_val;
-} __pyx__PyAsyncGenWrappedValue;
-
-
-#ifndef _PyAsyncGen_MAXFREELIST
-#define _PyAsyncGen_MAXFREELIST 80
-#endif
-
-// Freelists boost performance 6-10%; they also reduce memory
-// fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend
-// are short-living objects that are instantiated for every
-// __anext__ call.
-
-static __pyx__PyAsyncGenWrappedValue *__Pyx_ag_value_freelist[_PyAsyncGen_MAXFREELIST];
-static int __Pyx_ag_value_freelist_free = 0;
-
-static __pyx_PyAsyncGenASend *__Pyx_ag_asend_freelist[_PyAsyncGen_MAXFREELIST];
-static int __Pyx_ag_asend_freelist_free = 0;
-
-#define __pyx__PyAsyncGenWrappedValue_CheckExact(o) \
- (Py_TYPE(o) == __pyx__PyAsyncGenWrappedValueType)
-
-
-static int
-__Pyx_async_gen_traverse(__pyx_PyAsyncGenObject *gen, visitproc visit, void *arg)
-{
- Py_VISIT(gen->ag_finalizer);
- return __Pyx_Coroutine_traverse((__pyx_CoroutineObject*)gen, visit, arg);
-}
-
-
-static PyObject *
-__Pyx_async_gen_repr(__pyx_CoroutineObject *o)
-{
- // avoid NULL pointer dereference for qualname during garbage collection
- return PyUnicode_FromFormat("<async_generator object %S at %p>",
- o->gi_qualname ? o->gi_qualname : Py_None, o);
-}
-
-
-#if PY_VERSION_HEX >= 0x030600B0
-static int
-__Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o)
-{
- PyThreadState *tstate;
- PyObject *finalizer;
- PyObject *firstiter;
-
- if (o->ag_hooks_inited) {
- return 0;
- }
-
- o->ag_hooks_inited = 1;
-
- tstate = __Pyx_PyThreadState_Current;
-
- finalizer = tstate->async_gen_finalizer;
- if (finalizer) {
- Py_INCREF(finalizer);
- o->ag_finalizer = finalizer;
- }
-
- firstiter = tstate->async_gen_firstiter;
- if (firstiter) {
- PyObject *res;
+//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
+
+PyDoc_STRVAR(__Pyx_async_gen_send_doc,
+"send(arg) -> send 'arg' into generator,\n\
+return next yielded value or raise StopIteration.");
+
+PyDoc_STRVAR(__Pyx_async_gen_close_doc,
+"close() -> raise GeneratorExit inside generator.");
+
+PyDoc_STRVAR(__Pyx_async_gen_throw_doc,
+"throw(typ[,val[,tb]]) -> raise exception in generator,\n\
+return next yielded value or raise StopIteration.");
+
+PyDoc_STRVAR(__Pyx_async_gen_await_doc,
+"__await__() -> return a representation that can be passed into the 'await' expression.");
+
+// COPY STARTS HERE:
+
+static PyObject *__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *, PyObject *);
+static PyObject *__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *, PyObject *);
+
+static const char *__Pyx_NON_INIT_CORO_MSG = "can't send non-None value to a just-started coroutine";
+static const char *__Pyx_ASYNC_GEN_IGNORED_EXIT_MSG = "async generator ignored GeneratorExit";
+
+typedef enum {
+ __PYX_AWAITABLE_STATE_INIT, /* new awaitable, has not yet been iterated */
+ __PYX_AWAITABLE_STATE_ITER, /* being iterated */
+ __PYX_AWAITABLE_STATE_CLOSED, /* closed */
+} __pyx_AwaitableState;
+
+typedef struct {
+ PyObject_HEAD
+ __pyx_PyAsyncGenObject *ags_gen;
+
+ /* Can be NULL, when in the __anext__() mode (equivalent of "asend(None)") */
+ PyObject *ags_sendval;
+
+ __pyx_AwaitableState ags_state;
+} __pyx_PyAsyncGenASend;
+
+
+typedef struct {
+ PyObject_HEAD
+ __pyx_PyAsyncGenObject *agt_gen;
+
+ /* Can be NULL, when in the "aclose()" mode (equivalent of "athrow(GeneratorExit)") */
+ PyObject *agt_args;
+
+ __pyx_AwaitableState agt_state;
+} __pyx_PyAsyncGenAThrow;
+
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *agw_val;
+} __pyx__PyAsyncGenWrappedValue;
+
+
+#ifndef _PyAsyncGen_MAXFREELIST
+#define _PyAsyncGen_MAXFREELIST 80
+#endif
+
+// Freelists boost performance 6-10%; they also reduce memory
+// fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend
+// are short-living objects that are instantiated for every
+// __anext__ call.
+
+static __pyx__PyAsyncGenWrappedValue *__Pyx_ag_value_freelist[_PyAsyncGen_MAXFREELIST];
+static int __Pyx_ag_value_freelist_free = 0;
+
+static __pyx_PyAsyncGenASend *__Pyx_ag_asend_freelist[_PyAsyncGen_MAXFREELIST];
+static int __Pyx_ag_asend_freelist_free = 0;
+
+#define __pyx__PyAsyncGenWrappedValue_CheckExact(o) \
+ (Py_TYPE(o) == __pyx__PyAsyncGenWrappedValueType)
+
+
+static int
+__Pyx_async_gen_traverse(__pyx_PyAsyncGenObject *gen, visitproc visit, void *arg)
+{
+ Py_VISIT(gen->ag_finalizer);
+ return __Pyx_Coroutine_traverse((__pyx_CoroutineObject*)gen, visit, arg);
+}
+
+
+static PyObject *
+__Pyx_async_gen_repr(__pyx_CoroutineObject *o)
+{
+ // avoid NULL pointer dereference for qualname during garbage collection
+ return PyUnicode_FromFormat("<async_generator object %S at %p>",
+ o->gi_qualname ? o->gi_qualname : Py_None, o);
+}
+
+
+#if PY_VERSION_HEX >= 0x030600B0
+static int
+__Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o)
+{
+ PyThreadState *tstate;
+ PyObject *finalizer;
+ PyObject *firstiter;
+
+ if (o->ag_hooks_inited) {
+ return 0;
+ }
+
+ o->ag_hooks_inited = 1;
+
+ tstate = __Pyx_PyThreadState_Current;
+
+ finalizer = tstate->async_gen_finalizer;
+ if (finalizer) {
+ Py_INCREF(finalizer);
+ o->ag_finalizer = finalizer;
+ }
+
+ firstiter = tstate->async_gen_firstiter;
+ if (firstiter) {
+ PyObject *res;
#if CYTHON_UNPACK_METHODS
PyObject *self;
#endif
-
- Py_INCREF(firstiter);
- // at least asyncio stores methods here => optimise the call
+
+ Py_INCREF(firstiter);
+ // at least asyncio stores methods here => optimise the call
#if CYTHON_UNPACK_METHODS
if (likely(PyMethod_Check(firstiter)) && likely((self = PyMethod_GET_SELF(firstiter)) != NULL)) {
PyObject *function = PyMethod_GET_FUNCTION(firstiter);
@@ -237,193 +237,193 @@ __Pyx_async_gen_init_hooks(__pyx_PyAsyncGenObject *o)
#endif
res = __Pyx_PyObject_CallOneArg(firstiter, (PyObject*)o);
- Py_DECREF(firstiter);
- if (unlikely(res == NULL)) {
- return 1;
- }
- Py_DECREF(res);
- }
-
- return 0;
-}
-#endif
-
-
-static PyObject *
-__Pyx_async_gen_anext(PyObject *g)
-{
- __pyx_PyAsyncGenObject *o = (__pyx_PyAsyncGenObject*) g;
- if (__Pyx_async_gen_init_hooks(o)) {
- return NULL;
- }
- return __Pyx_async_gen_asend_new(o, NULL);
-}
-
+ Py_DECREF(firstiter);
+ if (unlikely(res == NULL)) {
+ return 1;
+ }
+ Py_DECREF(res);
+ }
+
+ return 0;
+}
+#endif
+
+
+static PyObject *
+__Pyx_async_gen_anext(PyObject *g)
+{
+ __pyx_PyAsyncGenObject *o = (__pyx_PyAsyncGenObject*) g;
+ if (__Pyx_async_gen_init_hooks(o)) {
+ return NULL;
+ }
+ return __Pyx_async_gen_asend_new(o, NULL);
+}
+
static PyObject *
__Pyx_async_gen_anext_method(PyObject *g, CYTHON_UNUSED PyObject *arg) {
return __Pyx_async_gen_anext(g);
}
-
-
-static PyObject *
-__Pyx_async_gen_asend(__pyx_PyAsyncGenObject *o, PyObject *arg)
-{
- if (__Pyx_async_gen_init_hooks(o)) {
- return NULL;
- }
- return __Pyx_async_gen_asend_new(o, arg);
-}
-
-
-static PyObject *
-__Pyx_async_gen_aclose(__pyx_PyAsyncGenObject *o, CYTHON_UNUSED PyObject *arg)
-{
- if (__Pyx_async_gen_init_hooks(o)) {
- return NULL;
- }
- return __Pyx_async_gen_athrow_new(o, NULL);
-}
-
-
-static PyObject *
-__Pyx_async_gen_athrow(__pyx_PyAsyncGenObject *o, PyObject *args)
-{
- if (__Pyx_async_gen_init_hooks(o)) {
- return NULL;
- }
- return __Pyx_async_gen_athrow_new(o, args);
-}
-
-
+
+
+static PyObject *
+__Pyx_async_gen_asend(__pyx_PyAsyncGenObject *o, PyObject *arg)
+{
+ if (__Pyx_async_gen_init_hooks(o)) {
+ return NULL;
+ }
+ return __Pyx_async_gen_asend_new(o, arg);
+}
+
+
+static PyObject *
+__Pyx_async_gen_aclose(__pyx_PyAsyncGenObject *o, CYTHON_UNUSED PyObject *arg)
+{
+ if (__Pyx_async_gen_init_hooks(o)) {
+ return NULL;
+ }
+ return __Pyx_async_gen_athrow_new(o, NULL);
+}
+
+
+static PyObject *
+__Pyx_async_gen_athrow(__pyx_PyAsyncGenObject *o, PyObject *args)
+{
+ if (__Pyx_async_gen_init_hooks(o)) {
+ return NULL;
+ }
+ return __Pyx_async_gen_athrow_new(o, args);
+}
+
+
static PyObject *
__Pyx_async_gen_self_method(PyObject *g, CYTHON_UNUSED PyObject *arg) {
return __Pyx_NewRef(g);
}
-static PyGetSetDef __Pyx_async_gen_getsetlist[] = {
- {(char*) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name,
- (char*) PyDoc_STR("name of the async generator"), 0},
- {(char*) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname,
- (char*) PyDoc_STR("qualified name of the async generator"), 0},
- //REMOVED: {(char*) "ag_await", (getter)coro_get_cr_await, NULL,
- //REMOVED: (char*) PyDoc_STR("object being awaited on, or None")},
- {0, 0, 0, 0, 0} /* Sentinel */
-};
-
-static PyMemberDef __Pyx_async_gen_memberlist[] = {
- //REMOVED: {(char*) "ag_frame", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_frame), READONLY},
- {(char*) "ag_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL},
- //REMOVED: {(char*) "ag_code", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_code), READONLY},
- //ADDED: "ag_await"
- {(char*) "ag_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY,
- (char*) PyDoc_STR("object being awaited on, or None")},
- {0, 0, 0, 0, 0} /* Sentinel */
-};
-
-PyDoc_STRVAR(__Pyx_async_aclose_doc,
-"aclose() -> raise GeneratorExit inside generator.");
-
-PyDoc_STRVAR(__Pyx_async_asend_doc,
-"asend(v) -> send 'v' in generator.");
-
-PyDoc_STRVAR(__Pyx_async_athrow_doc,
-"athrow(typ[,val[,tb]]) -> raise exception in generator.");
-
-PyDoc_STRVAR(__Pyx_async_aiter_doc,
-"__aiter__(v) -> return an asynchronous iterator.");
-
-PyDoc_STRVAR(__Pyx_async_anext_doc,
-"__anext__(v) -> continue asynchronous iteration and return the next element.");
-
-static PyMethodDef __Pyx_async_gen_methods[] = {
- {"asend", (PyCFunction)__Pyx_async_gen_asend, METH_O, __Pyx_async_asend_doc},
- {"athrow",(PyCFunction)__Pyx_async_gen_athrow, METH_VARARGS, __Pyx_async_athrow_doc},
- {"aclose", (PyCFunction)__Pyx_async_gen_aclose, METH_NOARGS, __Pyx_async_aclose_doc},
+static PyGetSetDef __Pyx_async_gen_getsetlist[] = {
+ {(char*) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name,
+ (char*) PyDoc_STR("name of the async generator"), 0},
+ {(char*) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname,
+ (char*) PyDoc_STR("qualified name of the async generator"), 0},
+ //REMOVED: {(char*) "ag_await", (getter)coro_get_cr_await, NULL,
+ //REMOVED: (char*) PyDoc_STR("object being awaited on, or None")},
+ {0, 0, 0, 0, 0} /* Sentinel */
+};
+
+static PyMemberDef __Pyx_async_gen_memberlist[] = {
+ //REMOVED: {(char*) "ag_frame", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_frame), READONLY},
+ {(char*) "ag_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL},
+ //REMOVED: {(char*) "ag_code", T_OBJECT, offsetof(__pyx_PyAsyncGenObject, ag_code), READONLY},
+ //ADDED: "ag_await"
+ {(char*) "ag_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY,
+ (char*) PyDoc_STR("object being awaited on, or None")},
+ {0, 0, 0, 0, 0} /* Sentinel */
+};
+
+PyDoc_STRVAR(__Pyx_async_aclose_doc,
+"aclose() -> raise GeneratorExit inside generator.");
+
+PyDoc_STRVAR(__Pyx_async_asend_doc,
+"asend(v) -> send 'v' in generator.");
+
+PyDoc_STRVAR(__Pyx_async_athrow_doc,
+"athrow(typ[,val[,tb]]) -> raise exception in generator.");
+
+PyDoc_STRVAR(__Pyx_async_aiter_doc,
+"__aiter__(v) -> return an asynchronous iterator.");
+
+PyDoc_STRVAR(__Pyx_async_anext_doc,
+"__anext__(v) -> continue asynchronous iteration and return the next element.");
+
+static PyMethodDef __Pyx_async_gen_methods[] = {
+ {"asend", (PyCFunction)__Pyx_async_gen_asend, METH_O, __Pyx_async_asend_doc},
+ {"athrow",(PyCFunction)__Pyx_async_gen_athrow, METH_VARARGS, __Pyx_async_athrow_doc},
+ {"aclose", (PyCFunction)__Pyx_async_gen_aclose, METH_NOARGS, __Pyx_async_aclose_doc},
{"__aiter__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_aiter_doc},
{"__anext__", (PyCFunction)__Pyx_async_gen_anext_method, METH_NOARGS, __Pyx_async_anext_doc},
- {0, 0, 0, 0} /* Sentinel */
-};
-
-
-#if CYTHON_USE_ASYNC_SLOTS
-static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_as_async = {
- 0, /* am_await */
- PyObject_SelfIter, /* am_aiter */
+ {0, 0, 0, 0} /* Sentinel */
+};
+
+
+#if CYTHON_USE_ASYNC_SLOTS
+static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_as_async = {
+ 0, /* am_await */
+ PyObject_SelfIter, /* am_aiter */
(unaryfunc)__Pyx_async_gen_anext, /* am_anext */
#if PY_VERSION_HEX >= 0x030A00A3
0, /*am_send*/
#endif
-};
-#endif
-
-static PyTypeObject __pyx_AsyncGenType_type = {
- PyVarObject_HEAD_INIT(0, 0)
- "async_generator", /* tp_name */
- sizeof(__pyx_PyAsyncGenObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)__Pyx_Coroutine_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
-#if CYTHON_USE_ASYNC_SLOTS
- &__Pyx_async_gen_as_async, /* tp_as_async */
-#else
- 0, /*tp_reserved*/
-#endif
- (reprfunc)__Pyx_async_gen_repr, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
- Py_TPFLAGS_HAVE_FINALIZE, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)__Pyx_async_gen_traverse, /* tp_traverse */
- 0, /* tp_clear */
-#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
- // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
- __Pyx_Coroutine_compare, /*tp_richcompare*/
-#else
- 0, /*tp_richcompare*/
-#endif
- offsetof(__pyx_CoroutineObject, gi_weakreflist), /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- __Pyx_async_gen_methods, /* tp_methods */
- __Pyx_async_gen_memberlist, /* tp_members */
- __Pyx_async_gen_getsetlist, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
- 0, /* tp_is_gc */
- 0, /* tp_bases */
- 0, /* tp_mro */
- 0, /* tp_cache */
- 0, /* tp_subclasses */
- 0, /* tp_weaklist */
-#if CYTHON_USE_TP_FINALIZE
- 0, /*tp_del*/
-#else
- __Pyx_Coroutine_del, /*tp_del*/
-#endif
- 0, /* tp_version_tag */
-#if CYTHON_USE_TP_FINALIZE
- __Pyx_Coroutine_del, /* tp_finalize */
-#elif PY_VERSION_HEX >= 0x030400a1
- 0, /* tp_finalize */
-#endif
+};
+#endif
+
+static PyTypeObject __pyx_AsyncGenType_type = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "async_generator", /* tp_name */
+ sizeof(__pyx_PyAsyncGenObject), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)__Pyx_Coroutine_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+#if CYTHON_USE_ASYNC_SLOTS
+ &__Pyx_async_gen_as_async, /* tp_as_async */
+#else
+ 0, /*tp_reserved*/
+#endif
+ (reprfunc)__Pyx_async_gen_repr, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_HAVE_FINALIZE, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)__Pyx_async_gen_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
+ // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
+ __Pyx_Coroutine_compare, /*tp_richcompare*/
+#else
+ 0, /*tp_richcompare*/
+#endif
+ offsetof(__pyx_CoroutineObject, gi_weakreflist), /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ __Pyx_async_gen_methods, /* tp_methods */
+ __Pyx_async_gen_memberlist, /* tp_members */
+ __Pyx_async_gen_getsetlist, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+ 0, /* tp_free */
+ 0, /* tp_is_gc */
+ 0, /* tp_bases */
+ 0, /* tp_mro */
+ 0, /* tp_cache */
+ 0, /* tp_subclasses */
+ 0, /* tp_weaklist */
+#if CYTHON_USE_TP_FINALIZE
+ 0, /*tp_del*/
+#else
+ __Pyx_Coroutine_del, /*tp_del*/
+#endif
+ 0, /* tp_version_tag */
+#if CYTHON_USE_TP_FINALIZE
+ __Pyx_Coroutine_del, /* tp_finalize */
+#elif PY_VERSION_HEX >= 0x030400a1
+ 0, /* tp_finalize */
+#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
@@ -433,238 +433,238 @@ static PyTypeObject __pyx_AsyncGenType_type = {
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
-};
-
-
-static int
-__Pyx_PyAsyncGen_ClearFreeLists(void)
-{
- int ret = __Pyx_ag_value_freelist_free + __Pyx_ag_asend_freelist_free;
-
- while (__Pyx_ag_value_freelist_free) {
- __pyx__PyAsyncGenWrappedValue *o;
- o = __Pyx_ag_value_freelist[--__Pyx_ag_value_freelist_free];
- assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
- PyObject_GC_Del(o);
- }
-
- while (__Pyx_ag_asend_freelist_free) {
- __pyx_PyAsyncGenASend *o;
- o = __Pyx_ag_asend_freelist[--__Pyx_ag_asend_freelist_free];
- assert(Py_TYPE(o) == __pyx__PyAsyncGenASendType);
- PyObject_GC_Del(o);
- }
-
- return ret;
-}
-
-static void
-__Pyx_PyAsyncGen_Fini(void)
-{
- __Pyx_PyAsyncGen_ClearFreeLists();
-}
-
-
-static PyObject *
-__Pyx_async_gen_unwrap_value(__pyx_PyAsyncGenObject *gen, PyObject *result)
-{
- if (result == NULL) {
- PyObject *exc_type = PyErr_Occurred();
- if (!exc_type) {
- PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration);
- gen->ag_closed = 1;
- } else if (__Pyx_PyErr_GivenExceptionMatches2(exc_type, __Pyx_PyExc_StopAsyncIteration, PyExc_GeneratorExit)) {
- gen->ag_closed = 1;
- }
-
- return NULL;
- }
-
- if (__pyx__PyAsyncGenWrappedValue_CheckExact(result)) {
- /* async yield */
- __Pyx_ReturnWithStopIteration(((__pyx__PyAsyncGenWrappedValue*)result)->agw_val);
- Py_DECREF(result);
- return NULL;
- }
-
- return result;
-}
-
-
-/* ---------- Async Generator ASend Awaitable ------------ */
-
-
-static void
-__Pyx_async_gen_asend_dealloc(__pyx_PyAsyncGenASend *o)
-{
- PyObject_GC_UnTrack((PyObject *)o);
- Py_CLEAR(o->ags_gen);
- Py_CLEAR(o->ags_sendval);
- if (__Pyx_ag_asend_freelist_free < _PyAsyncGen_MAXFREELIST) {
- assert(__pyx_PyAsyncGenASend_CheckExact(o));
- __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free++] = o;
- } else {
- PyObject_GC_Del(o);
- }
-}
-
-static int
-__Pyx_async_gen_asend_traverse(__pyx_PyAsyncGenASend *o, visitproc visit, void *arg)
-{
- Py_VISIT(o->ags_gen);
- Py_VISIT(o->ags_sendval);
- return 0;
-}
-
-
-static PyObject *
-__Pyx_async_gen_asend_send(PyObject *g, PyObject *arg)
-{
- __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g;
- PyObject *result;
-
- if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) {
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
- }
-
- if (o->ags_state == __PYX_AWAITABLE_STATE_INIT) {
- if (arg == NULL || arg == Py_None) {
- arg = o->ags_sendval ? o->ags_sendval : Py_None;
- }
- o->ags_state = __PYX_AWAITABLE_STATE_ITER;
- }
-
- result = __Pyx_Coroutine_Send((PyObject*)o->ags_gen, arg);
- result = __Pyx_async_gen_unwrap_value(o->ags_gen, result);
-
- if (result == NULL) {
- o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
- }
-
- return result;
-}
-
-
-static CYTHON_INLINE PyObject *
-__Pyx_async_gen_asend_iternext(PyObject *o)
-{
- return __Pyx_async_gen_asend_send(o, Py_None);
-}
-
-
-static PyObject *
-__Pyx_async_gen_asend_throw(__pyx_PyAsyncGenASend *o, PyObject *args)
-{
- PyObject *result;
-
- if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) {
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
- }
-
- result = __Pyx_Coroutine_Throw((PyObject*)o->ags_gen, args);
- result = __Pyx_async_gen_unwrap_value(o->ags_gen, result);
-
- if (result == NULL) {
- o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
- }
-
- return result;
-}
-
-
-static PyObject *
-__Pyx_async_gen_asend_close(PyObject *g, CYTHON_UNUSED PyObject *args)
-{
- __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g;
- o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
- Py_RETURN_NONE;
-}
-
-
-static PyMethodDef __Pyx_async_gen_asend_methods[] = {
- {"send", (PyCFunction)__Pyx_async_gen_asend_send, METH_O, __Pyx_async_gen_send_doc},
- {"throw", (PyCFunction)__Pyx_async_gen_asend_throw, METH_VARARGS, __Pyx_async_gen_throw_doc},
- {"close", (PyCFunction)__Pyx_async_gen_asend_close, METH_NOARGS, __Pyx_async_gen_close_doc},
+};
+
+
+static int
+__Pyx_PyAsyncGen_ClearFreeLists(void)
+{
+ int ret = __Pyx_ag_value_freelist_free + __Pyx_ag_asend_freelist_free;
+
+ while (__Pyx_ag_value_freelist_free) {
+ __pyx__PyAsyncGenWrappedValue *o;
+ o = __Pyx_ag_value_freelist[--__Pyx_ag_value_freelist_free];
+ assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
+ PyObject_GC_Del(o);
+ }
+
+ while (__Pyx_ag_asend_freelist_free) {
+ __pyx_PyAsyncGenASend *o;
+ o = __Pyx_ag_asend_freelist[--__Pyx_ag_asend_freelist_free];
+ assert(Py_TYPE(o) == __pyx__PyAsyncGenASendType);
+ PyObject_GC_Del(o);
+ }
+
+ return ret;
+}
+
+static void
+__Pyx_PyAsyncGen_Fini(void)
+{
+ __Pyx_PyAsyncGen_ClearFreeLists();
+}
+
+
+static PyObject *
+__Pyx_async_gen_unwrap_value(__pyx_PyAsyncGenObject *gen, PyObject *result)
+{
+ if (result == NULL) {
+ PyObject *exc_type = PyErr_Occurred();
+ if (!exc_type) {
+ PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration);
+ gen->ag_closed = 1;
+ } else if (__Pyx_PyErr_GivenExceptionMatches2(exc_type, __Pyx_PyExc_StopAsyncIteration, PyExc_GeneratorExit)) {
+ gen->ag_closed = 1;
+ }
+
+ return NULL;
+ }
+
+ if (__pyx__PyAsyncGenWrappedValue_CheckExact(result)) {
+ /* async yield */
+ __Pyx_ReturnWithStopIteration(((__pyx__PyAsyncGenWrappedValue*)result)->agw_val);
+ Py_DECREF(result);
+ return NULL;
+ }
+
+ return result;
+}
+
+
+/* ---------- Async Generator ASend Awaitable ------------ */
+
+
+static void
+__Pyx_async_gen_asend_dealloc(__pyx_PyAsyncGenASend *o)
+{
+ PyObject_GC_UnTrack((PyObject *)o);
+ Py_CLEAR(o->ags_gen);
+ Py_CLEAR(o->ags_sendval);
+ if (__Pyx_ag_asend_freelist_free < _PyAsyncGen_MAXFREELIST) {
+ assert(__pyx_PyAsyncGenASend_CheckExact(o));
+ __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free++] = o;
+ } else {
+ PyObject_GC_Del(o);
+ }
+}
+
+static int
+__Pyx_async_gen_asend_traverse(__pyx_PyAsyncGenASend *o, visitproc visit, void *arg)
+{
+ Py_VISIT(o->ags_gen);
+ Py_VISIT(o->ags_sendval);
+ return 0;
+}
+
+
+static PyObject *
+__Pyx_async_gen_asend_send(PyObject *g, PyObject *arg)
+{
+ __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g;
+ PyObject *result;
+
+ if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ if (o->ags_state == __PYX_AWAITABLE_STATE_INIT) {
+ if (arg == NULL || arg == Py_None) {
+ arg = o->ags_sendval ? o->ags_sendval : Py_None;
+ }
+ o->ags_state = __PYX_AWAITABLE_STATE_ITER;
+ }
+
+ result = __Pyx_Coroutine_Send((PyObject*)o->ags_gen, arg);
+ result = __Pyx_async_gen_unwrap_value(o->ags_gen, result);
+
+ if (result == NULL) {
+ o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
+ }
+
+ return result;
+}
+
+
+static CYTHON_INLINE PyObject *
+__Pyx_async_gen_asend_iternext(PyObject *o)
+{
+ return __Pyx_async_gen_asend_send(o, Py_None);
+}
+
+
+static PyObject *
+__Pyx_async_gen_asend_throw(__pyx_PyAsyncGenASend *o, PyObject *args)
+{
+ PyObject *result;
+
+ if (unlikely(o->ags_state == __PYX_AWAITABLE_STATE_CLOSED)) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ result = __Pyx_Coroutine_Throw((PyObject*)o->ags_gen, args);
+ result = __Pyx_async_gen_unwrap_value(o->ags_gen, result);
+
+ if (result == NULL) {
+ o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
+ }
+
+ return result;
+}
+
+
+static PyObject *
+__Pyx_async_gen_asend_close(PyObject *g, CYTHON_UNUSED PyObject *args)
+{
+ __pyx_PyAsyncGenASend *o = (__pyx_PyAsyncGenASend*) g;
+ o->ags_state = __PYX_AWAITABLE_STATE_CLOSED;
+ Py_RETURN_NONE;
+}
+
+
+static PyMethodDef __Pyx_async_gen_asend_methods[] = {
+ {"send", (PyCFunction)__Pyx_async_gen_asend_send, METH_O, __Pyx_async_gen_send_doc},
+ {"throw", (PyCFunction)__Pyx_async_gen_asend_throw, METH_VARARGS, __Pyx_async_gen_throw_doc},
+ {"close", (PyCFunction)__Pyx_async_gen_asend_close, METH_NOARGS, __Pyx_async_gen_close_doc},
{"__await__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_gen_await_doc},
- {0, 0, 0, 0} /* Sentinel */
-};
-
-
-#if CYTHON_USE_ASYNC_SLOTS
-static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_asend_as_async = {
- PyObject_SelfIter, /* am_await */
- 0, /* am_aiter */
+ {0, 0, 0, 0} /* Sentinel */
+};
+
+
+#if CYTHON_USE_ASYNC_SLOTS
+static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_asend_as_async = {
+ PyObject_SelfIter, /* am_await */
+ 0, /* am_aiter */
0, /* am_anext */
#if PY_VERSION_HEX >= 0x030A00A3
0, /*am_send*/
#endif
-};
-#endif
-
-
-static PyTypeObject __pyx__PyAsyncGenASendType_type = {
- PyVarObject_HEAD_INIT(0, 0)
- "async_generator_asend", /* tp_name */
- sizeof(__pyx_PyAsyncGenASend), /* tp_basicsize */
- 0, /* tp_itemsize */
- /* methods */
- (destructor)__Pyx_async_gen_asend_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
-#if CYTHON_USE_ASYNC_SLOTS
- &__Pyx_async_gen_asend_as_async, /* tp_as_async */
-#else
- 0, /*tp_reserved*/
-#endif
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)__Pyx_async_gen_asend_traverse, /* tp_traverse */
- 0, /* tp_clear */
-#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
- // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
- __Pyx_Coroutine_compare, /*tp_richcompare*/
-#else
- 0, /*tp_richcompare*/
-#endif
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter */
- (iternextfunc)__Pyx_async_gen_asend_iternext, /* tp_iternext */
- __Pyx_async_gen_asend_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
- 0, /* tp_is_gc */
- 0, /* tp_bases */
- 0, /* tp_mro */
- 0, /* tp_cache */
- 0, /* tp_subclasses */
- 0, /* tp_weaklist */
- 0, /* tp_del */
- 0, /* tp_version_tag */
-#if PY_VERSION_HEX >= 0x030400a1
- 0, /* tp_finalize */
-#endif
+};
+#endif
+
+
+static PyTypeObject __pyx__PyAsyncGenASendType_type = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "async_generator_asend", /* tp_name */
+ sizeof(__pyx_PyAsyncGenASend), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)__Pyx_async_gen_asend_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+#if CYTHON_USE_ASYNC_SLOTS
+ &__Pyx_async_gen_asend_as_async, /* tp_as_async */
+#else
+ 0, /*tp_reserved*/
+#endif
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)__Pyx_async_gen_asend_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
+ // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
+ __Pyx_Coroutine_compare, /*tp_richcompare*/
+#else
+ 0, /*tp_richcompare*/
+#endif
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ (iternextfunc)__Pyx_async_gen_asend_iternext, /* tp_iternext */
+ __Pyx_async_gen_asend_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+ 0, /* tp_free */
+ 0, /* tp_is_gc */
+ 0, /* tp_bases */
+ 0, /* tp_mro */
+ 0, /* tp_cache */
+ 0, /* tp_subclasses */
+ 0, /* tp_weaklist */
+ 0, /* tp_del */
+ 0, /* tp_version_tag */
+#if PY_VERSION_HEX >= 0x030400a1
+ 0, /* tp_finalize */
+#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
@@ -674,115 +674,115 @@ static PyTypeObject __pyx__PyAsyncGenASendType_type = {
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
-};
-
-
-static PyObject *
-__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *gen, PyObject *sendval)
-{
- __pyx_PyAsyncGenASend *o;
- if (__Pyx_ag_asend_freelist_free) {
- __Pyx_ag_asend_freelist_free--;
- o = __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free];
- _Py_NewReference((PyObject *)o);
- } else {
- o = PyObject_GC_New(__pyx_PyAsyncGenASend, __pyx__PyAsyncGenASendType);
- if (o == NULL) {
- return NULL;
- }
- }
-
- Py_INCREF(gen);
- o->ags_gen = gen;
-
- Py_XINCREF(sendval);
- o->ags_sendval = sendval;
-
- o->ags_state = __PYX_AWAITABLE_STATE_INIT;
-
- PyObject_GC_Track((PyObject*)o);
- return (PyObject*)o;
-}
-
-
-/* ---------- Async Generator Value Wrapper ------------ */
-
-
-static void
-__Pyx_async_gen_wrapped_val_dealloc(__pyx__PyAsyncGenWrappedValue *o)
-{
- PyObject_GC_UnTrack((PyObject *)o);
- Py_CLEAR(o->agw_val);
- if (__Pyx_ag_value_freelist_free < _PyAsyncGen_MAXFREELIST) {
- assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
- __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free++] = o;
- } else {
- PyObject_GC_Del(o);
- }
-}
-
-
-static int
-__Pyx_async_gen_wrapped_val_traverse(__pyx__PyAsyncGenWrappedValue *o,
- visitproc visit, void *arg)
-{
- Py_VISIT(o->agw_val);
- return 0;
-}
-
-
-static PyTypeObject __pyx__PyAsyncGenWrappedValueType_type = {
- PyVarObject_HEAD_INIT(0, 0)
- "async_generator_wrapped_value", /* tp_name */
- sizeof(__pyx__PyAsyncGenWrappedValue), /* tp_basicsize */
- 0, /* tp_itemsize */
- /* methods */
- (destructor)__Pyx_async_gen_wrapped_val_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)__Pyx_async_gen_wrapped_val_traverse, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
- 0, /* tp_is_gc */
- 0, /* tp_bases */
- 0, /* tp_mro */
- 0, /* tp_cache */
- 0, /* tp_subclasses */
- 0, /* tp_weaklist */
- 0, /* tp_del */
- 0, /* tp_version_tag */
-#if PY_VERSION_HEX >= 0x030400a1
- 0, /* tp_finalize */
-#endif
+};
+
+
+static PyObject *
+__Pyx_async_gen_asend_new(__pyx_PyAsyncGenObject *gen, PyObject *sendval)
+{
+ __pyx_PyAsyncGenASend *o;
+ if (__Pyx_ag_asend_freelist_free) {
+ __Pyx_ag_asend_freelist_free--;
+ o = __Pyx_ag_asend_freelist[__Pyx_ag_asend_freelist_free];
+ _Py_NewReference((PyObject *)o);
+ } else {
+ o = PyObject_GC_New(__pyx_PyAsyncGenASend, __pyx__PyAsyncGenASendType);
+ if (o == NULL) {
+ return NULL;
+ }
+ }
+
+ Py_INCREF(gen);
+ o->ags_gen = gen;
+
+ Py_XINCREF(sendval);
+ o->ags_sendval = sendval;
+
+ o->ags_state = __PYX_AWAITABLE_STATE_INIT;
+
+ PyObject_GC_Track((PyObject*)o);
+ return (PyObject*)o;
+}
+
+
+/* ---------- Async Generator Value Wrapper ------------ */
+
+
+static void
+__Pyx_async_gen_wrapped_val_dealloc(__pyx__PyAsyncGenWrappedValue *o)
+{
+ PyObject_GC_UnTrack((PyObject *)o);
+ Py_CLEAR(o->agw_val);
+ if (__Pyx_ag_value_freelist_free < _PyAsyncGen_MAXFREELIST) {
+ assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
+ __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free++] = o;
+ } else {
+ PyObject_GC_Del(o);
+ }
+}
+
+
+static int
+__Pyx_async_gen_wrapped_val_traverse(__pyx__PyAsyncGenWrappedValue *o,
+ visitproc visit, void *arg)
+{
+ Py_VISIT(o->agw_val);
+ return 0;
+}
+
+
+static PyTypeObject __pyx__PyAsyncGenWrappedValueType_type = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "async_generator_wrapped_value", /* tp_name */
+ sizeof(__pyx__PyAsyncGenWrappedValue), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)__Pyx_async_gen_wrapped_val_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_as_async */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)__Pyx_async_gen_wrapped_val_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+ 0, /* tp_free */
+ 0, /* tp_is_gc */
+ 0, /* tp_bases */
+ 0, /* tp_mro */
+ 0, /* tp_cache */
+ 0, /* tp_subclasses */
+ 0, /* tp_weaklist */
+ 0, /* tp_del */
+ 0, /* tp_version_tag */
+#if PY_VERSION_HEX >= 0x030400a1
+ 0, /* tp_finalize */
+#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
@@ -792,288 +792,288 @@ static PyTypeObject __pyx__PyAsyncGenWrappedValueType_type = {
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
-};
-
-
-static PyObject *
-__Pyx__PyAsyncGenValueWrapperNew(PyObject *val)
-{
- // NOTE: steals a reference to val !
- __pyx__PyAsyncGenWrappedValue *o;
- assert(val);
-
- if (__Pyx_ag_value_freelist_free) {
- __Pyx_ag_value_freelist_free--;
- o = __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free];
- assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
- _Py_NewReference((PyObject*)o);
- } else {
- o = PyObject_GC_New(__pyx__PyAsyncGenWrappedValue, __pyx__PyAsyncGenWrappedValueType);
- if (unlikely(!o)) {
- Py_DECREF(val);
- return NULL;
- }
- }
- o->agw_val = val;
- // no Py_INCREF(val) - steals reference!
- PyObject_GC_Track((PyObject*)o);
- return (PyObject*)o;
-}
-
-
-/* ---------- Async Generator AThrow awaitable ------------ */
-
-
-static void
-__Pyx_async_gen_athrow_dealloc(__pyx_PyAsyncGenAThrow *o)
-{
- PyObject_GC_UnTrack((PyObject *)o);
- Py_CLEAR(o->agt_gen);
- Py_CLEAR(o->agt_args);
- PyObject_GC_Del(o);
-}
-
-
-static int
-__Pyx_async_gen_athrow_traverse(__pyx_PyAsyncGenAThrow *o, visitproc visit, void *arg)
-{
- Py_VISIT(o->agt_gen);
- Py_VISIT(o->agt_args);
- return 0;
-}
-
-
-static PyObject *
-__Pyx_async_gen_athrow_send(__pyx_PyAsyncGenAThrow *o, PyObject *arg)
-{
- __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*)o->agt_gen;
- PyObject *retval;
-
- if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) {
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
- }
-
- if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) {
- if (o->agt_gen->ag_closed) {
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
- }
-
- if (arg != Py_None) {
- PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG);
- return NULL;
- }
-
- o->agt_state = __PYX_AWAITABLE_STATE_ITER;
-
- if (o->agt_args == NULL) {
- /* aclose() mode */
- o->agt_gen->ag_closed = 1;
-
- retval = __Pyx__Coroutine_Throw((PyObject*)gen,
- /* Do not close generator when
- PyExc_GeneratorExit is passed */
- PyExc_GeneratorExit, NULL, NULL, NULL, 0);
-
- if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
- Py_DECREF(retval);
- goto yield_close;
- }
- } else {
- PyObject *typ;
- PyObject *tb = NULL;
- PyObject *val = NULL;
-
- if (!PyArg_UnpackTuple(o->agt_args, "athrow", 1, 3,
- &typ, &val, &tb)) {
- return NULL;
- }
-
- retval = __Pyx__Coroutine_Throw((PyObject*)gen,
- /* Do not close generator when PyExc_GeneratorExit is passed */
- typ, val, tb, o->agt_args, 0);
- retval = __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
- }
- if (retval == NULL) {
- goto check_error;
- }
- return retval;
- }
-
- assert (o->agt_state == __PYX_AWAITABLE_STATE_ITER);
-
- retval = __Pyx_Coroutine_Send((PyObject *)gen, arg);
- if (o->agt_args) {
- return __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
- } else {
- /* aclose() mode */
- if (retval) {
- if (__pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
- Py_DECREF(retval);
- goto yield_close;
- }
- else {
- return retval;
- }
- }
- else {
- goto check_error;
- }
- }
-
-yield_close:
- PyErr_SetString(
- PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG);
- return NULL;
-
-check_error:
- if (PyErr_ExceptionMatches(__Pyx_PyExc_StopAsyncIteration)) {
- o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
- if (o->agt_args == NULL) {
- // when aclose() is called we don't want to propagate
- // StopAsyncIteration; just raise StopIteration, signalling
- // that 'aclose()' is done.
- PyErr_Clear();
- PyErr_SetNone(PyExc_StopIteration);
- }
- }
- else if (PyErr_ExceptionMatches(PyExc_GeneratorExit)) {
- o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
- PyErr_Clear(); /* ignore these errors */
- PyErr_SetNone(PyExc_StopIteration);
- }
- return NULL;
-}
-
-
-static PyObject *
-__Pyx_async_gen_athrow_throw(__pyx_PyAsyncGenAThrow *o, PyObject *args)
-{
- PyObject *retval;
-
- if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) {
- PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG);
- return NULL;
- }
-
- if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) {
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
- }
-
- retval = __Pyx_Coroutine_Throw((PyObject*)o->agt_gen, args);
- if (o->agt_args) {
- return __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
- } else {
- /* aclose() mode */
- if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
- Py_DECREF(retval);
- PyErr_SetString(PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG);
- return NULL;
- }
- return retval;
- }
-}
-
-
-static PyObject *
-__Pyx_async_gen_athrow_iternext(__pyx_PyAsyncGenAThrow *o)
-{
- return __Pyx_async_gen_athrow_send(o, Py_None);
-}
-
-
-static PyObject *
-__Pyx_async_gen_athrow_close(PyObject *g, CYTHON_UNUSED PyObject *args)
-{
- __pyx_PyAsyncGenAThrow *o = (__pyx_PyAsyncGenAThrow*) g;
- o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
- Py_RETURN_NONE;
-}
-
-
-static PyMethodDef __Pyx_async_gen_athrow_methods[] = {
- {"send", (PyCFunction)__Pyx_async_gen_athrow_send, METH_O, __Pyx_async_gen_send_doc},
- {"throw", (PyCFunction)__Pyx_async_gen_athrow_throw, METH_VARARGS, __Pyx_async_gen_throw_doc},
- {"close", (PyCFunction)__Pyx_async_gen_athrow_close, METH_NOARGS, __Pyx_async_gen_close_doc},
+};
+
+
+static PyObject *
+__Pyx__PyAsyncGenValueWrapperNew(PyObject *val)
+{
+ // NOTE: steals a reference to val !
+ __pyx__PyAsyncGenWrappedValue *o;
+ assert(val);
+
+ if (__Pyx_ag_value_freelist_free) {
+ __Pyx_ag_value_freelist_free--;
+ o = __Pyx_ag_value_freelist[__Pyx_ag_value_freelist_free];
+ assert(__pyx__PyAsyncGenWrappedValue_CheckExact(o));
+ _Py_NewReference((PyObject*)o);
+ } else {
+ o = PyObject_GC_New(__pyx__PyAsyncGenWrappedValue, __pyx__PyAsyncGenWrappedValueType);
+ if (unlikely(!o)) {
+ Py_DECREF(val);
+ return NULL;
+ }
+ }
+ o->agw_val = val;
+ // no Py_INCREF(val) - steals reference!
+ PyObject_GC_Track((PyObject*)o);
+ return (PyObject*)o;
+}
+
+
+/* ---------- Async Generator AThrow awaitable ------------ */
+
+
+static void
+__Pyx_async_gen_athrow_dealloc(__pyx_PyAsyncGenAThrow *o)
+{
+ PyObject_GC_UnTrack((PyObject *)o);
+ Py_CLEAR(o->agt_gen);
+ Py_CLEAR(o->agt_args);
+ PyObject_GC_Del(o);
+}
+
+
+static int
+__Pyx_async_gen_athrow_traverse(__pyx_PyAsyncGenAThrow *o, visitproc visit, void *arg)
+{
+ Py_VISIT(o->agt_gen);
+ Py_VISIT(o->agt_args);
+ return 0;
+}
+
+
+static PyObject *
+__Pyx_async_gen_athrow_send(__pyx_PyAsyncGenAThrow *o, PyObject *arg)
+{
+ __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*)o->agt_gen;
+ PyObject *retval;
+
+ if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) {
+ if (o->agt_gen->ag_closed) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ if (arg != Py_None) {
+ PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG);
+ return NULL;
+ }
+
+ o->agt_state = __PYX_AWAITABLE_STATE_ITER;
+
+ if (o->agt_args == NULL) {
+ /* aclose() mode */
+ o->agt_gen->ag_closed = 1;
+
+ retval = __Pyx__Coroutine_Throw((PyObject*)gen,
+ /* Do not close generator when
+ PyExc_GeneratorExit is passed */
+ PyExc_GeneratorExit, NULL, NULL, NULL, 0);
+
+ if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
+ Py_DECREF(retval);
+ goto yield_close;
+ }
+ } else {
+ PyObject *typ;
+ PyObject *tb = NULL;
+ PyObject *val = NULL;
+
+ if (!PyArg_UnpackTuple(o->agt_args, "athrow", 1, 3,
+ &typ, &val, &tb)) {
+ return NULL;
+ }
+
+ retval = __Pyx__Coroutine_Throw((PyObject*)gen,
+ /* Do not close generator when PyExc_GeneratorExit is passed */
+ typ, val, tb, o->agt_args, 0);
+ retval = __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
+ }
+ if (retval == NULL) {
+ goto check_error;
+ }
+ return retval;
+ }
+
+ assert (o->agt_state == __PYX_AWAITABLE_STATE_ITER);
+
+ retval = __Pyx_Coroutine_Send((PyObject *)gen, arg);
+ if (o->agt_args) {
+ return __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
+ } else {
+ /* aclose() mode */
+ if (retval) {
+ if (__pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
+ Py_DECREF(retval);
+ goto yield_close;
+ }
+ else {
+ return retval;
+ }
+ }
+ else {
+ goto check_error;
+ }
+ }
+
+yield_close:
+ PyErr_SetString(
+ PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG);
+ return NULL;
+
+check_error:
+ if (PyErr_ExceptionMatches(__Pyx_PyExc_StopAsyncIteration)) {
+ o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
+ if (o->agt_args == NULL) {
+ // when aclose() is called we don't want to propagate
+ // StopAsyncIteration; just raise StopIteration, signalling
+ // that 'aclose()' is done.
+ PyErr_Clear();
+ PyErr_SetNone(PyExc_StopIteration);
+ }
+ }
+ else if (PyErr_ExceptionMatches(PyExc_GeneratorExit)) {
+ o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
+ PyErr_Clear(); /* ignore these errors */
+ PyErr_SetNone(PyExc_StopIteration);
+ }
+ return NULL;
+}
+
+
+static PyObject *
+__Pyx_async_gen_athrow_throw(__pyx_PyAsyncGenAThrow *o, PyObject *args)
+{
+ PyObject *retval;
+
+ if (o->agt_state == __PYX_AWAITABLE_STATE_INIT) {
+ PyErr_SetString(PyExc_RuntimeError, __Pyx_NON_INIT_CORO_MSG);
+ return NULL;
+ }
+
+ if (o->agt_state == __PYX_AWAITABLE_STATE_CLOSED) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ retval = __Pyx_Coroutine_Throw((PyObject*)o->agt_gen, args);
+ if (o->agt_args) {
+ return __Pyx_async_gen_unwrap_value(o->agt_gen, retval);
+ } else {
+ /* aclose() mode */
+ if (retval && __pyx__PyAsyncGenWrappedValue_CheckExact(retval)) {
+ Py_DECREF(retval);
+ PyErr_SetString(PyExc_RuntimeError, __Pyx_ASYNC_GEN_IGNORED_EXIT_MSG);
+ return NULL;
+ }
+ return retval;
+ }
+}
+
+
+static PyObject *
+__Pyx_async_gen_athrow_iternext(__pyx_PyAsyncGenAThrow *o)
+{
+ return __Pyx_async_gen_athrow_send(o, Py_None);
+}
+
+
+static PyObject *
+__Pyx_async_gen_athrow_close(PyObject *g, CYTHON_UNUSED PyObject *args)
+{
+ __pyx_PyAsyncGenAThrow *o = (__pyx_PyAsyncGenAThrow*) g;
+ o->agt_state = __PYX_AWAITABLE_STATE_CLOSED;
+ Py_RETURN_NONE;
+}
+
+
+static PyMethodDef __Pyx_async_gen_athrow_methods[] = {
+ {"send", (PyCFunction)__Pyx_async_gen_athrow_send, METH_O, __Pyx_async_gen_send_doc},
+ {"throw", (PyCFunction)__Pyx_async_gen_athrow_throw, METH_VARARGS, __Pyx_async_gen_throw_doc},
+ {"close", (PyCFunction)__Pyx_async_gen_athrow_close, METH_NOARGS, __Pyx_async_gen_close_doc},
{"__await__", (PyCFunction)__Pyx_async_gen_self_method, METH_NOARGS, __Pyx_async_gen_await_doc},
- {0, 0, 0, 0} /* Sentinel */
-};
-
-
-#if CYTHON_USE_ASYNC_SLOTS
-static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_athrow_as_async = {
- PyObject_SelfIter, /* am_await */
- 0, /* am_aiter */
+ {0, 0, 0, 0} /* Sentinel */
+};
+
+
+#if CYTHON_USE_ASYNC_SLOTS
+static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_athrow_as_async = {
+ PyObject_SelfIter, /* am_await */
+ 0, /* am_aiter */
0, /* am_anext */
#if PY_VERSION_HEX >= 0x030A00A3
0, /*am_send*/
#endif
-};
-#endif
-
-
-static PyTypeObject __pyx__PyAsyncGenAThrowType_type = {
- PyVarObject_HEAD_INIT(0, 0)
- "async_generator_athrow", /* tp_name */
- sizeof(__pyx_PyAsyncGenAThrow), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)__Pyx_async_gen_athrow_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
-#if CYTHON_USE_ASYNC_SLOTS
- &__Pyx_async_gen_athrow_as_async, /* tp_as_async */
-#else
- 0, /*tp_reserved*/
-#endif
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)__Pyx_async_gen_athrow_traverse, /* tp_traverse */
- 0, /* tp_clear */
-#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
- // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
- __Pyx_Coroutine_compare, /*tp_richcompare*/
-#else
- 0, /*tp_richcompare*/
-#endif
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter */
- (iternextfunc)__Pyx_async_gen_athrow_iternext, /* tp_iternext */
- __Pyx_async_gen_athrow_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
- 0, /* tp_is_gc */
- 0, /* tp_bases */
- 0, /* tp_mro */
- 0, /* tp_cache */
- 0, /* tp_subclasses */
- 0, /* tp_weaklist */
- 0, /* tp_del */
- 0, /* tp_version_tag */
-#if PY_VERSION_HEX >= 0x030400a1
- 0, /* tp_finalize */
-#endif
+};
+#endif
+
+
+static PyTypeObject __pyx__PyAsyncGenAThrowType_type = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "async_generator_athrow", /* tp_name */
+ sizeof(__pyx_PyAsyncGenAThrow), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)__Pyx_async_gen_athrow_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+#if CYTHON_USE_ASYNC_SLOTS
+ &__Pyx_async_gen_athrow_as_async, /* tp_as_async */
+#else
+ 0, /*tp_reserved*/
+#endif
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)__Pyx_async_gen_athrow_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
+ // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
+ __Pyx_Coroutine_compare, /*tp_richcompare*/
+#else
+ 0, /*tp_richcompare*/
+#endif
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ (iternextfunc)__Pyx_async_gen_athrow_iternext, /* tp_iternext */
+ __Pyx_async_gen_athrow_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+ 0, /* tp_free */
+ 0, /* tp_is_gc */
+ 0, /* tp_bases */
+ 0, /* tp_mro */
+ 0, /* tp_cache */
+ 0, /* tp_subclasses */
+ 0, /* tp_weaklist */
+ 0, /* tp_del */
+ 0, /* tp_version_tag */
+#if PY_VERSION_HEX >= 0x030400a1
+ 0, /* tp_finalize */
+#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
@@ -1083,51 +1083,51 @@ static PyTypeObject __pyx__PyAsyncGenAThrowType_type = {
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
-};
-
-
-static PyObject *
-__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *gen, PyObject *args)
-{
- __pyx_PyAsyncGenAThrow *o;
- o = PyObject_GC_New(__pyx_PyAsyncGenAThrow, __pyx__PyAsyncGenAThrowType);
- if (o == NULL) {
- return NULL;
- }
- o->agt_gen = gen;
- o->agt_args = args;
- o->agt_state = __PYX_AWAITABLE_STATE_INIT;
- Py_INCREF(gen);
- Py_XINCREF(args);
- PyObject_GC_Track((PyObject*)o);
- return (PyObject*)o;
-}
-
-
-/* ---------- global type sharing ------------ */
-
-static int __pyx_AsyncGen_init(void) {
- // on Windows, C-API functions can't be used in slots statically
- __pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
- __pyx__PyAsyncGenWrappedValueType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
- __pyx__PyAsyncGenAThrowType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
- __pyx__PyAsyncGenASendType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
-
- __pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type);
- if (unlikely(!__pyx_AsyncGenType))
- return -1;
-
- __pyx__PyAsyncGenAThrowType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenAThrowType_type);
- if (unlikely(!__pyx__PyAsyncGenAThrowType))
- return -1;
-
- __pyx__PyAsyncGenWrappedValueType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenWrappedValueType_type);
- if (unlikely(!__pyx__PyAsyncGenWrappedValueType))
- return -1;
-
- __pyx__PyAsyncGenASendType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenASendType_type);
- if (unlikely(!__pyx__PyAsyncGenASendType))
- return -1;
-
- return 0;
-}
+};
+
+
+static PyObject *
+__Pyx_async_gen_athrow_new(__pyx_PyAsyncGenObject *gen, PyObject *args)
+{
+ __pyx_PyAsyncGenAThrow *o;
+ o = PyObject_GC_New(__pyx_PyAsyncGenAThrow, __pyx__PyAsyncGenAThrowType);
+ if (o == NULL) {
+ return NULL;
+ }
+ o->agt_gen = gen;
+ o->agt_args = args;
+ o->agt_state = __PYX_AWAITABLE_STATE_INIT;
+ Py_INCREF(gen);
+ Py_XINCREF(args);
+ PyObject_GC_Track((PyObject*)o);
+ return (PyObject*)o;
+}
+
+
+/* ---------- global type sharing ------------ */
+
+static int __pyx_AsyncGen_init(void) {
+ // on Windows, C-API functions can't be used in slots statically
+ __pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx__PyAsyncGenWrappedValueType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx__PyAsyncGenAThrowType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx__PyAsyncGenASendType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+
+ __pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type);
+ if (unlikely(!__pyx_AsyncGenType))
+ return -1;
+
+ __pyx__PyAsyncGenAThrowType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenAThrowType_type);
+ if (unlikely(!__pyx__PyAsyncGenAThrowType))
+ return -1;
+
+ __pyx__PyAsyncGenWrappedValueType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenWrappedValueType_type);
+ if (unlikely(!__pyx__PyAsyncGenWrappedValueType))
+ return -1;
+
+ __pyx__PyAsyncGenASendType = __Pyx_FetchCommonType(&__pyx__PyAsyncGenASendType_type);
+ if (unlikely(!__pyx__PyAsyncGenASendType))
+ return -1;
+
+ return 0;
+}
diff --git a/contrib/tools/cython/Cython/Utility/Buffer.c b/contrib/tools/cython/Cython/Utility/Buffer.c
index da9280173e..3c7105fa35 100644
--- a/contrib/tools/cython/Cython/Utility/Buffer.c
+++ b/contrib/tools/cython/Cython/Utility/Buffer.c
@@ -52,7 +52,7 @@ static void __Pyx_RaiseBufferFallbackError(void) {
}
/////////////// BufferFormatStructs.proto ///////////////
-//@proto_block: utility_code_proto_before_types
+//@proto_block: utility_code_proto_before_types
#define IS_UNSIGNED(type) (((type) -1) > 0)
@@ -96,9 +96,9 @@ typedef struct {
char is_valid_array;
} __Pyx_BufFmt_Context;
-
+
/////////////// GetAndReleaseBuffer.proto ///////////////
-
+
#if PY_MAJOR_VERSION < 3
static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags);
static void __Pyx_ReleaseBuffer(Py_buffer *view);
@@ -108,14 +108,14 @@ typedef struct {
#endif
/////////////// GetAndReleaseBuffer ///////////////
-
+
#if PY_MAJOR_VERSION < 3
static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) {
if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags);
{{for type_ptr, getbuffer, releasebuffer in types}}
{{if getbuffer}}
- if (__Pyx_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags);
+ if (__Pyx_TypeCheck(obj, {{type_ptr}})) return {{getbuffer}}(obj, view, flags);
{{endif}}
{{endfor}}
@@ -132,111 +132,111 @@ static void __Pyx_ReleaseBuffer(Py_buffer *view) {
return;
}
- if ((0)) {}
+ if ((0)) {}
{{for type_ptr, getbuffer, releasebuffer in types}}
{{if releasebuffer}}
- else if (__Pyx_TypeCheck(obj, {{type_ptr}})) {{releasebuffer}}(obj, view);
+ else if (__Pyx_TypeCheck(obj, {{type_ptr}})) {{releasebuffer}}(obj, view);
{{endif}}
{{endfor}}
- view->obj = NULL;
+ view->obj = NULL;
Py_DECREF(obj);
}
#endif /* PY_MAJOR_VERSION < 3 */
-/////////////// BufferGetAndValidate.proto ///////////////
-
-#define __Pyx_GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack) \
- ((obj == Py_None || obj == NULL) ? \
- (__Pyx_ZeroBuffer(buf), 0) : \
- __Pyx__GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack))
-
-static int __Pyx__GetBufferAndValidate(Py_buffer* buf, PyObject* obj,
- __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack);
-static void __Pyx_ZeroBuffer(Py_buffer* buf);
-static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info);/*proto*/
-
-static Py_ssize_t __Pyx_minusones[] = { {{ ", ".join(["-1"] * max_dims) }} };
-static Py_ssize_t __Pyx_zeros[] = { {{ ", ".join(["0"] * max_dims) }} };
-
-
-/////////////// BufferGetAndValidate ///////////////
-//@requires: BufferFormatCheck
-
-static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) {
- if (unlikely(info->buf == NULL)) return;
- if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL;
- __Pyx_ReleaseBuffer(info);
-}
-
-static void __Pyx_ZeroBuffer(Py_buffer* buf) {
- buf->buf = NULL;
- buf->obj = NULL;
- buf->strides = __Pyx_zeros;
- buf->shape = __Pyx_zeros;
- buf->suboffsets = __Pyx_minusones;
-}
-
-static int __Pyx__GetBufferAndValidate(
- Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags,
- int nd, int cast, __Pyx_BufFmt_StackElem* stack)
-{
- buf->buf = NULL;
- if (unlikely(__Pyx_GetBuffer(obj, buf, flags) == -1)) {
- __Pyx_ZeroBuffer(buf);
- return -1;
- }
- // From this point on, we have acquired the buffer and must release it on errors.
- if (unlikely(buf->ndim != nd)) {
- PyErr_Format(PyExc_ValueError,
- "Buffer has wrong number of dimensions (expected %d, got %d)",
- nd, buf->ndim);
- goto fail;
- }
- if (!cast) {
- __Pyx_BufFmt_Context ctx;
- __Pyx_BufFmt_Init(&ctx, stack, dtype);
- if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail;
- }
+/////////////// BufferGetAndValidate.proto ///////////////
+
+#define __Pyx_GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack) \
+ ((obj == Py_None || obj == NULL) ? \
+ (__Pyx_ZeroBuffer(buf), 0) : \
+ __Pyx__GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack))
+
+static int __Pyx__GetBufferAndValidate(Py_buffer* buf, PyObject* obj,
+ __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack);
+static void __Pyx_ZeroBuffer(Py_buffer* buf);
+static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info);/*proto*/
+
+static Py_ssize_t __Pyx_minusones[] = { {{ ", ".join(["-1"] * max_dims) }} };
+static Py_ssize_t __Pyx_zeros[] = { {{ ", ".join(["0"] * max_dims) }} };
+
+
+/////////////// BufferGetAndValidate ///////////////
+//@requires: BufferFormatCheck
+
+static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) {
+ if (unlikely(info->buf == NULL)) return;
+ if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL;
+ __Pyx_ReleaseBuffer(info);
+}
+
+static void __Pyx_ZeroBuffer(Py_buffer* buf) {
+ buf->buf = NULL;
+ buf->obj = NULL;
+ buf->strides = __Pyx_zeros;
+ buf->shape = __Pyx_zeros;
+ buf->suboffsets = __Pyx_minusones;
+}
+
+static int __Pyx__GetBufferAndValidate(
+ Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags,
+ int nd, int cast, __Pyx_BufFmt_StackElem* stack)
+{
+ buf->buf = NULL;
+ if (unlikely(__Pyx_GetBuffer(obj, buf, flags) == -1)) {
+ __Pyx_ZeroBuffer(buf);
+ return -1;
+ }
+ // From this point on, we have acquired the buffer and must release it on errors.
+ if (unlikely(buf->ndim != nd)) {
+ PyErr_Format(PyExc_ValueError,
+ "Buffer has wrong number of dimensions (expected %d, got %d)",
+ nd, buf->ndim);
+ goto fail;
+ }
+ if (!cast) {
+ __Pyx_BufFmt_Context ctx;
+ __Pyx_BufFmt_Init(&ctx, stack, dtype);
+ if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail;
+ }
if (unlikely((size_t)buf->itemsize != dtype->size)) {
- PyErr_Format(PyExc_ValueError,
- "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)",
- buf->itemsize, (buf->itemsize > 1) ? "s" : "",
- dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
- goto fail;
- }
- if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones;
- return 0;
-fail:;
- __Pyx_SafeReleaseBuffer(buf);
- return -1;
-}
-
-
-/////////////// BufferFormatCheck.proto ///////////////
-
-// Buffer format string checking
-//
-// Buffer type checking. Utility code for checking that acquired
-// buffers match our assumptions. We only need to check ndim and
-// the format string; the access mode/flags is checked by the
-// exporter. See:
-//
-// http://docs.python.org/3/library/struct.html
-// http://legacy.python.org/dev/peps/pep-3118/#additions-to-the-struct-string-syntax
-//
-// The alignment code is copied from _struct.c in Python.
-
+ PyErr_Format(PyExc_ValueError,
+ "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)",
+ buf->itemsize, (buf->itemsize > 1) ? "s" : "",
+ dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : "");
+ goto fail;
+ }
+ if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones;
+ return 0;
+fail:;
+ __Pyx_SafeReleaseBuffer(buf);
+ return -1;
+}
+
+
+/////////////// BufferFormatCheck.proto ///////////////
+
+// Buffer format string checking
+//
+// Buffer type checking. Utility code for checking that acquired
+// buffers match our assumptions. We only need to check ndim and
+// the format string; the access mode/flags is checked by the
+// exporter. See:
+//
+// http://docs.python.org/3/library/struct.html
+// http://legacy.python.org/dev/peps/pep-3118/#additions-to-the-struct-string-syntax
+//
+// The alignment code is copied from _struct.c in Python.
+
static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts);
static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,
__Pyx_BufFmt_StackElem* stack,
- __Pyx_TypeInfo* type); /*proto*/
+ __Pyx_TypeInfo* type); /*proto*/
/////////////// BufferFormatCheck ///////////////
-//@requires: ModuleSetupCode.c::IsLittleEndian
-//@requires: BufferFormatStructs
+//@requires: ModuleSetupCode.c::IsLittleEndian
+//@requires: BufferFormatStructs
static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,
__Pyx_BufFmt_StackElem* stack,
@@ -598,7 +598,7 @@ static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
}
/* Parse an array in the format string (e.g. (1,2,3)) */
-static PyObject *
+static PyObject *
__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp)
{
const char *ts = *tsp;
@@ -680,7 +680,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
++ts;
break;
case '<':
- if (!__Pyx_Is_Little_Endian()) {
+ if (!__Pyx_Is_Little_Endian()) {
PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler");
return NULL;
}
@@ -689,7 +689,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
break;
case '>':
case '!':
- if (__Pyx_Is_Little_Endian()) {
+ if (__Pyx_Is_Little_Endian()) {
PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler");
return NULL;
}
@@ -754,7 +754,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
__Pyx_BufFmt_RaiseUnexpectedChar('Z');
return NULL;
}
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
case '?': case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I':
case 'l': case 'L': case 'q': case 'Q':
case 'f': case 'd': case 'g':
@@ -768,7 +768,7 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
++ts;
break;
}
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
case 's':
/* 's' or new type (cannot be added to current pool) */
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
@@ -802,9 +802,9 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha
static int __pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b);
/////////////// TypeInfoCompare ///////////////
-//@requires: BufferFormatStructs
-
-// See if two dtypes are equal
+//@requires: BufferFormatStructs
+
+// See if two dtypes are equal
static int
__pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b)
{
@@ -870,10 +870,10 @@ struct __pyx_typeinfo_string {
static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type);
/////////////// TypeInfoToFormat ///////////////
-//@requires: BufferFormatStructs
+//@requires: BufferFormatStructs
+
+// See also MemoryView.pyx:BufferFormatFromTypeInfo
-// See also MemoryView.pyx:BufferFormatFromTypeInfo
-
static struct __pyx_typeinfo_string __Pyx_TypeInfoToFormat(__Pyx_TypeInfo *type) {
struct __pyx_typeinfo_string result = { {0} };
char *buf = (char *) result.string;
diff --git a/contrib/tools/cython/Cython/Utility/Builtins.c b/contrib/tools/cython/Cython/Utility/Builtins.c
index aa3d58a72c..1ffb3bcebd 100644
--- a/contrib/tools/cython/Cython/Utility/Builtins.c
+++ b/contrib/tools/cython/Cython/Utility/Builtins.c
@@ -109,7 +109,7 @@ static PyObject* __Pyx_PyExec3(PyObject* o, PyObject* globals, PyObject* locals)
locals = globals;
}
- if (__Pyx_PyDict_GetItemStr(globals, PYIDENT("__builtins__")) == NULL) {
+ if (__Pyx_PyDict_GetItemStr(globals, PYIDENT("__builtins__")) == NULL) {
if (PyDict_SetItem(globals, PYIDENT("__builtins__"), PyEval_GetBuiltins()) < 0)
goto bad;
}
@@ -171,46 +171,46 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *
//////////////////// GetAttr3 ////////////////////
//@requires: ObjectHandling.c::GetAttr
-//@requires: Exceptions.c::PyThreadStateGet
-//@requires: Exceptions.c::PyErrFetchRestore
-//@requires: Exceptions.c::PyErrExceptionMatches
-
-static PyObject *__Pyx_GetAttr3Default(PyObject *d) {
- __Pyx_PyThreadState_declare
- __Pyx_PyThreadState_assign
- if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
- return NULL;
- __Pyx_PyErr_Clear();
- Py_INCREF(d);
- return d;
-}
-
+//@requires: Exceptions.c::PyThreadStateGet
+//@requires: Exceptions.c::PyErrFetchRestore
+//@requires: Exceptions.c::PyErrExceptionMatches
+
+static PyObject *__Pyx_GetAttr3Default(PyObject *d) {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
+ return NULL;
+ __Pyx_PyErr_Clear();
+ Py_INCREF(d);
+ return d;
+}
+
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
PyObject *r = __Pyx_GetAttr(o, n);
- return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
-}
-
-//////////////////// HasAttr.proto ////////////////////
-
-static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/
-
-//////////////////// HasAttr ////////////////////
-//@requires: ObjectHandling.c::GetAttr
-
-static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
- PyObject *r;
- if (unlikely(!__Pyx_PyBaseString_Check(n))) {
- PyErr_SetString(PyExc_TypeError,
- "hasattr(): attribute name must be string");
- return -1;
- }
- r = __Pyx_GetAttr(o, n);
+ return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
+}
+
+//////////////////// HasAttr.proto ////////////////////
+
+static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /*proto*/
+
+//////////////////// HasAttr ////////////////////
+//@requires: ObjectHandling.c::GetAttr
+
+static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
+ PyObject *r;
+ if (unlikely(!__Pyx_PyBaseString_Check(n))) {
+ PyErr_SetString(PyExc_TypeError,
+ "hasattr(): attribute name must be string");
+ return -1;
+ }
+ r = __Pyx_GetAttr(o, n);
if (unlikely(!r)) {
PyErr_Clear();
- return 0;
- } else {
- Py_DECREF(r);
- return 1;
+ return 0;
+ } else {
+ Py_DECREF(r);
+ return 1;
}
}
@@ -236,22 +236,22 @@ static PyObject* __Pyx_Intern(PyObject* s) {
//////////////////// abs_longlong.proto ////////////////////
-static CYTHON_INLINE PY_LONG_LONG __Pyx_abs_longlong(PY_LONG_LONG x) {
+static CYTHON_INLINE PY_LONG_LONG __Pyx_abs_longlong(PY_LONG_LONG x) {
#if defined (__cplusplus) && __cplusplus >= 201103L
- return std::abs(x);
+ return std::abs(x);
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
- return llabs(x);
-#elif defined (_MSC_VER)
+ return llabs(x);
+#elif defined (_MSC_VER)
// abs() is defined for long, but 64-bits type on MSVC is long long.
- // Use MS-specific _abs64() instead, which returns the original (negative) value for abs(-MAX-1)
- return _abs64(x);
+ // Use MS-specific _abs64() instead, which returns the original (negative) value for abs(-MAX-1)
+ return _abs64(x);
#elif defined (__GNUC__)
// gcc or clang on 64 bit windows.
- return __builtin_llabs(x);
+ return __builtin_llabs(x);
#else
if (sizeof(PY_LONG_LONG) <= sizeof(Py_ssize_t))
return __Pyx_sst_abs(x);
- return (x<0) ? -x : x;
+ return (x<0) ? -x : x;
#endif
}
@@ -468,12 +468,12 @@ static CYTHON_INLINE PyObject* __Pyx_PyDict_ViewItems(PyObject* d) {
return CALL_UNBOUND_METHOD(PyDict_Type, "viewitems", d);
}
-
+
//////////////////// pyfrozenset_new.proto ////////////////////
-
-static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it);
-
-//////////////////// pyfrozenset_new ////////////////////
+
+static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it);
+
+//////////////////// pyfrozenset_new ////////////////////
//@substitute: naming
static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) {
diff --git a/contrib/tools/cython/Cython/Utility/CConvert.pyx b/contrib/tools/cython/Cython/Utility/CConvert.pyx
index 90583bd1f2..5969f6a582 100644
--- a/contrib/tools/cython/Cython/Utility/CConvert.pyx
+++ b/contrib/tools/cython/Cython/Utility/CConvert.pyx
@@ -8,8 +8,8 @@ cdef extern from *:
object PyErr_Format(exc, const char *format, ...)
@cname("{{funcname}}")
-cdef {{struct_type}} {{funcname}}(obj) except *:
- cdef {{struct_type}} result
+cdef {{struct_type}} {{funcname}}(obj) except *:
+ cdef {{struct_type}} result
if not PyMapping_Check(obj):
PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name)
@@ -33,8 +33,8 @@ cdef extern from *:
object PyErr_Format(exc, const char *format, ...)
@cname("{{funcname}}")
-cdef {{struct_type}} {{funcname}}(obj) except *:
- cdef {{struct_type}} result
+cdef {{struct_type}} {{funcname}}(obj) except *:
+ cdef {{struct_type}} result
cdef Py_ssize_t length
if not PyMapping_Check(obj):
PyErr_Format(TypeError, b"Expected %.16s, got %.200s", b"a mapping", Py_TYPE(obj).tp_name)
diff --git a/contrib/tools/cython/Cython/Utility/CMath.c b/contrib/tools/cython/Cython/Utility/CMath.c
index f5e59d27b3..2cd2223138 100644
--- a/contrib/tools/cython/Cython/Utility/CMath.c
+++ b/contrib/tools/cython/Cython/Utility/CMath.c
@@ -73,10 +73,10 @@ static CYTHON_INLINE %(type)s %(func_name)s(%(type)s b, %(type)s e) {
switch (e) {
case 3:
t *= b;
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
case 2:
t *= b;
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
case 1:
return t;
case 0:
diff --git a/contrib/tools/cython/Cython/Utility/CommonStructures.c b/contrib/tools/cython/Cython/Utility/CommonStructures.c
index c704a81c2e..c7945feb49 100644
--- a/contrib/tools/cython/Cython/Utility/CommonStructures.c
+++ b/contrib/tools/cython/Cython/Utility/CommonStructures.c
@@ -1,86 +1,86 @@
-/////////////// FetchCommonType.proto ///////////////
-
-static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type);
-
-/////////////// FetchCommonType ///////////////
-
-static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) {
- PyObject* fake_module;
- PyTypeObject* cached_type = NULL;
-
- fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
- if (!fake_module) return NULL;
- Py_INCREF(fake_module);
-
- cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name);
- if (cached_type) {
- if (!PyType_Check((PyObject*)cached_type)) {
- PyErr_Format(PyExc_TypeError,
- "Shared Cython type %.200s is not a type object",
- type->tp_name);
- goto bad;
- }
- if (cached_type->tp_basicsize != type->tp_basicsize) {
- PyErr_Format(PyExc_TypeError,
- "Shared Cython type %.200s has the wrong size, try recompiling",
- type->tp_name);
- goto bad;
- }
- } else {
- if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
- PyErr_Clear();
- if (PyType_Ready(type) < 0) goto bad;
- if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0)
- goto bad;
- Py_INCREF(type);
- cached_type = type;
- }
-
-done:
- Py_DECREF(fake_module);
- // NOTE: always returns owned reference, or NULL on error
- return cached_type;
-
-bad:
- Py_XDECREF(cached_type);
- cached_type = NULL;
- goto done;
-}
-
-
-/////////////// FetchCommonPointer.proto ///////////////
-
-static void* __Pyx_FetchCommonPointer(void* pointer, const char* name);
-
-/////////////// FetchCommonPointer ///////////////
-
-
-static void* __Pyx_FetchCommonPointer(void* pointer, const char* name) {
-#if PY_VERSION_HEX >= 0x02070000
- PyObject* fake_module = NULL;
- PyObject* capsule = NULL;
- void* value = NULL;
-
- fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
- if (!fake_module) return NULL;
- Py_INCREF(fake_module);
-
- capsule = PyObject_GetAttrString(fake_module, name);
- if (!capsule) {
- if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
- PyErr_Clear();
- capsule = PyCapsule_New(pointer, name, NULL);
- if (!capsule) goto bad;
- if (PyObject_SetAttrString(fake_module, name, capsule) < 0)
- goto bad;
- }
- value = PyCapsule_GetPointer(capsule, name);
-
-bad:
- Py_XDECREF(capsule);
- Py_DECREF(fake_module);
- return value;
-#else
- return pointer;
-#endif
-}
+/////////////// FetchCommonType.proto ///////////////
+
+static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type);
+
+/////////////// FetchCommonType ///////////////
+
+static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) {
+ PyObject* fake_module;
+ PyTypeObject* cached_type = NULL;
+
+ fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
+ if (!fake_module) return NULL;
+ Py_INCREF(fake_module);
+
+ cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name);
+ if (cached_type) {
+ if (!PyType_Check((PyObject*)cached_type)) {
+ PyErr_Format(PyExc_TypeError,
+ "Shared Cython type %.200s is not a type object",
+ type->tp_name);
+ goto bad;
+ }
+ if (cached_type->tp_basicsize != type->tp_basicsize) {
+ PyErr_Format(PyExc_TypeError,
+ "Shared Cython type %.200s has the wrong size, try recompiling",
+ type->tp_name);
+ goto bad;
+ }
+ } else {
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
+ PyErr_Clear();
+ if (PyType_Ready(type) < 0) goto bad;
+ if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0)
+ goto bad;
+ Py_INCREF(type);
+ cached_type = type;
+ }
+
+done:
+ Py_DECREF(fake_module);
+ // NOTE: always returns owned reference, or NULL on error
+ return cached_type;
+
+bad:
+ Py_XDECREF(cached_type);
+ cached_type = NULL;
+ goto done;
+}
+
+
+/////////////// FetchCommonPointer.proto ///////////////
+
+static void* __Pyx_FetchCommonPointer(void* pointer, const char* name);
+
+/////////////// FetchCommonPointer ///////////////
+
+
+static void* __Pyx_FetchCommonPointer(void* pointer, const char* name) {
+#if PY_VERSION_HEX >= 0x02070000
+ PyObject* fake_module = NULL;
+ PyObject* capsule = NULL;
+ void* value = NULL;
+
+ fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI);
+ if (!fake_module) return NULL;
+ Py_INCREF(fake_module);
+
+ capsule = PyObject_GetAttrString(fake_module, name);
+ if (!capsule) {
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
+ PyErr_Clear();
+ capsule = PyCapsule_New(pointer, name, NULL);
+ if (!capsule) goto bad;
+ if (PyObject_SetAttrString(fake_module, name, capsule) < 0)
+ goto bad;
+ }
+ value = PyCapsule_GetPointer(capsule, name);
+
+bad:
+ Py_XDECREF(capsule);
+ Py_DECREF(fake_module);
+ return value;
+#else
+ return pointer;
+#endif
+}
diff --git a/contrib/tools/cython/Cython/Utility/Complex.c b/contrib/tools/cython/Cython/Utility/Complex.c
index acd2e71f65..28062a0611 100644
--- a/contrib/tools/cython/Cython/Utility/Complex.c
+++ b/contrib/tools/cython/Cython/Utility/Complex.c
@@ -1,5 +1,5 @@
-/////////////// Header.proto ///////////////
-//@proto_block: h_code
+/////////////// Header.proto ///////////////
+//@proto_block: h_code
#if !defined(CYTHON_CCOMPLEX)
#if defined(__cplusplus)
@@ -50,8 +50,8 @@
#endif
-/////////////// Declarations.proto ///////////////
-//@proto_block: complex_type_declarations
+/////////////// Declarations.proto ///////////////
+//@proto_block: complex_type_declarations
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
diff --git a/contrib/tools/cython/Cython/Utility/Coroutine.c b/contrib/tools/cython/Cython/Utility/Coroutine.c
index f430f9a72e..d26314083b 100644
--- a/contrib/tools/cython/Cython/Utility/Coroutine.c
+++ b/contrib/tools/cython/Cython/Utility/Coroutine.c
@@ -5,17 +5,17 @@ static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject
//////////////////// GeneratorYieldFrom ////////////////////
//@requires: Generator
-static void __PyxPyIter_CheckErrorAndDecref(PyObject *source) {
- PyErr_Format(PyExc_TypeError,
- "iter() returned non-iterator of type '%.100s'",
- Py_TYPE(source)->tp_name);
- Py_DECREF(source);
-}
-
+static void __PyxPyIter_CheckErrorAndDecref(PyObject *source) {
+ PyErr_Format(PyExc_TypeError,
+ "iter() returned non-iterator of type '%.100s'",
+ Py_TYPE(source)->tp_name);
+ Py_DECREF(source);
+}
+
static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) {
PyObject *source_gen, *retval;
#ifdef __Pyx_Coroutine_USED
- if (__Pyx_Coroutine_Check(source)) {
+ if (__Pyx_Coroutine_Check(source)) {
// TODO: this should only happen for types.coroutine()ed generators, but we can't determine that here
Py_INCREF(source);
source_gen = source;
@@ -29,23 +29,23 @@ static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject
if (unlikely(!source_gen))
return NULL;
if (unlikely(!PyIter_Check(source_gen))) {
- __PyxPyIter_CheckErrorAndDecref(source_gen);
+ __PyxPyIter_CheckErrorAndDecref(source_gen);
return NULL;
}
} else
- // CPython also allows non-iterable sequences to be iterated over
+ // CPython also allows non-iterable sequences to be iterated over
#endif
- {
+ {
source_gen = PyObject_GetIter(source);
- if (unlikely(!source_gen))
- return NULL;
- }
+ if (unlikely(!source_gen))
+ return NULL;
+ }
// source_gen is now the iterator, make the first next() call
-#if CYTHON_USE_TYPE_SLOTS
+#if CYTHON_USE_TYPE_SLOTS
retval = Py_TYPE(source_gen)->tp_iternext(source_gen);
-#else
- retval = PyIter_Next(source_gen);
-#endif
+#else
+ retval = PyIter_Next(source_gen);
+#endif
}
if (likely(retval)) {
gen->yieldfrom = source_gen;
@@ -58,59 +58,59 @@ static CYTHON_INLINE PyObject* __Pyx_Generator_Yield_From(__pyx_CoroutineObject
//////////////////// CoroutineYieldFrom.proto ////////////////////
-static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source);
+static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source);
//////////////////// CoroutineYieldFrom ////////////////////
//@requires: Coroutine
//@requires: GetAwaitIter
-static PyObject* __Pyx__Coroutine_Yield_From_Generic(__pyx_CoroutineObject *gen, PyObject *source) {
- PyObject *retval;
- PyObject *source_gen = __Pyx__Coroutine_GetAwaitableIter(source);
- if (unlikely(!source_gen)) {
- return NULL;
- }
- // source_gen is now the iterator, make the first next() call
- if (__Pyx_Coroutine_Check(source_gen)) {
- retval = __Pyx_Generator_Next(source_gen);
- } else {
-#if CYTHON_USE_TYPE_SLOTS
- retval = Py_TYPE(source_gen)->tp_iternext(source_gen);
+static PyObject* __Pyx__Coroutine_Yield_From_Generic(__pyx_CoroutineObject *gen, PyObject *source) {
+ PyObject *retval;
+ PyObject *source_gen = __Pyx__Coroutine_GetAwaitableIter(source);
+ if (unlikely(!source_gen)) {
+ return NULL;
+ }
+ // source_gen is now the iterator, make the first next() call
+ if (__Pyx_Coroutine_Check(source_gen)) {
+ retval = __Pyx_Generator_Next(source_gen);
+ } else {
+#if CYTHON_USE_TYPE_SLOTS
+ retval = Py_TYPE(source_gen)->tp_iternext(source_gen);
#else
- retval = PyIter_Next(source_gen);
+ retval = PyIter_Next(source_gen);
#endif
- }
- if (retval) {
- gen->yieldfrom = source_gen;
- return retval;
- }
- Py_DECREF(source_gen);
- return NULL;
+ }
+ if (retval) {
+ gen->yieldfrom = source_gen;
+ return retval;
+ }
+ Py_DECREF(source_gen);
+ return NULL;
}
-static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) {
+static CYTHON_INLINE PyObject* __Pyx_Coroutine_Yield_From(__pyx_CoroutineObject *gen, PyObject *source) {
PyObject *retval;
- if (__Pyx_Coroutine_Check(source)) {
- if (unlikely(((__pyx_CoroutineObject*)source)->yieldfrom)) {
- PyErr_SetString(
- PyExc_RuntimeError,
- "coroutine is being awaited already");
+ if (__Pyx_Coroutine_Check(source)) {
+ if (unlikely(((__pyx_CoroutineObject*)source)->yieldfrom)) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "coroutine is being awaited already");
return NULL;
}
retval = __Pyx_Generator_Next(source);
-#ifdef __Pyx_AsyncGen_USED
- // inlined "__pyx_PyAsyncGenASend" handling to avoid the series of generic calls
- } else if (__pyx_PyAsyncGenASend_CheckExact(source)) {
- retval = __Pyx_async_gen_asend_iternext(source);
-#endif
+#ifdef __Pyx_AsyncGen_USED
+ // inlined "__pyx_PyAsyncGenASend" handling to avoid the series of generic calls
+ } else if (__pyx_PyAsyncGenASend_CheckExact(source)) {
+ retval = __Pyx_async_gen_asend_iternext(source);
+#endif
} else {
- return __Pyx__Coroutine_Yield_From_Generic(gen, source);
+ return __Pyx__Coroutine_Yield_From_Generic(gen, source);
}
- if (retval) {
- Py_INCREF(source);
- gen->yieldfrom = source;
+ if (retval) {
+ Py_INCREF(source);
+ gen->yieldfrom = source;
}
- return retval;
+ return retval;
}
@@ -126,50 +126,50 @@ static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *o); /*proto*/
static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAwaitableIter(PyObject *o) {
#ifdef __Pyx_Coroutine_USED
- if (__Pyx_Coroutine_Check(o)) {
- return __Pyx_NewRef(o);
+ if (__Pyx_Coroutine_Check(o)) {
+ return __Pyx_NewRef(o);
}
#endif
return __Pyx__Coroutine_GetAwaitableIter(o);
}
-
-static void __Pyx_Coroutine_AwaitableIterError(PyObject *source) {
-#if PY_VERSION_HEX >= 0x030600B3 || defined(_PyErr_FormatFromCause)
- _PyErr_FormatFromCause(
- PyExc_TypeError,
- "'async for' received an invalid object "
- "from __anext__: %.100s",
- Py_TYPE(source)->tp_name);
-#elif PY_MAJOR_VERSION >= 3
- PyObject *exc, *val, *val2, *tb;
- assert(PyErr_Occurred());
- PyErr_Fetch(&exc, &val, &tb);
- PyErr_NormalizeException(&exc, &val, &tb);
- if (tb != NULL) {
- PyException_SetTraceback(val, tb);
- Py_DECREF(tb);
- }
- Py_DECREF(exc);
- assert(!PyErr_Occurred());
- PyErr_Format(
- PyExc_TypeError,
- "'async for' received an invalid object "
- "from __anext__: %.100s",
- Py_TYPE(source)->tp_name);
-
- PyErr_Fetch(&exc, &val2, &tb);
- PyErr_NormalizeException(&exc, &val2, &tb);
- Py_INCREF(val);
- PyException_SetCause(val2, val);
- PyException_SetContext(val2, val);
- PyErr_Restore(exc, val2, tb);
-#else
- // since Py2 does not have exception chaining, it's better to avoid shadowing exceptions there
- source++;
-#endif
-}
-
+
+static void __Pyx_Coroutine_AwaitableIterError(PyObject *source) {
+#if PY_VERSION_HEX >= 0x030600B3 || defined(_PyErr_FormatFromCause)
+ _PyErr_FormatFromCause(
+ PyExc_TypeError,
+ "'async for' received an invalid object "
+ "from __anext__: %.100s",
+ Py_TYPE(source)->tp_name);
+#elif PY_MAJOR_VERSION >= 3
+ PyObject *exc, *val, *val2, *tb;
+ assert(PyErr_Occurred());
+ PyErr_Fetch(&exc, &val, &tb);
+ PyErr_NormalizeException(&exc, &val, &tb);
+ if (tb != NULL) {
+ PyException_SetTraceback(val, tb);
+ Py_DECREF(tb);
+ }
+ Py_DECREF(exc);
+ assert(!PyErr_Occurred());
+ PyErr_Format(
+ PyExc_TypeError,
+ "'async for' received an invalid object "
+ "from __anext__: %.100s",
+ Py_TYPE(source)->tp_name);
+
+ PyErr_Fetch(&exc, &val2, &tb);
+ PyErr_NormalizeException(&exc, &val2, &tb);
+ Py_INCREF(val);
+ PyException_SetCause(val2, val);
+ PyException_SetContext(val2, val);
+ PyErr_Restore(exc, val2, tb);
+#else
+ // since Py2 does not have exception chaining, it's better to avoid shadowing exceptions there
+ source++;
+#endif
+}
+
// adapted from genobject.c in Py3.5
static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) {
PyObject *res;
@@ -181,13 +181,13 @@ static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) {
#endif
#if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact)
if (PyCoro_CheckExact(obj)) {
- return __Pyx_NewRef(obj);
+ return __Pyx_NewRef(obj);
} else
#endif
#if CYTHON_COMPILING_IN_CPYTHON && defined(CO_ITERABLE_COROUTINE)
if (PyGen_CheckExact(obj) && ((PyGenObject*)obj)->gi_code && ((PyCodeObject *)((PyGenObject*)obj)->gi_code)->co_flags & CO_ITERABLE_COROUTINE) {
// Python generator marked with "@types.coroutine" decorator
- return __Pyx_NewRef(obj);
+ return __Pyx_NewRef(obj);
} else
#endif
{
@@ -201,12 +201,12 @@ static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) {
goto slot_error;
Py_DECREF(method);
}
- if (unlikely(!res)) {
- // surprisingly, CPython replaces the exception here...
- __Pyx_Coroutine_AwaitableIterError(obj);
- goto bad;
- }
- if (unlikely(!PyIter_Check(res))) {
+ if (unlikely(!res)) {
+ // surprisingly, CPython replaces the exception here...
+ __Pyx_Coroutine_AwaitableIterError(obj);
+ goto bad;
+ }
+ if (unlikely(!PyIter_Check(res))) {
PyErr_Format(PyExc_TypeError,
"__await__() returned non-iterator of type '%.100s'",
Py_TYPE(res)->tp_name);
@@ -214,7 +214,7 @@ static PyObject *__Pyx__Coroutine_GetAwaitableIter(PyObject *obj) {
} else {
int is_coroutine = 0;
#ifdef __Pyx_Coroutine_USED
- is_coroutine |= __Pyx_Coroutine_Check(res);
+ is_coroutine |= __Pyx_Coroutine_Check(res);
#endif
#if PY_VERSION_HEX >= 0x030500B2 || defined(PyCoro_CheckExact)
is_coroutine |= PyCoro_CheckExact(res);
@@ -246,7 +246,7 @@ static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *o); /*pro
//@requires: GetAwaitIter
//@requires: ObjectHandling.c::PyObjectCallMethod0
-static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
+static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
#if PY_VERSION_HEX < 0x030500B1
{
PyObject *iter = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__"));
@@ -258,7 +258,7 @@ static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
}
#else
// avoid C warning about 'unused function'
- if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__"));
+ if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__"));
#endif
PyErr_Format(PyExc_TypeError, "'async for' requires an object with __aiter__ method, got %.100s",
@@ -266,26 +266,26 @@ static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
return NULL;
}
-
-static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *obj) {
-#ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(obj)) {
- return __Pyx_NewRef(obj);
- }
-#endif
+
+static CYTHON_INLINE PyObject *__Pyx_Coroutine_GetAsyncIter(PyObject *obj) {
+#ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(obj)) {
+ return __Pyx_NewRef(obj);
+ }
+#endif
#if CYTHON_USE_ASYNC_SLOTS
- {
- __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj);
- if (likely(am && am->am_aiter)) {
- return (*am->am_aiter)(obj);
- }
- }
-#endif
- return __Pyx_Coroutine_GetAsyncIter_Generic(obj);
-}
-
-
-static PyObject *__Pyx__Coroutine_AsyncIterNext(PyObject *obj) {
+ {
+ __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj);
+ if (likely(am && am->am_aiter)) {
+ return (*am->am_aiter)(obj);
+ }
+ }
+#endif
+ return __Pyx_Coroutine_GetAsyncIter_Generic(obj);
+}
+
+
+static PyObject *__Pyx__Coroutine_AsyncIterNext(PyObject *obj) {
#if PY_VERSION_HEX < 0x030500B1
{
PyObject *value = __Pyx_PyObject_CallMethod0(obj, PYIDENT("__anext__"));
@@ -301,68 +301,68 @@ static PyObject *__Pyx__Coroutine_AsyncIterNext(PyObject *obj) {
}
-static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *obj) {
-#ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(obj)) {
- return __Pyx_async_gen_anext(obj);
- }
-#endif
-#if CYTHON_USE_ASYNC_SLOTS
- {
- __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj);
- if (likely(am && am->am_anext)) {
- return (*am->am_anext)(obj);
- }
- }
-#endif
- return __Pyx__Coroutine_AsyncIterNext(obj);
-}
-
-
+static CYTHON_INLINE PyObject *__Pyx_Coroutine_AsyncIterNext(PyObject *obj) {
+#ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(obj)) {
+ return __Pyx_async_gen_anext(obj);
+ }
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ {
+ __Pyx_PyAsyncMethodsStruct* am = __Pyx_PyType_AsAsync(obj);
+ if (likely(am && am->am_anext)) {
+ return (*am->am_anext)(obj);
+ }
+ }
+#endif
+ return __Pyx__Coroutine_AsyncIterNext(obj);
+}
+
+
//////////////////// pep479.proto ////////////////////
-static void __Pyx_Generator_Replace_StopIteration(int in_async_gen); /*proto*/
+static void __Pyx_Generator_Replace_StopIteration(int in_async_gen); /*proto*/
//////////////////// pep479 ////////////////////
//@requires: Exceptions.c::GetException
-static void __Pyx_Generator_Replace_StopIteration(CYTHON_UNUSED int in_async_gen) {
- PyObject *exc, *val, *tb, *cur_exc;
+static void __Pyx_Generator_Replace_StopIteration(CYTHON_UNUSED int in_async_gen) {
+ PyObject *exc, *val, *tb, *cur_exc;
__Pyx_PyThreadState_declare
- #ifdef __Pyx_StopAsyncIteration_USED
- int is_async_stopiteration = 0;
- #endif
-
- cur_exc = PyErr_Occurred();
- if (likely(!__Pyx_PyErr_GivenExceptionMatches(cur_exc, PyExc_StopIteration))) {
- #ifdef __Pyx_StopAsyncIteration_USED
- if (in_async_gen && unlikely(__Pyx_PyErr_GivenExceptionMatches(cur_exc, __Pyx_PyExc_StopAsyncIteration))) {
- is_async_stopiteration = 1;
- } else
- #endif
- return;
- }
-
+ #ifdef __Pyx_StopAsyncIteration_USED
+ int is_async_stopiteration = 0;
+ #endif
+
+ cur_exc = PyErr_Occurred();
+ if (likely(!__Pyx_PyErr_GivenExceptionMatches(cur_exc, PyExc_StopIteration))) {
+ #ifdef __Pyx_StopAsyncIteration_USED
+ if (in_async_gen && unlikely(__Pyx_PyErr_GivenExceptionMatches(cur_exc, __Pyx_PyExc_StopAsyncIteration))) {
+ is_async_stopiteration = 1;
+ } else
+ #endif
+ return;
+ }
+
__Pyx_PyThreadState_assign
- // Chain exceptions by moving Stop(Async)Iteration to exc_info before creating the RuntimeError.
- // In Py2.x, no chaining happens, but the exception still stays visible in exc_info.
+ // Chain exceptions by moving Stop(Async)Iteration to exc_info before creating the RuntimeError.
+ // In Py2.x, no chaining happens, but the exception still stays visible in exc_info.
__Pyx_GetException(&exc, &val, &tb);
Py_XDECREF(exc);
Py_XDECREF(val);
Py_XDECREF(tb);
- PyErr_SetString(PyExc_RuntimeError,
- #ifdef __Pyx_StopAsyncIteration_USED
- is_async_stopiteration ? "async generator raised StopAsyncIteration" :
- in_async_gen ? "async generator raised StopIteration" :
- #endif
- "generator raised StopIteration");
+ PyErr_SetString(PyExc_RuntimeError,
+ #ifdef __Pyx_StopAsyncIteration_USED
+ is_async_stopiteration ? "async generator raised StopAsyncIteration" :
+ in_async_gen ? "async generator raised StopIteration" :
+ #endif
+ "generator raised StopIteration");
}
//////////////////// CoroutineBase.proto ////////////////////
-//@substitute: naming
+//@substitute: naming
-typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *);
+typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *);
#if CYTHON_USE_EXC_INFO_STACK
// See https://bugs.python.org/issue25612
@@ -387,7 +387,7 @@ typedef struct {
PyObject *gi_name;
PyObject *gi_qualname;
PyObject *gi_modulename;
- PyObject *gi_code;
+ PyObject *gi_code;
PyObject *gi_frame;
int resume_label;
// using T_BOOL for property below requires char value
@@ -395,42 +395,42 @@ typedef struct {
} __pyx_CoroutineObject;
static __pyx_CoroutineObject *__Pyx__Coroutine_New(
- PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
+ PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/
-
-static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
- __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
- PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/
-
+
+static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
+ __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
+ PyObject *name, PyObject *qualname, PyObject *module_name); /*proto*/
+
static CYTHON_INLINE void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *self);
static int __Pyx_Coroutine_clear(PyObject *self); /*proto*/
-static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); /*proto*/
-static PyObject *__Pyx_Coroutine_Close(PyObject *self); /*proto*/
-static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); /*proto*/
+static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); /*proto*/
+static PyObject *__Pyx_Coroutine_Close(PyObject *self); /*proto*/
+static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); /*proto*/
-// macros for exception state swapping instead of inline functions to make use of the local thread state context
+// macros for exception state swapping instead of inline functions to make use of the local thread state context
#if CYTHON_USE_EXC_INFO_STACK
#define __Pyx_Coroutine_SwapException(self)
#define __Pyx_Coroutine_ResetAndClearException(self) __Pyx_Coroutine_ExceptionClear(&(self)->gi_exc_state)
#else
-#define __Pyx_Coroutine_SwapException(self) { \
+#define __Pyx_Coroutine_SwapException(self) { \
__Pyx_ExceptionSwap(&(self)->gi_exc_state.exc_type, &(self)->gi_exc_state.exc_value, &(self)->gi_exc_state.exc_traceback); \
__Pyx_Coroutine_ResetFrameBackpointer(&(self)->gi_exc_state); \
- }
-#define __Pyx_Coroutine_ResetAndClearException(self) { \
+ }
+#define __Pyx_Coroutine_ResetAndClearException(self) { \
__Pyx_ExceptionReset((self)->gi_exc_state.exc_type, (self)->gi_exc_state.exc_value, (self)->gi_exc_state.exc_traceback); \
(self)->gi_exc_state.exc_type = (self)->gi_exc_state.exc_value = (self)->gi_exc_state.exc_traceback = NULL; \
- }
+ }
#endif
-
-#if CYTHON_FAST_THREAD_STATE
-#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \
- __Pyx_PyGen__FetchStopIterationValue($local_tstate_cname, pvalue)
+
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \
+ __Pyx_PyGen__FetchStopIterationValue($local_tstate_cname, pvalue)
#else
-#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \
- __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue)
+#define __Pyx_PyGen_FetchStopIterationValue(pvalue) \
+ __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue)
#endif
-static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); /*proto*/
+static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); /*proto*/
static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state); /*proto*/
@@ -440,53 +440,53 @@ static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStr
static PyTypeObject *__pyx_CoroutineType = 0;
static PyTypeObject *__pyx_CoroutineAwaitType = 0;
#define __Pyx_Coroutine_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineType)
-// __Pyx_Coroutine_Check(obj): see override for IterableCoroutine below
-#define __Pyx_Coroutine_Check(obj) __Pyx_Coroutine_CheckExact(obj)
-#define __Pyx_CoroutineAwait_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineAwaitType)
+// __Pyx_Coroutine_Check(obj): see override for IterableCoroutine below
+#define __Pyx_Coroutine_Check(obj) __Pyx_Coroutine_CheckExact(obj)
+#define __Pyx_CoroutineAwait_CheckExact(obj) (Py_TYPE(obj) == __pyx_CoroutineAwaitType)
-#define __Pyx_Coroutine_New(body, code, closure, name, qualname, module_name) \
- __Pyx__Coroutine_New(__pyx_CoroutineType, body, code, closure, name, qualname, module_name)
+#define __Pyx_Coroutine_New(body, code, closure, name, qualname, module_name) \
+ __Pyx__Coroutine_New(__pyx_CoroutineType, body, code, closure, name, qualname, module_name)
static int __pyx_Coroutine_init(void); /*proto*/
static PyObject *__Pyx__Coroutine_await(PyObject *coroutine); /*proto*/
-typedef struct {
- PyObject_HEAD
- PyObject *coroutine;
-} __pyx_CoroutineAwaitObject;
+typedef struct {
+ PyObject_HEAD
+ PyObject *coroutine;
+} __pyx_CoroutineAwaitObject;
static PyObject *__Pyx_CoroutineAwait_Close(__pyx_CoroutineAwaitObject *self, PyObject *arg); /*proto*/
-static PyObject *__Pyx_CoroutineAwait_Throw(__pyx_CoroutineAwaitObject *self, PyObject *args); /*proto*/
-
-
+static PyObject *__Pyx_CoroutineAwait_Throw(__pyx_CoroutineAwaitObject *self, PyObject *args); /*proto*/
+
+
//////////////////// Generator.proto ////////////////////
#define __Pyx_Generator_USED
static PyTypeObject *__pyx_GeneratorType = 0;
#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType)
-#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name) \
- __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name)
+#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name) \
+ __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name)
static PyObject *__Pyx_Generator_Next(PyObject *self);
static int __pyx_Generator_init(void); /*proto*/
-//////////////////// AsyncGen ////////////////////
-//@requires: AsyncGen.c::AsyncGenerator
-// -> empty, only delegates to separate file
-
-
+//////////////////// AsyncGen ////////////////////
+//@requires: AsyncGen.c::AsyncGenerator
+// -> empty, only delegates to separate file
+
+
//////////////////// CoroutineBase ////////////////////
//@substitute: naming
//@requires: Exceptions.c::PyErrFetchRestore
//@requires: Exceptions.c::PyThreadStateGet
//@requires: Exceptions.c::SwapException
//@requires: Exceptions.c::RaiseException
-//@requires: Exceptions.c::SaveResetException
+//@requires: Exceptions.c::SaveResetException
//@requires: ObjectHandling.c::PyObjectCallMethod1
//@requires: ObjectHandling.c::PyObjectGetAttrStr
-//@requires: CommonStructures.c::FetchCommonType
+//@requires: CommonStructures.c::FetchCommonType
#include <structmember.h>
#include <frameobject.h>
@@ -499,7 +499,7 @@ static int __pyx_Generator_init(void); /*proto*/
// Returns 0 if no exception or StopIteration is set.
// If any other exception is set, returns -1 and leaves
// pvalue unchanged.
-static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *$local_tstate_cname, PyObject **pvalue) {
+static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *$local_tstate_cname, PyObject **pvalue) {
PyObject *et, *ev, *tb;
PyObject *value = NULL;
@@ -542,7 +542,7 @@ static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *$lo
}
Py_DECREF(ev);
}
- else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) {
+ else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) {
// 'steal' reference to ev
value = ev;
}
@@ -552,7 +552,7 @@ static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *$lo
*pvalue = value;
return 0;
}
- } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) {
+ } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) {
__Pyx_ErrRestore(et, ev, tb);
return -1;
}
@@ -605,68 +605,68 @@ void __Pyx_Coroutine_ExceptionClear(__Pyx_ExcInfoStruct *exc_state) {
Py_XDECREF(tb);
}
-#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL)
-static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) {
- const char *msg;
+#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL)
+static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) {
+ const char *msg;
if ((0)) {
- #ifdef __Pyx_Coroutine_USED
- } else if (__Pyx_Coroutine_Check((PyObject*)gen)) {
- msg = "coroutine already executing";
- #endif
- #ifdef __Pyx_AsyncGen_USED
- } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) {
- msg = "async generator already executing";
- #endif
- } else {
- msg = "generator already executing";
- }
- PyErr_SetString(PyExc_ValueError, msg);
-}
-
-#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL)
-static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) {
- const char *msg;
+ #ifdef __Pyx_Coroutine_USED
+ } else if (__Pyx_Coroutine_Check((PyObject*)gen)) {
+ msg = "coroutine already executing";
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) {
+ msg = "async generator already executing";
+ #endif
+ } else {
+ msg = "generator already executing";
+ }
+ PyErr_SetString(PyExc_ValueError, msg);
+}
+
+#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL)
+static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) {
+ const char *msg;
if ((0)) {
- #ifdef __Pyx_Coroutine_USED
- } else if (__Pyx_Coroutine_Check(gen)) {
- msg = "can't send non-None value to a just-started coroutine";
- #endif
- #ifdef __Pyx_AsyncGen_USED
- } else if (__Pyx_AsyncGen_CheckExact(gen)) {
- msg = "can't send non-None value to a just-started async generator";
- #endif
- } else {
- msg = "can't send non-None value to a just-started generator";
- }
- PyErr_SetString(PyExc_TypeError, msg);
-}
-
-#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL)
-static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) {
- #ifdef __Pyx_Coroutine_USED
- if (!closing && __Pyx_Coroutine_Check(gen)) {
- // `self` is an exhausted coroutine: raise an error,
- // except when called from gen_close(), which should
- // always be a silent method.
- PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine");
- } else
- #endif
- if (value) {
- // `gen` is an exhausted generator:
- // only set exception if called from send().
- #ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(gen))
- PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration);
- else
- #endif
- PyErr_SetNone(PyExc_StopIteration);
- }
-}
-
-static
-PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) {
- __Pyx_PyThreadState_declare
- PyThreadState *tstate;
+ #ifdef __Pyx_Coroutine_USED
+ } else if (__Pyx_Coroutine_Check(gen)) {
+ msg = "can't send non-None value to a just-started coroutine";
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ } else if (__Pyx_AsyncGen_CheckExact(gen)) {
+ msg = "can't send non-None value to a just-started async generator";
+ #endif
+ } else {
+ msg = "can't send non-None value to a just-started generator";
+ }
+ PyErr_SetString(PyExc_TypeError, msg);
+}
+
+#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL)
+static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) {
+ #ifdef __Pyx_Coroutine_USED
+ if (!closing && __Pyx_Coroutine_Check(gen)) {
+ // `self` is an exhausted coroutine: raise an error,
+ // except when called from gen_close(), which should
+ // always be a silent method.
+ PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine");
+ } else
+ #endif
+ if (value) {
+ // `gen` is an exhausted generator:
+ // only set exception if called from send().
+ #ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(gen))
+ PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration);
+ else
+ #endif
+ PyErr_SetNone(PyExc_StopIteration);
+ }
+}
+
+static
+PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) {
+ __Pyx_PyThreadState_declare
+ PyThreadState *tstate;
__Pyx_ExcInfoStruct *exc_state;
PyObject *retval;
@@ -674,28 +674,28 @@ PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, i
if (unlikely(self->resume_label == 0)) {
if (unlikely(value && value != Py_None)) {
- return __Pyx_Coroutine_NotStartedError((PyObject*)self);
+ return __Pyx_Coroutine_NotStartedError((PyObject*)self);
}
}
if (unlikely(self->resume_label == -1)) {
- return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing);
+ return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing);
}
-#if CYTHON_FAST_THREAD_STATE
+#if CYTHON_FAST_THREAD_STATE
__Pyx_PyThreadState_assign
- tstate = $local_tstate_cname;
-#else
- tstate = __Pyx_PyThreadState_Current;
-#endif
-
+ tstate = $local_tstate_cname;
+#else
+ tstate = __Pyx_PyThreadState_Current;
+#endif
+
// Traceback/Frame rules pre-Py3.7:
// - on entry, save external exception state in self->gi_exc_state, restore it on exit
// - on exit, keep internally generated exceptions in self->gi_exc_state, clear everything else
- // - on entry, set "f_back" pointer of internal exception traceback to (current) outer call frame
- // - on exit, clear "f_back" of internal exception traceback
- // - do not touch external frames and tracebacks
-
+ // - on entry, set "f_back" pointer of internal exception traceback to (current) outer call frame
+ // - on exit, clear "f_back" of internal exception traceback
+ // - do not touch external frames and tracebacks
+
// Traceback/Frame rules for Py3.7+ (CYTHON_USE_EXC_INFO_STACK):
// - on entry, push internal exception state in self->gi_exc_state on the exception stack
// - on exit, keep internally generated exceptions in self->gi_exc_state, clear everything else
@@ -720,8 +720,8 @@ PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, i
// which is a valid state so no need to check
f->f_back = PyThreadState_GetFrame(tstate);
#else
- Py_XINCREF(tstate->frame);
- f->f_back = tstate->frame;
+ Py_XINCREF(tstate->frame);
+ f->f_back = tstate->frame;
#endif
}
#endif
@@ -733,20 +733,20 @@ PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, i
tstate->exc_info = exc_state;
#else
if (exc_state->exc_type) {
- // We were in an except handler when we left,
- // restore the exception state which was put aside.
+ // We were in an except handler when we left,
+ // restore the exception state which was put aside.
__Pyx_ExceptionSwap(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback);
- // self->exc_* now holds the exception state of the caller
+ // self->exc_* now holds the exception state of the caller
} else {
- // save away the exception state of the caller
+ // save away the exception state of the caller
__Pyx_Coroutine_ExceptionClear(exc_state);
__Pyx_ExceptionSave(&exc_state->exc_type, &exc_state->exc_value, &exc_state->exc_traceback);
}
#endif
self->is_running = 1;
- PyObject* s = (PyObject *)(self);
- retval = self->body(s, tstate, value);
+ PyObject* s = (PyObject *)(self);
+ retval = self->body(s, tstate, value);
self->is_running = 0;
#if CYTHON_USE_EXC_INFO_STACK
@@ -758,40 +758,40 @@ PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, i
__Pyx_Coroutine_ResetFrameBackpointer(exc_state);
#endif
- return retval;
-}
-
+ return retval;
+}
+
static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStruct *exc_state) {
- // Don't keep the reference to f_back any longer than necessary. It
- // may keep a chain of frames alive or it could create a reference
- // cycle.
+ // Don't keep the reference to f_back any longer than necessary. It
+ // may keep a chain of frames alive or it could create a reference
+ // cycle.
PyObject *exc_tb = exc_state->exc_traceback;
if (likely(exc_tb)) {
#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON
- // FIXME: what to do in PyPy?
+ // FIXME: what to do in PyPy?
#else
PyTracebackObject *tb = (PyTracebackObject *) exc_tb;
- PyFrameObject *f = tb->tb_frame;
- Py_CLEAR(f->f_back);
+ PyFrameObject *f = tb->tb_frame;
+ Py_CLEAR(f->f_back);
#endif
}
}
static CYTHON_INLINE
-PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) {
- if (unlikely(!retval)) {
- __Pyx_PyThreadState_declare
- __Pyx_PyThreadState_assign
- if (!__Pyx_PyErr_Occurred()) {
- // method call must not terminate with NULL without setting an exception
- PyObject *exc = PyExc_StopIteration;
- #ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(gen))
- exc = __Pyx_PyExc_StopAsyncIteration;
- #endif
- __Pyx_PyErr_SetNone(exc);
- }
+PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) {
+ if (unlikely(!retval)) {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ if (!__Pyx_PyErr_Occurred()) {
+ // method call must not terminate with NULL without setting an exception
+ PyObject *exc = PyExc_StopIteration;
+ #ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(gen))
+ exc = __Pyx_PyExc_StopAsyncIteration;
+ #endif
+ __Pyx_PyErr_SetNone(exc);
+ }
}
return retval;
}
@@ -827,9 +827,9 @@ PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) {
PyObject *ret;
PyObject *val = NULL;
__Pyx_Coroutine_Undelegate(gen);
- __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val);
+ __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val);
// val == NULL on failure => pass on exception
- ret = __Pyx_Coroutine_SendEx(gen, val, 0);
+ ret = __Pyx_Coroutine_SendEx(gen, val, 0);
Py_XDECREF(val);
return ret;
}
@@ -838,8 +838,8 @@ static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) {
PyObject *retval;
__pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self;
PyObject *yf = gen->yieldfrom;
- if (unlikely(gen->is_running))
- return __Pyx_Coroutine_AlreadyRunningError(gen);
+ if (unlikely(gen->is_running))
+ return __Pyx_Coroutine_AlreadyRunningError(gen);
if (yf) {
PyObject *ret;
// FIXME: does this really need an INCREF() ?
@@ -851,27 +851,27 @@ static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) {
} else
#endif
#ifdef __Pyx_Coroutine_USED
- if (__Pyx_Coroutine_Check(yf)) {
+ if (__Pyx_Coroutine_Check(yf)) {
ret = __Pyx_Coroutine_Send(yf, value);
} else
#endif
- #ifdef __Pyx_AsyncGen_USED
- if (__pyx_PyAsyncGenASend_CheckExact(yf)) {
- ret = __Pyx_async_gen_asend_send(yf, value);
- } else
- #endif
- #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
- // _PyGen_Send() is not exported before Py3.6
- if (PyGen_CheckExact(yf)) {
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_PyAsyncGenASend_CheckExact(yf)) {
+ ret = __Pyx_async_gen_asend_send(yf, value);
+ } else
+ #endif
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
+ // _PyGen_Send() is not exported before Py3.6
+ if (PyGen_CheckExact(yf)) {
ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value);
- } else
- #endif
- #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
- // _PyGen_Send() is not exported before Py3.6
- if (PyCoro_CheckExact(yf)) {
+ } else
+ #endif
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
+ // _PyGen_Send() is not exported before Py3.6
+ if (PyCoro_CheckExact(yf)) {
ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value);
- } else
- #endif
+ } else
+ #endif
{
if (value == Py_None)
ret = Py_TYPE(yf)->tp_iternext(yf);
@@ -885,9 +885,9 @@ static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) {
}
retval = __Pyx_Coroutine_FinishDelegation(gen);
} else {
- retval = __Pyx_Coroutine_SendEx(gen, value, 0);
+ retval = __Pyx_Coroutine_SendEx(gen, value, 0);
}
- return __Pyx_Coroutine_MethodReturn(self, retval);
+ return __Pyx_Coroutine_MethodReturn(self, retval);
}
// This helper function is used by gen_close and gen_throw to
@@ -904,27 +904,27 @@ static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) {
} else
#endif
#ifdef __Pyx_Coroutine_USED
- if (__Pyx_Coroutine_Check(yf)) {
+ if (__Pyx_Coroutine_Check(yf)) {
retval = __Pyx_Coroutine_Close(yf);
if (!retval)
return -1;
} else
- if (__Pyx_CoroutineAwait_CheckExact(yf)) {
+ if (__Pyx_CoroutineAwait_CheckExact(yf)) {
retval = __Pyx_CoroutineAwait_Close((__pyx_CoroutineAwaitObject*)yf, NULL);
- if (!retval)
- return -1;
- } else
+ if (!retval)
+ return -1;
+ } else
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_PyAsyncGenASend_CheckExact(yf)) {
+ retval = __Pyx_async_gen_asend_close(yf, NULL);
+ // cannot fail
+ } else
+ if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) {
+ retval = __Pyx_async_gen_athrow_close(yf, NULL);
+ // cannot fail
+ } else
#endif
- #ifdef __Pyx_AsyncGen_USED
- if (__pyx_PyAsyncGenASend_CheckExact(yf)) {
- retval = __Pyx_async_gen_asend_close(yf, NULL);
- // cannot fail
- } else
- if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) {
- retval = __Pyx_async_gen_athrow_close(yf, NULL);
- // cannot fail
- } else
- #endif
{
PyObject *meth;
gen->is_running = 1;
@@ -949,8 +949,8 @@ static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) {
static PyObject *__Pyx_Generator_Next(PyObject *self) {
__pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self;
PyObject *yf = gen->yieldfrom;
- if (unlikely(gen->is_running))
- return __Pyx_Coroutine_AlreadyRunningError(gen);
+ if (unlikely(gen->is_running))
+ return __Pyx_Coroutine_AlreadyRunningError(gen);
if (yf) {
PyObject *ret;
// FIXME: does this really need an INCREF() ?
@@ -962,17 +962,17 @@ static PyObject *__Pyx_Generator_Next(PyObject *self) {
ret = __Pyx_Generator_Next(yf);
} else
#endif
- #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
- // _PyGen_Send() is not exported before Py3.6
- if (PyGen_CheckExact(yf)) {
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3)
+ // _PyGen_Send() is not exported before Py3.6
+ if (PyGen_CheckExact(yf)) {
ret = __Pyx_PyGen_Send((PyGenObject*)yf, NULL);
- } else
- #endif
- #ifdef __Pyx_Coroutine_USED
- if (__Pyx_Coroutine_Check(yf)) {
- ret = __Pyx_Coroutine_Send(yf, Py_None);
- } else
- #endif
+ } else
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ if (__Pyx_Coroutine_Check(yf)) {
+ ret = __Pyx_Coroutine_Send(yf, Py_None);
+ } else
+ #endif
ret = Py_TYPE(yf)->tp_iternext(yf);
gen->is_running = 0;
//Py_DECREF(yf);
@@ -981,7 +981,7 @@ static PyObject *__Pyx_Generator_Next(PyObject *self) {
}
return __Pyx_Coroutine_FinishDelegation(gen);
}
- return __Pyx_Coroutine_SendEx(gen, Py_None, 0);
+ return __Pyx_Coroutine_SendEx(gen, Py_None, 0);
}
static PyObject *__Pyx_Coroutine_Close_Method(PyObject *self, CYTHON_UNUSED PyObject *arg) {
@@ -994,8 +994,8 @@ static PyObject *__Pyx_Coroutine_Close(PyObject *self) {
PyObject *yf = gen->yieldfrom;
int err = 0;
- if (unlikely(gen->is_running))
- return __Pyx_Coroutine_AlreadyRunningError(gen);
+ if (unlikely(gen->is_running))
+ return __Pyx_Coroutine_AlreadyRunningError(gen);
if (yf) {
Py_INCREF(yf);
@@ -1005,31 +1005,31 @@ static PyObject *__Pyx_Coroutine_Close(PyObject *self) {
}
if (err == 0)
PyErr_SetNone(PyExc_GeneratorExit);
- retval = __Pyx_Coroutine_SendEx(gen, NULL, 1);
- if (unlikely(retval)) {
- const char *msg;
+ retval = __Pyx_Coroutine_SendEx(gen, NULL, 1);
+ if (unlikely(retval)) {
+ const char *msg;
Py_DECREF(retval);
- if ((0)) {
- #ifdef __Pyx_Coroutine_USED
- } else if (__Pyx_Coroutine_Check(self)) {
- msg = "coroutine ignored GeneratorExit";
- #endif
- #ifdef __Pyx_AsyncGen_USED
- } else if (__Pyx_AsyncGen_CheckExact(self)) {
-#if PY_VERSION_HEX < 0x03060000
- msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)";
-#else
- msg = "async generator ignored GeneratorExit";
-#endif
- #endif
- } else {
- msg = "generator ignored GeneratorExit";
- }
- PyErr_SetString(PyExc_RuntimeError, msg);
+ if ((0)) {
+ #ifdef __Pyx_Coroutine_USED
+ } else if (__Pyx_Coroutine_Check(self)) {
+ msg = "coroutine ignored GeneratorExit";
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ } else if (__Pyx_AsyncGen_CheckExact(self)) {
+#if PY_VERSION_HEX < 0x03060000
+ msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)";
+#else
+ msg = "async generator ignored GeneratorExit";
+#endif
+ #endif
+ } else {
+ msg = "generator ignored GeneratorExit";
+ }
+ PyErr_SetString(PyExc_RuntimeError, msg);
return NULL;
}
raised_exception = PyErr_Occurred();
- if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) {
+ if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) {
// ignore these errors
if (raised_exception) PyErr_Clear();
Py_INCREF(Py_None);
@@ -1038,43 +1038,43 @@ static PyObject *__Pyx_Coroutine_Close(PyObject *self) {
return NULL;
}
-static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb,
- PyObject *args, int close_on_genexit) {
+static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb,
+ PyObject *args, int close_on_genexit) {
__pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self;
PyObject *yf = gen->yieldfrom;
- if (unlikely(gen->is_running))
- return __Pyx_Coroutine_AlreadyRunningError(gen);
+ if (unlikely(gen->is_running))
+ return __Pyx_Coroutine_AlreadyRunningError(gen);
if (yf) {
PyObject *ret;
Py_INCREF(yf);
- if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) {
- // Asynchronous generators *should not* be closed right away.
- // We have to allow some awaits to work it through, hence the
- // `close_on_genexit` parameter here.
+ if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) {
+ // Asynchronous generators *should not* be closed right away.
+ // We have to allow some awaits to work it through, hence the
+ // `close_on_genexit` parameter here.
int err = __Pyx_Coroutine_CloseIter(gen, yf);
Py_DECREF(yf);
__Pyx_Coroutine_Undelegate(gen);
if (err < 0)
- return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0));
+ return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0));
goto throw_here;
}
gen->is_running = 1;
- if (0
+ if (0
#ifdef __Pyx_Generator_USED
- || __Pyx_Generator_CheckExact(yf)
+ || __Pyx_Generator_CheckExact(yf)
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ || __Pyx_Coroutine_Check(yf)
#endif
+ ) {
+ ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit);
#ifdef __Pyx_Coroutine_USED
- || __Pyx_Coroutine_Check(yf)
+ } else if (__Pyx_CoroutineAwait_CheckExact(yf)) {
+ ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit);
#endif
- ) {
- ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit);
- #ifdef __Pyx_Coroutine_USED
- } else if (__Pyx_CoroutineAwait_CheckExact(yf)) {
- ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit);
- #endif
- } else {
+ } else {
PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, PYIDENT("throw"));
if (unlikely(!meth)) {
Py_DECREF(yf);
@@ -1087,12 +1087,12 @@ static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject
gen->is_running = 0;
goto throw_here;
}
- if (likely(args)) {
- ret = PyObject_CallObject(meth, args);
- } else {
- // "tb" or even "val" might be NULL, but that also correctly terminates the argument list
- ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL);
- }
+ if (likely(args)) {
+ ret = PyObject_CallObject(meth, args);
+ } else {
+ // "tb" or even "val" might be NULL, but that also correctly terminates the argument list
+ ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL);
+ }
Py_DECREF(meth);
}
gen->is_running = 0;
@@ -1100,24 +1100,24 @@ static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject
if (!ret) {
ret = __Pyx_Coroutine_FinishDelegation(gen);
}
- return __Pyx_Coroutine_MethodReturn(self, ret);
+ return __Pyx_Coroutine_MethodReturn(self, ret);
}
throw_here:
__Pyx_Raise(typ, val, tb, NULL);
- return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0));
+ return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0));
}
-static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) {
- PyObject *typ;
- PyObject *val = NULL;
- PyObject *tb = NULL;
+static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) {
+ PyObject *typ;
+ PyObject *val = NULL;
+ PyObject *tb = NULL;
+
+ if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb))
+ return NULL;
+
+ return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1);
+}
- if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb))
- return NULL;
-
- return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1);
-}
-
static CYTHON_INLINE int __Pyx_Coroutine_traverse_excstate(__Pyx_ExcInfoStruct *exc_state, visitproc visit, void *arg) {
Py_VISIT(exc_state->exc_type);
Py_VISIT(exc_state->exc_value);
@@ -1125,7 +1125,7 @@ static CYTHON_INLINE int __Pyx_Coroutine_traverse_excstate(__Pyx_ExcInfoStruct *
return 0;
}
-static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) {
+static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) {
Py_VISIT(gen->closure);
Py_VISIT(gen->classobj);
Py_VISIT(gen->yieldfrom);
@@ -1139,16 +1139,16 @@ static int __Pyx_Coroutine_clear(PyObject *self) {
Py_CLEAR(gen->classobj);
Py_CLEAR(gen->yieldfrom);
__Pyx_Coroutine_ExceptionClear(&gen->gi_exc_state);
-#ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(self)) {
- Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer);
- }
-#endif
- Py_CLEAR(gen->gi_code);
+#ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(self)) {
+ Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer);
+ }
+#endif
+ Py_CLEAR(gen->gi_code);
Py_CLEAR(gen->gi_frame);
Py_CLEAR(gen->gi_name);
Py_CLEAR(gen->gi_qualname);
- Py_CLEAR(gen->gi_modulename);
+ Py_CLEAR(gen->gi_modulename);
return 0;
}
@@ -1159,10 +1159,10 @@ static void __Pyx_Coroutine_dealloc(PyObject *self) {
if (gen->gi_weakreflist != NULL)
PyObject_ClearWeakRefs(self);
- if (gen->resume_label >= 0) {
- // Generator is paused or unstarted, so we need to close
+ if (gen->resume_label >= 0) {
+ // Generator is paused or unstarted, so we need to close
PyObject_GC_Track(self);
-#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE
+#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE
if (PyObject_CallFinalizerFromDealloc(self))
#else
Py_TYPE(gen)->tp_del(self);
@@ -1175,14 +1175,14 @@ static void __Pyx_Coroutine_dealloc(PyObject *self) {
PyObject_GC_UnTrack(self);
}
-#ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(self)) {
- /* We have to handle this case for asynchronous generators
- right here, because this code has to be between UNTRACK
- and GC_Del. */
- Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer);
- }
-#endif
+#ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(self)) {
+ /* We have to handle this case for asynchronous generators
+ right here, because this code has to be between UNTRACK
+ and GC_Del. */
+ Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer);
+ }
+#endif
__Pyx_Coroutine_clear(self);
PyObject_GC_Del(gen);
}
@@ -1192,93 +1192,93 @@ static void __Pyx_Coroutine_del(PyObject *self) {
__pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self;
__Pyx_PyThreadState_declare
- if (gen->resume_label < 0) {
- // already terminated => nothing to clean up
- return;
- }
+ if (gen->resume_label < 0) {
+ // already terminated => nothing to clean up
+ return;
+ }
-#if !CYTHON_USE_TP_FINALIZE
+#if !CYTHON_USE_TP_FINALIZE
// Temporarily resurrect the object.
assert(self->ob_refcnt == 0);
__Pyx_SET_REFCNT(self, 1);
#endif
- __Pyx_PyThreadState_assign
-
+ __Pyx_PyThreadState_assign
+
// Save the current exception, if any.
__Pyx_ErrFetch(&error_type, &error_value, &error_traceback);
-#ifdef __Pyx_AsyncGen_USED
- if (__Pyx_AsyncGen_CheckExact(self)) {
- __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self;
- PyObject *finalizer = agen->ag_finalizer;
- if (finalizer && !agen->ag_closed) {
- PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self);
- if (unlikely(!res)) {
- PyErr_WriteUnraisable(self);
- } else {
- Py_DECREF(res);
- }
- // Restore the saved exception.
- __Pyx_ErrRestore(error_type, error_value, error_traceback);
- return;
- }
- }
-#endif
-
- if (unlikely(gen->resume_label == 0 && !error_value)) {
-#ifdef __Pyx_Coroutine_USED
-#ifdef __Pyx_Generator_USED
- // only warn about (async) coroutines
- if (!__Pyx_Generator_CheckExact(self))
-#endif
- {
- // untrack dead object as we are executing Python code (which might trigger GC)
- PyObject_GC_UnTrack(self);
-#if PY_MAJOR_VERSION >= 3 /* PY_VERSION_HEX >= 0x03030000*/ || defined(PyErr_WarnFormat)
- if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0))
- PyErr_WriteUnraisable(self);
-#else
- {PyObject *msg;
- char *cmsg;
- #if CYTHON_COMPILING_IN_PYPY
- msg = NULL;
- cmsg = (char*) "coroutine was never awaited";
- #else
- char *cname;
- PyObject *qualname;
- qualname = gen->gi_qualname;
- cname = PyString_AS_STRING(qualname);
- msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname);
-
- if (unlikely(!msg)) {
- PyErr_Clear();
- cmsg = (char*) "coroutine was never awaited";
- } else {
- cmsg = PyString_AS_STRING(msg);
- }
- #endif
- if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0))
- PyErr_WriteUnraisable(self);
- Py_XDECREF(msg);}
-#endif
- PyObject_GC_Track(self);
- }
-#endif /*__Pyx_Coroutine_USED*/
- } else {
- PyObject *res = __Pyx_Coroutine_Close(self);
- if (unlikely(!res)) {
- if (PyErr_Occurred())
- PyErr_WriteUnraisable(self);
- } else {
- Py_DECREF(res);
- }
- }
-
+#ifdef __Pyx_AsyncGen_USED
+ if (__Pyx_AsyncGen_CheckExact(self)) {
+ __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self;
+ PyObject *finalizer = agen->ag_finalizer;
+ if (finalizer && !agen->ag_closed) {
+ PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self);
+ if (unlikely(!res)) {
+ PyErr_WriteUnraisable(self);
+ } else {
+ Py_DECREF(res);
+ }
+ // Restore the saved exception.
+ __Pyx_ErrRestore(error_type, error_value, error_traceback);
+ return;
+ }
+ }
+#endif
+
+ if (unlikely(gen->resume_label == 0 && !error_value)) {
+#ifdef __Pyx_Coroutine_USED
+#ifdef __Pyx_Generator_USED
+ // only warn about (async) coroutines
+ if (!__Pyx_Generator_CheckExact(self))
+#endif
+ {
+ // untrack dead object as we are executing Python code (which might trigger GC)
+ PyObject_GC_UnTrack(self);
+#if PY_MAJOR_VERSION >= 3 /* PY_VERSION_HEX >= 0x03030000*/ || defined(PyErr_WarnFormat)
+ if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0))
+ PyErr_WriteUnraisable(self);
+#else
+ {PyObject *msg;
+ char *cmsg;
+ #if CYTHON_COMPILING_IN_PYPY
+ msg = NULL;
+ cmsg = (char*) "coroutine was never awaited";
+ #else
+ char *cname;
+ PyObject *qualname;
+ qualname = gen->gi_qualname;
+ cname = PyString_AS_STRING(qualname);
+ msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname);
+
+ if (unlikely(!msg)) {
+ PyErr_Clear();
+ cmsg = (char*) "coroutine was never awaited";
+ } else {
+ cmsg = PyString_AS_STRING(msg);
+ }
+ #endif
+ if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0))
+ PyErr_WriteUnraisable(self);
+ Py_XDECREF(msg);}
+#endif
+ PyObject_GC_Track(self);
+ }
+#endif /*__Pyx_Coroutine_USED*/
+ } else {
+ PyObject *res = __Pyx_Coroutine_Close(self);
+ if (unlikely(!res)) {
+ if (PyErr_Occurred())
+ PyErr_WriteUnraisable(self);
+ } else {
+ Py_DECREF(res);
+ }
+ }
+
// Restore the saved exception.
__Pyx_ErrRestore(error_type, error_value, error_traceback);
-#if !CYTHON_USE_TP_FINALIZE
+#if !CYTHON_USE_TP_FINALIZE
// Undo the temporary resurrection; can't use DECREF here, it would
// cause a recursive call.
assert(Py_REFCNT(self) > 0);
@@ -1404,17 +1404,17 @@ __Pyx_Coroutine_get_frame(__pyx_CoroutineObject *self, CYTHON_UNUSED void *conte
}
static __pyx_CoroutineObject *__Pyx__Coroutine_New(
- PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
+ PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
PyObject *name, PyObject *qualname, PyObject *module_name) {
__pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type);
- if (unlikely(!gen))
+ if (unlikely(!gen))
return NULL;
- return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name);
-}
+ return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name);
+}
-static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
- __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
- PyObject *name, PyObject *qualname, PyObject *module_name) {
+static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
+ __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure,
+ PyObject *name, PyObject *qualname, PyObject *module_name) {
gen->body = body;
gen->closure = closure;
Py_XINCREF(closure);
@@ -1435,8 +1435,8 @@ static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
gen->gi_name = name;
Py_XINCREF(module_name);
gen->gi_modulename = module_name;
- Py_XINCREF(code);
- gen->gi_code = code;
+ Py_XINCREF(code);
+ gen->gi_code = code;
gen->gi_frame = NULL;
PyObject_GC_Track(gen);
@@ -1447,7 +1447,7 @@ static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit(
//////////////////// Coroutine ////////////////////
//@requires: CoroutineBase
//@requires: PatchGeneratorABC
-//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
+//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
static void __Pyx_CoroutineAwait_dealloc(PyObject *self) {
PyObject_GC_UnTrack(self);
@@ -1588,7 +1588,7 @@ static PyObject *__Pyx_Coroutine_await_method(PyObject *coroutine, CYTHON_UNUSED
#if defined(__Pyx_IterableCoroutine_USED) || CYTHON_USE_ASYNC_SLOTS
static PyObject *__Pyx_Coroutine_await(PyObject *coroutine) {
- if (unlikely(!coroutine || !__Pyx_Coroutine_Check(coroutine))) {
+ if (unlikely(!coroutine || !__Pyx_Coroutine_Check(coroutine))) {
PyErr_SetString(PyExc_TypeError, "invalid input, expected coroutine");
return NULL;
}
@@ -1628,7 +1628,7 @@ static PyMemberDef __pyx_Coroutine_memberlist[] = {
{(char *) "cr_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL},
{(char*) "cr_await", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY,
(char*) PyDoc_STR("object being awaited, or None")},
- {(char*) "cr_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL},
+ {(char*) "cr_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL},
{(char *) "__module__", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_modulename), PY_WRITE_RESTRICTED, 0},
{0, 0, 0, 0, 0}
};
@@ -1638,8 +1638,8 @@ static PyGetSetDef __pyx_Coroutine_getsets[] = {
(char*) PyDoc_STR("name of the coroutine"), 0},
{(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname,
(char*) PyDoc_STR("qualified name of the coroutine"), 0},
- {(char *) "cr_frame", (getter)__Pyx_Coroutine_get_frame, NULL,
- (char*) PyDoc_STR("Frame of the coroutine"), 0},
+ {(char *) "cr_frame", (getter)__Pyx_Coroutine_get_frame, NULL,
+ (char*) PyDoc_STR("Frame of the coroutine"), 0},
{0, 0, 0, 0, 0}
};
@@ -1659,7 +1659,7 @@ static PyTypeObject __pyx_CoroutineType_type = {
"coroutine", /*tp_name*/
sizeof(__pyx_CoroutineObject), /*tp_basicsize*/
0, /*tp_itemsize*/
- (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/
+ (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
@@ -1682,14 +1682,14 @@ static PyTypeObject __pyx_CoroutineType_type = {
0, /*tp_doc*/
(traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/
0, /*tp_clear*/
-#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
+#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
// in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
__Pyx_Coroutine_compare, /*tp_richcompare*/
#else
0, /*tp_richcompare*/
#endif
offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/
- // no tp_iter() as iterator is only available through __await__()
+ // no tp_iter() as iterator is only available through __await__()
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_Coroutine_methods, /*tp_methods*/
@@ -1710,16 +1710,16 @@ static PyTypeObject __pyx_CoroutineType_type = {
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
-#if CYTHON_USE_TP_FINALIZE
+#if CYTHON_USE_TP_FINALIZE
0, /*tp_del*/
#else
__Pyx_Coroutine_del, /*tp_del*/
#endif
0, /*tp_version_tag*/
-#if CYTHON_USE_TP_FINALIZE
+#if CYTHON_USE_TP_FINALIZE
__Pyx_Coroutine_del, /*tp_finalize*/
-#elif PY_VERSION_HEX >= 0x030400a1
- 0, /*tp_finalize*/
+#elif PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
@@ -1734,107 +1734,107 @@ static PyTypeObject __pyx_CoroutineType_type = {
static int __pyx_Coroutine_init(void) {
// on Windows, C-API functions can't be used in slots statically
- __pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type);
if (unlikely(!__pyx_CoroutineType))
return -1;
-#ifdef __Pyx_IterableCoroutine_USED
- if (unlikely(__pyx_IterableCoroutine_init() == -1))
- return -1;
-#endif
-
+#ifdef __Pyx_IterableCoroutine_USED
+ if (unlikely(__pyx_IterableCoroutine_init() == -1))
+ return -1;
+#endif
+
__pyx_CoroutineAwaitType = __Pyx_FetchCommonType(&__pyx_CoroutineAwaitType_type);
if (unlikely(!__pyx_CoroutineAwaitType))
return -1;
return 0;
}
-
-//////////////////// IterableCoroutine.proto ////////////////////
-
-#define __Pyx_IterableCoroutine_USED
-
-static PyTypeObject *__pyx_IterableCoroutineType = 0;
-
-#undef __Pyx_Coroutine_Check
-#define __Pyx_Coroutine_Check(obj) (__Pyx_Coroutine_CheckExact(obj) || (Py_TYPE(obj) == __pyx_IterableCoroutineType))
-
-#define __Pyx_IterableCoroutine_New(body, code, closure, name, qualname, module_name) \
- __Pyx__Coroutine_New(__pyx_IterableCoroutineType, body, code, closure, name, qualname, module_name)
-
-static int __pyx_IterableCoroutine_init(void);/*proto*/
-
-
-//////////////////// IterableCoroutine ////////////////////
-//@requires: Coroutine
-//@requires: CommonStructures.c::FetchCommonType
-
-static PyTypeObject __pyx_IterableCoroutineType_type = {
- PyVarObject_HEAD_INIT(0, 0)
- "iterable_coroutine", /*tp_name*/
- sizeof(__pyx_CoroutineObject), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/
- 0, /*tp_print*/
- 0, /*tp_getattr*/
- 0, /*tp_setattr*/
-#if CYTHON_USE_ASYNC_SLOTS
- &__pyx_Coroutine_as_async, /*tp_as_async (tp_reserved) - Py3 only! */
-#else
- 0, /*tp_reserved*/
-#endif
- 0, /*tp_repr*/
- 0, /*tp_as_number*/
- 0, /*tp_as_sequence*/
- 0, /*tp_as_mapping*/
- 0, /*tp_hash*/
- 0, /*tp_call*/
- 0, /*tp_str*/
- 0, /*tp_getattro*/
- 0, /*tp_setattro*/
- 0, /*tp_as_buffer*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/
- 0, /*tp_doc*/
- (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/
- 0, /*tp_clear*/
-#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
- // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
- __Pyx_Coroutine_compare, /*tp_richcompare*/
-#else
- 0, /*tp_richcompare*/
-#endif
- offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/
- // enable iteration for legacy support of asyncio yield-from protocol
- __Pyx_Coroutine_await, /*tp_iter*/
- (iternextfunc) __Pyx_Generator_Next, /*tp_iternext*/
- __pyx_Coroutine_methods, /*tp_methods*/
- __pyx_Coroutine_memberlist, /*tp_members*/
- __pyx_Coroutine_getsets, /*tp_getset*/
- 0, /*tp_base*/
- 0, /*tp_dict*/
- 0, /*tp_descr_get*/
- 0, /*tp_descr_set*/
- 0, /*tp_dictoffset*/
- 0, /*tp_init*/
- 0, /*tp_alloc*/
- 0, /*tp_new*/
- 0, /*tp_free*/
- 0, /*tp_is_gc*/
- 0, /*tp_bases*/
- 0, /*tp_mro*/
- 0, /*tp_cache*/
- 0, /*tp_subclasses*/
- 0, /*tp_weaklist*/
-#if PY_VERSION_HEX >= 0x030400a1
- 0, /*tp_del*/
-#else
- __Pyx_Coroutine_del, /*tp_del*/
-#endif
- 0, /*tp_version_tag*/
-#if PY_VERSION_HEX >= 0x030400a1
- __Pyx_Coroutine_del, /*tp_finalize*/
-#endif
+
+//////////////////// IterableCoroutine.proto ////////////////////
+
+#define __Pyx_IterableCoroutine_USED
+
+static PyTypeObject *__pyx_IterableCoroutineType = 0;
+
+#undef __Pyx_Coroutine_Check
+#define __Pyx_Coroutine_Check(obj) (__Pyx_Coroutine_CheckExact(obj) || (Py_TYPE(obj) == __pyx_IterableCoroutineType))
+
+#define __Pyx_IterableCoroutine_New(body, code, closure, name, qualname, module_name) \
+ __Pyx__Coroutine_New(__pyx_IterableCoroutineType, body, code, closure, name, qualname, module_name)
+
+static int __pyx_IterableCoroutine_init(void);/*proto*/
+
+
+//////////////////// IterableCoroutine ////////////////////
+//@requires: Coroutine
+//@requires: CommonStructures.c::FetchCommonType
+
+static PyTypeObject __pyx_IterableCoroutineType_type = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "iterable_coroutine", /*tp_name*/
+ sizeof(__pyx_CoroutineObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ (destructor) __Pyx_Coroutine_dealloc,/*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+#if CYTHON_USE_ASYNC_SLOTS
+ &__pyx_Coroutine_as_async, /*tp_as_async (tp_reserved) - Py3 only! */
+#else
+ 0, /*tp_reserved*/
+#endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, /*tp_flags*/
+ 0, /*tp_doc*/
+ (traverseproc) __Pyx_Coroutine_traverse, /*tp_traverse*/
+ 0, /*tp_clear*/
+#if CYTHON_USE_ASYNC_SLOTS && CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 && PY_VERSION_HEX < 0x030500B1
+ // in order to (mis-)use tp_reserved above, we must also implement tp_richcompare
+ __Pyx_Coroutine_compare, /*tp_richcompare*/
+#else
+ 0, /*tp_richcompare*/
+#endif
+ offsetof(__pyx_CoroutineObject, gi_weakreflist), /*tp_weaklistoffset*/
+ // enable iteration for legacy support of asyncio yield-from protocol
+ __Pyx_Coroutine_await, /*tp_iter*/
+ (iternextfunc) __Pyx_Generator_Next, /*tp_iternext*/
+ __pyx_Coroutine_methods, /*tp_methods*/
+ __pyx_Coroutine_memberlist, /*tp_members*/
+ __pyx_Coroutine_getsets, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ 0, /*tp_init*/
+ 0, /*tp_alloc*/
+ 0, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+#if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_del*/
+#else
+ __Pyx_Coroutine_del, /*tp_del*/
+#endif
+ 0, /*tp_version_tag*/
+#if PY_VERSION_HEX >= 0x030400a1
+ __Pyx_Coroutine_del, /*tp_finalize*/
+#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
@@ -1844,22 +1844,22 @@ static PyTypeObject __pyx_IterableCoroutineType_type = {
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
-};
-
-
-static int __pyx_IterableCoroutine_init(void) {
- __pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
- __pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type);
- if (unlikely(!__pyx_IterableCoroutineType))
- return -1;
- return 0;
-}
-
-
+};
+
+
+static int __pyx_IterableCoroutine_init(void) {
+ __pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type);
+ if (unlikely(!__pyx_IterableCoroutineType))
+ return -1;
+ return 0;
+}
+
+
//////////////////// Generator ////////////////////
//@requires: CoroutineBase
//@requires: PatchGeneratorABC
-//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
+//@requires: ObjectHandling.c::PyObject_GenericGetAttrNoDict
static PyMethodDef __pyx_Generator_methods[] = {
{"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O,
@@ -1875,7 +1875,7 @@ static PyMemberDef __pyx_Generator_memberlist[] = {
{(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL},
{(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY,
(char*) PyDoc_STR("object being iterated by 'yield from', or None")},
- {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL},
+ {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL},
{0, 0, 0, 0, 0}
};
@@ -1935,16 +1935,16 @@ static PyTypeObject __pyx_GeneratorType_type = {
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
-#if CYTHON_USE_TP_FINALIZE
+#if CYTHON_USE_TP_FINALIZE
0, /*tp_del*/
#else
__Pyx_Coroutine_del, /*tp_del*/
#endif
0, /*tp_version_tag*/
-#if CYTHON_USE_TP_FINALIZE
+#if CYTHON_USE_TP_FINALIZE
__Pyx_Coroutine_del, /*tp_finalize*/
-#elif PY_VERSION_HEX >= 0x030400a1
- 0, /*tp_finalize*/
+#elif PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
@@ -1959,7 +1959,7 @@ static PyTypeObject __pyx_GeneratorType_type = {
static int __pyx_Generator_init(void) {
// on Windows, C-API functions can't be used in slots statically
- __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
+ __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_GeneratorType_type.tp_iter = PyObject_SelfIter;
__pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type);
@@ -1984,15 +1984,15 @@ static void __Pyx__ReturnWithStopIteration(PyObject* value); /*proto*/
// 1) Instantiating an exception just to pass back a value is costly.
// 2) CPython 3.3 <= x < 3.5b1 crash in yield-from when the StopIteration is not instantiated.
// 3) Passing a tuple as value into PyErr_SetObject() passes its items on as arguments.
-// 4) Passing an exception as value will interpret it as an exception on unpacking and raise it (or unpack its value).
-// 5) If there is currently an exception being handled, we need to chain it.
+// 4) Passing an exception as value will interpret it as an exception on unpacking and raise it (or unpack its value).
+// 5) If there is currently an exception being handled, we need to chain it.
static void __Pyx__ReturnWithStopIteration(PyObject* value) {
PyObject *exc, *args;
#if CYTHON_COMPILING_IN_CPYTHON || CYTHON_COMPILING_IN_PYSTON
__Pyx_PyThreadState_declare
- if ((PY_VERSION_HEX >= 0x03030000 && PY_VERSION_HEX < 0x030500B1)
- || unlikely(PyTuple_Check(value) || PyExceptionInstance_Check(value))) {
+ if ((PY_VERSION_HEX >= 0x03030000 && PY_VERSION_HEX < 0x030500B1)
+ || unlikely(PyTuple_Check(value) || PyExceptionInstance_Check(value))) {
args = PyTuple_New(1);
if (unlikely(!args)) return;
Py_INCREF(value);
@@ -2005,20 +2005,20 @@ static void __Pyx__ReturnWithStopIteration(PyObject* value) {
Py_INCREF(value);
exc = value;
}
- #if CYTHON_FAST_THREAD_STATE
+ #if CYTHON_FAST_THREAD_STATE
__Pyx_PyThreadState_assign
#if CYTHON_USE_EXC_INFO_STACK
if (!$local_tstate_cname->exc_info->exc_type)
- #else
- if (!$local_tstate_cname->exc_type)
- #endif
- {
+ #else
+ if (!$local_tstate_cname->exc_type)
+ #endif
+ {
// no chaining needed => avoid the overhead in PyErr_SetObject()
Py_INCREF(PyExc_StopIteration);
__Pyx_ErrRestore(PyExc_StopIteration, exc, NULL);
return;
}
- #endif
+ #endif
#else
args = PyTuple_Pack(1, value);
if (unlikely(!args)) return;
@@ -2089,10 +2089,10 @@ static int __Pyx_patch_abc(void); /*proto*/
//////////////////// PatchGeneratorABC ////////////////////
//@requires: PatchModuleWithCoroutine
-#ifndef CYTHON_REGISTER_ABCS
-#define CYTHON_REGISTER_ABCS 1
-#endif
-
+#ifndef CYTHON_REGISTER_ABCS
+#define CYTHON_REGISTER_ABCS 1
+#endif
+
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
static PyObject* __Pyx_patch_abc_module(PyObject *module); /*proto*/
static PyObject* __Pyx_patch_abc_module(PyObject *module) {
@@ -2115,13 +2115,13 @@ if _cython_coroutine_type is not None:
static int __Pyx_patch_abc(void) {
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
static int abc_patched = 0;
- if (CYTHON_REGISTER_ABCS && !abc_patched) {
+ if (CYTHON_REGISTER_ABCS && !abc_patched) {
PyObject *module;
- module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections");
+ module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections");
if (!module) {
PyErr_WriteUnraisable(NULL);
if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning,
- ((PY_MAJOR_VERSION >= 3) ?
+ ((PY_MAJOR_VERSION >= 3) ?
"Cython module failed to register with collections.abc module" :
"Cython module failed to register with collections module"), 1) < 0)) {
return -1;
@@ -2145,7 +2145,7 @@ static int __Pyx_patch_abc(void) {
}
#else
// avoid "unused" warning for __Pyx_Coroutine_patch_module()
- if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL);
+ if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL);
#endif
return 0;
}
@@ -2186,8 +2186,8 @@ _module._COROUTINE_TYPES = coro_types
);
} else {
PyErr_Clear();
-// Always enable fallback: even if we compile against 3.4.2, we might be running on 3.4.1 at some point.
-//#if PY_VERSION_HEX < 0x03040200
+// Always enable fallback: even if we compile against 3.4.2, we might be running on 3.4.1 at some point.
+//#if PY_VERSION_HEX < 0x03040200
// Py3.4.1 used to have asyncio.tasks instead of asyncio.coroutines
package = __Pyx_Import(PYIDENT("asyncio.tasks"), NULL, 0);
if (unlikely(!package)) goto asyncio_done;
@@ -2208,15 +2208,15 @@ if hasattr(_module, 'iscoroutine'):
old_types.add(_cython_generator_type)
""")
);
-//#endif
+//#endif
// Py < 0x03040200
}
Py_DECREF(package);
if (unlikely(!patch_module)) goto ignore;
-//#if PY_VERSION_HEX < 0x03040200
+//#if PY_VERSION_HEX < 0x03040200
asyncio_done:
PyErr_Clear();
-//#endif
+//#endif
asyncio_patched = 1;
#ifdef __Pyx_Generator_USED
// now patch inspect.isgenerator() by looking up the imported module in the patched asyncio module
@@ -2238,7 +2238,7 @@ asyncio_done:
}
#else
// avoid "unused" warning for __Pyx_patch_inspect()
- if ((0)) return __Pyx_patch_inspect(module);
+ if ((0)) return __Pyx_patch_inspect(module);
#endif
}
return module;
@@ -2250,7 +2250,7 @@ ignore:
}
#else
// avoid "unused" warning for __Pyx_Coroutine_patch_module()
- if ((0)) return __Pyx_patch_inspect(__Pyx_Coroutine_patch_module(module, NULL));
+ if ((0)) return __Pyx_patch_inspect(__Pyx_Coroutine_patch_module(module, NULL));
#endif
return module;
}
@@ -2285,7 +2285,7 @@ old_types.add(_cython_generator_type)
}
#else
// avoid "unused" warning for __Pyx_Coroutine_patch_module()
- if ((0)) return __Pyx_Coroutine_patch_module(module, NULL);
+ if ((0)) return __Pyx_Coroutine_patch_module(module, NULL);
#endif
return module;
}
diff --git a/contrib/tools/cython/Cython/Utility/CppConvert.pyx b/contrib/tools/cython/Cython/Utility/CppConvert.pyx
index 27cf0164d9..5f7859dd0e 100644
--- a/contrib/tools/cython/Cython/Utility/CppConvert.pyx
+++ b/contrib/tools/cython/Cython/Utility/CppConvert.pyx
@@ -7,12 +7,12 @@ cdef extern from *:
cdef cppclass string "{{type}}":
string()
string(char* c_str, size_t size)
- cdef const char* __Pyx_PyObject_AsStringAndSize(object, Py_ssize_t*) except NULL
+ cdef const char* __Pyx_PyObject_AsStringAndSize(object, Py_ssize_t*) except NULL
@cname("{{cname}}")
cdef string {{cname}}(object o) except *:
cdef Py_ssize_t length = 0
- cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length)
+ cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length)
return string(data, length)
@@ -27,7 +27,7 @@ cdef extern from *:
{{for py_type in ['PyObject', 'PyUnicode', 'PyStr', 'PyBytes', 'PyByteArray']}}
cdef extern from *:
- cdef object __Pyx_{{py_type}}_FromStringAndSize(const char*, size_t)
+ cdef object __Pyx_{{py_type}}_FromStringAndSize(const char*, size_t)
@cname("{{cname.replace("PyObject", py_type, 1)}}")
cdef inline object {{cname.replace("PyObject", py_type, 1)}}(const string& s):
@@ -45,7 +45,7 @@ cdef extern from *:
cdef vector[X] {{cname}}(object o) except *:
cdef vector[X] v
for item in o:
- v.push_back(<X>item)
+ v.push_back(<X>item)
return v
@@ -58,7 +58,7 @@ cdef extern from *:
@cname("{{cname}}")
cdef object {{cname}}(vector[X]& v):
- return [v[i] for i in range(v.size())]
+ return [v[i] for i in range(v.size())]
#################### list.from_py ####################
@@ -71,7 +71,7 @@ cdef extern from *:
cdef cpp_list[X] {{cname}}(object o) except *:
cdef cpp_list[X] l
for item in o:
- l.push_back(<X>item)
+ l.push_back(<X>item)
return l
@@ -93,7 +93,7 @@ cdef object {{cname}}(const cpp_list[X]& v):
o = []
cdef cpp_list[X].const_iterator iter = v.begin()
while iter != v.end():
- o.append(cython.operator.dereference(iter))
+ o.append(cython.operator.dereference(iter))
cython.operator.preincrement(iter)
return o
@@ -108,7 +108,7 @@ cdef extern from *:
cdef set[X] {{cname}}(object o) except *:
cdef set[X] s
for item in o:
- s.insert(<X>item)
+ s.insert(<X>item)
return s
@@ -130,7 +130,7 @@ cdef object {{cname}}(const cpp_set[X]& s):
o = set()
cdef cpp_set[X].const_iterator iter = s.begin()
while iter != s.end():
- o.add(cython.operator.dereference(iter))
+ o.add(cython.operator.dereference(iter))
cython.operator.preincrement(iter)
return o
@@ -144,7 +144,7 @@ cdef extern from *:
@cname("{{cname}}")
cdef pair[X,Y] {{cname}}(object o) except *:
x, y = o
- return pair[X,Y](<X>x, <Y>y)
+ return pair[X,Y](<X>x, <Y>y)
#################### pair.to_py ####################
@@ -156,7 +156,7 @@ cdef extern from *:
@cname("{{cname}}")
cdef object {{cname}}(const pair[X,Y]& p):
- return p.first, p.second
+ return p.first, p.second
#################### map.from_py ####################
@@ -175,7 +175,7 @@ cdef map[X,Y] {{cname}}(object o) except *:
cdef dict d = o
cdef map[X,Y] m
for key, value in d.iteritems():
- m.insert(pair[X,Y](<X>key, <Y>value))
+ m.insert(pair[X,Y](<X>key, <Y>value))
return m
@@ -204,7 +204,7 @@ cdef object {{cname}}(const map[X,Y]& s):
cdef map[X,Y].const_iterator iter = s.begin()
while iter != s.end():
key_value = &cython.operator.dereference(iter)
- o[key_value.first] = key_value.second
+ o[key_value.first] = key_value.second
cython.operator.preincrement(iter)
return o
@@ -249,7 +249,7 @@ cdef extern from *:
cdef TMaybe[X] {{cname}}(object o) except *:
cdef TMaybe[X] result
if o is not None:
- result = <X>o
+ result = <X>o
return result
#################### arcadia_TMaybe.to_py ####################
@@ -262,7 +262,7 @@ cdef extern from *:
@cname("{{cname}}")
cdef object {{cname}}(const TMaybe[X]& s):
if s.Defined():
- return s.GetRef()
+ return s.GetRef()
return None
@@ -276,7 +276,7 @@ cdef extern from *:
cdef TVector[X] {{cname}}(object o) except *:
cdef TVector[X] v
for item in o:
- v.push_back(<X>item)
+ v.push_back(<X>item)
return v
@@ -289,7 +289,7 @@ cdef extern from *:
@cname("{{cname}}")
cdef object {{cname}}(const TVector[X]& v):
- return [v[i] for i in range(v.size())]
+ return [v[i] for i in range(v.size())]
#################### arcadia_THashMap.from_py ####################
@@ -306,7 +306,7 @@ cdef THashMap[X,Y] {{cname}}(object o) except *:
cdef dict d = o
cdef THashMap[X,Y] m
for key, value in d.iteritems():
- m.insert(pair[X,Y](<X>key, <Y>value))
+ m.insert(pair[X,Y](<X>key, <Y>value))
return m
@@ -333,7 +333,7 @@ cdef dict {{cname}}(const THashMap[X,Y]& s):
cdef THashMap[X,Y].const_iterator iter = s.begin()
while iter != s.end():
key_value = &cython.operator.dereference(iter)
- result[key_value.first] = key_value.second
+ result[key_value.first] = key_value.second
cython.operator.preincrement(iter)
return result
diff --git a/contrib/tools/cython/Cython/Utility/CppSupport.cpp b/contrib/tools/cython/Cython/Utility/CppSupport.cpp
index a46df521d3..b8fcff0643 100644
--- a/contrib/tools/cython/Cython/Utility/CppSupport.cpp
+++ b/contrib/tools/cython/Cython/Utility/CppSupport.cpp
@@ -46,13 +46,13 @@ static void __Pyx_CppExn2PyErr() {
}
}
#endif
-
-/////////////// PythranConversion.proto ///////////////
-
-template <class T>
-auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python(
- typename pythonic::returnable<typename std::remove_cv<typename std::remove_reference<T>::type>::type>::type{std::forward<T>(value)}))
-{
- using returnable_type = typename pythonic::returnable<typename std::remove_cv<typename std::remove_reference<T>::type>::type>::type;
- return to_python(returnable_type{std::forward<T>(value)});
-}
+
+/////////////// PythranConversion.proto ///////////////
+
+template <class T>
+auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python(
+ typename pythonic::returnable<typename std::remove_cv<typename std::remove_reference<T>::type>::type>::type{std::forward<T>(value)}))
+{
+ using returnable_type = typename pythonic::returnable<typename std::remove_cv<typename std::remove_reference<T>::type>::type>::type;
+ return to_python(returnable_type{std::forward<T>(value)});
+}
diff --git a/contrib/tools/cython/Cython/Utility/CythonFunction.c b/contrib/tools/cython/Cython/Utility/CythonFunction.c
index 01ec4b37f3..d51b308a8d 100644
--- a/contrib/tools/cython/Cython/Utility/CythonFunction.c
+++ b/contrib/tools/cython/Cython/Utility/CythonFunction.c
@@ -72,11 +72,11 @@ static int __pyx_CyFunction_init(void);
//////////////////// CythonFunctionShared ////////////////////
//@substitute: naming
-//@requires: CommonStructures.c::FetchCommonType
+//@requires: CommonStructures.c::FetchCommonType
////@requires: ObjectHandling.c::PyObjectGetAttrStr
-#include <structmember.h>
-
+#include <structmember.h>
+
static PyObject *
__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *closure)
{
@@ -418,7 +418,7 @@ static PyGetSetDef __pyx_CyFunction_getsets[] = {
};
static PyMemberDef __pyx_CyFunction_members[] = {
- {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), PY_WRITE_RESTRICTED, 0},
+ {(char *) "__module__", T_OBJECT, offsetof(PyCFunctionObject, m_module), PY_WRITE_RESTRICTED, 0},
{0, 0, 0, 0, 0}
};
@@ -508,7 +508,7 @@ __Pyx_CyFunction_clear(__pyx_CyFunctionObject *m)
return 0;
}
-static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m)
+static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m)
{
if (__Pyx_CyFunction_weakreflist(m) != NULL)
PyObject_ClearWeakRefs((PyObject *) m);
@@ -517,12 +517,12 @@ static void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m)
}
static void __Pyx_CyFunction_dealloc(PyObject *obj)
-{
+{
__pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) obj;
- PyObject_GC_UnTrack(m);
- __Pyx__CyFunction_dealloc(m);
-}
-
+ PyObject_GC_UnTrack(m);
+ __Pyx__CyFunction_dealloc(m);
+}
+
static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg)
{
Py_VISIT(m->func_closure);
@@ -610,16 +610,16 @@ static PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, Py
if (likely(kw == NULL || PyDict_Size(kw) == 0)) {
size = PyTuple_GET_SIZE(arg);
if (likely(size == 1)) {
- PyObject *result, *arg0;
- #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
- arg0 = PyTuple_GET_ITEM(arg, 0);
- #else
- arg0 = PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL;
- #endif
+ PyObject *result, *arg0;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ arg0 = PyTuple_GET_ITEM(arg, 0);
+ #else
+ arg0 = PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL;
+ #endif
result = (*meth)(self, arg0);
- #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)
+ #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)
Py_DECREF(arg0);
- #endif
+ #endif
return result;
}
PyErr_Format(PyExc_TypeError,
@@ -745,7 +745,7 @@ static PyTypeObject __pyx_CyFunctionType_type = {
static int __pyx_CyFunction_init(void) {
__pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);
- if (unlikely(__pyx_CyFunctionType == NULL)) {
+ if (unlikely(__pyx_CyFunctionType == NULL)) {
return -1;
}
return 0;
@@ -755,7 +755,7 @@ static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t
__pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;
m->defaults = PyObject_Malloc(size);
- if (unlikely(!m->defaults))
+ if (unlikely(!m->defaults))
return PyErr_NoMemory();
memset(m->defaults, 0, size);
m->defaults_pyobjects = pyobjects;
@@ -807,12 +807,12 @@ static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qual
//////////////////// CyFunctionClassCell.proto ////////////////////
-static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj);/*proto*/
+static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj);/*proto*/
//////////////////// CyFunctionClassCell ////////////////////
//@requires: CythonFunctionShared
-static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) {
+static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) {
Py_ssize_t i, count = PyList_GET_SIZE(cyfunctions);
for (i = 0; i < count; i++) {
@@ -878,14 +878,14 @@ __pyx_FusedFunction_New(PyMethodDef *ml, int flags,
return op;
}
-static void
-__pyx_FusedFunction_dealloc(__pyx_FusedFunctionObject *self)
-{
- PyObject_GC_UnTrack(self);
- Py_CLEAR(self->self);
- Py_CLEAR(self->type);
- Py_CLEAR(self->__signatures__);
- __Pyx__CyFunction_dealloc((__pyx_CyFunctionObject *) self);
+static void
+__pyx_FusedFunction_dealloc(__pyx_FusedFunctionObject *self)
+{
+ PyObject_GC_UnTrack(self);
+ Py_CLEAR(self->self);
+ Py_CLEAR(self->type);
+ Py_CLEAR(self->__signatures__);
+ __Pyx__CyFunction_dealloc((__pyx_CyFunctionObject *) self);
}
static int
@@ -1292,14 +1292,14 @@ static CYTHON_UNUSED PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*pro
//////////////////// ClassMethod ////////////////////
static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
-#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000
+#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM <= 0x05080000
if (PyObject_TypeCheck(method, &PyWrapperDescr_Type)) {
// cdef classes
return PyClassMethod_New(method);
}
#else
-#if CYTHON_COMPILING_IN_PYSTON || CYTHON_COMPILING_IN_PYPY
- // special C-API function only in Pyston and PyPy >= 5.9
+#if CYTHON_COMPILING_IN_PYSTON || CYTHON_COMPILING_IN_PYPY
+ // special C-API function only in Pyston and PyPy >= 5.9
if (PyMethodDescr_Check(method))
#else
#if PY_MAJOR_VERSION == 2
diff --git a/contrib/tools/cython/Cython/Utility/Embed.c b/contrib/tools/cython/Cython/Utility/Embed.c
index 8a95435d07..60da8f2330 100644
--- a/contrib/tools/cython/Cython/Utility/Embed.c
+++ b/contrib/tools/cython/Cython/Utility/Embed.c
@@ -40,20 +40,20 @@ static int __Pyx_main(int argc, wchar_t **argv) {
%(module_is_main)s = 1;
#if PY_MAJOR_VERSION < 3
init%(module_name)s();
- #elif CYTHON_PEP489_MULTI_PHASE_INIT
- m = PyInit_%(module_name)s();
- if (!PyModule_Check(m)) {
- PyModuleDef *mdef = (PyModuleDef *) m;
- PyObject *modname = PyUnicode_FromString("__main__");
- m = NULL;
- if (modname) {
- // FIXME: not currently calling PyModule_FromDefAndSpec() here because we do not have a module spec!
- // FIXME: not currently setting __file__, __path__, __spec__, ...
- m = PyModule_NewObject(modname);
- Py_DECREF(modname);
- if (m) PyModule_ExecDef(m, mdef);
- }
- }
+ #elif CYTHON_PEP489_MULTI_PHASE_INIT
+ m = PyInit_%(module_name)s();
+ if (!PyModule_Check(m)) {
+ PyModuleDef *mdef = (PyModuleDef *) m;
+ PyObject *modname = PyUnicode_FromString("__main__");
+ m = NULL;
+ if (modname) {
+ // FIXME: not currently calling PyModule_FromDefAndSpec() here because we do not have a module spec!
+ // FIXME: not currently setting __file__, __path__, __spec__, ...
+ m = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (m) PyModule_ExecDef(m, mdef);
+ }
+ }
#else
m = PyInit_%(module_name)s();
#endif
diff --git a/contrib/tools/cython/Cython/Utility/Exceptions.c b/contrib/tools/cython/Cython/Utility/Exceptions.c
index 35c2b385f4..b0411f6956 100644
--- a/contrib/tools/cython/Cython/Utility/Exceptions.c
+++ b/contrib/tools/cython/Cython/Utility/Exceptions.c
@@ -11,12 +11,12 @@
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyThreadState_declare PyThreadState *$local_tstate_cname;
-#define __Pyx_PyThreadState_assign $local_tstate_cname = __Pyx_PyThreadState_Current;
-#define __Pyx_PyErr_Occurred() $local_tstate_cname->curexc_type
+#define __Pyx_PyThreadState_assign $local_tstate_cname = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() $local_tstate_cname->curexc_type
#else
#define __Pyx_PyThreadState_declare
#define __Pyx_PyThreadState_assign
-#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
#endif
@@ -33,28 +33,28 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta
/////////////// PyErrExceptionMatches ///////////////
#if CYTHON_FAST_THREAD_STATE
-static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
- Py_ssize_t i, n;
- n = PyTuple_GET_SIZE(tuple);
-#if PY_MAJOR_VERSION >= 3
- // the tighter subtype checking in Py3 allows faster out-of-order comparison
- for (i=0; i<n; i++) {
- if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
- }
-#endif
- for (i=0; i<n; i++) {
- if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
- }
- return 0;
-}
-
+static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(tuple);
+#if PY_MAJOR_VERSION >= 3
+ // the tighter subtype checking in Py3 allows faster out-of-order comparison
+ for (i=0; i<n; i++) {
+ if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
+ }
+#endif
+ for (i=0; i<n; i++) {
+ if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
+ }
+ return 0;
+}
+
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
PyObject *exc_type = tstate->curexc_type;
if (exc_type == err) return 1;
if (unlikely(!exc_type)) return 0;
- if (unlikely(PyTuple_Check(err)))
- return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
- return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
+ if (unlikely(PyTuple_Check(err)))
+ return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
+ return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
}
#endif
@@ -63,7 +63,7 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta
//@requires: PyThreadStateGet
#if CYTHON_FAST_THREAD_STATE
-#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState($local_tstate_cname, type, value, tb)
@@ -71,19 +71,19 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); /*proto*/
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); /*proto*/
-#if CYTHON_COMPILING_IN_CPYTHON
-#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
#else
-#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
-#endif
-
-#else
-#define __Pyx_PyErr_Clear() PyErr_Clear()
-#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
-#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
-#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
#endif
@@ -291,7 +291,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject
PyErr_Restore(tmp_type, tmp_value, tb);
Py_XDECREF(tmp_tb);
#else
- PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) {
Py_INCREF(tb);
@@ -395,14 +395,14 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)
exc_info->exc_value = local_value;
exc_info->exc_traceback = local_tb;
}
- #else
+ #else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = local_type;
tstate->exc_value = local_value;
tstate->exc_traceback = local_tb;
- #endif
+ #endif
// Make sure tstate is in a consistent state when we XDECREF
// these objects (DECREF may run arbitrary code).
Py_XDECREF(tmp_type);
@@ -438,11 +438,11 @@ static CYTHON_INLINE void __Pyx_ReraiseException(void) {
type = exc_info->exc_type;
value = exc_info->exc_value;
tb = exc_info->exc_traceback;
- #else
+ #else
type = tstate->exc_type;
value = tstate->exc_value;
tb = tstate->exc_traceback;
- #endif
+ #endif
#else
PyErr_GetExcInfo(&type, &value, &tb);
#endif
@@ -492,11 +492,11 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject *
*type = exc_info->exc_type;
*value = exc_info->exc_value;
*tb = exc_info->exc_traceback;
- #else
+ #else
*type = tstate->exc_type;
*value = tstate->exc_value;
*tb = tstate->exc_traceback;
- #endif
+ #endif
Py_XINCREF(*type);
Py_XINCREF(*value);
Py_XINCREF(*tb);
@@ -504,7 +504,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject *
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
-
+
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
@@ -513,14 +513,14 @@ static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject
exc_info->exc_type = type;
exc_info->exc_value = value;
exc_info->exc_traceback = tb;
- #else
+ #else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = type;
tstate->exc_value = value;
tstate->exc_traceback = tb;
- #endif
+ #endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
@@ -543,17 +543,17 @@ static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value,
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
-
+
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
-
+
exc_info->exc_type = *type;
exc_info->exc_value = *value;
exc_info->exc_traceback = *tb;
- #else
+ #else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
@@ -561,7 +561,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject *
tstate->exc_type = *type;
tstate->exc_value = *value;
tstate->exc_traceback = *tb;
- #endif
+ #endif
*type = tmp_type;
*value = tmp_value;
@@ -632,66 +632,66 @@ static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno,
#endif
}
-/////////////// CLineInTraceback.proto ///////////////
-
-#ifdef CYTHON_CLINE_IN_TRACEBACK /* 0 or 1 to disable/enable C line display in tracebacks at C compile time */
-#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
-#else
-static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/
-#endif
-
-/////////////// CLineInTraceback ///////////////
-//@requires: ObjectHandling.c::PyObjectGetAttrStr
+/////////////// CLineInTraceback.proto ///////////////
+
+#ifdef CYTHON_CLINE_IN_TRACEBACK /* 0 or 1 to disable/enable C line display in tracebacks at C compile time */
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/
+#endif
+
+/////////////// CLineInTraceback ///////////////
+//@requires: ObjectHandling.c::PyObjectGetAttrStr
//@requires: ObjectHandling.c::PyDictVersioning
-//@requires: PyErrFetchRestore
-//@substitute: naming
-
-#ifndef CYTHON_CLINE_IN_TRACEBACK
+//@requires: PyErrFetchRestore
+//@substitute: naming
+
+#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
- PyObject *use_cline;
- PyObject *ptype, *pvalue, *ptraceback;
-#if CYTHON_COMPILING_IN_CPYTHON
- PyObject **cython_runtime_dict;
-#endif
+ PyObject *use_cline;
+ PyObject *ptype, *pvalue, *ptraceback;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject **cython_runtime_dict;
+#endif
if (unlikely(!${cython_runtime_cname})) {
// Very early error where the runtime module is not set up yet.
return c_line;
}
- __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
-
-#if CYTHON_COMPILING_IN_CPYTHON
- cython_runtime_dict = _PyObject_GetDictPtr(${cython_runtime_cname});
- if (likely(cython_runtime_dict)) {
+ __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
+
+#if CYTHON_COMPILING_IN_CPYTHON
+ cython_runtime_dict = _PyObject_GetDictPtr(${cython_runtime_cname});
+ if (likely(cython_runtime_dict)) {
__PYX_PY_DICT_LOOKUP_IF_MODIFIED(
use_cline, *cython_runtime_dict,
__Pyx_PyDict_GetItemStr(*cython_runtime_dict, PYIDENT("cline_in_traceback")))
- } else
-#endif
- {
- PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"));
- if (use_cline_obj) {
- use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
- Py_DECREF(use_cline_obj);
- } else {
- PyErr_Clear();
- use_cline = NULL;
- }
- }
- if (!use_cline) {
- c_line = 0;
+ } else
+#endif
+ {
+ PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"));
+ if (use_cline_obj) {
+ use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
+ Py_DECREF(use_cline_obj);
+ } else {
+ PyErr_Clear();
+ use_cline = NULL;
+ }
+ }
+ if (!use_cline) {
+ c_line = 0;
// No need to handle errors here when we reset the exception state just afterwards.
(void) PyObject_SetAttr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"), Py_False);
- }
+ }
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
- c_line = 0;
- }
- __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
- return c_line;
-}
-#endif
-
+ c_line = 0;
+ }
+ __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
+ return c_line;
+}
+#endif
+
/////////////// AddTraceback.proto ///////////////
static void __Pyx_AddTraceback(const char *funcname, int c_line,
@@ -699,7 +699,7 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line,
/////////////// AddTraceback ///////////////
//@requires: ModuleSetupCode.c::CodeObjectCache
-//@requires: CLineInTraceback
+//@requires: CLineInTraceback
//@substitute: naming
#include "compile.h"
@@ -771,25 +771,25 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
- PyThreadState *tstate = __Pyx_PyThreadState_Current;
-
- if (c_line) {
- c_line = __Pyx_CLineForTraceback(tstate, c_line);
- }
-
- // Negate to avoid collisions between py and c lines.
- py_code = $global_code_object_cache_find(c_line ? -c_line : py_line);
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+
+ if (c_line) {
+ c_line = __Pyx_CLineForTraceback(tstate, c_line);
+ }
+
+ // Negate to avoid collisions between py and c lines.
+ py_code = $global_code_object_cache_find(c_line ? -c_line : py_line);
if (!py_code) {
py_code = __Pyx_CreateCodeObjectForTraceback(
funcname, c_line, py_line, filename);
if (!py_code) goto bad;
- $global_code_object_cache_insert(c_line ? -c_line : py_line, py_code);
+ $global_code_object_cache_insert(c_line ? -c_line : py_line, py_code);
}
py_frame = PyFrame_New(
- tstate, /*PyThreadState *tstate,*/
- py_code, /*PyCodeObject *code,*/
- $moddict_cname, /*PyObject *globals,*/
- 0 /*PyObject *locals*/
+ tstate, /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ $moddict_cname, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
__Pyx_PyFrame_SetLineNumber(py_frame, py_line);
diff --git a/contrib/tools/cython/Cython/Utility/ExtensionTypes.c b/contrib/tools/cython/Cython/Utility/ExtensionTypes.c
index 31c9889ee4..0d8c41dee1 100644
--- a/contrib/tools/cython/Cython/Utility/ExtensionTypes.c
+++ b/contrib/tools/cython/Cython/Utility/ExtensionTypes.c
@@ -1,59 +1,59 @@
-/////////////// PyType_Ready.proto ///////////////
-
-static int __Pyx_PyType_Ready(PyTypeObject *t);
-
-/////////////// PyType_Ready ///////////////
-
-// Wrapper around PyType_Ready() with some runtime checks and fixes
-// to deal with multiple inheritance.
-static int __Pyx_PyType_Ready(PyTypeObject *t) {
- // Loop over all bases (except the first) and check that those
- // really are heap types. Otherwise, it would not be safe to
- // subclass them.
- //
- // We also check tp_dictoffset: it is unsafe to inherit
- // tp_dictoffset from a base class because the object structures
- // would not be compatible. So, if our extension type doesn't set
- // tp_dictoffset (i.e. there is no __dict__ attribute in the object
- // structure), we need to check that none of the base classes sets
- // it either.
- int r;
- PyObject *bases = t->tp_bases;
- if (bases)
- {
- Py_ssize_t i, n = PyTuple_GET_SIZE(bases);
- for (i = 1; i < n; i++) /* Skip first base */
- {
- PyObject *b0 = PyTuple_GET_ITEM(bases, i);
- PyTypeObject *b;
-#if PY_MAJOR_VERSION < 3
- /* Disallow old-style classes */
- if (PyClass_Check(b0))
- {
- PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class",
- PyString_AS_STRING(((PyClassObject*)b0)->cl_name));
- return -1;
- }
-#endif
- b = (PyTypeObject*)b0;
- if (!PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE))
- {
- PyErr_Format(PyExc_TypeError, "base class '%.200s' is not a heap type",
- b->tp_name);
- return -1;
- }
- if (t->tp_dictoffset == 0 && b->tp_dictoffset)
- {
- PyErr_Format(PyExc_TypeError,
- "extension type '%.200s' has no __dict__ slot, but base type '%.200s' has: "
- "either add 'cdef dict __dict__' to the extension type "
- "or add '__slots__ = [...]' to the base type",
- t->tp_name, b->tp_name);
- return -1;
- }
- }
- }
-
+/////////////// PyType_Ready.proto ///////////////
+
+static int __Pyx_PyType_Ready(PyTypeObject *t);
+
+/////////////// PyType_Ready ///////////////
+
+// Wrapper around PyType_Ready() with some runtime checks and fixes
+// to deal with multiple inheritance.
+static int __Pyx_PyType_Ready(PyTypeObject *t) {
+ // Loop over all bases (except the first) and check that those
+ // really are heap types. Otherwise, it would not be safe to
+ // subclass them.
+ //
+ // We also check tp_dictoffset: it is unsafe to inherit
+ // tp_dictoffset from a base class because the object structures
+ // would not be compatible. So, if our extension type doesn't set
+ // tp_dictoffset (i.e. there is no __dict__ attribute in the object
+ // structure), we need to check that none of the base classes sets
+ // it either.
+ int r;
+ PyObject *bases = t->tp_bases;
+ if (bases)
+ {
+ Py_ssize_t i, n = PyTuple_GET_SIZE(bases);
+ for (i = 1; i < n; i++) /* Skip first base */
+ {
+ PyObject *b0 = PyTuple_GET_ITEM(bases, i);
+ PyTypeObject *b;
+#if PY_MAJOR_VERSION < 3
+ /* Disallow old-style classes */
+ if (PyClass_Check(b0))
+ {
+ PyErr_Format(PyExc_TypeError, "base class '%.200s' is an old-style class",
+ PyString_AS_STRING(((PyClassObject*)b0)->cl_name));
+ return -1;
+ }
+#endif
+ b = (PyTypeObject*)b0;
+ if (!PyType_HasFeature(b, Py_TPFLAGS_HEAPTYPE))
+ {
+ PyErr_Format(PyExc_TypeError, "base class '%.200s' is not a heap type",
+ b->tp_name);
+ return -1;
+ }
+ if (t->tp_dictoffset == 0 && b->tp_dictoffset)
+ {
+ PyErr_Format(PyExc_TypeError,
+ "extension type '%.200s' has no __dict__ slot, but base type '%.200s' has: "
+ "either add 'cdef dict __dict__' to the extension type "
+ "or add '__slots__ = [...]' to the base type",
+ t->tp_name, b->tp_name);
+ return -1;
+ }
+ }
+ }
+
#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION)
{
// Make sure GC does not pick up our non-heap type as heap type with this hack!
@@ -89,10 +89,10 @@ static int __Pyx_PyType_Ready(PyTypeObject *t) {
// Other than this check, the Py_TPFLAGS_HEAPTYPE flag is unused
// in PyType_Ready().
t->tp_flags |= Py_TPFLAGS_HEAPTYPE;
-#endif
-
- r = PyType_Ready(t);
-
+#endif
+
+ r = PyType_Ready(t);
+
#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION)
t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE;
@@ -114,11 +114,11 @@ static int __Pyx_PyType_Ready(PyTypeObject *t) {
}
Py_DECREF(gc);
}
-#endif
-
- return r;
-}
-
+#endif
+
+ return r;
+}
+
/////////////// CallNextTpDealloc.proto ///////////////
static void __Pyx_call_next_tp_dealloc(PyObject* obj, destructor current_tp_dealloc);
@@ -171,69 +171,69 @@ static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_clear) {
if (type && type->tp_clear)
type->tp_clear(obj);
}
-
-/////////////// SetupReduce.proto ///////////////
-
-static int __Pyx_setup_reduce(PyObject* type_obj);
-
-/////////////// SetupReduce ///////////////
+
+/////////////// SetupReduce.proto ///////////////
+
+static int __Pyx_setup_reduce(PyObject* type_obj);
+
+/////////////// SetupReduce ///////////////
//@requires: ObjectHandling.c::PyObjectGetAttrStrNoError
-//@requires: ObjectHandling.c::PyObjectGetAttrStr
-//@substitute: naming
-
-static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
- int ret;
- PyObject *name_attr;
-
- name_attr = __Pyx_PyObject_GetAttrStr(meth, PYIDENT("__name__"));
- if (likely(name_attr)) {
- ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
- } else {
- ret = -1;
- }
-
- if (unlikely(ret < 0)) {
- PyErr_Clear();
- ret = 0;
- }
-
- Py_XDECREF(name_attr);
- return ret;
-}
-
-static int __Pyx_setup_reduce(PyObject* type_obj) {
- int ret = 0;
- PyObject *object_reduce = NULL;
- PyObject *object_reduce_ex = NULL;
- PyObject *reduce = NULL;
- PyObject *reduce_ex = NULL;
- PyObject *reduce_cython = NULL;
- PyObject *setstate = NULL;
- PyObject *setstate_cython = NULL;
-
-#if CYTHON_USE_PYTYPE_LOOKUP
+//@requires: ObjectHandling.c::PyObjectGetAttrStr
+//@substitute: naming
+
+static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
+ int ret;
+ PyObject *name_attr;
+
+ name_attr = __Pyx_PyObject_GetAttrStr(meth, PYIDENT("__name__"));
+ if (likely(name_attr)) {
+ ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
+ } else {
+ ret = -1;
+ }
+
+ if (unlikely(ret < 0)) {
+ PyErr_Clear();
+ ret = 0;
+ }
+
+ Py_XDECREF(name_attr);
+ return ret;
+}
+
+static int __Pyx_setup_reduce(PyObject* type_obj) {
+ int ret = 0;
+ PyObject *object_reduce = NULL;
+ PyObject *object_reduce_ex = NULL;
+ PyObject *reduce = NULL;
+ PyObject *reduce_ex = NULL;
+ PyObject *reduce_cython = NULL;
+ PyObject *setstate = NULL;
+ PyObject *setstate_cython = NULL;
+
+#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD;
-#else
+#else
if (PyObject_HasAttr(type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD;
-#endif
-
-#if CYTHON_USE_PYTYPE_LOOKUP
+#endif
+
+#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD;
-#else
+#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD;
-#endif
-
+#endif
+
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_ex__")); if (unlikely(!reduce_ex)) goto __PYX_BAD;
- if (reduce_ex == object_reduce_ex) {
-
-#if CYTHON_USE_PYTYPE_LOOKUP
+ if (reduce_ex == object_reduce_ex) {
+
+#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD;
-#else
+#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD;
-#endif
+#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce__")); if (unlikely(!reduce)) goto __PYX_BAD;
-
- if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, PYIDENT("__reduce_cython__"))) {
+
+ if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, PYIDENT("__reduce_cython__"))) {
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__reduce_cython__"));
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce__"), reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
@@ -243,10 +243,10 @@ static int __Pyx_setup_reduce(PyObject* type_obj) {
// Otherwise: error.
goto __PYX_BAD;
}
-
- setstate = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate__"));
- if (!setstate) PyErr_Clear();
- if (!setstate || __Pyx_setup_reduce_is_named(setstate, PYIDENT("__setstate_cython__"))) {
+
+ setstate = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate__"));
+ if (!setstate) PyErr_Clear();
+ if (!setstate || __Pyx_setup_reduce_is_named(setstate, PYIDENT("__setstate_cython__"))) {
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__setstate_cython__"));
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate__"), setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
@@ -256,25 +256,25 @@ static int __Pyx_setup_reduce(PyObject* type_obj) {
// Otherwise: error.
goto __PYX_BAD;
}
- }
- PyType_Modified((PyTypeObject*)type_obj);
- }
- }
+ }
+ PyType_Modified((PyTypeObject*)type_obj);
+ }
+ }
goto __PYX_GOOD;
-
+
__PYX_BAD:
- if (!PyErr_Occurred())
- PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
- ret = -1;
+ if (!PyErr_Occurred())
+ PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
+ ret = -1;
__PYX_GOOD:
-#if !CYTHON_USE_PYTYPE_LOOKUP
- Py_XDECREF(object_reduce);
- Py_XDECREF(object_reduce_ex);
-#endif
- Py_XDECREF(reduce);
- Py_XDECREF(reduce_ex);
- Py_XDECREF(reduce_cython);
- Py_XDECREF(setstate);
- Py_XDECREF(setstate_cython);
- return ret;
-}
+#if !CYTHON_USE_PYTYPE_LOOKUP
+ Py_XDECREF(object_reduce);
+ Py_XDECREF(object_reduce_ex);
+#endif
+ Py_XDECREF(reduce);
+ Py_XDECREF(reduce_ex);
+ Py_XDECREF(reduce_cython);
+ Py_XDECREF(setstate);
+ Py_XDECREF(setstate_cython);
+ return ret;
+}
diff --git a/contrib/tools/cython/Cython/Utility/FunctionArguments.c b/contrib/tools/cython/Cython/Utility/FunctionArguments.c
index f36cbbe723..8333d93666 100644
--- a/contrib/tools/cython/Cython/Utility/FunctionArguments.c
+++ b/contrib/tools/cython/Cython/Utility/FunctionArguments.c
@@ -1,15 +1,15 @@
//////////////////// ArgTypeTest.proto ////////////////////
-#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact) \
- ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 : \
- __Pyx__ArgTypeTest(obj, type, name, exact))
-
-static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); /*proto*/
-
+#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact) \
+ ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 : \
+ __Pyx__ArgTypeTest(obj, type, name, exact))
+
+static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); /*proto*/
+
//////////////////// ArgTypeTest ////////////////////
-static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)
+static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)
{
if (unlikely(!type)) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
@@ -17,15 +17,15 @@ static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *nam
}
else if (exact) {
#if PY_MAJOR_VERSION == 2
- if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;
+ if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;
#endif
}
else {
- if (likely(__Pyx_TypeCheck(obj, type))) return 1;
+ if (likely(__Pyx_TypeCheck(obj, type))) return 1;
}
- PyErr_Format(PyExc_TypeError,
- "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)",
- name, type->tp_name, Py_TYPE(obj)->tp_name);
+ PyErr_Format(PyExc_TypeError,
+ "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)",
+ name, type->tp_name, Py_TYPE(obj)->tp_name);
return 0;
}
@@ -69,11 +69,11 @@ static void __Pyx_RaiseArgtupleInvalid(
//////////////////// RaiseKeywordRequired.proto ////////////////////
-static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/
+static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/
//////////////////// RaiseKeywordRequired ////////////////////
-static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name) {
+static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name) {
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() needs keyword-only argument %U", func_name, kw_name);
@@ -117,7 +117,7 @@ static void __Pyx_RaiseMappingExpectedError(PyObject* arg) {
//////////////////// KeywordStringCheck.proto ////////////////////
-static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /*proto*/
+static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /*proto*/
//////////////////// KeywordStringCheck ////////////////////
@@ -125,7 +125,7 @@ static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name
// were passed to a function, or if any keywords were passed to a
// function that does not accept them.
-static int __Pyx_CheckKeywordStrings(
+static int __Pyx_CheckKeywordStrings(
PyObject *kwdict,
const char* function_name,
int kw_allowed)
@@ -140,7 +140,7 @@ static int __Pyx_CheckKeywordStrings(
#else
while (PyDict_Next(kwdict, &pos, &key, 0)) {
#if PY_MAJOR_VERSION < 3
- if (unlikely(!PyString_Check(key)))
+ if (unlikely(!PyString_Check(key)))
#endif
if (unlikely(!PyUnicode_Check(key)))
goto invalid_keyword_type;
diff --git a/contrib/tools/cython/Cython/Utility/ImportExport.c b/contrib/tools/cython/Cython/Utility/ImportExport.c
index 4206cbc66b..532ec326f6 100644
--- a/contrib/tools/cython/Cython/Utility/ImportExport.c
+++ b/contrib/tools/cython/Cython/Utility/ImportExport.c
@@ -23,7 +23,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
PyObject *global_dict = 0;
PyObject *empty_dict = 0;
PyObject *list;
- #if PY_MAJOR_VERSION < 3
+ #if PY_MAJOR_VERSION < 3
PyObject *py_import;
py_import = __Pyx_PyObject_GetAttrStr($builtins_cname, PYIDENT("__import__"));
if (!py_import)
@@ -61,7 +61,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
}
#endif
if (!module) {
- #if PY_MAJOR_VERSION < 3
+ #if PY_MAJOR_VERSION < 3
PyObject *py_level = PyInt_FromLong(level);
if (!py_level)
goto bad;
@@ -75,7 +75,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
}
}
bad:
- #if PY_MAJOR_VERSION < 3
+ #if PY_MAJOR_VERSION < 3
Py_XDECREF(py_import);
#endif
Py_XDECREF(empty_list);
@@ -226,8 +226,8 @@ bad:
/////////////// SetPackagePathFromImportLib.proto ///////////////
-// PY_VERSION_HEX >= 0x03030000
-#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT
+// PY_VERSION_HEX >= 0x03030000
+#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT
static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name);
#else
#define __Pyx_SetPackagePathFromImportLib(a, b) 0
@@ -237,8 +237,8 @@ static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, Py
//@requires: ObjectHandling.c::PyObjectGetAttrStr
//@substitute: naming
-// PY_VERSION_HEX >= 0x03030000
-#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT
+// PY_VERSION_HEX >= 0x03030000
+#if PY_MAJOR_VERSION >= 3 && !CYTHON_PEP489_MULTI_PHASE_INIT
static int __Pyx_SetPackagePathFromImportLib(const char* parent_package_name, PyObject *module_name) {
PyObject *importlib, *loader, *osmod, *ossep, *parts, *package_path;
PyObject *path = NULL, *file_path = NULL;
@@ -636,104 +636,104 @@ bad:
Py_XDECREF(ob);
return NULL;
}
-
-
-/////////////// MergeVTables.proto ///////////////
-//@requires: GetVTable
-
-static int __Pyx_MergeVtables(PyTypeObject *type); /*proto*/
-
-/////////////// MergeVTables ///////////////
-
-static int __Pyx_MergeVtables(PyTypeObject *type) {
- int i;
- void** base_vtables;
- void* unknown = (void*)-1;
- PyObject* bases = type->tp_bases;
- int base_depth = 0;
- {
- PyTypeObject* base = type->tp_base;
- while (base) {
- base_depth += 1;
- base = base->tp_base;
- }
- }
+
+
+/////////////// MergeVTables.proto ///////////////
+//@requires: GetVTable
+
+static int __Pyx_MergeVtables(PyTypeObject *type); /*proto*/
+
+/////////////// MergeVTables ///////////////
+
+static int __Pyx_MergeVtables(PyTypeObject *type) {
+ int i;
+ void** base_vtables;
+ void* unknown = (void*)-1;
+ PyObject* bases = type->tp_bases;
+ int base_depth = 0;
+ {
+ PyTypeObject* base = type->tp_base;
+ while (base) {
+ base_depth += 1;
+ base = base->tp_base;
+ }
+ }
base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1));
- base_vtables[0] = unknown;
- // Could do MRO resolution of individual methods in the future, assuming
- // compatible vtables, but for now simply require a common vtable base.
- // Note that if the vtables of various bases are extended separately,
- // resolution isn't possible and we must reject it just as when the
- // instance struct is so extended. (It would be good to also do this
- // check when a multiple-base class is created in pure Python as well.)
- for (i = 1; i < PyTuple_GET_SIZE(bases); i++) {
- void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_dict);
- if (base_vtable != NULL) {
- int j;
- PyTypeObject* base = type->tp_base;
- for (j = 0; j < base_depth; j++) {
- if (base_vtables[j] == unknown) {
- base_vtables[j] = __Pyx_GetVtable(base->tp_dict);
- base_vtables[j + 1] = unknown;
- }
- if (base_vtables[j] == base_vtable) {
- break;
- } else if (base_vtables[j] == NULL) {
- // No more potential matching bases (with vtables).
- goto bad;
- }
- base = base->tp_base;
- }
- }
- }
- PyErr_Clear();
- free(base_vtables);
- return 0;
-bad:
- PyErr_Format(
- PyExc_TypeError,
- "multiple bases have vtable conflict: '%s' and '%s'",
- type->tp_base->tp_name, ((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_name);
- free(base_vtables);
- return -1;
-}
-
-
-/////////////// ImportNumPyArray.proto ///////////////
-
-static PyObject *__pyx_numpy_ndarray = NULL;
-
-static PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void); /*proto*/
-
-/////////////// ImportNumPyArray.cleanup ///////////////
-Py_CLEAR(__pyx_numpy_ndarray);
-
-/////////////// ImportNumPyArray ///////////////
-//@requires: ImportExport.c::Import
-
-static PyObject* __Pyx__ImportNumPyArray(void) {
- PyObject *numpy_module, *ndarray_object = NULL;
- numpy_module = __Pyx_Import(PYIDENT("numpy"), NULL, 0);
- if (likely(numpy_module)) {
- ndarray_object = PyObject_GetAttrString(numpy_module, "ndarray");
- Py_DECREF(numpy_module);
- }
- if (unlikely(!ndarray_object)) {
- // ImportError, AttributeError, ...
- PyErr_Clear();
- }
- if (unlikely(!ndarray_object || !PyObject_TypeCheck(ndarray_object, &PyType_Type))) {
- Py_XDECREF(ndarray_object);
- Py_INCREF(Py_None);
- ndarray_object = Py_None;
- }
- return ndarray_object;
-}
-
-static CYTHON_INLINE PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void) {
- if (unlikely(!__pyx_numpy_ndarray)) {
- __pyx_numpy_ndarray = __Pyx__ImportNumPyArray();
- }
- Py_INCREF(__pyx_numpy_ndarray);
- return __pyx_numpy_ndarray;
-}
+ base_vtables[0] = unknown;
+ // Could do MRO resolution of individual methods in the future, assuming
+ // compatible vtables, but for now simply require a common vtable base.
+ // Note that if the vtables of various bases are extended separately,
+ // resolution isn't possible and we must reject it just as when the
+ // instance struct is so extended. (It would be good to also do this
+ // check when a multiple-base class is created in pure Python as well.)
+ for (i = 1; i < PyTuple_GET_SIZE(bases); i++) {
+ void* base_vtable = __Pyx_GetVtable(((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_dict);
+ if (base_vtable != NULL) {
+ int j;
+ PyTypeObject* base = type->tp_base;
+ for (j = 0; j < base_depth; j++) {
+ if (base_vtables[j] == unknown) {
+ base_vtables[j] = __Pyx_GetVtable(base->tp_dict);
+ base_vtables[j + 1] = unknown;
+ }
+ if (base_vtables[j] == base_vtable) {
+ break;
+ } else if (base_vtables[j] == NULL) {
+ // No more potential matching bases (with vtables).
+ goto bad;
+ }
+ base = base->tp_base;
+ }
+ }
+ }
+ PyErr_Clear();
+ free(base_vtables);
+ return 0;
+bad:
+ PyErr_Format(
+ PyExc_TypeError,
+ "multiple bases have vtable conflict: '%s' and '%s'",
+ type->tp_base->tp_name, ((PyTypeObject*)PyTuple_GET_ITEM(bases, i))->tp_name);
+ free(base_vtables);
+ return -1;
+}
+
+
+/////////////// ImportNumPyArray.proto ///////////////
+
+static PyObject *__pyx_numpy_ndarray = NULL;
+
+static PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void); /*proto*/
+
+/////////////// ImportNumPyArray.cleanup ///////////////
+Py_CLEAR(__pyx_numpy_ndarray);
+
+/////////////// ImportNumPyArray ///////////////
+//@requires: ImportExport.c::Import
+
+static PyObject* __Pyx__ImportNumPyArray(void) {
+ PyObject *numpy_module, *ndarray_object = NULL;
+ numpy_module = __Pyx_Import(PYIDENT("numpy"), NULL, 0);
+ if (likely(numpy_module)) {
+ ndarray_object = PyObject_GetAttrString(numpy_module, "ndarray");
+ Py_DECREF(numpy_module);
+ }
+ if (unlikely(!ndarray_object)) {
+ // ImportError, AttributeError, ...
+ PyErr_Clear();
+ }
+ if (unlikely(!ndarray_object || !PyObject_TypeCheck(ndarray_object, &PyType_Type))) {
+ Py_XDECREF(ndarray_object);
+ Py_INCREF(Py_None);
+ ndarray_object = Py_None;
+ }
+ return ndarray_object;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_ImportNumPyArrayTypeIfAvailable(void) {
+ if (unlikely(!__pyx_numpy_ndarray)) {
+ __pyx_numpy_ndarray = __Pyx__ImportNumPyArray();
+ }
+ Py_INCREF(__pyx_numpy_ndarray);
+ return __pyx_numpy_ndarray;
+}
diff --git a/contrib/tools/cython/Cython/Utility/MemoryView.pyx b/contrib/tools/cython/Cython/Utility/MemoryView.pyx
index 98898a8bd2..6ca5fab9ba 100644
--- a/contrib/tools/cython/Cython/Utility/MemoryView.pyx
+++ b/contrib/tools/cython/Cython/Utility/MemoryView.pyx
@@ -66,7 +66,7 @@ cdef extern from *:
PyBUF_INDIRECT
PyBUF_ND
PyBUF_RECORDS
- PyBUF_RECORDS_RO
+ PyBUF_RECORDS_RO
ctypedef struct __Pyx_TypeInfo:
pass
@@ -227,8 +227,8 @@ cdef class array:
flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE
return memoryview(self, flags, self.dtype_is_object)
- def __len__(self):
- return self._shape[0]
+ def __len__(self):
+ return self._shape[0]
def __getattr__(self, attr):
return getattr(self.memview, attr)
@@ -414,9 +414,9 @@ cdef class memoryview(object):
return self.convert_item_to_object(itemp)
def __setitem__(memoryview self, object index, object value):
- if self.view.readonly:
- raise TypeError("Cannot assign to read-only memoryview")
-
+ if self.view.readonly:
+ raise TypeError("Cannot assign to read-only memoryview")
+
have_slices, index = _unellipsify(index, self.view.ndim)
if have_slices:
@@ -516,9 +516,9 @@ cdef class memoryview(object):
@cname('getbuffer')
def __getbuffer__(self, Py_buffer *info, int flags):
- if flags & PyBUF_WRITABLE and self.view.readonly:
- raise ValueError("Cannot create writable memory view from read-only memoryview")
-
+ if flags & PyBUF_WRITABLE and self.view.readonly:
+ raise ValueError("Cannot create writable memory view from read-only memoryview")
+
if flags & PyBUF_ND:
info.shape = self.view.shape
else:
@@ -543,12 +543,12 @@ cdef class memoryview(object):
info.ndim = self.view.ndim
info.itemsize = self.view.itemsize
info.len = self.view.len
- info.readonly = self.view.readonly
+ info.readonly = self.view.readonly
info.obj = self
__pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)")
- # Some properties that have the same semantics as in NumPy
+ # Some properties that have the same semantics as in NumPy
@property
def T(self):
cdef _memoryviewslice result = memoryview_copy(self)
@@ -1024,10 +1024,10 @@ cdef memoryview_fromslice({{memviewslice_name}} memviewslice,
(<__pyx_buffer *> &result.view).obj = Py_None
Py_INCREF(Py_None)
- if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE:
- result.flags = PyBUF_RECORDS
- else:
- result.flags = PyBUF_RECORDS_RO
+ if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE:
+ result.flags = PyBUF_RECORDS
+ else:
+ result.flags = PyBUF_RECORDS_RO
result.view.shape = <Py_ssize_t *> result.from_slice.shape
result.view.strides = <Py_ssize_t *> result.from_slice.strides
@@ -1354,7 +1354,7 @@ cdef void broadcast_leading({{memviewslice_name}} *mslice,
mslice.suboffsets[i] = -1
#
-### Take care of refcounting the objects in slices. Do this separately from any copying,
+### Take care of refcounting the objects in slices. Do this separately from any copying,
### to minimize acquiring the GIL
#
diff --git a/contrib/tools/cython/Cython/Utility/MemoryView_C.c b/contrib/tools/cython/Cython/Utility/MemoryView_C.c
index 8f91ca16b4..0a5d8ee2c2 100644
--- a/contrib/tools/cython/Cython/Utility/MemoryView_C.c
+++ b/contrib/tools/cython/Cython/Utility/MemoryView_C.c
@@ -1,5 +1,5 @@
////////// MemviewSliceStruct.proto //////////
-//@proto_block: utility_code_proto_before_types
+//@proto_block: utility_code_proto_before_types
/* memoryview slice struct */
struct {{memview_struct_name}};
@@ -12,12 +12,12 @@ typedef struct {
Py_ssize_t suboffsets[{{max_dims}}];
} {{memviewslice_name}};
-// used for "len(memviewslice)"
-#define __Pyx_MemoryView_Len(m) (m.shape[0])
+// used for "len(memviewslice)"
+#define __Pyx_MemoryView_Len(m) (m.shape[0])
+
-
/////////// Atomics.proto /////////////
-//@proto_block: utility_code_proto_before_types
+//@proto_block: utility_code_proto_before_types
#include <pythread.h>
@@ -82,7 +82,7 @@ typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
/////////////// ObjectToMemviewSlice.proto ///////////////
-static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag);
+static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *, int writable_flag);
////////// MemviewSliceInit.proto //////////
@@ -127,7 +127,7 @@ static CYTHON_INLINE char *__pyx_memviewslice_index_full(
/////////////// ObjectToMemviewSlice ///////////////
//@requires: MemviewSliceValidateAndInit
-static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) {
+static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writable_flag) {
{{memviewslice_name}} result = {{memslice_init}};
__Pyx_BufFmt_StackElem stack[{{struct_nesting_depth}}];
int axes_specs[] = { {{axes_specs}} };
@@ -140,7 +140,7 @@ static CYTHON_INLINE {{memviewslice_name}} {{funcname}}(PyObject *obj, int writa
}
retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, {{c_or_f_flag}},
- {{buf_flag}} | writable_flag, {{ndim}},
+ {{buf_flag}} | writable_flag, {{ndim}},
&{{dtype_typeinfo}}, stack,
&result, obj);
@@ -169,8 +169,8 @@ static int __Pyx_ValidateAndInit_memviewslice(
/////////////// MemviewSliceValidateAndInit ///////////////
//@requires: Buffer.c::TypeInfoCompare
-//@requires: Buffer.c::BufferFormatStructs
-//@requires: Buffer.c::BufferFormatCheck
+//@requires: Buffer.c::BufferFormatStructs
+//@requires: Buffer.c::BufferFormatCheck
static int
__pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec)
@@ -441,12 +441,12 @@ no_fail:
return retval;
}
-#ifndef Py_NO_RETURN
-// available since Py3.3
-#define Py_NO_RETURN
-#endif
+#ifndef Py_NO_RETURN
+// available since Py3.3
+#define Py_NO_RETURN
+#endif
-static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
+static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
va_list vargs;
char msg[200];
@@ -455,8 +455,8 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
#else
va_start(vargs);
#endif
- vsnprintf(msg, 200, fmt, vargs);
- va_end(vargs);
+ vsnprintf(msg, 200, fmt, vargs);
+ va_end(vargs);
Py_FatalError(msg);
}
@@ -696,21 +696,21 @@ __pyx_slices_overlap({{memviewslice_name}} *slice1,
}
-////////// MemviewSliceCheckContig.proto //////////
+////////// MemviewSliceCheckContig.proto //////////
-#define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \
- __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}})
+#define __pyx_memviewslice_is_contig_{{contig_type}}{{ndim}}(slice) \
+ __pyx_memviewslice_is_contig(slice, '{{contig_type}}', {{ndim}})
////////// MemviewSliceIsContig.proto //////////
-static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/
+static int __pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim);/*proto*/
////////// MemviewSliceIsContig //////////
static int
-__pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim)
+__pyx_memviewslice_is_contig(const {{memviewslice_name}} mvs, char order, int ndim)
{
int i, index, step, start;
Py_ssize_t itemsize = mvs.memview->view.itemsize;
diff --git a/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c b/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c
index e811ac9186..0c7059b354 100644
--- a/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c
+++ b/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c
@@ -34,12 +34,12 @@
#define DL_EXPORT(t) t
#endif
-// For use in DL_IMPORT/DL_EXPORT macros.
-#define __PYX_COMMA ,
-
+// For use in DL_IMPORT/DL_EXPORT macros.
+#define __PYX_COMMA ,
+
#ifndef HAVE_LONG_LONG
// CPython has required PY_LONG_LONG support for years, even if HAVE_LONG_LONG is not defined for us
- #if PY_VERSION_HEX >= 0x02070000
+ #if PY_VERSION_HEX >= 0x02070000
#define HAVE_LONG_LONG
#endif
#endif
@@ -59,14 +59,14 @@
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
- #undef CYTHON_USE_PYTYPE_LOOKUP
- #define CYTHON_USE_PYTYPE_LOOKUP 0
- #if PY_VERSION_HEX < 0x03050000
- #undef CYTHON_USE_ASYNC_SLOTS
- #define CYTHON_USE_ASYNC_SLOTS 0
- #elif !defined(CYTHON_USE_ASYNC_SLOTS)
- #define CYTHON_USE_ASYNC_SLOTS 1
- #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#undef CYTHON_USE_UNICODE_INTERNALS
@@ -85,10 +85,10 @@
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
- #undef CYTHON_PEP489_MULTI_PHASE_INIT
- #define CYTHON_PEP489_MULTI_PHASE_INIT 0
- #undef CYTHON_USE_TP_FINALIZE
- #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
@@ -102,8 +102,8 @@
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
- #undef CYTHON_USE_PYTYPE_LOOKUP
- #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#undef CYTHON_USE_PYLIST_INTERNALS
@@ -128,10 +128,10 @@
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
- #undef CYTHON_PEP489_MULTI_PHASE_INIT
- #define CYTHON_PEP489_MULTI_PHASE_INIT 0
- #undef CYTHON_USE_TP_FINALIZE
- #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
@@ -145,13 +145,13 @@
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
- #if PY_VERSION_HEX < 0x02070000
- // looks like calling _PyType_Lookup() isn't safe in Py<=2.6/3.1
- #undef CYTHON_USE_PYTYPE_LOOKUP
- #define CYTHON_USE_PYTYPE_LOOKUP 0
- #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
- #define CYTHON_USE_PYTYPE_LOOKUP 1
- #endif
+ #if PY_VERSION_HEX < 0x02070000
+ // looks like calling _PyType_Lookup() isn't safe in Py<=2.6/3.1
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
#if PY_MAJOR_VERSION < 3
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
@@ -195,12 +195,12 @@
// fast_pycall code
#define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1)
#endif
- #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
- #endif
- #ifndef CYTHON_USE_TP_FINALIZE
- #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
- #endif
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
#ifndef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
#endif
@@ -227,162 +227,162 @@
#endif
#endif
-#ifndef __has_attribute
- #define __has_attribute(x) 0
-#endif
-
-#ifndef __has_cpp_attribute
- #define __has_cpp_attribute(x) 0
-#endif
-
-// restrict
-#ifndef CYTHON_RESTRICT
- #if defined(__GNUC__)
- #define CYTHON_RESTRICT __restrict__
- #elif defined(_MSC_VER) && _MSC_VER >= 1400
- #define CYTHON_RESTRICT __restrict
- #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
- #define CYTHON_RESTRICT restrict
- #else
- #define CYTHON_RESTRICT
- #endif
-#endif
-
-// unused attribute
-#ifndef CYTHON_UNUSED
-# if defined(__GNUC__)
-# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
-# define CYTHON_UNUSED __attribute__ ((__unused__))
-# else
-# define CYTHON_UNUSED
-# endif
-# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
-# define CYTHON_UNUSED __attribute__ ((__unused__))
-# else
-# define CYTHON_UNUSED
-# endif
-#endif
-
-#ifndef CYTHON_MAYBE_UNUSED_VAR
-# if defined(__cplusplus)
- template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
-# else
-# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
-# endif
-#endif
-
-#ifndef CYTHON_NCP_UNUSED
-# if CYTHON_COMPILING_IN_CPYTHON
-# define CYTHON_NCP_UNUSED
-# else
-# define CYTHON_NCP_UNUSED CYTHON_UNUSED
-# endif
-#endif
-
-#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
-
-#ifdef _MSC_VER
- #ifndef _MSC_STDINT_H_
- #if _MSC_VER < 1300
- typedef unsigned char uint8_t;
- typedef unsigned int uint32_t;
- #else
- typedef unsigned __int8 uint8_t;
- typedef unsigned __int32 uint32_t;
- #endif
- #endif
-#else
- #include <stdint.h>
-#endif
-
-
-#ifndef CYTHON_FALLTHROUGH
- #if defined(__cplusplus) && __cplusplus >= 201103L
- #if __has_cpp_attribute(fallthrough)
- #define CYTHON_FALLTHROUGH [[fallthrough]]
- #elif __has_cpp_attribute(clang::fallthrough)
- #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
- #elif __has_cpp_attribute(gnu::fallthrough)
- #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
- #endif
- #endif
-
- #ifndef CYTHON_FALLTHROUGH
- #if __has_attribute(fallthrough)
- #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
- #else
- #define CYTHON_FALLTHROUGH
- #endif
- #endif
-
- #if defined(__clang__ ) && defined(__apple_build_version__)
- #if __apple_build_version__ < 7000000 /* Xcode < 7.0 */
- #undef CYTHON_FALLTHROUGH
- #define CYTHON_FALLTHROUGH
- #endif
- #endif
-#endif
-
-/////////////// CInitCode ///////////////
-
-// inline attribute
-#ifndef CYTHON_INLINE
- #if defined(__clang__)
- #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
- #elif defined(__GNUC__)
- #define CYTHON_INLINE __inline__
- #elif defined(_MSC_VER)
- #define CYTHON_INLINE __inline
- #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
- #define CYTHON_INLINE inline
- #else
- #define CYTHON_INLINE
- #endif
-#endif
-
-
-/////////////// CppInitCode ///////////////
-
-#ifndef __cplusplus
- #error "Cython files generated with the C++ option must be compiled with a C++ compiler."
-#endif
-
-// inline attribute
-#ifndef CYTHON_INLINE
- #if defined(__clang__)
- #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
- #else
- #define CYTHON_INLINE inline
- #endif
-#endif
-
-// Work around clang bug http://stackoverflow.com/questions/21847816/c-invoke-nested-template-class-destructor
-template<typename T>
-void __Pyx_call_destructor(T& x) {
- x.~T();
-}
-
-// Used for temporary variables of "reference" type.
-template<typename T>
-class __Pyx_FakeReference {
- public:
- __Pyx_FakeReference() : ptr(NULL) { }
- // __Pyx_FakeReference(T& ref) : ptr(&ref) { }
- // Const version needed as Cython doesn't know about const overloads (e.g. for stl containers).
- __Pyx_FakeReference(const T& ref) : ptr(const_cast<T*>(&ref)) { }
- T *operator->() { return ptr; }
- T *operator&() { return ptr; }
- operator T&() { return *ptr; }
- // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed).
- template<typename U> bool operator ==(U other) { return *ptr == other; }
- template<typename U> bool operator !=(U other) { return *ptr != other; }
- private:
- T *ptr;
-};
-
-
-/////////////// PythonCompatibility ///////////////
-
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+
+// restrict
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+
+// unused attribute
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include <stdint.h>
+#endif
+
+
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000 /* Xcode < 7.0 */
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+/////////////// CInitCode ///////////////
+
+// inline attribute
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+
+/////////////// CppInitCode ///////////////
+
+#ifndef __cplusplus
+ #error "Cython files generated with the C++ option must be compiled with a C++ compiler."
+#endif
+
+// inline attribute
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #else
+ #define CYTHON_INLINE inline
+ #endif
+#endif
+
+// Work around clang bug http://stackoverflow.com/questions/21847816/c-invoke-nested-template-class-destructor
+template<typename T>
+void __Pyx_call_destructor(T& x) {
+ x.~T();
+}
+
+// Used for temporary variables of "reference" type.
+template<typename T>
+class __Pyx_FakeReference {
+ public:
+ __Pyx_FakeReference() : ptr(NULL) { }
+ // __Pyx_FakeReference(T& ref) : ptr(&ref) { }
+ // Const version needed as Cython doesn't know about const overloads (e.g. for stl containers).
+ __Pyx_FakeReference(const T& ref) : ptr(const_cast<T*>(&ref)) { }
+ T *operator->() { return ptr; }
+ T *operator&() { return ptr; }
+ operator T&() { return *ptr; }
+ // TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed).
+ template<typename U> bool operator ==(U other) { return *ptr == other; }
+ template<typename U> bool operator !=(U other) { return *ptr != other; }
+ private:
+ T *ptr;
+};
+
+
+/////////////// PythonCompatibility ///////////////
+
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
#define Py_OptimizeFlag 0
#endif
@@ -493,22 +493,22 @@ class __Pyx_FakeReference {
// value if defined: Stackless Python < 3.6: 0x80 else 0x100
#define METH_STACKLESS 0
#endif
-#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
- // new in CPython 3.6, but changed in 3.7 - see
- // positional-only parameters:
- // https://bugs.python.org/issue29464
- // const args:
- // https://bugs.python.org/issue32240
- #ifndef METH_FASTCALL
- #define METH_FASTCALL 0x80
- #endif
- typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
- // new in CPython 3.7, used to be old signature of _PyCFunctionFast() in 3.6
- typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
- Py_ssize_t nargs, PyObject *kwnames);
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ // new in CPython 3.6, but changed in 3.7 - see
+ // positional-only parameters:
+ // https://bugs.python.org/issue29464
+ // const args:
+ // https://bugs.python.org/issue32240
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ // new in CPython 3.7, used to be old signature of _PyCFunctionFast() in 3.6
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
#else
#define __Pyx_PyCFunctionFast _PyCFunctionFast
- #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
#endif
#if CYTHON_FAST_PYCCALL
#define __Pyx_PyFastCFunction_Check(func) \
@@ -517,93 +517,93 @@ class __Pyx_FakeReference {
#define __Pyx_PyFastCFunction_Check(func) 0
#endif
-#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
- #define PyObject_Malloc(s) PyMem_Malloc(s)
- #define PyObject_Free(p) PyMem_Free(p)
- #define PyObject_Realloc(p) PyMem_Realloc(p)
-#endif
-
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
#define PyMem_RawMalloc(n) PyMem_Malloc(n)
#define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
#define PyMem_RawFree(p) PyMem_Free(p)
#endif
-#if CYTHON_COMPILING_IN_PYSTON
- // special C-API functions only in Pyston
- #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
- #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
-#else
- #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
- #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
-#endif
-
-#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
- #define __Pyx_PyThreadState_Current PyThreadState_GET()
-#elif PY_VERSION_HEX >= 0x03060000
- //#elif PY_VERSION_HEX >= 0x03050200
- // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
- #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
-#elif PY_VERSION_HEX >= 0x03000000
- #define __Pyx_PyThreadState_Current PyThreadState_GET()
-#else
- #define __Pyx_PyThreadState_Current _PyThreadState_Current
-#endif
-
-// TSS (Thread Specific Storage) API
-#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
-#include "pythread.h"
-#define Py_tss_NEEDS_INIT 0
-typedef int Py_tss_t;
-static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
- *key = PyThread_create_key();
+#if CYTHON_COMPILING_IN_PYSTON
+ // special C-API functions only in Pyston
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ //#elif PY_VERSION_HEX >= 0x03050200
+ // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+
+// TSS (Thread Specific Storage) API
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
return 0; /* PyThread_create_key reports success always */
-}
-static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
- Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
- *key = Py_tss_NEEDS_INIT;
- return key;
-}
-static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
- PyObject_Free(key);
-}
-static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
- return *key != Py_tss_NEEDS_INIT;
-}
-static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
- PyThread_delete_key(*key);
- *key = Py_tss_NEEDS_INIT;
-}
-static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
- return PyThread_set_key_value(*key, value);
-}
-static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
- return PyThread_get_key_value(*key);
-}
-// PyThread_delete_key_value(key) is equalivalent to PyThread_set_key_value(key, NULL)
-// PyThread_ReInitTLS() is a no-op
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+// PyThread_delete_key_value(key) is equalivalent to PyThread_set_key_value(key, NULL)
+// PyThread_ReInitTLS() is a no-op
#endif /* TSS (Thread Specific Storage) API */
-
-#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
-#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
-#else
-#define __Pyx_PyDict_NewPresized(n) PyDict_New()
-#endif
-
-#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
- #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
- #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
-#else
- #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
- #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
-#endif
-
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
-#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
-#else
-#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
-#endif
-
+
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+
/* new Py3.3 unicode type (PEP 393) */
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
@@ -719,12 +719,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
-#if CYTHON_ASSUME_SAFE_MACROS
- #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
-#else
- // NOTE: might fail with exception => check for -1
- #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
-#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ // NOTE: might fail with exception => check for -1
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
#if PY_MAJOR_VERSION >= 3
#define PyIntObject PyLongObject
@@ -776,12 +776,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#else
- #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
- #endif
-#else
- #define __Pyx_PyType_AsAsync(obj) NULL
-#endif
-#ifndef __Pyx_PyAsyncMethodsStruct
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
typedef struct {
unaryfunc am_await;
unaryfunc am_aiter;
@@ -803,7 +803,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#endif
-/////////////// PyModInitFuncType.proto ///////////////
+/////////////// PyModInitFuncType.proto ///////////////
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
@@ -813,106 +813,106 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
-#define __Pyx_PyMODINIT_FUNC void
+#define __Pyx_PyMODINIT_FUNC void
#endif
-#else
+#else
// Py3+: define this to PyObject * manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition.
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
-#define __Pyx_PyMODINIT_FUNC PyObject *
-#endif
-#endif
-
-
-/////////////// FastTypeChecks.proto ///////////////
-
-#if CYTHON_COMPILING_IN_CPYTHON
-#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
-static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);/*proto*/
-static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);/*proto*/
-static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);/*proto*/
-#else
-#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
-#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
-#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
-#endif
-
-#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
-
-/////////////// FastTypeChecks ///////////////
-//@requires: Exceptions.c::PyThreadStateGet
-//@requires: Exceptions.c::PyErrFetchRestore
-
-#if CYTHON_COMPILING_IN_CPYTHON
-static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
- while (a) {
- a = a->tp_base;
- if (a == b)
- return 1;
- }
- return b == &PyBaseObject_Type;
-}
-
-static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
- PyObject *mro;
- if (a == b) return 1;
- mro = a->tp_mro;
- if (likely(mro)) {
- Py_ssize_t i, n;
- n = PyTuple_GET_SIZE(mro);
- for (i = 0; i < n; i++) {
- if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
- return 1;
- }
- return 0;
- }
- // should only get here for incompletely initialised types, i.e. never under normal usage patterns
- return __Pyx_InBases(a, b);
-}
-
-
-#if PY_MAJOR_VERSION == 2
-static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
- // PyObject_IsSubclass() can recurse and therefore is not safe
- PyObject *exception, *value, *tb;
- int res;
- __Pyx_PyThreadState_declare
- __Pyx_PyThreadState_assign
- __Pyx_ErrFetch(&exception, &value, &tb);
-
- res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
- // This function must not fail, so print the error here (which also clears it)
- if (unlikely(res == -1)) {
- PyErr_WriteUnraisable(err);
- res = 0;
- }
- if (!res) {
- res = PyObject_IsSubclass(err, exc_type2);
- // This function must not fail, so print the error here (which also clears it)
- if (unlikely(res == -1)) {
- PyErr_WriteUnraisable(err);
- res = 0;
- }
- }
-
- __Pyx_ErrRestore(exception, value, tb);
- return res;
-}
-#else
-static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
- int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
- if (!res) {
- res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
- }
- return res;
-}
-#endif
-
-// so far, we only call PyErr_GivenExceptionMatches() with an exception type (not instance) as first argument
-// => optimise for that case
-
+#define __Pyx_PyMODINIT_FUNC PyObject *
+#endif
+#endif
+
+
+/////////////// FastTypeChecks.proto ///////////////
+
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);/*proto*/
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);/*proto*/
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);/*proto*/
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/////////////// FastTypeChecks ///////////////
+//@requires: Exceptions.c::PyThreadStateGet
+//@requires: Exceptions.c::PyErrFetchRestore
+
+#if CYTHON_COMPILING_IN_CPYTHON
+static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
+ while (a) {
+ a = a->tp_base;
+ if (a == b)
+ return 1;
+ }
+ return b == &PyBaseObject_Type;
+}
+
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
+ PyObject *mro;
+ if (a == b) return 1;
+ mro = a->tp_mro;
+ if (likely(mro)) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(mro);
+ for (i = 0; i < n; i++) {
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
+ return 1;
+ }
+ return 0;
+ }
+ // should only get here for incompletely initialised types, i.e. never under normal usage patterns
+ return __Pyx_InBases(a, b);
+}
+
+
+#if PY_MAJOR_VERSION == 2
+static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
+ // PyObject_IsSubclass() can recurse and therefore is not safe
+ PyObject *exception, *value, *tb;
+ int res;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrFetch(&exception, &value, &tb);
+
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
+ // This function must not fail, so print the error here (which also clears it)
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ if (!res) {
+ res = PyObject_IsSubclass(err, exc_type2);
+ // This function must not fail, so print the error here (which also clears it)
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ }
+
+ __Pyx_ErrRestore(exception, value, tb);
+ return res;
+}
+#else
+static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
+ int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
+ if (!res) {
+ res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
+ }
+ return res;
+}
+#endif
+
+// so far, we only call PyErr_GivenExceptionMatches() with an exception type (not instance) as first argument
+// => optimise for that case
+
static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
assert(PyExceptionClass_Check(exc_type));
@@ -937,9 +937,9 @@ static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *
return 0;
}
-static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
- if (likely(err == exc_type)) return 1;
- if (likely(PyExceptionClass_Check(err))) {
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
+ if (likely(err == exc_type)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
if (likely(PyExceptionClass_Check(exc_type))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
} else if (likely(PyTuple_Check(exc_type))) {
@@ -947,24 +947,24 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObje
} else {
// FIXME: Py3: PyErr_SetString(PyExc_TypeError, "catching classes that do not inherit from BaseException is not allowed");
}
- }
- return PyErr_GivenExceptionMatches(err, exc_type);
-}
-
-static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
+ }
+ return PyErr_GivenExceptionMatches(err, exc_type);
+}
+
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
// Only used internally with known exception types => pure safety check assertions.
assert(PyExceptionClass_Check(exc_type1));
assert(PyExceptionClass_Check(exc_type2));
- if (likely(err == exc_type1 || err == exc_type2)) return 1;
- if (likely(PyExceptionClass_Check(err))) {
- return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
- }
- return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
-}
-
-#endif
-
-
+ if (likely(err == exc_type1 || err == exc_type2)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
+ }
+ return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
+}
+
+#endif
+
+
/////////////// MathInitCode ///////////////
#if defined(WIN32) || defined(MS_WINDOWS)
@@ -998,7 +998,7 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
/////////////// ForceInitThreads.proto ///////////////
-//@proto_block: utility_code_proto_before_types
+//@proto_block: utility_code_proto_before_types
#ifndef __PYX_FORCE_INIT_THREADS
#define __PYX_FORCE_INIT_THREADS 0
@@ -1010,11 +1010,11 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc
PyEval_InitThreads();
#endif
-
-/////////////// ModuleCreationPEP489 ///////////////
-//@substitute: naming
-
-//#if CYTHON_PEP489_MULTI_PHASE_INIT
+
+/////////////// ModuleCreationPEP489 ///////////////
+//@substitute: naming
+
+//#if CYTHON_PEP489_MULTI_PHASE_INIT
static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
#if PY_VERSION_HEX >= 0x030700A1
static PY_INT64_T main_interpreter_id = -1;
@@ -1042,54 +1042,54 @@ static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
}
static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
- PyObject *value = PyObject_GetAttrString(spec, from_name);
- int result = 0;
- if (likely(value)) {
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
+ int result = 0;
+ if (likely(value)) {
if (allow_none || value != Py_None) {
result = PyDict_SetItemString(moddict, to_name, value);
}
- Py_DECREF(value);
- } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
- PyErr_Clear();
- } else {
- result = -1;
- }
- return result;
-}
-
+ Py_DECREF(value);
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Clear();
+ } else {
+ result = -1;
+ }
+ return result;
+}
+
static CYTHON_SMALL_CODE PyObject* ${pymodule_create_func_cname}(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
- PyObject *module = NULL, *moddict, *modname;
-
- // For now, we only have exactly one module instance.
+ PyObject *module = NULL, *moddict, *modname;
+
+ // For now, we only have exactly one module instance.
if (__Pyx_check_single_interpreter())
return NULL;
- if (${module_cname})
- return __Pyx_NewRef(${module_cname});
-
- modname = PyObject_GetAttrString(spec, "name");
- if (unlikely(!modname)) goto bad;
-
- module = PyModule_NewObject(modname);
- Py_DECREF(modname);
- if (unlikely(!module)) goto bad;
-
- moddict = PyModule_GetDict(module);
- if (unlikely(!moddict)) goto bad;
- // moddict is a borrowed reference
-
+ if (${module_cname})
+ return __Pyx_NewRef(${module_cname});
+
+ modname = PyObject_GetAttrString(spec, "name");
+ if (unlikely(!modname)) goto bad;
+
+ module = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (unlikely(!module)) goto bad;
+
+ moddict = PyModule_GetDict(module);
+ if (unlikely(!moddict)) goto bad;
+ // moddict is a borrowed reference
+
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
-
- return module;
-bad:
- Py_XDECREF(module);
- return NULL;
-}
-//#endif
-
-
+
+ return module;
+bad:
+ Py_XDECREF(module);
+ return NULL;
+}
+//#endif
+
+
/////////////// CodeObjectCache.proto ///////////////
typedef struct {
@@ -1229,22 +1229,22 @@ static int __Pyx_check_binary_version(void) {
return 0;
}
-/////////////// IsLittleEndian.proto ///////////////
-
-static CYTHON_INLINE int __Pyx_Is_Little_Endian(void);
-
-/////////////// IsLittleEndian ///////////////
-
-static CYTHON_INLINE int __Pyx_Is_Little_Endian(void)
-{
- union {
- uint32_t u32;
- uint8_t u8[4];
- } S;
- S.u32 = 0x01020304;
- return S.u8[0] == 4;
-}
-
+/////////////// IsLittleEndian.proto ///////////////
+
+static CYTHON_INLINE int __Pyx_Is_Little_Endian(void);
+
+/////////////// IsLittleEndian ///////////////
+
+static CYTHON_INLINE int __Pyx_Is_Little_Endian(void)
+{
+ union {
+ uint32_t u32;
+ uint8_t u8[4];
+ } S;
+ S.u32 = 0x01020304;
+ return S.u8[0] == 4;
+}
+
/////////////// Refnanny.proto ///////////////
#ifndef CYTHON_REFNANNY
@@ -1330,20 +1330,20 @@ end:
}
#endif /* CYTHON_REFNANNY */
-
-/////////////// ImportRefnannyAPI ///////////////
-
-#if CYTHON_REFNANNY
-__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
-if (!__Pyx_RefNanny) {
- PyErr_Clear();
- __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
- if (!__Pyx_RefNanny)
- Py_FatalError("failed to import 'refnanny' module");
-}
-#endif
-
-
+
+/////////////// ImportRefnannyAPI ///////////////
+
+#if CYTHON_REFNANNY
+__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+}
+#endif
+
+
/////////////// RegisterModuleCleanup.proto ///////////////
//@substitute: naming
@@ -1358,7 +1358,7 @@ static int __Pyx_RegisterCleanup(void); /*proto*/
/////////////// RegisterModuleCleanup ///////////////
//@substitute: naming
-#if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY
+#if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY
static PyObject* ${cleanup_cname}_atexit(PyObject *module, CYTHON_UNUSED PyObject *unused) {
${cleanup_cname}(module);
Py_INCREF(Py_None); return Py_None;
@@ -1429,178 +1429,178 @@ bad:
return ret;
}
#endif
-
-/////////////// FastGil.init ///////////////
-#ifdef WITH_THREAD
-__Pyx_FastGilFuncInit();
-#endif
-
-/////////////// NoFastGil.proto ///////////////
-//@proto_block: utility_code_proto_before_types
-
-#define __Pyx_PyGILState_Ensure PyGILState_Ensure
-#define __Pyx_PyGILState_Release PyGILState_Release
-#define __Pyx_FastGIL_Remember()
-#define __Pyx_FastGIL_Forget()
-#define __Pyx_FastGilFuncInit()
-
-/////////////// FastGil.proto ///////////////
-//@proto_block: utility_code_proto_before_types
-
-struct __Pyx_FastGilVtab {
- PyGILState_STATE (*Fast_PyGILState_Ensure)(void);
- void (*Fast_PyGILState_Release)(PyGILState_STATE oldstate);
- void (*FastGIL_Remember)(void);
- void (*FastGIL_Forget)(void);
-};
-
-static void __Pyx_FastGIL_Noop(void) {}
-static struct __Pyx_FastGilVtab __Pyx_FastGilFuncs = {
- PyGILState_Ensure,
- PyGILState_Release,
- __Pyx_FastGIL_Noop,
- __Pyx_FastGIL_Noop
-};
-
-static void __Pyx_FastGilFuncInit(void);
-
-#define __Pyx_PyGILState_Ensure __Pyx_FastGilFuncs.Fast_PyGILState_Ensure
-#define __Pyx_PyGILState_Release __Pyx_FastGilFuncs.Fast_PyGILState_Release
-#define __Pyx_FastGIL_Remember __Pyx_FastGilFuncs.FastGIL_Remember
-#define __Pyx_FastGIL_Forget __Pyx_FastGilFuncs.FastGIL_Forget
-
-#ifdef WITH_THREAD
- #ifndef CYTHON_THREAD_LOCAL
- #if __STDC_VERSION__ >= 201112
- #define CYTHON_THREAD_LOCAL _Thread_local
- #elif defined(__GNUC__)
- #define CYTHON_THREAD_LOCAL __thread
- #elif defined(_MSC_VER)
- #define CYTHON_THREAD_LOCAL __declspec(thread)
- #endif
- #endif
-#endif
-
-/////////////// FastGil ///////////////
-//@requires: CommonStructures.c::FetchCommonPointer
-// The implementations of PyGILState_Ensure/Release calls PyThread_get_key_value
-// several times which is turns out to be quite slow (slower in fact than
-// acquiring the GIL itself). Simply storing it in a thread local for the
-// common case is much faster.
-// To make optimal use of this thread local, we attempt to share it between
-// modules.
-
-#define __Pyx_FastGIL_ABI_module "_cython_" CYTHON_ABI
-#define __Pyx_FastGIL_PyCapsuleName "FastGilFuncs"
-#define __Pyx_FastGIL_PyCapsule \
- __Pyx_FastGIL_ABI_module "." __Pyx_FastGIL_PyCapsuleName
-
-#if PY_VERSION_HEX < 0x02070000
- #undef CYTHON_THREAD_LOCAL
-#endif
-
-#ifdef CYTHON_THREAD_LOCAL
-
-#include "pythread.h"
-#include "pystate.h"
-
-static CYTHON_THREAD_LOCAL PyThreadState *__Pyx_FastGil_tcur = NULL;
-static CYTHON_THREAD_LOCAL int __Pyx_FastGil_tcur_depth = 0;
-static int __Pyx_FastGil_autoTLSkey = -1;
-
-static CYTHON_INLINE void __Pyx_FastGIL_Remember0(void) {
- ++__Pyx_FastGil_tcur_depth;
-}
-
-static CYTHON_INLINE void __Pyx_FastGIL_Forget0(void) {
- if (--__Pyx_FastGil_tcur_depth == 0) {
- __Pyx_FastGil_tcur = NULL;
- }
-}
-
-static CYTHON_INLINE PyThreadState *__Pyx_FastGil_get_tcur(void) {
- PyThreadState *tcur = __Pyx_FastGil_tcur;
- if (tcur == NULL) {
- tcur = __Pyx_FastGil_tcur = (PyThreadState*)PyThread_get_key_value(__Pyx_FastGil_autoTLSkey);
- }
- return tcur;
-}
-
-static PyGILState_STATE __Pyx_FastGil_PyGILState_Ensure(void) {
- int current;
+
+/////////////// FastGil.init ///////////////
+#ifdef WITH_THREAD
+__Pyx_FastGilFuncInit();
+#endif
+
+/////////////// NoFastGil.proto ///////////////
+//@proto_block: utility_code_proto_before_types
+
+#define __Pyx_PyGILState_Ensure PyGILState_Ensure
+#define __Pyx_PyGILState_Release PyGILState_Release
+#define __Pyx_FastGIL_Remember()
+#define __Pyx_FastGIL_Forget()
+#define __Pyx_FastGilFuncInit()
+
+/////////////// FastGil.proto ///////////////
+//@proto_block: utility_code_proto_before_types
+
+struct __Pyx_FastGilVtab {
+ PyGILState_STATE (*Fast_PyGILState_Ensure)(void);
+ void (*Fast_PyGILState_Release)(PyGILState_STATE oldstate);
+ void (*FastGIL_Remember)(void);
+ void (*FastGIL_Forget)(void);
+};
+
+static void __Pyx_FastGIL_Noop(void) {}
+static struct __Pyx_FastGilVtab __Pyx_FastGilFuncs = {
+ PyGILState_Ensure,
+ PyGILState_Release,
+ __Pyx_FastGIL_Noop,
+ __Pyx_FastGIL_Noop
+};
+
+static void __Pyx_FastGilFuncInit(void);
+
+#define __Pyx_PyGILState_Ensure __Pyx_FastGilFuncs.Fast_PyGILState_Ensure
+#define __Pyx_PyGILState_Release __Pyx_FastGilFuncs.Fast_PyGILState_Release
+#define __Pyx_FastGIL_Remember __Pyx_FastGilFuncs.FastGIL_Remember
+#define __Pyx_FastGIL_Forget __Pyx_FastGilFuncs.FastGIL_Forget
+
+#ifdef WITH_THREAD
+ #ifndef CYTHON_THREAD_LOCAL
+ #if __STDC_VERSION__ >= 201112
+ #define CYTHON_THREAD_LOCAL _Thread_local
+ #elif defined(__GNUC__)
+ #define CYTHON_THREAD_LOCAL __thread
+ #elif defined(_MSC_VER)
+ #define CYTHON_THREAD_LOCAL __declspec(thread)
+ #endif
+ #endif
+#endif
+
+/////////////// FastGil ///////////////
+//@requires: CommonStructures.c::FetchCommonPointer
+// The implementations of PyGILState_Ensure/Release calls PyThread_get_key_value
+// several times which is turns out to be quite slow (slower in fact than
+// acquiring the GIL itself). Simply storing it in a thread local for the
+// common case is much faster.
+// To make optimal use of this thread local, we attempt to share it between
+// modules.
+
+#define __Pyx_FastGIL_ABI_module "_cython_" CYTHON_ABI
+#define __Pyx_FastGIL_PyCapsuleName "FastGilFuncs"
+#define __Pyx_FastGIL_PyCapsule \
+ __Pyx_FastGIL_ABI_module "." __Pyx_FastGIL_PyCapsuleName
+
+#if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_THREAD_LOCAL
+#endif
+
+#ifdef CYTHON_THREAD_LOCAL
+
+#include "pythread.h"
+#include "pystate.h"
+
+static CYTHON_THREAD_LOCAL PyThreadState *__Pyx_FastGil_tcur = NULL;
+static CYTHON_THREAD_LOCAL int __Pyx_FastGil_tcur_depth = 0;
+static int __Pyx_FastGil_autoTLSkey = -1;
+
+static CYTHON_INLINE void __Pyx_FastGIL_Remember0(void) {
+ ++__Pyx_FastGil_tcur_depth;
+}
+
+static CYTHON_INLINE void __Pyx_FastGIL_Forget0(void) {
+ if (--__Pyx_FastGil_tcur_depth == 0) {
+ __Pyx_FastGil_tcur = NULL;
+ }
+}
+
+static CYTHON_INLINE PyThreadState *__Pyx_FastGil_get_tcur(void) {
+ PyThreadState *tcur = __Pyx_FastGil_tcur;
+ if (tcur == NULL) {
+ tcur = __Pyx_FastGil_tcur = (PyThreadState*)PyThread_get_key_value(__Pyx_FastGil_autoTLSkey);
+ }
+ return tcur;
+}
+
+static PyGILState_STATE __Pyx_FastGil_PyGILState_Ensure(void) {
+ int current;
PyThreadState *tcur;
- __Pyx_FastGIL_Remember0();
+ __Pyx_FastGIL_Remember0();
tcur = __Pyx_FastGil_get_tcur();
- if (tcur == NULL) {
- // Uninitialized, need to initialize now.
- return PyGILState_Ensure();
- }
- current = tcur == __Pyx_PyThreadState_Current;
- if (current == 0) {
- PyEval_RestoreThread(tcur);
- }
- ++tcur->gilstate_counter;
- return current ? PyGILState_LOCKED : PyGILState_UNLOCKED;
-}
-
-static void __Pyx_FastGil_PyGILState_Release(PyGILState_STATE oldstate) {
- PyThreadState *tcur = __Pyx_FastGil_get_tcur();
- __Pyx_FastGIL_Forget0();
- if (tcur->gilstate_counter == 1) {
- // This is the last lock, do all the cleanup as well.
- PyGILState_Release(oldstate);
- } else {
- --tcur->gilstate_counter;
- if (oldstate == PyGILState_UNLOCKED) {
- PyEval_SaveThread();
- }
- }
-}
-
-static void __Pyx_FastGilFuncInit0(void) {
- /* Try to detect autoTLSkey. */
- int key;
- void* this_thread_state = (void*) PyGILState_GetThisThreadState();
- for (key = 0; key < 100; key++) {
- if (PyThread_get_key_value(key) == this_thread_state) {
- __Pyx_FastGil_autoTLSkey = key;
- break;
- }
- }
- if (__Pyx_FastGil_autoTLSkey != -1) {
- PyObject* capsule = NULL;
- PyObject* abi_module = NULL;
- __Pyx_PyGILState_Ensure = __Pyx_FastGil_PyGILState_Ensure;
- __Pyx_PyGILState_Release = __Pyx_FastGil_PyGILState_Release;
- __Pyx_FastGIL_Remember = __Pyx_FastGIL_Remember0;
- __Pyx_FastGIL_Forget = __Pyx_FastGIL_Forget0;
- capsule = PyCapsule_New(&__Pyx_FastGilFuncs, __Pyx_FastGIL_PyCapsule, NULL);
- abi_module = PyImport_AddModule(__Pyx_FastGIL_ABI_module);
- if (capsule && abi_module) {
- PyObject_SetAttrString(abi_module, __Pyx_FastGIL_PyCapsuleName, capsule);
- }
- Py_XDECREF(capsule);
- }
-}
-
-#else
-
-static void __Pyx_FastGilFuncInit0(void) {
- CYTHON_UNUSED void* force_use = (void*)&__Pyx_FetchCommonPointer;
-}
-
-#endif
-
-static void __Pyx_FastGilFuncInit(void) {
-#if PY_VERSION_HEX >= 0x02070000
- struct __Pyx_FastGilVtab* shared = (struct __Pyx_FastGilVtab*)PyCapsule_Import(__Pyx_FastGIL_PyCapsule, 1);
-#else
- struct __Pyx_FastGilVtab* shared = NULL;
-#endif
- if (shared) {
- __Pyx_FastGilFuncs = *shared;
- } else {
- PyErr_Clear();
- __Pyx_FastGilFuncInit0();
- }
-}
+ if (tcur == NULL) {
+ // Uninitialized, need to initialize now.
+ return PyGILState_Ensure();
+ }
+ current = tcur == __Pyx_PyThreadState_Current;
+ if (current == 0) {
+ PyEval_RestoreThread(tcur);
+ }
+ ++tcur->gilstate_counter;
+ return current ? PyGILState_LOCKED : PyGILState_UNLOCKED;
+}
+
+static void __Pyx_FastGil_PyGILState_Release(PyGILState_STATE oldstate) {
+ PyThreadState *tcur = __Pyx_FastGil_get_tcur();
+ __Pyx_FastGIL_Forget0();
+ if (tcur->gilstate_counter == 1) {
+ // This is the last lock, do all the cleanup as well.
+ PyGILState_Release(oldstate);
+ } else {
+ --tcur->gilstate_counter;
+ if (oldstate == PyGILState_UNLOCKED) {
+ PyEval_SaveThread();
+ }
+ }
+}
+
+static void __Pyx_FastGilFuncInit0(void) {
+ /* Try to detect autoTLSkey. */
+ int key;
+ void* this_thread_state = (void*) PyGILState_GetThisThreadState();
+ for (key = 0; key < 100; key++) {
+ if (PyThread_get_key_value(key) == this_thread_state) {
+ __Pyx_FastGil_autoTLSkey = key;
+ break;
+ }
+ }
+ if (__Pyx_FastGil_autoTLSkey != -1) {
+ PyObject* capsule = NULL;
+ PyObject* abi_module = NULL;
+ __Pyx_PyGILState_Ensure = __Pyx_FastGil_PyGILState_Ensure;
+ __Pyx_PyGILState_Release = __Pyx_FastGil_PyGILState_Release;
+ __Pyx_FastGIL_Remember = __Pyx_FastGIL_Remember0;
+ __Pyx_FastGIL_Forget = __Pyx_FastGIL_Forget0;
+ capsule = PyCapsule_New(&__Pyx_FastGilFuncs, __Pyx_FastGIL_PyCapsule, NULL);
+ abi_module = PyImport_AddModule(__Pyx_FastGIL_ABI_module);
+ if (capsule && abi_module) {
+ PyObject_SetAttrString(abi_module, __Pyx_FastGIL_PyCapsuleName, capsule);
+ }
+ Py_XDECREF(capsule);
+ }
+}
+
+#else
+
+static void __Pyx_FastGilFuncInit0(void) {
+ CYTHON_UNUSED void* force_use = (void*)&__Pyx_FetchCommonPointer;
+}
+
+#endif
+
+static void __Pyx_FastGilFuncInit(void) {
+#if PY_VERSION_HEX >= 0x02070000
+ struct __Pyx_FastGilVtab* shared = (struct __Pyx_FastGilVtab*)PyCapsule_Import(__Pyx_FastGIL_PyCapsule, 1);
+#else
+ struct __Pyx_FastGilVtab* shared = NULL;
+#endif
+ if (shared) {
+ __Pyx_FastGilFuncs = *shared;
+ } else {
+ PyErr_Clear();
+ __Pyx_FastGilFuncInit0();
+ }
+}
diff --git a/contrib/tools/cython/Cython/Utility/ObjectHandling.c b/contrib/tools/cython/Cython/Utility/ObjectHandling.c
index dad5f1db5c..c1b1c60bda 100644
--- a/contrib/tools/cython/Cython/Utility/ObjectHandling.c
+++ b/contrib/tools/cython/Cython/Utility/ObjectHandling.c
@@ -80,69 +80,69 @@ static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
/////////////// UnpackTuple2.proto ///////////////
-#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple) \
- (likely(is_tuple || PyTuple_Check(tuple)) ? \
- (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ? \
- __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) : \
- (__Pyx_UnpackTupleError(tuple, 2), -1)) : \
- __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple))
-
-static CYTHON_INLINE int __Pyx_unpack_tuple2_exact(
- PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple);
-static int __Pyx_unpack_tuple2_generic(
- PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple);
-
+#define __Pyx_unpack_tuple2(tuple, value1, value2, is_tuple, has_known_size, decref_tuple) \
+ (likely(is_tuple || PyTuple_Check(tuple)) ? \
+ (likely(has_known_size || PyTuple_GET_SIZE(tuple) == 2) ? \
+ __Pyx_unpack_tuple2_exact(tuple, value1, value2, decref_tuple) : \
+ (__Pyx_UnpackTupleError(tuple, 2), -1)) : \
+ __Pyx_unpack_tuple2_generic(tuple, value1, value2, has_known_size, decref_tuple))
+
+static CYTHON_INLINE int __Pyx_unpack_tuple2_exact(
+ PyObject* tuple, PyObject** value1, PyObject** value2, int decref_tuple);
+static int __Pyx_unpack_tuple2_generic(
+ PyObject* tuple, PyObject** value1, PyObject** value2, int has_known_size, int decref_tuple);
+
/////////////// UnpackTuple2 ///////////////
//@requires: UnpackItemEndCheck
//@requires: UnpackTupleError
//@requires: RaiseNeedMoreValuesToUnpack
-static CYTHON_INLINE int __Pyx_unpack_tuple2_exact(
- PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) {
- PyObject *value1 = NULL, *value2 = NULL;
+static CYTHON_INLINE int __Pyx_unpack_tuple2_exact(
+ PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2, int decref_tuple) {
+ PyObject *value1 = NULL, *value2 = NULL;
#if CYTHON_COMPILING_IN_PYPY
- value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad;
- value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad;
+ value1 = PySequence_ITEM(tuple, 0); if (unlikely(!value1)) goto bad;
+ value2 = PySequence_ITEM(tuple, 1); if (unlikely(!value2)) goto bad;
#else
- value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1);
- value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2);
+ value1 = PyTuple_GET_ITEM(tuple, 0); Py_INCREF(value1);
+ value2 = PyTuple_GET_ITEM(tuple, 1); Py_INCREF(value2);
#endif
- if (decref_tuple) {
- Py_DECREF(tuple);
+ if (decref_tuple) {
+ Py_DECREF(tuple);
}
-
+
*pvalue1 = value1;
*pvalue2 = value2;
return 0;
-#if CYTHON_COMPILING_IN_PYPY
-bad:
- Py_XDECREF(value1);
- Py_XDECREF(value2);
- if (decref_tuple) { Py_XDECREF(tuple); }
- return -1;
-#endif
-}
-
-static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2,
- int has_known_size, int decref_tuple) {
- Py_ssize_t index;
- PyObject *value1 = NULL, *value2 = NULL, *iter = NULL;
- iternextfunc iternext;
-
- iter = PyObject_GetIter(tuple);
- if (unlikely(!iter)) goto bad;
- if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; }
-
- iternext = Py_TYPE(iter)->tp_iternext;
- value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; }
- value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; }
- if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad;
-
- Py_DECREF(iter);
- *pvalue1 = value1;
- *pvalue2 = value2;
- return 0;
-
+#if CYTHON_COMPILING_IN_PYPY
+bad:
+ Py_XDECREF(value1);
+ Py_XDECREF(value2);
+ if (decref_tuple) { Py_XDECREF(tuple); }
+ return -1;
+#endif
+}
+
+static int __Pyx_unpack_tuple2_generic(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2,
+ int has_known_size, int decref_tuple) {
+ Py_ssize_t index;
+ PyObject *value1 = NULL, *value2 = NULL, *iter = NULL;
+ iternextfunc iternext;
+
+ iter = PyObject_GetIter(tuple);
+ if (unlikely(!iter)) goto bad;
+ if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; }
+
+ iternext = Py_TYPE(iter)->tp_iternext;
+ value1 = iternext(iter); if (unlikely(!value1)) { index = 0; goto unpacking_failed; }
+ value2 = iternext(iter); if (unlikely(!value2)) { index = 1; goto unpacking_failed; }
+ if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(iternext(iter), 2))) goto bad;
+
+ Py_DECREF(iter);
+ *pvalue1 = value1;
+ *pvalue2 = value2;
+ return 0;
+
unpacking_failed:
if (!has_known_size && __Pyx_IterFinish() == 0)
__Pyx_RaiseNeedMoreValuesError(index);
@@ -154,77 +154,77 @@ bad:
return -1;
}
-
+
/////////////// IterNext.proto ///////////////
#define __Pyx_PyIter_Next(obj) __Pyx_PyIter_Next2(obj, NULL)
static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject *, PyObject *); /*proto*/
/////////////// IterNext ///////////////
-//@requires: Exceptions.c::PyThreadStateGet
-//@requires: Exceptions.c::PyErrFetchRestore
-
-static PyObject *__Pyx_PyIter_Next2Default(PyObject* defval) {
- PyObject* exc_type;
- __Pyx_PyThreadState_declare
- __Pyx_PyThreadState_assign
- exc_type = __Pyx_PyErr_Occurred();
- if (unlikely(exc_type)) {
- if (!defval || unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))
- return NULL;
- __Pyx_PyErr_Clear();
- Py_INCREF(defval);
- return defval;
- }
- if (defval) {
- Py_INCREF(defval);
- return defval;
- }
- __Pyx_PyErr_SetNone(PyExc_StopIteration);
- return NULL;
-}
-
-static void __Pyx_PyIter_Next_ErrorNoIterator(PyObject *iterator) {
- PyErr_Format(PyExc_TypeError,
- "%.200s object is not an iterator", Py_TYPE(iterator)->tp_name);
-}
-
+//@requires: Exceptions.c::PyThreadStateGet
+//@requires: Exceptions.c::PyErrFetchRestore
+
+static PyObject *__Pyx_PyIter_Next2Default(PyObject* defval) {
+ PyObject* exc_type;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ exc_type = __Pyx_PyErr_Occurred();
+ if (unlikely(exc_type)) {
+ if (!defval || unlikely(!__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))
+ return NULL;
+ __Pyx_PyErr_Clear();
+ Py_INCREF(defval);
+ return defval;
+ }
+ if (defval) {
+ Py_INCREF(defval);
+ return defval;
+ }
+ __Pyx_PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+}
+
+static void __Pyx_PyIter_Next_ErrorNoIterator(PyObject *iterator) {
+ PyErr_Format(PyExc_TypeError,
+ "%.200s object is not an iterator", Py_TYPE(iterator)->tp_name);
+}
+
// originally copied from Py3's builtin_next()
static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject* defval) {
PyObject* next;
- // We always do a quick slot check because calling PyIter_Check() is so wasteful.
+ // We always do a quick slot check because calling PyIter_Check() is so wasteful.
iternextfunc iternext = Py_TYPE(iterator)->tp_iternext;
- if (likely(iternext)) {
+ if (likely(iternext)) {
#if CYTHON_USE_TYPE_SLOTS
- next = iternext(iterator);
- if (likely(next))
- return next;
- #if PY_VERSION_HEX >= 0x02070000
- if (unlikely(iternext == &_PyObject_NextNotImplemented))
- return NULL;
- #endif
+ next = iternext(iterator);
+ if (likely(next))
+ return next;
+ #if PY_VERSION_HEX >= 0x02070000
+ if (unlikely(iternext == &_PyObject_NextNotImplemented))
+ return NULL;
+ #endif
#else
- // Since the slot was set, assume that PyIter_Next() will likely succeed, and properly fail otherwise.
- // Note: PyIter_Next() crashes in CPython if "tp_iternext" is NULL.
- next = PyIter_Next(iterator);
- if (likely(next))
- return next;
-#endif
- } else if (CYTHON_USE_TYPE_SLOTS || unlikely(!PyIter_Check(iterator))) {
- // If CYTHON_USE_TYPE_SLOTS, then the slot was not set and we don't have an iterable.
- // Otherwise, don't trust "tp_iternext" and rely on PyIter_Check().
- __Pyx_PyIter_Next_ErrorNoIterator(iterator);
+ // Since the slot was set, assume that PyIter_Next() will likely succeed, and properly fail otherwise.
+ // Note: PyIter_Next() crashes in CPython if "tp_iternext" is NULL.
+ next = PyIter_Next(iterator);
+ if (likely(next))
+ return next;
+#endif
+ } else if (CYTHON_USE_TYPE_SLOTS || unlikely(!PyIter_Check(iterator))) {
+ // If CYTHON_USE_TYPE_SLOTS, then the slot was not set and we don't have an iterable.
+ // Otherwise, don't trust "tp_iternext" and rely on PyIter_Check().
+ __Pyx_PyIter_Next_ErrorNoIterator(iterator);
return NULL;
}
-#if !CYTHON_USE_TYPE_SLOTS
- else {
- // We have an iterator with an empty "tp_iternext", but didn't call next() on it yet.
- next = PyIter_Next(iterator);
- if (likely(next))
- return next;
- }
-#endif
- return __Pyx_PyIter_Next2Default(defval);
+#if !CYTHON_USE_TYPE_SLOTS
+ else {
+ // We have an iterator with an empty "tp_iternext", but didn't call next() on it yet.
+ next = PyIter_Next(iterator);
+ if (likely(next))
+ return next;
+ }
+#endif
+ return __Pyx_PyIter_Next2Default(defval);
}
/////////////// IterFinish.proto ///////////////
@@ -239,10 +239,10 @@ static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/
static CYTHON_INLINE int __Pyx_IterFinish(void) {
#if CYTHON_FAST_THREAD_STATE
- PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject* exc_type = tstate->curexc_type;
if (unlikely(exc_type)) {
- if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) {
PyObject *exc_value, *exc_tb;
exc_value = tstate->curexc_value;
exc_tb = tstate->curexc_traceback;
@@ -271,68 +271,68 @@ static CYTHON_INLINE int __Pyx_IterFinish(void) {
#endif
}
-
-/////////////// ObjectGetItem.proto ///////////////
-
-#if CYTHON_USE_TYPE_SLOTS
-static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);/*proto*/
-#else
-#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key)
-#endif
-
-/////////////// ObjectGetItem ///////////////
-// //@requires: GetItemInt - added in IndexNode as it uses templating.
-
-#if CYTHON_USE_TYPE_SLOTS
-static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) {
- PyObject *runerr;
- Py_ssize_t key_value;
- PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence;
- if (unlikely(!(m && m->sq_item))) {
- PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name);
- return NULL;
- }
-
- key_value = __Pyx_PyIndex_AsSsize_t(index);
- if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
- return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1);
- }
-
- // Error handling code -- only manage OverflowError differently.
- if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) {
- PyErr_Clear();
- PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name);
- }
- return NULL;
-}
-
-static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) {
- PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping;
- if (likely(m && m->mp_subscript)) {
- return m->mp_subscript(obj, key);
- }
- return __Pyx_PyObject_GetIndex(obj, key);
-}
-#endif
-
-
+
+/////////////// ObjectGetItem.proto ///////////////
+
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);/*proto*/
+#else
+#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key)
+#endif
+
+/////////////// ObjectGetItem ///////////////
+// //@requires: GetItemInt - added in IndexNode as it uses templating.
+
+#if CYTHON_USE_TYPE_SLOTS
+static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) {
+ PyObject *runerr;
+ Py_ssize_t key_value;
+ PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence;
+ if (unlikely(!(m && m->sq_item))) {
+ PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name);
+ return NULL;
+ }
+
+ key_value = __Pyx_PyIndex_AsSsize_t(index);
+ if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
+ return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1);
+ }
+
+ // Error handling code -- only manage OverflowError differently.
+ if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) {
+ PyErr_Clear();
+ PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name);
+ }
+ return NULL;
+}
+
+static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) {
+ PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping;
+ if (likely(m && m->mp_subscript)) {
+ return m->mp_subscript(obj, key);
+ }
+ return __Pyx_PyObject_GetIndex(obj, key);
+}
+#endif
+
+
/////////////// DictGetItem.proto ///////////////
#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
-static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);/*proto*/
-
-#define __Pyx_PyObject_Dict_GetItem(obj, name) \
- (likely(PyDict_CheckExact(obj)) ? \
- __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))
-
-#else
-#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)
-#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name)
-#endif
-
-/////////////// DictGetItem ///////////////
-
-#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
+static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);/*proto*/
+
+#define __Pyx_PyObject_Dict_GetItem(obj, name) \
+ (likely(PyDict_CheckExact(obj)) ? \
+ __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))
+
+#else
+#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)
+#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name)
+#endif
+
+/////////////// DictGetItem ///////////////
+
+#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) {
PyObject *value;
value = PyDict_GetItemWithError(d, key);
@@ -375,13 +375,13 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ss
int wraparound, int boundscheck);
{{endfor}}
-static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
+static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
int is_list, int wraparound, int boundscheck);
/////////////// GetItemInt ///////////////
-static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
+static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
PyObject *r;
if (!j) return NULL;
r = PyObject_GetItem(o, j);
@@ -394,12 +394,12 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_{{type}}_Fast(PyObject *o, Py_ss
CYTHON_NCP_UNUSED int wraparound,
CYTHON_NCP_UNUSED int boundscheck) {
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
- Py_ssize_t wrapped_i = i;
- if (wraparound & unlikely(i < 0)) {
- wrapped_i += Py{{type}}_GET_SIZE(o);
- }
+ Py_ssize_t wrapped_i = i;
+ if (wraparound & unlikely(i < 0)) {
+ wrapped_i += Py{{type}}_GET_SIZE(o);
+ }
if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, Py{{type}}_GET_SIZE(o)))) {
- PyObject *r = Py{{type}}_GET_ITEM(o, wrapped_i);
+ PyObject *r = Py{{type}}_GET_ITEM(o, wrapped_i);
Py_INCREF(r);
return r;
}
@@ -463,13 +463,13 @@ static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
(is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \
__Pyx_SetItemInt_Generic(o, to_py_func(i), v)))
-static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v);
+static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v);
static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObject *v,
int is_list, int wraparound, int boundscheck);
/////////////// SetItemInt ///////////////
-static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) {
+static int __Pyx_SetItemInt_Generic(PyObject *o, PyObject *j, PyObject *v) {
int r;
if (!j) return -1;
r = PyObject_SetItem(o, j, v);
@@ -529,13 +529,13 @@ static CYTHON_INLINE int __Pyx_SetItemInt_Fast(PyObject *o, Py_ssize_t i, PyObje
(is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) : \
__Pyx_DelItem_Generic(o, to_py_func(i))))
-static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j);
+static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j);
static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i,
int is_list, int wraparound);
/////////////// DelItemInt ///////////////
-static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) {
+static int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) {
int r;
if (!j) return -1;
r = PyObject_DelItem(o, j);
@@ -882,7 +882,7 @@ static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw); /*proto*
//@requires: CalculateMetaclass
static PyObject *__Pyx_Py3MetaclassGet(PyObject *bases, PyObject *mkw) {
- PyObject *metaclass = mkw ? __Pyx_PyDict_GetItemStr(mkw, PYIDENT("metaclass")) : NULL;
+ PyObject *metaclass = mkw ? __Pyx_PyDict_GetItemStr(mkw, PYIDENT("metaclass")) : NULL;
if (metaclass) {
Py_INCREF(metaclass);
if (PyDict_DelItem(mkw, PYIDENT("metaclass")) < 0) {
@@ -919,7 +919,7 @@ static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *na
return NULL;
/* Python2 __metaclass__ */
- metaclass = __Pyx_PyDict_GetItemStr(dict, PYIDENT("__metaclass__"));
+ metaclass = __Pyx_PyDict_GetItemStr(dict, PYIDENT("__metaclass__"));
if (metaclass) {
Py_INCREF(metaclass);
if (PyType_Check(metaclass)) {
@@ -1030,7 +1030,7 @@ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0;
}
- if (likely(__Pyx_TypeCheck(obj, type)))
+ if (likely(__Pyx_TypeCheck(obj, type)))
return 1;
PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s",
Py_TYPE(obj)->tp_name, type->tp_name);
@@ -1052,37 +1052,37 @@ static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict,
return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
}
-/////////////// PySetContains.proto ///////////////
-
-static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); /* proto */
-
-/////////////// PySetContains ///////////////
-//@requires: Builtins.c::pyfrozenset_new
-
-static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) {
- int result = -1;
- if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) {
- /* Convert key to frozenset */
- PyObject *tmpkey;
- PyErr_Clear();
- tmpkey = __Pyx_PyFrozenSet_New(key);
- if (tmpkey != NULL) {
- result = PySet_Contains(set, tmpkey);
- Py_DECREF(tmpkey);
- }
- }
- return result;
-}
-
-static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) {
- int result = PySet_Contains(set, key);
-
- if (unlikely(result < 0)) {
- result = __Pyx_PySet_ContainsUnhashable(set, key);
- }
- return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
-}
-
+/////////////// PySetContains.proto ///////////////
+
+static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq); /* proto */
+
+/////////////// PySetContains ///////////////
+//@requires: Builtins.c::pyfrozenset_new
+
+static int __Pyx_PySet_ContainsUnhashable(PyObject *set, PyObject *key) {
+ int result = -1;
+ if (PySet_Check(key) && PyErr_ExceptionMatches(PyExc_TypeError)) {
+ /* Convert key to frozenset */
+ PyObject *tmpkey;
+ PyErr_Clear();
+ tmpkey = __Pyx_PyFrozenSet_New(key);
+ if (tmpkey != NULL) {
+ result = PySet_Contains(set, tmpkey);
+ Py_DECREF(tmpkey);
+ }
+ }
+ return result;
+}
+
+static CYTHON_INLINE int __Pyx_PySet_ContainsTF(PyObject* key, PyObject* set, int eq) {
+ int result = PySet_Contains(set, key);
+
+ if (unlikely(result < 0)) {
+ result = __Pyx_PySet_ContainsUnhashable(set, key);
+ }
+ return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
+}
+
/////////////// PySequenceContains.proto ///////////////
static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) {
@@ -1125,45 +1125,45 @@ static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name); /*pro
/////////////// GetNameInClass ///////////////
//@requires: PyObjectGetAttrStr
//@requires: GetModuleGlobalName
-//@requires: Exceptions.c::PyThreadStateGet
-//@requires: Exceptions.c::PyErrFetchRestore
-//@requires: Exceptions.c::PyErrExceptionMatches
+//@requires: Exceptions.c::PyThreadStateGet
+//@requires: Exceptions.c::PyErrFetchRestore
+//@requires: Exceptions.c::PyErrExceptionMatches
-static PyObject *__Pyx_GetGlobalNameAfterAttributeLookup(PyObject *name) {
+static PyObject *__Pyx_GetGlobalNameAfterAttributeLookup(PyObject *name) {
PyObject *result;
- __Pyx_PyThreadState_declare
- __Pyx_PyThreadState_assign
- if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
- return NULL;
- __Pyx_PyErr_Clear();
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
+ return NULL;
+ __Pyx_PyErr_Clear();
__Pyx_GetModuleGlobalNameUncached(result, name);
return result;
-}
-
+}
+
static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) {
PyObject *result;
result = __Pyx_PyObject_GetAttrStr(nmspace, name);
- if (!result) {
- result = __Pyx_GetGlobalNameAfterAttributeLookup(name);
- }
+ if (!result) {
+ result = __Pyx_GetGlobalNameAfterAttributeLookup(name);
+ }
return result;
}
-
-/////////////// SetNameInClass.proto ///////////////
-
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
-// Identifier names are always interned and have a pre-calculated hash value.
-#define __Pyx_SetNameInClass(ns, name, value) \
- (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value))
-#elif CYTHON_COMPILING_IN_CPYTHON
-#define __Pyx_SetNameInClass(ns, name, value) \
- (likely(PyDict_CheckExact(ns)) ? PyDict_SetItem(ns, name, value) : PyObject_SetItem(ns, name, value))
-#else
-#define __Pyx_SetNameInClass(ns, name, value) PyObject_SetItem(ns, name, value)
-#endif
-
-
+
+/////////////// SetNameInClass.proto ///////////////
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
+// Identifier names are always interned and have a pre-calculated hash value.
+#define __Pyx_SetNameInClass(ns, name, value) \
+ (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value))
+#elif CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_SetNameInClass(ns, name, value) \
+ (likely(PyDict_CheckExact(ns)) ? PyDict_SetItem(ns, name, value) : PyObject_SetItem(ns, name, value))
+#else
+#define __Pyx_SetNameInClass(ns, name, value) PyObject_SetItem(ns, name, value)
+#endif
+
+
/////////////// GetModuleGlobalName.proto ///////////////
//@requires: PyDictVersioning
//@substitute: naming
@@ -1201,22 +1201,22 @@ static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
{
PyObject *result;
#if !CYTHON_AVOID_BORROWED_REFS
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
- // Identifier names are always interned and have a pre-calculated hash value.
- result = _PyDict_GetItem_KnownHash($moddict_cname, name, ((PyASCIIObject *) name)->hash);
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
+ // Identifier names are always interned and have a pre-calculated hash value.
+ result = _PyDict_GetItem_KnownHash($moddict_cname, name, ((PyASCIIObject *) name)->hash);
__PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version)
- if (likely(result)) {
+ if (likely(result)) {
return __Pyx_NewRef(result);
- } else if (unlikely(PyErr_Occurred())) {
+ } else if (unlikely(PyErr_Occurred())) {
return NULL;
}
-#else
+#else
result = PyDict_GetItem($moddict_cname, name);
__PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
-#endif
+#endif
#else
result = PyObject_GetItem($moddict_cname, name);
__PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version)
@@ -1236,7 +1236,7 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); /*proto*/
//@requires: PyObjectGetAttrStr
static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
-#if CYTHON_USE_TYPE_SLOTS
+#if CYTHON_USE_TYPE_SLOTS
#if PY_MAJOR_VERSION >= 3
if (likely(PyUnicode_Check(n)))
#else
@@ -1250,7 +1250,7 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
/////////////// PyObjectLookupSpecial.proto ///////////////
//@requires: PyObjectGetAttrStr
-#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS
+#if CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObject* attr_name) {
PyObject *res;
PyTypeObject *tp = Py_TYPE(obj);
@@ -1276,91 +1276,91 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_LookupSpecial(PyObject* obj, PyObj
#define __Pyx_PyObject_LookupSpecial(o,n) __Pyx_PyObject_GetAttrStr(o,n)
#endif
-
-/////////////// PyObject_GenericGetAttrNoDict.proto ///////////////
-
-// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7.
-#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
-static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
-#else
-// No-args macro to allow function pointer assignment.
-#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
-#endif
-
-/////////////// PyObject_GenericGetAttrNoDict ///////////////
-
-#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
-
-static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
- PyErr_Format(PyExc_AttributeError,
-#if PY_MAJOR_VERSION >= 3
- "'%.50s' object has no attribute '%U'",
- tp->tp_name, attr_name);
-#else
- "'%.50s' object has no attribute '%.400s'",
- tp->tp_name, PyString_AS_STRING(attr_name));
-#endif
- return NULL;
-}
-
-static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
- // Copied and adapted from _PyObject_GenericGetAttrWithDict() in CPython 2.6/3.7.
- // To be used in the "tp_getattro" slot of extension types that have no instance dict and cannot be subclassed.
- PyObject *descr;
- PyTypeObject *tp = Py_TYPE(obj);
-
- if (unlikely(!PyString_Check(attr_name))) {
- return PyObject_GenericGetAttr(obj, attr_name);
- }
-
- assert(!tp->tp_dictoffset);
- descr = _PyType_Lookup(tp, attr_name);
- if (unlikely(!descr)) {
- return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
- }
-
- Py_INCREF(descr);
-
- #if PY_MAJOR_VERSION < 3
- if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
- #endif
- {
- descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
- // Optimise for the non-descriptor case because it is faster.
- if (unlikely(f)) {
- PyObject *res = f(descr, obj, (PyObject *)tp);
- Py_DECREF(descr);
- return res;
- }
- }
- return descr;
-}
-#endif
-
-
-/////////////// PyObject_GenericGetAttr.proto ///////////////
-
-// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7.
-#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
-static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
-#else
-// No-args macro to allow function pointer assignment.
-#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
-#endif
-
-/////////////// PyObject_GenericGetAttr ///////////////
-//@requires: PyObject_GenericGetAttrNoDict
-
-#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
-static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
- if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
- return PyObject_GenericGetAttr(obj, attr_name);
- }
- return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
-}
-#endif
-
-
+
+/////////////// PyObject_GenericGetAttrNoDict.proto ///////////////
+
+// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7.
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
+#else
+// No-args macro to allow function pointer assignment.
+#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
+#endif
+
+/////////////// PyObject_GenericGetAttrNoDict ///////////////
+
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+
+static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
+ PyErr_Format(PyExc_AttributeError,
+#if PY_MAJOR_VERSION >= 3
+ "'%.50s' object has no attribute '%U'",
+ tp->tp_name, attr_name);
+#else
+ "'%.50s' object has no attribute '%.400s'",
+ tp->tp_name, PyString_AS_STRING(attr_name));
+#endif
+ return NULL;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
+ // Copied and adapted from _PyObject_GenericGetAttrWithDict() in CPython 2.6/3.7.
+ // To be used in the "tp_getattro" slot of extension types that have no instance dict and cannot be subclassed.
+ PyObject *descr;
+ PyTypeObject *tp = Py_TYPE(obj);
+
+ if (unlikely(!PyString_Check(attr_name))) {
+ return PyObject_GenericGetAttr(obj, attr_name);
+ }
+
+ assert(!tp->tp_dictoffset);
+ descr = _PyType_Lookup(tp, attr_name);
+ if (unlikely(!descr)) {
+ return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
+ }
+
+ Py_INCREF(descr);
+
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
+ #endif
+ {
+ descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
+ // Optimise for the non-descriptor case because it is faster.
+ if (unlikely(f)) {
+ PyObject *res = f(descr, obj, (PyObject *)tp);
+ Py_DECREF(descr);
+ return res;
+ }
+ }
+ return descr;
+}
+#endif
+
+
+/////////////// PyObject_GenericGetAttr.proto ///////////////
+
+// Setting "tp_getattro" to anything but "PyObject_GenericGetAttr" disables fast method calls in Py3.7.
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
+#else
+// No-args macro to allow function pointer assignment.
+#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
+#endif
+
+/////////////// PyObject_GenericGetAttr ///////////////
+//@requires: PyObject_GenericGetAttrNoDict
+
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
+ if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
+ return PyObject_GenericGetAttr(obj, attr_name);
+ }
+ return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
+}
+#endif
+
+
/////////////// PyObjectGetAttrStrNoError.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);/*proto*/
@@ -1399,14 +1399,14 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, P
/////////////// PyObjectGetAttrStr.proto ///////////////
#if CYTHON_USE_TYPE_SLOTS
-static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);/*proto*/
-#else
-#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
-#endif
-
-/////////////// PyObjectGetAttrStr ///////////////
-
-#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);/*proto*/
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/////////////// PyObjectGetAttrStr ///////////////
+
+#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro))
@@ -1419,20 +1419,20 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject
}
#endif
-
+
/////////////// PyObjectSetAttrStr.proto ///////////////
#if CYTHON_USE_TYPE_SLOTS
-#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL)
-static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);/*proto*/
-#else
-#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
-#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
-#endif
-
-/////////////// PyObjectSetAttrStr ///////////////
-
-#if CYTHON_USE_TYPE_SLOTS
+#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL)
+static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);/*proto*/
+#else
+#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
+#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
+#endif
+
+/////////////// PyObjectSetAttrStr ///////////////
+
+#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_setattro))
@@ -1588,7 +1588,7 @@ static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) {
#if CYTHON_COMPILING_IN_CPYTHON
#if PY_MAJOR_VERSION >= 3
// method dscriptor type isn't exported in Py2.x, cannot easily check the type there
- if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type)))
+ if (likely(__Pyx_TypeCheck(method, &PyMethodDescr_Type)))
#endif
{
PyMethodDescrObject *descr = (PyMethodDescrObject*) method;
@@ -1605,19 +1605,19 @@ static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) {
static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); /*proto*/
#if CYTHON_COMPILING_IN_CPYTHON
-// FASTCALL methods receive "&empty_tuple" as simple "PyObject[0]*"
+// FASTCALL methods receive "&empty_tuple" as simple "PyObject[0]*"
#define __Pyx_CallUnboundCMethod0(cfunc, self) \
- (likely((cfunc)->func) ? \
+ (likely((cfunc)->func) ? \
(likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) : \
- (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ? \
- (PY_VERSION_HEX >= 0x030700A0 ? \
+ (PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ? \
+ (PY_VERSION_HEX >= 0x030700A0 ? \
(*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0) : \
(*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0, NULL)) : \
- (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ? \
+ (PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ? \
(*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &$empty_tuple, 0, NULL) : \
(likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, $empty_tuple, NULL)) : \
- ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, $empty_tuple) : \
- __Pyx__CallUnboundCMethod0(cfunc, self)))))) : \
+ ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, $empty_tuple) : \
+ __Pyx__CallUnboundCMethod0(cfunc, self)))))) : \
__Pyx__CallUnboundCMethod0(cfunc, self))
#else
#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self)
@@ -1648,10 +1648,10 @@ bad:
/////////////// CallUnboundCMethod1.proto ///////////////
-static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/
+static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/
#if CYTHON_COMPILING_IN_CPYTHON
-static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/
+static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg);/*proto*/
#else
#define __Pyx_CallUnboundCMethod1(cfunc, self, arg) __Pyx__CallUnboundCMethod1(cfunc, self, arg)
#endif
@@ -1660,30 +1660,30 @@ static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction*
//@requires: UnpackUnboundCMethod
//@requires: PyObjectCall
-#if CYTHON_COMPILING_IN_CPYTHON
-static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) {
- if (likely(cfunc->func)) {
- int flag = cfunc->flag;
- // Not using #ifdefs for PY_VERSION_HEX to avoid C compiler warnings about unused functions.
- if (flag == METH_O) {
- return (*(cfunc->func))(self, arg);
- } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) {
- if (PY_VERSION_HEX >= 0x030700A0) {
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg) {
+ if (likely(cfunc->func)) {
+ int flag = cfunc->flag;
+ // Not using #ifdefs for PY_VERSION_HEX to avoid C compiler warnings about unused functions.
+ if (flag == METH_O) {
+ return (*(cfunc->func))(self, arg);
+ } else if (PY_VERSION_HEX >= 0x030600B1 && flag == METH_FASTCALL) {
+ if (PY_VERSION_HEX >= 0x030700A0) {
return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1);
- } else {
+ } else {
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
- }
- } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) {
+ }
+ } else if (PY_VERSION_HEX >= 0x030700A0 && flag == (METH_FASTCALL | METH_KEYWORDS)) {
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
- }
- }
- return __Pyx__CallUnboundCMethod1(cfunc, self, arg);
-}
-#endif
-
+ }
+ }
+ return __Pyx__CallUnboundCMethod1(cfunc, self, arg);
+}
+#endif
+
static PyObject* __Pyx__CallUnboundCMethod1(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg){
PyObject *args, *result = NULL;
- if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL;
+ if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL;
#if CYTHON_COMPILING_IN_CPYTHON
if (cfunc->func && (cfunc->flag & METH_VARARGS)) {
args = PyTuple_New(1);
@@ -1714,77 +1714,77 @@ bad:
}
-/////////////// CallUnboundCMethod2.proto ///////////////
-
-static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); /*proto*/
-
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1
-static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); /*proto*/
-#else
-#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2)
-#endif
-
-/////////////// CallUnboundCMethod2 ///////////////
-//@requires: UnpackUnboundCMethod
-//@requires: PyObjectCall
-
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1
-static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) {
- if (likely(cfunc->func)) {
- PyObject *args[2] = {arg1, arg2};
- if (cfunc->flag == METH_FASTCALL) {
- #if PY_VERSION_HEX >= 0x030700A0
+/////////////// CallUnboundCMethod2.proto ///////////////
+
+static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2); /*proto*/
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1
+static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2); /*proto*/
+#else
+#define __Pyx_CallUnboundCMethod2(cfunc, self, arg1, arg2) __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2)
+#endif
+
+/////////////// CallUnboundCMethod2 ///////////////
+//@requires: UnpackUnboundCMethod
+//@requires: PyObjectCall
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030600B1
+static CYTHON_INLINE PyObject *__Pyx_CallUnboundCMethod2(__Pyx_CachedCFunction *cfunc, PyObject *self, PyObject *arg1, PyObject *arg2) {
+ if (likely(cfunc->func)) {
+ PyObject *args[2] = {arg1, arg2};
+ if (cfunc->flag == METH_FASTCALL) {
+ #if PY_VERSION_HEX >= 0x030700A0
return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, args, 2);
- #else
+ #else
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL);
- #endif
- }
- #if PY_VERSION_HEX >= 0x030700A0
- if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS))
+ #endif
+ }
+ #if PY_VERSION_HEX >= 0x030700A0
+ if (cfunc->flag == (METH_FASTCALL | METH_KEYWORDS))
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, 2, NULL);
- #endif
- }
- return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2);
-}
-#endif
-
-static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){
- PyObject *args, *result = NULL;
- if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL;
-#if CYTHON_COMPILING_IN_CPYTHON
- if (cfunc->func && (cfunc->flag & METH_VARARGS)) {
- args = PyTuple_New(2);
- if (unlikely(!args)) goto bad;
- Py_INCREF(arg1);
- PyTuple_SET_ITEM(args, 0, arg1);
- Py_INCREF(arg2);
- PyTuple_SET_ITEM(args, 1, arg2);
- if (cfunc->flag & METH_KEYWORDS)
+ #endif
+ }
+ return __Pyx__CallUnboundCMethod2(cfunc, self, arg1, arg2);
+}
+#endif
+
+static PyObject* __Pyx__CallUnboundCMethod2(__Pyx_CachedCFunction* cfunc, PyObject* self, PyObject* arg1, PyObject* arg2){
+ PyObject *args, *result = NULL;
+ if (unlikely(!cfunc->func && !cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL;
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (cfunc->func && (cfunc->flag & METH_VARARGS)) {
+ args = PyTuple_New(2);
+ if (unlikely(!args)) goto bad;
+ Py_INCREF(arg1);
+ PyTuple_SET_ITEM(args, 0, arg1);
+ Py_INCREF(arg2);
+ PyTuple_SET_ITEM(args, 1, arg2);
+ if (cfunc->flag & METH_KEYWORDS)
result = (*(PyCFunctionWithKeywords)(void*)(PyCFunction)cfunc->func)(self, args, NULL);
- else
- result = (*cfunc->func)(self, args);
- } else {
- args = PyTuple_New(3);
- if (unlikely(!args)) goto bad;
- Py_INCREF(self);
- PyTuple_SET_ITEM(args, 0, self);
- Py_INCREF(arg1);
- PyTuple_SET_ITEM(args, 1, arg1);
- Py_INCREF(arg2);
- PyTuple_SET_ITEM(args, 2, arg2);
- result = __Pyx_PyObject_Call(cfunc->method, args, NULL);
- }
-#else
- args = PyTuple_Pack(3, self, arg1, arg2);
- if (unlikely(!args)) goto bad;
- result = __Pyx_PyObject_Call(cfunc->method, args, NULL);
-#endif
-bad:
- Py_XDECREF(args);
- return result;
-}
-
-
+ else
+ result = (*cfunc->func)(self, args);
+ } else {
+ args = PyTuple_New(3);
+ if (unlikely(!args)) goto bad;
+ Py_INCREF(self);
+ PyTuple_SET_ITEM(args, 0, self);
+ Py_INCREF(arg1);
+ PyTuple_SET_ITEM(args, 1, arg1);
+ Py_INCREF(arg2);
+ PyTuple_SET_ITEM(args, 2, arg2);
+ result = __Pyx_PyObject_Call(cfunc->method, args, NULL);
+ }
+#else
+ args = PyTuple_Pack(3, self, arg1, arg2);
+ if (unlikely(!args)) goto bad;
+ result = __Pyx_PyObject_Call(cfunc->method, args, NULL);
+#endif
+bad:
+ Py_XDECREF(args);
+ return result;
+}
+
+
/////////////// PyObjectCallMethod0.proto ///////////////
static PyObject* __Pyx_PyObject_CallMethod0(PyObject* obj, PyObject* method_name); /*proto*/
@@ -1819,14 +1819,14 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
//@requires: PyObjectCallOneArg
//@requires: PyObjectCall2Args
-static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) {
+static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) {
// Separate function to avoid excessive inlining.
PyObject *result = __Pyx_PyObject_CallOneArg(method, arg);
Py_DECREF(method);
- return result;
-}
-
-static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) {
+ return result;
+}
+
+static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) {
PyObject *method = NULL, *result;
int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method);
if (likely(is_method)) {
@@ -1834,7 +1834,7 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
Py_DECREF(method);
return result;
}
- if (unlikely(!method)) return NULL;
+ if (unlikely(!method)) return NULL;
return __Pyx__PyObject_CallMethod1(method, arg);
}
@@ -2018,7 +2018,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,
PyObject *globals) {
PyFrameObject *f;
- PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject **fastlocals;
Py_ssize_t i;
PyObject *result;
@@ -2160,8 +2160,8 @@ done:
Py_LeaveRecursiveCall();
return result;
}
-#endif /* CPython < 3.6 */
-#endif /* CYTHON_FAST_PYCALL */
+#endif /* CPython < 3.6 */
+#endif /* CYTHON_FAST_PYCALL */
/////////////// PyCFunctionFastCall.proto ///////////////
@@ -2179,7 +2179,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, P
PyCFunctionObject *func = (PyCFunctionObject*)func_obj;
PyCFunction meth = PyCFunction_GET_FUNCTION(func);
PyObject *self = PyCFunction_GET_SELF(func);
- int flags = PyCFunction_GET_FLAGS(func);
+ int flags = PyCFunction_GET_FLAGS(func);
assert(PyCFunction_Check(func));
assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));
@@ -2191,13 +2191,13 @@ static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, P
caller loses its exception */
assert(!PyErr_Occurred());
- if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
+ if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);
- } else {
+ } else {
return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);
- }
+ }
}
-#endif /* CYTHON_FAST_PYCCALL */
+#endif /* CYTHON_FAST_PYCCALL */
/////////////// PyObjectCall2Args.proto ///////////////
diff --git a/contrib/tools/cython/Cython/Utility/Optimize.c b/contrib/tools/cython/Cython/Utility/Optimize.c
index 9ecdd14176..d18c9b78ec 100644
--- a/contrib/tools/cython/Cython/Utility/Optimize.c
+++ b/contrib/tools/cython/Cython/Utility/Optimize.c
@@ -198,8 +198,8 @@ static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObjec
value = default_value;
}
Py_INCREF(value);
- // avoid C compiler warning about unused utility functions
- if ((1));
+ // avoid C compiler warning about unused utility functions
+ if ((1));
#else
if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) {
/* these presumably have safe hash functions */
@@ -208,13 +208,13 @@ static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObjec
value = default_value;
}
Py_INCREF(value);
- }
-#endif
- else {
+ }
+#endif
+ else {
if (default_value == Py_None)
- value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key);
- else
- value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key, default_value);
+ value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key);
+ else
+ value = CALL_UNBOUND_METHOD(PyDict_Type, "get", d, key, default_value);
}
return value;
}
@@ -231,7 +231,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *ke
PyObject* value;
#if PY_VERSION_HEX >= 0x030400A0
// we keep the method call at the end to avoid "unused" C compiler warnings
- if ((1)) {
+ if ((1)) {
value = PyDict_SetDefault(d, key, default_value);
if (unlikely(!value)) return NULL;
Py_INCREF(value);
@@ -261,7 +261,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *ke
#endif
#endif
} else {
- value = CALL_UNBOUND_METHOD(PyDict_Type, "setdefault", d, key, default_value);
+ value = CALL_UNBOUND_METHOD(PyDict_Type, "setdefault", d, key, default_value);
}
return value;
}
@@ -271,28 +271,28 @@ static CYTHON_INLINE PyObject *__Pyx_PyDict_SetDefault(PyObject *d, PyObject *ke
#define __Pyx_PyDict_Clear(d) (PyDict_Clear(d), 0)
-
-/////////////// py_dict_pop.proto ///////////////
-
-static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value); /*proto*/
-
-/////////////// py_dict_pop ///////////////
-
-static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value) {
-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B3
- if ((1)) {
- return _PyDict_Pop(d, key, default_value);
- } else
- // avoid "function unused" warnings
-#endif
- if (default_value) {
- return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key, default_value);
- } else {
- return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key);
- }
-}
-
-
+
+/////////////// py_dict_pop.proto ///////////////
+
+static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value); /*proto*/
+
+/////////////// py_dict_pop ///////////////
+
+static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value) {
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B3
+ if ((1)) {
+ return _PyDict_Pop(d, key, default_value);
+ } else
+ // avoid "function unused" warnings
+#endif
+ if (default_value) {
+ return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key, default_value);
+ } else {
+ return CALL_UNBOUND_METHOD(PyDict_Type, "pop", d, key);
+ }
+}
+
+
/////////////// dict_iter.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name,
@@ -309,31 +309,31 @@ static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_di
Py_ssize_t* p_orig_length, int* p_source_is_dict) {
is_dict = is_dict || likely(PyDict_CheckExact(iterable));
*p_source_is_dict = is_dict;
- if (is_dict) {
+ if (is_dict) {
#if !CYTHON_COMPILING_IN_PYPY
*p_orig_length = PyDict_Size(iterable);
Py_INCREF(iterable);
return iterable;
-#elif PY_MAJOR_VERSION >= 3
- // On PyPy3, we need to translate manually a few method names.
- // This logic is not needed on CPython thanks to the fast case above.
- static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL;
- PyObject **pp = NULL;
- if (method_name) {
- const char *name = PyUnicode_AsUTF8(method_name);
- if (strcmp(name, "iteritems") == 0) pp = &py_items;
- else if (strcmp(name, "iterkeys") == 0) pp = &py_keys;
- else if (strcmp(name, "itervalues") == 0) pp = &py_values;
- if (pp) {
- if (!*pp) {
- *pp = PyUnicode_FromString(name + 4);
- if (!*pp)
- return NULL;
- }
- method_name = *pp;
- }
- }
-#endif
+#elif PY_MAJOR_VERSION >= 3
+ // On PyPy3, we need to translate manually a few method names.
+ // This logic is not needed on CPython thanks to the fast case above.
+ static PyObject *py_items = NULL, *py_keys = NULL, *py_values = NULL;
+ PyObject **pp = NULL;
+ if (method_name) {
+ const char *name = PyUnicode_AsUTF8(method_name);
+ if (strcmp(name, "iteritems") == 0) pp = &py_items;
+ else if (strcmp(name, "iterkeys") == 0) pp = &py_keys;
+ else if (strcmp(name, "itervalues") == 0) pp = &py_values;
+ if (pp) {
+ if (!*pp) {
+ *pp = PyUnicode_FromString(name + 4);
+ if (!*pp)
+ return NULL;
+ }
+ method_name = *pp;
+ }
+ }
+#endif
}
*p_orig_length = 0;
if (method_name) {
@@ -421,143 +421,143 @@ static CYTHON_INLINE int __Pyx_dict_iter_next(
}
-/////////////// set_iter.proto ///////////////
-
-static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set,
- Py_ssize_t* p_orig_length, int* p_source_is_set); /*proto*/
-static CYTHON_INLINE int __Pyx_set_iter_next(
- PyObject* iter_obj, Py_ssize_t orig_length,
- Py_ssize_t* ppos, PyObject **value,
- int source_is_set); /*proto*/
-
-/////////////// set_iter ///////////////
-//@requires: ObjectHandling.c::IterFinish
-
-static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set,
- Py_ssize_t* p_orig_length, int* p_source_is_set) {
-#if CYTHON_COMPILING_IN_CPYTHON
- is_set = is_set || likely(PySet_CheckExact(iterable) || PyFrozenSet_CheckExact(iterable));
- *p_source_is_set = is_set;
+/////////////// set_iter.proto ///////////////
+
+static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set,
+ Py_ssize_t* p_orig_length, int* p_source_is_set); /*proto*/
+static CYTHON_INLINE int __Pyx_set_iter_next(
+ PyObject* iter_obj, Py_ssize_t orig_length,
+ Py_ssize_t* ppos, PyObject **value,
+ int source_is_set); /*proto*/
+
+/////////////// set_iter ///////////////
+//@requires: ObjectHandling.c::IterFinish
+
+static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set,
+ Py_ssize_t* p_orig_length, int* p_source_is_set) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ is_set = is_set || likely(PySet_CheckExact(iterable) || PyFrozenSet_CheckExact(iterable));
+ *p_source_is_set = is_set;
if (likely(is_set)) {
*p_orig_length = PySet_Size(iterable);
Py_INCREF(iterable);
return iterable;
}
-#else
- (void)is_set;
- *p_source_is_set = 0;
+#else
+ (void)is_set;
+ *p_source_is_set = 0;
#endif
- *p_orig_length = 0;
- return PyObject_GetIter(iterable);
-}
-
-static CYTHON_INLINE int __Pyx_set_iter_next(
- PyObject* iter_obj, Py_ssize_t orig_length,
- Py_ssize_t* ppos, PyObject **value,
- int source_is_set) {
- if (!CYTHON_COMPILING_IN_CPYTHON || unlikely(!source_is_set)) {
- *value = PyIter_Next(iter_obj);
- if (unlikely(!*value)) {
- return __Pyx_IterFinish();
- }
- (void)orig_length;
- (void)ppos;
+ *p_orig_length = 0;
+ return PyObject_GetIter(iterable);
+}
+
+static CYTHON_INLINE int __Pyx_set_iter_next(
+ PyObject* iter_obj, Py_ssize_t orig_length,
+ Py_ssize_t* ppos, PyObject **value,
+ int source_is_set) {
+ if (!CYTHON_COMPILING_IN_CPYTHON || unlikely(!source_is_set)) {
+ *value = PyIter_Next(iter_obj);
+ if (unlikely(!*value)) {
+ return __Pyx_IterFinish();
+ }
+ (void)orig_length;
+ (void)ppos;
return 1;
- }
-#if CYTHON_COMPILING_IN_CPYTHON
- if (unlikely(PySet_GET_SIZE(iter_obj) != orig_length)) {
- PyErr_SetString(
- PyExc_RuntimeError,
- "set changed size during iteration");
- return -1;
- }
- {
- Py_hash_t hash;
- int ret = _PySet_NextEntry(iter_obj, ppos, value, &hash);
- // CPython does not raise errors here, only if !isinstance(iter_obj, set/frozenset)
- assert (ret != -1);
- if (likely(ret)) {
- Py_INCREF(*value);
- return 1;
- }
- }
-#endif
+ }
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(PySet_GET_SIZE(iter_obj) != orig_length)) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "set changed size during iteration");
+ return -1;
+ }
+ {
+ Py_hash_t hash;
+ int ret = _PySet_NextEntry(iter_obj, ppos, value, &hash);
+ // CPython does not raise errors here, only if !isinstance(iter_obj, set/frozenset)
+ assert (ret != -1);
+ if (likely(ret)) {
+ Py_INCREF(*value);
+ return 1;
+ }
+ }
+#endif
+ return 0;
+}
+
+/////////////// py_set_discard_unhashable ///////////////
+//@requires: Builtins.c::pyfrozenset_new
+
+static int __Pyx_PySet_DiscardUnhashable(PyObject *set, PyObject *key) {
+ PyObject *tmpkey;
+ int rv;
+
+ if (likely(!PySet_Check(key) || !PyErr_ExceptionMatches(PyExc_TypeError)))
+ return -1;
+ PyErr_Clear();
+ tmpkey = __Pyx_PyFrozenSet_New(key);
+ if (tmpkey == NULL)
+ return -1;
+ rv = PySet_Discard(set, tmpkey);
+ Py_DECREF(tmpkey);
+ return rv;
+}
+
+
+/////////////// py_set_discard.proto ///////////////
+
+static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key); /*proto*/
+
+/////////////// py_set_discard ///////////////
+//@requires: py_set_discard_unhashable
+
+static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key) {
+ int found = PySet_Discard(set, key);
+ // Convert *key* to frozenset if necessary
+ if (unlikely(found < 0)) {
+ found = __Pyx_PySet_DiscardUnhashable(set, key);
+ }
+ // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
+ return found;
+}
+
+
+/////////////// py_set_remove.proto ///////////////
+
+static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key); /*proto*/
+
+/////////////// py_set_remove ///////////////
+//@requires: py_set_discard_unhashable
+
+static int __Pyx_PySet_RemoveNotFound(PyObject *set, PyObject *key, int found) {
+ // Convert *key* to frozenset if necessary
+ if (unlikely(found < 0)) {
+ found = __Pyx_PySet_DiscardUnhashable(set, key);
+ }
+ if (likely(found == 0)) {
+ // Not found
+ PyObject *tup;
+ tup = PyTuple_Pack(1, key);
+ if (!tup)
+ return -1;
+ PyErr_SetObject(PyExc_KeyError, tup);
+ Py_DECREF(tup);
+ return -1;
+ }
+ // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
+ return found;
+}
+
+static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key) {
+ int found = PySet_Discard(set, key);
+ if (unlikely(found != 1)) {
+ // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
+ return __Pyx_PySet_RemoveNotFound(set, key, found);
+ }
return 0;
-}
-
-/////////////// py_set_discard_unhashable ///////////////
-//@requires: Builtins.c::pyfrozenset_new
-
-static int __Pyx_PySet_DiscardUnhashable(PyObject *set, PyObject *key) {
- PyObject *tmpkey;
- int rv;
-
- if (likely(!PySet_Check(key) || !PyErr_ExceptionMatches(PyExc_TypeError)))
- return -1;
- PyErr_Clear();
- tmpkey = __Pyx_PyFrozenSet_New(key);
- if (tmpkey == NULL)
- return -1;
- rv = PySet_Discard(set, tmpkey);
- Py_DECREF(tmpkey);
- return rv;
-}
-
-
-/////////////// py_set_discard.proto ///////////////
-
-static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key); /*proto*/
-
-/////////////// py_set_discard ///////////////
-//@requires: py_set_discard_unhashable
-
-static CYTHON_INLINE int __Pyx_PySet_Discard(PyObject *set, PyObject *key) {
- int found = PySet_Discard(set, key);
- // Convert *key* to frozenset if necessary
- if (unlikely(found < 0)) {
- found = __Pyx_PySet_DiscardUnhashable(set, key);
- }
- // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
- return found;
-}
-
-
-/////////////// py_set_remove.proto ///////////////
-
-static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key); /*proto*/
-
-/////////////// py_set_remove ///////////////
-//@requires: py_set_discard_unhashable
-
-static int __Pyx_PySet_RemoveNotFound(PyObject *set, PyObject *key, int found) {
- // Convert *key* to frozenset if necessary
- if (unlikely(found < 0)) {
- found = __Pyx_PySet_DiscardUnhashable(set, key);
- }
- if (likely(found == 0)) {
- // Not found
- PyObject *tup;
- tup = PyTuple_Pack(1, key);
- if (!tup)
- return -1;
- PyErr_SetObject(PyExc_KeyError, tup);
- Py_DECREF(tup);
- return -1;
- }
- // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
- return found;
-}
-
-static CYTHON_INLINE int __Pyx_PySet_Remove(PyObject *set, PyObject *key) {
- int found = PySet_Discard(set, key);
- if (unlikely(found != 1)) {
- // note: returns -1 on error, 0 (not found) or 1 (found) otherwise => error check for -1 or < 0 works
- return __Pyx_PySet_RemoveNotFound(set, key, found);
- }
- return 0;
-}
-
-
+}
+
+
/////////////// unicode_iter.proto ///////////////
static CYTHON_INLINE int __Pyx_init_unicode_iteration(
@@ -866,7 +866,7 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED
return PyInt_FromLong(x);
{{elif op == 'TrueDivide'}}
{{zerodiv_check('b')}}
- if (8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) {
+ if (8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53))) {
return PyFloat_FromDouble((double)a / (double)b);
}
// let Python do the rounding
@@ -929,7 +929,7 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED
{{endif}}
}
// if size doesn't fit into a long or PY_LONG_LONG anymore, fall through to default
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
{{endfor}}
{{endfor}}
@@ -961,7 +961,7 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, CYTHON_UNUSED
x += ((x != 0) & ((x ^ b) < 0)) * b;
{{elif op == 'TrueDivide'}}
{{zerodiv_check('b')}}
- if ((8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53)))
+ if ((8 * sizeof(long) <= 53 || likely(labs({{ival}}) <= ((PY_LONG_LONG)1 << 53)))
|| __Pyx_sst_abs(size) <= 52 / PyLong_SHIFT) {
return PyFloat_FromDouble((double)a / (double)b);
}
@@ -1132,7 +1132,7 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatv
if (8 * sizeof(unsigned long) > {{_size}} * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || ({{_size-1}} * PyLong_SHIFT < 53))) {
{{fval}} = (double) {{pylong_join(_size, 'digits')}};
// let CPython do its own float rounding from 2**53 on (max. consecutive integer in double float)
- if ((8 * sizeof(unsigned long) < 53) || ({{_size}} * PyLong_SHIFT < 53) || ({{fval}} < (double) ((PY_LONG_LONG)1 << 53))) {
+ if ((8 * sizeof(unsigned long) < 53) || ({{_size}} * PyLong_SHIFT < 53) || ({{fval}} < (double) ((PY_LONG_LONG)1 << 53))) {
if (size == {{-_size}})
{{fval}} = -{{fval}};
break;
@@ -1143,7 +1143,7 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatv
// check above. However, the number of digits that CPython uses for a given PyLong
// value is minimal, and together with the "(size-1) * SHIFT < 53" check above,
// this should make it safe.
- CYTHON_FALLTHROUGH;
+ CYTHON_FALLTHROUGH;
{{endfor}}
default:
#else
diff --git a/contrib/tools/cython/Cython/Utility/Overflow.c b/contrib/tools/cython/Cython/Utility/Overflow.c
index 171a3b8470..0259c58f01 100644
--- a/contrib/tools/cython/Cython/Utility/Overflow.c
+++ b/contrib/tools/cython/Cython/Utility/Overflow.c
@@ -1,7 +1,7 @@
/*
These functions provide integer arithmetic with integer checking. They do not
actually raise an exception when an overflow is detected, but rather set a bit
-in the overflow parameter. (This parameter may be re-used across several
+in the overflow parameter. (This parameter may be re-used across several
arithmetic operations, so should be or-ed rather than assigned to.)
The implementation is divided into two parts, the signed and unsigned basecases,
@@ -273,7 +273,7 @@ static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}
return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow);
#endif
} else {
- abort(); return 0; /* handled elsewhere */
+ abort(); return 0; /* handled elsewhere */
}
} else {
if ((sizeof({{TYPE}}) == sizeof(int))) {
@@ -285,7 +285,7 @@ static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}
return ({{TYPE}}) __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow);
#endif
} else {
- abort(); return 0; /* handled elsewhere */
+ abort(); return 0; /* handled elsewhere */
}
}
}
diff --git a/contrib/tools/cython/Cython/Utility/Profile.c b/contrib/tools/cython/Cython/Utility/Profile.c
index 90453293e2..921eb67529 100644
--- a/contrib/tools/cython/Cython/Utility/Profile.c
+++ b/contrib/tools/cython/Cython/Utility/Profile.c
@@ -1,5 +1,5 @@
/////////////// Profile.proto ///////////////
-//@requires: Exceptions.c::PyErrFetchRestore
+//@requires: Exceptions.c::PyErrFetchRestore
//@substitute: naming
// Note that cPython ignores PyTrace_EXCEPTION,
@@ -105,9 +105,9 @@
if (CYTHON_TRACE_NOGIL) { \
PyThreadState *tstate; \
PyGILState_STATE state = PyGILState_Ensure(); \
- tstate = __Pyx_PyThreadState_Current; \
+ tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 1, 1)) { \
- __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
+ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
} \
PyGILState_Release(state); \
if (unlikely(__Pyx_use_tracing < 0)) goto_error; \
@@ -115,7 +115,7 @@
} else { \
PyThreadState* tstate = PyThreadState_GET(); \
if (__Pyx_IsTracing(tstate, 1, 1)) { \
- __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
+ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
if (unlikely(__Pyx_use_tracing < 0)) goto_error; \
} \
}
@@ -123,7 +123,7 @@
#define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \
{ PyThreadState* tstate = PyThreadState_GET(); \
if (__Pyx_IsTracing(tstate, 1, 1)) { \
- __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
+ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \
if (unlikely(__Pyx_use_tracing < 0)) goto_error; \
} \
}
@@ -131,7 +131,7 @@
#define __Pyx_TraceException() \
if (likely(!__Pyx_use_tracing)); else { \
- PyThreadState* tstate = __Pyx_PyThreadState_Current; \
+ PyThreadState* tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 1)) { \
__Pyx_EnterTracing(tstate); \
PyObject *exc_info = __Pyx_GetExceptionTuple(tstate); \
@@ -149,7 +149,7 @@
static void __Pyx_call_return_trace_func(PyThreadState *tstate, PyFrameObject *frame, PyObject *result) {
PyObject *type, *value, *traceback;
- __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
+ __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
__Pyx_EnterTracing(tstate);
if (CYTHON_TRACE && tstate->c_tracefunc)
tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_RETURN, result);
@@ -157,7 +157,7 @@
tstate->c_profilefunc(tstate->c_profileobj, frame, PyTrace_RETURN, result);
CYTHON_FRAME_DEL(frame);
__Pyx_LeaveTracing(tstate);
- __Pyx_ErrRestoreInState(tstate, type, value, traceback);
+ __Pyx_ErrRestoreInState(tstate, type, value, traceback);
}
#ifdef WITH_THREAD
@@ -167,14 +167,14 @@
if (CYTHON_TRACE_NOGIL) { \
PyThreadState *tstate; \
PyGILState_STATE state = PyGILState_Ensure(); \
- tstate = __Pyx_PyThreadState_Current; \
+ tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0)) { \
__Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \
} \
PyGILState_Release(state); \
} \
} else { \
- PyThreadState* tstate = __Pyx_PyThreadState_Current; \
+ PyThreadState* tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0)) { \
__Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \
} \
@@ -183,7 +183,7 @@
#else
#define __Pyx_TraceReturn(result, nogil) \
if (likely(!__Pyx_use_tracing)); else { \
- PyThreadState* tstate = __Pyx_PyThreadState_Current; \
+ PyThreadState* tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0)) { \
__Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \
} \
@@ -191,14 +191,14 @@
#endif
static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno); /*proto*/
- static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, PyThreadState* tstate, const char *funcname, const char *srcfile, int firstlineno); /*proto*/
+ static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, PyThreadState* tstate, const char *funcname, const char *srcfile, int firstlineno); /*proto*/
#else
#define __Pyx_TraceDeclarations
#define __Pyx_TraceFrameInit(codeobj)
// mark error label as used to avoid compiler warnings
- #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) if ((1)); else goto_error;
+ #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) if ((1)); else goto_error;
#define __Pyx_TraceException()
#define __Pyx_TraceReturn(result, nogil)
@@ -209,7 +209,7 @@
static int __Pyx_call_line_trace_func(PyThreadState *tstate, PyFrameObject *frame, int lineno) {
int ret;
PyObject *type, *value, *traceback;
- __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
+ __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
__Pyx_PyFrame_SetLineNumber(frame, lineno);
__Pyx_EnterTracing(tstate);
@@ -217,7 +217,7 @@
__Pyx_LeaveTracing(tstate);
if (likely(!ret)) {
- __Pyx_ErrRestoreInState(tstate, type, value, traceback);
+ __Pyx_ErrRestoreInState(tstate, type, value, traceback);
} else {
Py_XDECREF(type);
Py_XDECREF(value);
@@ -229,46 +229,46 @@
#ifdef WITH_THREAD
#define __Pyx_TraceLine(lineno, nogil, goto_error) \
if (likely(!__Pyx_use_tracing)); else { \
- // mark error label as used to avoid compiler warnings \
- if ((1)); else goto_error; \
+ // mark error label as used to avoid compiler warnings \
+ if ((1)); else goto_error; \
if (nogil) { \
if (CYTHON_TRACE_NOGIL) { \
int ret = 0; \
PyThreadState *tstate; \
PyGILState_STATE state = PyGILState_Ensure(); \
- tstate = __Pyx_PyThreadState_Current; \
+ tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \
ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \
} \
PyGILState_Release(state); \
- // XXX https://github.com/cython/cython/issues/2274 \
- if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
+ // XXX https://github.com/cython/cython/issues/2274 \
+ if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
} \
} else { \
- PyThreadState* tstate = __Pyx_PyThreadState_Current; \
+ PyThreadState* tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \
int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \
- // XXX https://github.com/cython/cython/issues/2274 \
- if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
+ // XXX https://github.com/cython/cython/issues/2274 \
+ if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
} \
} \
}
#else
#define __Pyx_TraceLine(lineno, nogil, goto_error) \
if (likely(!__Pyx_use_tracing)); else { \
- // mark error label as used to avoid compiler warnings \
- if ((1)); else goto_error; \
- PyThreadState* tstate = __Pyx_PyThreadState_Current; \
+ // mark error label as used to avoid compiler warnings \
+ if ((1)); else goto_error; \
+ PyThreadState* tstate = __Pyx_PyThreadState_Current; \
if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \
int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \
- // XXX https://github.com/cython/cython/issues/2274 \
- if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
+ // XXX https://github.com/cython/cython/issues/2274 \
+ if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \
} \
}
#endif
#else
// mark error label as used to avoid compiler warnings
- #define __Pyx_TraceLine(lineno, nogil, goto_error) if ((1)); else goto_error;
+ #define __Pyx_TraceLine(lineno, nogil, goto_error) if ((1)); else goto_error;
#endif
/////////////// Profile ///////////////
@@ -278,7 +278,7 @@
static int __Pyx_TraceSetupAndCall(PyCodeObject** code,
PyFrameObject** frame,
- PyThreadState* tstate,
+ PyThreadState* tstate,
const char *funcname,
const char *srcfile,
int firstlineno) {
@@ -310,7 +310,7 @@ static int __Pyx_TraceSetupAndCall(PyCodeObject** code,
retval = 1;
__Pyx_EnterTracing(tstate);
- __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
+ __Pyx_ErrFetchInState(tstate, &type, &value, &traceback);
#if CYTHON_TRACE
if (tstate->c_tracefunc)
@@ -321,7 +321,7 @@ static int __Pyx_TraceSetupAndCall(PyCodeObject** code,
__Pyx_LeaveTracing(tstate);
if (retval) {
- __Pyx_ErrRestoreInState(tstate, type, value, traceback);
+ __Pyx_ErrRestoreInState(tstate, type, value, traceback);
return __Pyx_IsTracing(tstate, 0, 0) && retval;
} else {
Py_XDECREF(type);
@@ -353,8 +353,8 @@ static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const cha
0, /*int argcount,*/
0, /*int nlocals,*/
0, /*int stacksize,*/
- // make CPython use a fresh dict for "f_locals" at need (see GH #1836)
- CO_OPTIMIZED | CO_NEWLOCALS, /*int flags,*/
+ // make CPython use a fresh dict for "f_locals" at need (see GH #1836)
+ CO_OPTIMIZED | CO_NEWLOCALS, /*int flags,*/
$empty_bytes, /*PyObject *code,*/
$empty_tuple, /*PyObject *consts,*/
$empty_tuple, /*PyObject *names,*/
diff --git a/contrib/tools/cython/Cython/Utility/StringTools.c b/contrib/tools/cython/Cython/Utility/StringTools.c
index c3247ece8a..2fdae812a0 100644
--- a/contrib/tools/cython/Cython/Utility/StringTools.c
+++ b/contrib/tools/cython/Cython/Utility/StringTools.c
@@ -38,9 +38,9 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
#endif
if (!*t->p)
return -1;
- // initialise cached hash value
- if (PyObject_Hash(*t->p) == -1)
- return -1;
+ // initialise cached hash value
+ if (PyObject_Hash(*t->p) == -1)
+ return -1;
++t;
}
return 0;
@@ -79,30 +79,30 @@ static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 ch
#endif
#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2
-static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) {
- /* handle surrogate pairs for Py_UNICODE buffers in 16bit Unicode builds */
- Py_UNICODE high_val, low_val;
- Py_UNICODE* pos;
- high_val = (Py_UNICODE) (0xD800 | (((character - 0x10000) >> 10) & ((1<<10)-1)));
- low_val = (Py_UNICODE) (0xDC00 | ( (character - 0x10000) & ((1<<10)-1)));
- for (pos=buffer; pos < buffer+length-1; pos++) {
- if (unlikely((high_val == pos[0]) & (low_val == pos[1]))) return 1;
- }
- return 0;
-}
+static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) {
+ /* handle surrogate pairs for Py_UNICODE buffers in 16bit Unicode builds */
+ Py_UNICODE high_val, low_val;
+ Py_UNICODE* pos;
+ high_val = (Py_UNICODE) (0xD800 | (((character - 0x10000) >> 10) & ((1<<10)-1)));
+ low_val = (Py_UNICODE) (0xDC00 | ( (character - 0x10000) & ((1<<10)-1)));
+ for (pos=buffer; pos < buffer+length-1; pos++) {
+ if (unlikely((high_val == pos[0]) & (low_val == pos[1]))) return 1;
+ }
+ return 0;
+}
#endif
-
-static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) {
- Py_UNICODE uchar;
- Py_UNICODE* pos;
- uchar = (Py_UNICODE) character;
- for (pos=buffer; pos < buffer+length; pos++) {
- if (unlikely(uchar == pos[0])) return 1;
- }
- return 0;
-}
+
+static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) {
+ Py_UNICODE uchar;
+ Py_UNICODE* pos;
+ uchar = (Py_UNICODE) character;
+ for (pos=buffer; pos < buffer+length; pos++) {
+ if (unlikely(uchar == pos[0])) return 1;
+ }
+ return 0;
+}
#endif
-
+
static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character) {
#if CYTHON_PEP393_ENABLED
const int kind = PyUnicode_KIND(unicode);
@@ -127,17 +127,17 @@ static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 ch
#if PY_VERSION_HEX < 0x03090000 || (defined(PyUnicode_WCHAR_KIND) && defined(PyUnicode_AS_UNICODE))
#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2
if ((sizeof(Py_UNICODE) == 2) && unlikely(character > 65535)) {
- return __Pyx_PyUnicodeBufferContainsUCS4_SP(
+ return __Pyx_PyUnicodeBufferContainsUCS4_SP(
__Pyx_PyUnicode_AS_UNICODE(unicode),
__Pyx_PyUnicode_GET_SIZE(unicode),
- character);
+ character);
} else
#endif
{
- return __Pyx_PyUnicodeBufferContainsUCS4_BMP(
+ return __Pyx_PyUnicodeBufferContainsUCS4_BMP(
__Pyx_PyUnicode_AS_UNICODE(unicode),
__Pyx_PyUnicode_GET_SIZE(unicode),
- character);
+ character);
}
#endif
@@ -223,21 +223,21 @@ static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int
if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {
goto return_ne;
}
-#if CYTHON_USE_UNICODE_INTERNALS
- {
- Py_hash_t hash1, hash2;
- #if CYTHON_PEP393_ENABLED
- hash1 = ((PyASCIIObject*)s1)->hash;
- hash2 = ((PyASCIIObject*)s2)->hash;
- #else
- hash1 = ((PyUnicodeObject*)s1)->hash;
- hash2 = ((PyUnicodeObject*)s2)->hash;
- #endif
- if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
- goto return_ne;
- }
- }
-#endif
+#if CYTHON_USE_UNICODE_INTERNALS
+ {
+ Py_hash_t hash1, hash2;
+ #if CYTHON_PEP393_ENABLED
+ hash1 = ((PyASCIIObject*)s1)->hash;
+ hash2 = ((PyASCIIObject*)s2)->hash;
+ #else
+ hash1 = ((PyUnicodeObject*)s1)->hash;
+ hash2 = ((PyUnicodeObject*)s2)->hash;
+ #endif
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ goto return_ne;
+ }
+ }
+#endif
// len(s1) == len(s2) >= 1 (empty string is interned, and "s1 is not s2")
kind = __Pyx_PyUnicode_KIND(s1);
if (kind != __Pyx_PyUnicode_KIND(s2)) {
@@ -313,16 +313,16 @@ static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int eq
} else if (length == 1) {
return (equals == Py_EQ);
} else {
- int result;
-#if CYTHON_USE_UNICODE_INTERNALS
- Py_hash_t hash1, hash2;
- hash1 = ((PyBytesObject*)s1)->ob_shash;
- hash2 = ((PyBytesObject*)s2)->ob_shash;
- if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
- return (equals == Py_NE);
- }
-#endif
- result = memcmp(ps1, ps2, (size_t)length);
+ int result;
+#if CYTHON_USE_UNICODE_INTERNALS
+ Py_hash_t hash1, hash2;
+ hash1 = ((PyBytesObject*)s1)->ob_shash;
+ hash2 = ((PyBytesObject*)s2)->ob_shash;
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ return (equals == Py_NE);
+ }
+#endif
+ result = memcmp(ps1, ps2, (size_t)length);
return (equals == Py_EQ) ? (result == 0) : (result != 0);
}
} else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
@@ -434,21 +434,21 @@ static CYTHON_INLINE Py_UCS4 __Pyx_GetItemInt_Unicode_Fast(PyObject* ustring, Py
}
-/////////////// decode_c_string_utf16.proto ///////////////
-
-static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) {
- int byteorder = 0;
- return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
-}
-static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) {
- int byteorder = -1;
- return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
-}
-static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) {
- int byteorder = 1;
- return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
-}
-
+/////////////// decode_c_string_utf16.proto ///////////////
+
+static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) {
+ int byteorder = 0;
+ return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
+}
+static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) {
+ int byteorder = -1;
+ return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
+}
+static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) {
+ int byteorder = 1;
+ return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
+}
+
/////////////// decode_cpp_string.proto ///////////////
//@requires: IncludeCppStringH
//@requires: decode_c_bytes
@@ -470,7 +470,7 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_string(
/////////////// decode_c_string ///////////////
//@requires: IncludeStringH
-//@requires: decode_c_string_utf16
+//@requires: decode_c_string_utf16
//@substitute: naming
/* duplicate code to avoid calling strlen() if start >= 0 and stop >= 0 */
@@ -514,7 +514,7 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes(
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors));
/////////////// decode_c_bytes ///////////////
-//@requires: decode_c_string_utf16
+//@requires: decode_c_string_utf16
//@substitute: naming
static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes(
@@ -617,8 +617,8 @@ static CYTHON_INLINE int __Pyx_Py_UNICODE_ISTITLE(Py_UCS4 uchar)
/////////////// unicode_tailmatch.proto ///////////////
-static int __Pyx_PyUnicode_Tailmatch(
- PyObject* s, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/
+static int __Pyx_PyUnicode_Tailmatch(
+ PyObject* s, PyObject* substr, Py_ssize_t start, Py_ssize_t end, int direction); /*proto*/
/////////////// unicode_tailmatch ///////////////
@@ -626,32 +626,32 @@ static int __Pyx_PyUnicode_Tailmatch(
// tuple of prefixes/suffixes, whereas it's much more common to
// test for a single unicode string.
-static int __Pyx_PyUnicode_TailmatchTuple(PyObject* s, PyObject* substrings,
- Py_ssize_t start, Py_ssize_t end, int direction) {
- Py_ssize_t i, count = PyTuple_GET_SIZE(substrings);
- for (i = 0; i < count; i++) {
- Py_ssize_t result;
+static int __Pyx_PyUnicode_TailmatchTuple(PyObject* s, PyObject* substrings,
+ Py_ssize_t start, Py_ssize_t end, int direction) {
+ Py_ssize_t i, count = PyTuple_GET_SIZE(substrings);
+ for (i = 0; i < count; i++) {
+ Py_ssize_t result;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
- result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substrings, i),
- start, end, direction);
+ result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substrings, i),
+ start, end, direction);
#else
- PyObject* sub = PySequence_ITEM(substrings, i);
- if (unlikely(!sub)) return -1;
- result = PyUnicode_Tailmatch(s, sub, start, end, direction);
- Py_DECREF(sub);
+ PyObject* sub = PySequence_ITEM(substrings, i);
+ if (unlikely(!sub)) return -1;
+ result = PyUnicode_Tailmatch(s, sub, start, end, direction);
+ Py_DECREF(sub);
#endif
- if (result) {
- return (int) result;
+ if (result) {
+ return (int) result;
}
}
- return 0;
-}
-
-static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr,
- Py_ssize_t start, Py_ssize_t end, int direction) {
- if (unlikely(PyTuple_Check(substr))) {
- return __Pyx_PyUnicode_TailmatchTuple(s, substr, start, end, direction);
- }
+ return 0;
+}
+
+static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr,
+ Py_ssize_t start, Py_ssize_t end, int direction) {
+ if (unlikely(PyTuple_Check(substr))) {
+ return __Pyx_PyUnicode_TailmatchTuple(s, substr, start, end, direction);
+ }
return (int) PyUnicode_Tailmatch(s, substr, start, end, direction);
}
@@ -721,33 +721,33 @@ static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg,
return retval;
}
-static int __Pyx_PyBytes_TailmatchTuple(PyObject* self, PyObject* substrings,
- Py_ssize_t start, Py_ssize_t end, int direction) {
- Py_ssize_t i, count = PyTuple_GET_SIZE(substrings);
- for (i = 0; i < count; i++) {
- int result;
+static int __Pyx_PyBytes_TailmatchTuple(PyObject* self, PyObject* substrings,
+ Py_ssize_t start, Py_ssize_t end, int direction) {
+ Py_ssize_t i, count = PyTuple_GET_SIZE(substrings);
+ for (i = 0; i < count; i++) {
+ int result;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
- result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substrings, i),
- start, end, direction);
+ result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substrings, i),
+ start, end, direction);
#else
- PyObject* sub = PySequence_ITEM(substrings, i);
- if (unlikely(!sub)) return -1;
- result = __Pyx_PyBytes_SingleTailmatch(self, sub, start, end, direction);
- Py_DECREF(sub);
+ PyObject* sub = PySequence_ITEM(substrings, i);
+ if (unlikely(!sub)) return -1;
+ result = __Pyx_PyBytes_SingleTailmatch(self, sub, start, end, direction);
+ Py_DECREF(sub);
#endif
- if (result) {
- return result;
+ if (result) {
+ return result;
}
}
- return 0;
-}
-
-static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr,
- Py_ssize_t start, Py_ssize_t end, int direction) {
- if (unlikely(PyTuple_Check(substr))) {
- return __Pyx_PyBytes_TailmatchTuple(self, substr, start, end, direction);
- }
-
+ return 0;
+}
+
+static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr,
+ Py_ssize_t start, Py_ssize_t end, int direction) {
+ if (unlikely(PyTuple_Check(substr))) {
+ return __Pyx_PyBytes_TailmatchTuple(self, substr, start, end, direction);
+ }
+
return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction);
}
@@ -788,7 +788,7 @@ static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t i
Py_ssize_t size = PyBytes_GET_SIZE(bytes);
if (unlikely(!__Pyx_is_valid_index(index, size))) {
PyErr_SetString(PyExc_IndexError, "string index out of range");
- return (char) -1;
+ return (char) -1;
}
}
return PyBytes_AS_STRING(bytes)[index];
@@ -873,7 +873,7 @@ static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_co
if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) {
memcpy((char *)result_udata + char_pos * result_ukind, udata, (size_t) (ulength * result_ukind));
} else {
- #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters)
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters)
_PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength);
#else
Py_ssize_t j;
@@ -1169,27 +1169,27 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FormatAndDecref(PyObject* s, PyObj
Py_DECREF(s);
return result;
}
-
-
-//////////////////// PyUnicode_Unicode.proto ////////////////////
-
-static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj);/*proto*/
-
-//////////////////// PyUnicode_Unicode ////////////////////
-
-static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) {
- if (unlikely(obj == Py_None))
- obj = PYUNICODE("None");
- return __Pyx_NewRef(obj);
-}
-
-
-//////////////////// PyObject_Unicode.proto ////////////////////
-
-#if PY_MAJOR_VERSION >= 3
-#define __Pyx_PyObject_Unicode(obj) \
- (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Str(obj))
-#else
-#define __Pyx_PyObject_Unicode(obj) \
- (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Unicode(obj))
-#endif
+
+
+//////////////////// PyUnicode_Unicode.proto ////////////////////
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj);/*proto*/
+
+//////////////////// PyUnicode_Unicode ////////////////////
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Unicode(PyObject *obj) {
+ if (unlikely(obj == Py_None))
+ obj = PYUNICODE("None");
+ return __Pyx_NewRef(obj);
+}
+
+
+//////////////////// PyObject_Unicode.proto ////////////////////
+
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyObject_Unicode(obj) \
+ (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Str(obj))
+#else
+#define __Pyx_PyObject_Unicode(obj) \
+ (likely(PyUnicode_CheckExact(obj)) ? __Pyx_NewRef(obj) : PyObject_Unicode(obj))
+#endif
diff --git a/contrib/tools/cython/Cython/Utility/TypeConversion.c b/contrib/tools/cython/Cython/Utility/TypeConversion.c
index dc58606363..7a7bf0f799 100644
--- a/contrib/tools/cython/Cython/Utility/TypeConversion.c
+++ b/contrib/tools/cython/Cython/Utility/TypeConversion.c
@@ -32,10 +32,10 @@ static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
#define __Pyx_sst_abs(value) abs(value)
#elif SIZEOF_LONG >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) labs(value)
-#elif defined (_MSC_VER)
+#elif defined (_MSC_VER)
// abs() is defined for long, but 64-bits type on MSVC is long long.
// Use MS-specific _abs64 instead.
- #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define __Pyx_sst_abs(value) llabs(value)
#elif defined (__GNUC__)
@@ -45,8 +45,8 @@ static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
#define __Pyx_sst_abs(value) ((value<0) ? -value : value)
#endif
-static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
-static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
@@ -62,25 +62,25 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
#endif
-#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
-#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
-#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
-#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
-#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
-#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
-// There used to be a Py_UNICODE_strlen() in CPython 3.x, but it is deprecated since Py3.3.
-static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+// There used to be a Py_UNICODE_strlen() in CPython 3.x, but it is deprecated since Py3.3.
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
const Py_UNICODE *u_end = u;
while (*u_end++) ;
return (size_t)(u_end - u - 1);
@@ -97,9 +97,9 @@ static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
-#define __Pyx_PySequence_Tuple(obj) \
- (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
-
+#define __Pyx_PySequence_Tuple(obj) \
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
@@ -207,67 +207,67 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
}
-// Py3.7 returns a "const char*" for unicode strings
-static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
+// Py3.7 returns a "const char*" for unicode strings
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
Py_ssize_t ignore;
return __Pyx_PyObject_AsStringAndSize(o, &ignore);
}
-#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
-#if !CYTHON_PEP393_ENABLED
-static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
- char* defenc_c;
- // borrowed reference, cached internally in 'o' by CPython
- PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
- if (!defenc) return NULL;
- defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+#if !CYTHON_PEP393_ENABLED
+static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ char* defenc_c;
+ // borrowed reference, cached internally in 'o' by CPython
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
- {
- char* end = defenc_c + PyBytes_GET_SIZE(defenc);
- char* c;
- for (c = defenc_c; c < end; c++) {
- if ((unsigned char) (*c) >= 128) {
- // raise the error
- PyUnicode_AsASCIIString(o);
- return NULL;
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ // raise the error
+ PyUnicode_AsASCIIString(o);
+ return NULL;
}
}
- }
+ }
#endif /*__PYX_DEFAULT_STRING_ENCODING_IS_ASCII*/
- *length = PyBytes_GET_SIZE(defenc);
- return defenc_c;
-}
-
-#else /* CYTHON_PEP393_ENABLED: */
-
-static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
- if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+}
+
+#else /* CYTHON_PEP393_ENABLED: */
+
+static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
- if (likely(PyUnicode_IS_ASCII(o))) {
- // cached for the lifetime of the object
- *length = PyUnicode_GET_LENGTH(o);
- return PyUnicode_AsUTF8(o);
- } else {
- // raise the error
- PyUnicode_AsASCIIString(o);
- return NULL;
- }
+ if (likely(PyUnicode_IS_ASCII(o))) {
+ // cached for the lifetime of the object
+ *length = PyUnicode_GET_LENGTH(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ // raise the error
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
#else /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */
- return PyUnicode_AsUTF8AndSize(o, length);
+ return PyUnicode_AsUTF8AndSize(o, length);
#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */
-}
-#endif /* CYTHON_PEP393_ENABLED */
-#endif
-
-// Py3.7 returns a "const char*" for unicode strings
-static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
-#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
- if (
-#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
- __Pyx_sys_getdefaultencoding_not_ascii &&
-#endif
- PyUnicode_Check(o)) {
- return __Pyx_PyUnicode_AsStringAndSize(o, length);
+}
+#endif /* CYTHON_PEP393_ENABLED */
+#endif
+
+// Py3.7 returns a "const char*" for unicode strings
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+ return __Pyx_PyUnicode_AsStringAndSize(o, length);
} else
#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */
@@ -303,28 +303,28 @@ static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
return retval;
}
-static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
-#if PY_MAJOR_VERSION >= 3
- if (PyLong_Check(result)) {
- // CPython issue #17576: warn if 'result' not of exact type int.
- if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
- "__int__ returned non-int (type %.200s). "
- "The ability to return an instance of a strict subclass of int "
- "is deprecated, and may be removed in a future version of Python.",
- Py_TYPE(result)->tp_name)) {
- Py_DECREF(result);
- return NULL;
- }
- return result;
- }
-#endif
- PyErr_Format(PyExc_TypeError,
- "__%.4s__ returned non-%.4s (type %.200s)",
- type_name, type_name, Py_TYPE(result)->tp_name);
- Py_DECREF(result);
- return NULL;
-}
-
+static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
+#if PY_MAJOR_VERSION >= 3
+ if (PyLong_Check(result)) {
+ // CPython issue #17576: warn if 'result' not of exact type int.
+ if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+ "__int__ returned non-int (type %.200s). "
+ "The ability to return an instance of a strict subclass of int "
+ "is deprecated, and may be removed in a future version of Python.",
+ Py_TYPE(result)->tp_name)) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ return result;
+ }
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ type_name, type_name, Py_TYPE(result)->tp_name);
+ Py_DECREF(result);
+ return NULL;
+}
+
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
#if CYTHON_USE_TYPE_SLOTS
PyNumberMethods *m;
@@ -332,9 +332,9 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
const char *name = NULL;
PyObject *res = NULL;
#if PY_MAJOR_VERSION < 3
- if (likely(PyInt_Check(x) || PyLong_Check(x)))
+ if (likely(PyInt_Check(x) || PyLong_Check(x)))
#else
- if (likely(PyLong_Check(x)))
+ if (likely(PyLong_Check(x)))
#endif
return __Pyx_NewRef(x);
#if CYTHON_USE_TYPE_SLOTS
@@ -342,30 +342,30 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
#if PY_MAJOR_VERSION < 3
if (m && m->nb_int) {
name = "int";
- res = m->nb_int(x);
+ res = m->nb_int(x);
}
else if (m && m->nb_long) {
name = "long";
- res = m->nb_long(x);
+ res = m->nb_long(x);
}
#else
- if (likely(m && m->nb_int)) {
+ if (likely(m && m->nb_int)) {
name = "int";
- res = m->nb_int(x);
+ res = m->nb_int(x);
}
#endif
#else
- if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
- res = PyNumber_Int(x);
- }
+ if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
+ res = PyNumber_Int(x);
+ }
#endif
- if (likely(res)) {
+ if (likely(res)) {
#if PY_MAJOR_VERSION < 3
- if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
+ if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
#else
- if (unlikely(!PyLong_CheckExact(res))) {
+ if (unlikely(!PyLong_CheckExact(res))) {
#endif
- return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
+ return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
}
}
else if (!PyErr_Occurred()) {
@@ -712,8 +712,8 @@ static const char DIGIT_PAIRS_8[2*8*8+1] = {
};
static const char DIGITS_HEX[2*16+1] = {
- "0123456789abcdef"
- "0123456789ABCDEF"
+ "0123456789abcdef"
+ "0123456789ABCDEF"
};
@@ -751,39 +751,39 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t wid
if (format_char == 'X') {
hex_digits += 16;
format_char = 'x';
- }
+ }
// surprise: even trivial sprintf() calls don't get optimised in gcc (4.8)
remaining = value; /* not using abs(value) to avoid overflow problems */
last_one_off = 0;
dpos = end;
- do {
+ do {
int digit_pos;
switch (format_char) {
case 'o':
- digit_pos = abs((int)(remaining % (8*8)));
- remaining = ({{TYPE}}) (remaining / (8*8));
+ digit_pos = abs((int)(remaining % (8*8)));
+ remaining = ({{TYPE}}) (remaining / (8*8));
dpos -= 2;
memcpy(dpos, DIGIT_PAIRS_8 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */
last_one_off = (digit_pos < 8);
break;
case 'd':
- digit_pos = abs((int)(remaining % (10*10)));
- remaining = ({{TYPE}}) (remaining / (10*10));
+ digit_pos = abs((int)(remaining % (10*10)));
+ remaining = ({{TYPE}}) (remaining / (10*10));
dpos -= 2;
memcpy(dpos, DIGIT_PAIRS_10 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */
last_one_off = (digit_pos < 10);
break;
case 'x':
- *(--dpos) = hex_digits[abs((int)(remaining % 16))];
- remaining = ({{TYPE}}) (remaining / 16);
+ *(--dpos) = hex_digits[abs((int)(remaining % 16))];
+ remaining = ({{TYPE}}) (remaining / 16);
break;
default:
assert(0);
break;
}
- } while (unlikely(remaining != 0));
-
+ } while (unlikely(remaining != 0));
+
if (last_one_off) {
assert(*dpos == '0');
dpos++;
diff --git a/contrib/tools/cython/Cython/Utility/arrayarray.h b/contrib/tools/cython/Cython/Utility/arrayarray.h
index f2f07b9f84..a9e4923785 100644
--- a/contrib/tools/cython/Cython/Utility/arrayarray.h
+++ b/contrib/tools/cython/Cython/Utility/arrayarray.h
@@ -29,7 +29,7 @@ typedef struct arraydescr {
int (*setitem)(struct arrayobject *, Py_ssize_t, PyObject *);
#if PY_MAJOR_VERSION >= 3
char *formats;
-#endif
+#endif
} arraydescr;
@@ -47,10 +47,10 @@ struct arrayobject {
char *as_chars;
unsigned long *as_ulongs;
long *as_longs;
-#if PY_MAJOR_VERSION >= 3
- unsigned long long *as_ulonglongs;
- long long *as_longlongs;
-#endif
+#if PY_MAJOR_VERSION >= 3
+ unsigned long long *as_ulonglongs;
+ long long *as_longlongs;
+#endif
short *as_shorts;
unsigned short *as_ushorts;
Py_UNICODE *as_pyunicodes;
@@ -114,7 +114,7 @@ static CYTHON_INLINE int resize(arrayobject *self, Py_ssize_t n) {
if (items == NULL) {
PyErr_NoMemory();
return -1;
- }
+ }
self->data.ob_item = (char*) items;
__Pyx_SET_SIZE(self, n);
self->allocated = n;
diff --git a/contrib/tools/cython/Cython/Utils.py b/contrib/tools/cython/Cython/Utils.py
index c23e11be30..d59d67d78b 100644
--- a/contrib/tools/cython/Cython/Utils.py
+++ b/contrib/tools/cython/Cython/Utils.py
@@ -385,22 +385,22 @@ class LazyStr:
return left + self.callback()
-class OrderedSet(object):
- def __init__(self, elements=()):
- self._list = []
- self._set = set()
- self.update(elements)
- def __iter__(self):
- return iter(self._list)
- def update(self, elements):
- for e in elements:
- self.add(e)
- def add(self, e):
- if e not in self._set:
- self._list.append(e)
- self._set.add(e)
-
-
+class OrderedSet(object):
+ def __init__(self, elements=()):
+ self._list = []
+ self._set = set()
+ self.update(elements)
+ def __iter__(self):
+ return iter(self._list)
+ def update(self, elements):
+ for e in elements:
+ self.add(e)
+ def add(self, e):
+ if e not in self._set:
+ self._list.append(e)
+ self._set.add(e)
+
+
# Class decorator that adds a metaclass and recreates the class with it.
# Copied from 'six'.
def add_metaclass(metaclass):
diff --git a/contrib/tools/cython/Cython/ya.make b/contrib/tools/cython/Cython/ya.make
index 7012412a22..f4aacbdf94 100644
--- a/contrib/tools/cython/Cython/ya.make
+++ b/contrib/tools/cython/Cython/ya.make
@@ -1,20 +1,20 @@
PY23_LIBRARY()
-
-OWNER(g:yatool)
-
+
+OWNER(g:yatool)
+
NO_LINT()
-# Minimal set of the files required to support coverage (DEVTOOLS-4095)
-PY_SRCS(
- __init__.py
- Coverage.py
- Shadow.py
- Utils.py
-)
-
-PEERDIR(
- contrib/python/six
+# Minimal set of the files required to support coverage (DEVTOOLS-4095)
+PY_SRCS(
+ __init__.py
+ Coverage.py
+ Shadow.py
+ Utils.py
+)
+
+PEERDIR(
+ contrib/python/six
library/python/resource
-)
-
-END()
+)
+
+END()
diff --git a/contrib/tools/python/ya.make b/contrib/tools/python/ya.make
index 6f3f55bda7..fc95aaca64 100644
--- a/contrib/tools/python/ya.make
+++ b/contrib/tools/python/ya.make
@@ -14,7 +14,7 @@ PEERDIR(
)
END()
-
-RECURSE_FOR_TESTS(
- tests
-)
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/tools/python3/lib/lsan.supp b/contrib/tools/python3/lib/lsan.supp
index ab7a4ead80..37121682e7 100644
--- a/contrib/tools/python3/lib/lsan.supp
+++ b/contrib/tools/python3/lib/lsan.supp
@@ -1,2 +1,2 @@
-leak:PyBytes_FromStringAndSize
-leak:PyUnicode_New
+leak:PyBytes_FromStringAndSize
+leak:PyUnicode_New
diff --git a/contrib/tools/python3/lib/ya.make b/contrib/tools/python3/lib/ya.make
index c4cbbe9427..c78425d6cf 100644
--- a/contrib/tools/python3/lib/ya.make
+++ b/contrib/tools/python3/lib/ya.make
@@ -12,6 +12,6 @@ PEERDIR(
contrib/tools/python3/src/Modules
)
-SUPPRESSIONS(lsan.supp)
-
+SUPPRESSIONS(lsan.supp)
+
END()
diff --git a/contrib/tools/python3/src/tsan.supp b/contrib/tools/python3/src/tsan.supp
index efce024ff0..65dce089ff 100644
--- a/contrib/tools/python3/src/tsan.supp
+++ b/contrib/tools/python3/src/tsan.supp
@@ -1,3 +1,3 @@
# Thread sanitizers reports a data race during main thread shutdown.
# This race was consiered to be safe. See DEVTOOLSSUPPORT-8443 for details
-race:PyInterpreterState_Delete
+race:PyInterpreterState_Delete
diff --git a/contrib/tools/yasm/libyasm/errwarn.c b/contrib/tools/yasm/libyasm/errwarn.c
index c69360ab9a..f759cf8f71 100644
--- a/contrib/tools/yasm/libyasm/errwarn.c
+++ b/contrib/tools/yasm/libyasm/errwarn.c
@@ -27,7 +27,7 @@
#include "util.h"
#include <ctype.h>
-#include <errno.h>
+#include <errno.h>
#include <stdarg.h>
#include "coretype.h"
@@ -528,10 +528,10 @@ yasm__fatal(const char *message, ...)
/*@notreached@*/
va_end(va);
}
-
-void
-yasm__fatal_missing_input_file(const char *message, const char *filename)
-{
- fprintf(stderr, "%s: %s %s (%s)\n", yasm_gettext_hook(N_("FATAL")), yasm_gettext_hook(message), strerror(errno), yasm_gettext_hook(filename));
- exit(EXIT_FAILURE);
-}
+
+void
+yasm__fatal_missing_input_file(const char *message, const char *filename)
+{
+ fprintf(stderr, "%s: %s %s (%s)\n", yasm_gettext_hook(N_("FATAL")), yasm_gettext_hook(message), strerror(errno), yasm_gettext_hook(filename));
+ exit(EXIT_FAILURE);
+}
diff --git a/contrib/tools/yasm/libyasm/errwarn.h b/contrib/tools/yasm/libyasm/errwarn.h
index b877d04e66..f8e4f10ec3 100644
--- a/contrib/tools/yasm/libyasm/errwarn.h
+++ b/contrib/tools/yasm/libyasm/errwarn.h
@@ -114,9 +114,9 @@ extern /*@exits@*/ void (*yasm_fatal) (const char *message, va_list va);
YASM_LIB_DECL
/*@exits@*/ void yasm__fatal(const char *message, ...);
-YASM_LIB_DECL
-/*@exits@*/ void yasm__fatal_missing_input_file(const char *message, const char *filename);
-
+YASM_LIB_DECL
+/*@exits@*/ void yasm__fatal_missing_input_file(const char *message, const char *filename);
+
/** Unconditionally clear the error indicator, freeing any associated data.
* Has no effect if the error indicator is not set.
*/
diff --git a/contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c b/contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c
index bdc4ff79bd..60c9e4f0cd 100644
--- a/contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c
+++ b/contrib/tools/yasm/modules/preprocs/gas/gas-preproc.c
@@ -1235,7 +1235,7 @@ gas_preproc_create(const char *in_filename, yasm_symtab *symtab,
if (strcmp(in_filename, "-") != 0) {
f = fopen(in_filename, "r");
if (!f) {
- yasm__fatal_missing_input_file(N_("Could not open input file"), in_filename);
+ yasm__fatal_missing_input_file(N_("Could not open input file"), in_filename);
}
} else {
f = stdin;
diff --git a/contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c b/contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c
index 0f57838720..566dd8004b 100644
--- a/contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c
+++ b/contrib/tools/yasm/modules/preprocs/nasm/nasm-preproc.c
@@ -142,7 +142,7 @@ nasm_preproc_create(const char *in_filename, yasm_symtab *symtab,
if (strcmp(in_filename, "-") != 0) {
f = fopen(in_filename, "r");
if (!f)
- yasm__fatal_missing_input_file( N_("Could not open input file"), in_filename );
+ yasm__fatal_missing_input_file( N_("Could not open input file"), in_filename );
}
else
f = stdin;
diff --git a/contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c b/contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c
index 7c2cda084b..9745b242fe 100644
--- a/contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c
+++ b/contrib/tools/yasm/modules/preprocs/raw/raw-preproc.c
@@ -51,7 +51,7 @@ raw_preproc_create(const char *in_filename, yasm_symtab *symtab,
if (strcmp(in_filename, "-") != 0) {
f = fopen(in_filename, "r");
if (!f)
- yasm__fatal_missing_input_file( N_("Could not open input file"), in_filename );
+ yasm__fatal_missing_input_file( N_("Could not open input file"), in_filename );
}
else
f = stdin;
diff --git a/library/cpp/accurate_accumulate/benchmark/metrics/ya.make b/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
index 9c62aad212..5d532e1479 100644
--- a/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
+++ b/library/cpp/accurate_accumulate/benchmark/metrics/ya.make
@@ -2,12 +2,12 @@ OWNER(yazevnul)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/actors/interconnect/ut_fat/ya.make b/library/cpp/actors/interconnect/ut_fat/ya.make
index 5218401045..6e58d08154 100644
--- a/library/cpp/actors/interconnect/ut_fat/ya.make
+++ b/library/cpp/actors/interconnect/ut_fat/ya.make
@@ -7,7 +7,7 @@ OWNER(
SIZE(LARGE)
-TAG(ya:fat)
+TAG(ya:fat)
SRCS(
main.cpp
diff --git a/library/cpp/digest/argonish/ut_fat/ya.make b/library/cpp/digest/argonish/ut_fat/ya.make
index 605524bedb..94ebda9225 100644
--- a/library/cpp/digest/argonish/ut_fat/ya.make
+++ b/library/cpp/digest/argonish/ut_fat/ya.make
@@ -16,6 +16,6 @@ TAG(
ya:force_sandbox
)
-SIZE(LARGE)
+SIZE(LARGE)
END()
diff --git a/library/cpp/http/io/fuzz/ya.make b/library/cpp/http/io/fuzz/ya.make
index d4d4b8cdbf..8b3ccb1969 100644
--- a/library/cpp/http/io/fuzz/ya.make
+++ b/library/cpp/http/io/fuzz/ya.make
@@ -9,8 +9,8 @@ PEERDIR(
library/cpp/http/io
)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
SRCS(
main.cpp
)
diff --git a/library/cpp/http/io/stream_ut.cpp b/library/cpp/http/io/stream_ut.cpp
index 48c9307dd4..1ea35df675 100644
--- a/library/cpp/http/io/stream_ut.cpp
+++ b/library/cpp/http/io/stream_ut.cpp
@@ -84,8 +84,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestHttpInput) {
TString res = "I'm a teapot";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
@@ -238,8 +238,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestMinRequest) {
TString res = "qqqqqq";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
@@ -264,8 +264,8 @@ Y_UNIT_TEST_SUITE(THttpStreamTest) {
Y_UNIT_TEST(TestResponseWithBlanks) {
TString res = "qqqqqq\r\n\r\nsdasdsad\r\n";
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
TTestHttpServer serverImpl(res);
THttpServer server(&serverImpl, THttpServer::TOptions(port).EnableKeepAlive(true).EnableCompression(true));
diff --git a/library/cpp/http/server/http_ut.cpp b/library/cpp/http/server/http_ut.cpp
index cf6f9c2c63..cc62bb988e 100644
--- a/library/cpp/http/server/http_ut.cpp
+++ b/library/cpp/http/server/http_ut.cpp
@@ -322,8 +322,8 @@ Y_UNIT_TEST_SUITE(THttpServerTest) {
Y_UNIT_TEST(TestEchoServer) {
TString res = TestData();
- TPortManager pm;
- const ui16 port = pm.GetPort();
+ TPortManager pm;
+ const ui16 port = pm.GetPort();
const bool trueFalse[] = {true, false};
TEchoServer serverImpl(res);
@@ -428,8 +428,8 @@ Y_UNIT_TEST_SUITE(THttpServerTest) {
* Data should be large enough not to fit into socket buffer
**/
TString res = TestData(10 * 1024 * 1024);
- TPortManager portManager;
- const ui16 port = portManager.GetPort();
+ TPortManager portManager;
+ const ui16 port = portManager.GetPort();
TEchoServer serverImpl(res);
THttpServer::TOptions options(port);
options.EnableKeepAlive(true);
diff --git a/library/cpp/json/yson/ut/ya.make b/library/cpp/json/yson/ut/ya.make
index 1a4a193b55..4ceb65b279 100644
--- a/library/cpp/json/yson/ut/ya.make
+++ b/library/cpp/json/yson/ut/ya.make
@@ -15,10 +15,10 @@ PEERDIR(
library/cpp/testing/unittest
)
-SIZE(LARGE)
+SIZE(LARGE)
+
+TAG(ya:fat)
-TAG(ya:fat)
-
TIMEOUT(600)
SRCS(
diff --git a/library/cpp/scheme/tests/fuzz_json/ya.make b/library/cpp/scheme/tests/fuzz_json/ya.make
index e398cfc5dd..0d91c70585 100644
--- a/library/cpp/scheme/tests/fuzz_json/ya.make
+++ b/library/cpp/scheme/tests/fuzz_json/ya.make
@@ -7,8 +7,8 @@ OWNER(
velavokr
)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
SRCS(
fuzz_json.cpp
)
diff --git a/library/cpp/string_utils/base64/bench/metrics/ya.make b/library/cpp/string_utils/base64/bench/metrics/ya.make
index 7185c725b6..b0406516c3 100644
--- a/library/cpp/string_utils/base64/bench/metrics/ya.make
+++ b/library/cpp/string_utils/base64/bench/metrics/ya.make
@@ -5,12 +5,12 @@ OWNER(
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/testing/benchmark/examples/metrics/ya.make b/library/cpp/testing/benchmark/examples/metrics/ya.make
index 0ba17e4545..a9dbdca9fa 100644
--- a/library/cpp/testing/benchmark/examples/metrics/ya.make
+++ b/library/cpp/testing/benchmark/examples/metrics/ya.make
@@ -5,12 +5,12 @@ OWNER(
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/library/cpp/testing/common/env.cpp b/library/cpp/testing/common/env.cpp
index 436af91845..fa3a47fe16 100644
--- a/library/cpp/testing/common/env.cpp
+++ b/library/cpp/testing/common/env.cpp
@@ -3,19 +3,19 @@
#include <build/scripts/c_templates/svnversion.h>
#include <util/folder/dirut.h>
-#include <util/folder/path.h>
-#include <util/generic/singleton.h>
+#include <util/folder/path.h>
+#include <util/generic/singleton.h>
#include <util/stream/file.h>
-#include <util/stream/fwd.h>
-#include <util/system/env.h>
-#include <util/system/file.h>
-#include <util/system/file_lock.h>
-#include <util/system/guard.h>
-
+#include <util/stream/fwd.h>
+#include <util/system/env.h>
+#include <util/system/file.h>
+#include <util/system/file_lock.h>
+#include <util/system/guard.h>
+
#include <library/cpp/json/json_reader.h>
#include <library/cpp/json/json_value.h>
-#include <library/cpp/json/json_writer.h>
-
+#include <library/cpp/json/json_writer.h>
+
TString ArcadiaSourceRoot() {
if (const auto& sourceRoot = NPrivate::GetTestEnv().SourceRoot) {
return sourceRoot;
@@ -63,89 +63,89 @@ TString GetArcadiaTestsData() {
return {};
}
-
+
TString GetWorkPath() {
TString workPath = NPrivate::GetTestEnv().WorkPath;
if (workPath) {
return workPath;
- }
+ }
return NPrivate::GetCwd();
-}
-
+}
+
TFsPath GetOutputPath() {
return GetWorkPath() + "/testing_out_stuff";
}
-const TString& GetRamDrivePath() {
+const TString& GetRamDrivePath() {
return NPrivate::GetTestEnv().RamDrivePath;
-}
-
+}
+
const TString& GetYtHddPath() {
return NPrivate::GetTestEnv().YtHddPath;
}
-const TString& GetOutputRamDrivePath() {
+const TString& GetOutputRamDrivePath() {
return NPrivate::GetTestEnv().TestOutputRamDrivePath;
-}
-
-const TString& GdbPath() {
- return NPrivate::GetTestEnv().GdbPath;
-}
-
-const TString& GetTestParam(TStringBuf name) {
- const static TString def = "";
- return GetTestParam(name, def);
-}
-
-const TString& GetTestParam(TStringBuf name, const TString& def) {
- auto& testParameters = NPrivate::GetTestEnv().TestParameters;
- auto it = testParameters.find(name.data());
- if (it != testParameters.end()) {
- return it->second;
- }
- return def;
-}
-
-void AddEntryToCoreSearchFile(const TString& filename, TStringBuf cmd, int pid, const TFsPath& binaryPath = TFsPath(), const TFsPath& cwd = TFsPath()) {
- auto lock = TFileLock(filename);
- TGuard<TFileLock> guard(lock);
-
- TOFStream output(TFile(filename, WrOnly | ForAppend | OpenAlways));
-
- NJson::TJsonWriter writer(&output, false);
- writer.OpenMap();
- writer.Write("cmd", cmd);
- writer.Write("pid", pid);
- if (binaryPath) {
- writer.Write("binary_path", binaryPath);
- }
- if (cwd) {
- writer.Write("cwd", cwd);
- }
- writer.CloseMap();
- writer.Flush();
-
- output.Write("\n");
-}
-
-void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd) {
- auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
- if (filename) {
- AddEntryToCoreSearchFile(filename, "add", pid, binaryPath, cwd);
- }
-}
-
-void StopProcessCoreWatching(int pid) {
- auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
- if (filename) {
- AddEntryToCoreSearchFile(filename, "drop", pid);
- }
-}
-
+}
+
+const TString& GdbPath() {
+ return NPrivate::GetTestEnv().GdbPath;
+}
+
+const TString& GetTestParam(TStringBuf name) {
+ const static TString def = "";
+ return GetTestParam(name, def);
+}
+
+const TString& GetTestParam(TStringBuf name, const TString& def) {
+ auto& testParameters = NPrivate::GetTestEnv().TestParameters;
+ auto it = testParameters.find(name.data());
+ if (it != testParameters.end()) {
+ return it->second;
+ }
+ return def;
+}
+
+void AddEntryToCoreSearchFile(const TString& filename, TStringBuf cmd, int pid, const TFsPath& binaryPath = TFsPath(), const TFsPath& cwd = TFsPath()) {
+ auto lock = TFileLock(filename);
+ TGuard<TFileLock> guard(lock);
+
+ TOFStream output(TFile(filename, WrOnly | ForAppend | OpenAlways));
+
+ NJson::TJsonWriter writer(&output, false);
+ writer.OpenMap();
+ writer.Write("cmd", cmd);
+ writer.Write("pid", pid);
+ if (binaryPath) {
+ writer.Write("binary_path", binaryPath);
+ }
+ if (cwd) {
+ writer.Write("cwd", cwd);
+ }
+ writer.CloseMap();
+ writer.Flush();
+
+ output.Write("\n");
+}
+
+void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd) {
+ auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
+ if (filename) {
+ AddEntryToCoreSearchFile(filename, "add", pid, binaryPath, cwd);
+ }
+}
+
+void StopProcessCoreWatching(int pid) {
+ auto& filename = NPrivate::GetTestEnv().CoreSearchFile;
+ if (filename) {
+ AddEntryToCoreSearchFile(filename, "drop", pid);
+ }
+}
+
bool FromYaTest() {
return NPrivate::GetTestEnv().IsRunningFromTest;
-}
+}
namespace NPrivate {
TTestEnv::TTestEnv() {
@@ -161,9 +161,9 @@ namespace NPrivate {
RamDrivePath = "";
YtHddPath = "";
TestOutputRamDrivePath = "";
- GdbPath = "";
- CoreSearchFile = "";
- TestParameters.clear();
+ GdbPath = "";
+ CoreSearchFile = "";
+ TestParameters.clear();
const TString contextFilename = GetEnv("YA_TEST_CONTEXT_FILE");
if (contextFilename) {
@@ -206,23 +206,23 @@ namespace NPrivate {
if (value) {
TestOutputRamDrivePath = value->GetStringSafe("");
}
-
- value = context.GetValueByPath("runtime.gdb_bin");
- if (value) {
- GdbPath = value->GetStringSafe("");
- }
-
- value = context.GetValueByPath("runtime.test_params");
- if (value) {
- for (const auto& entry : context.GetValueByPath("runtime.test_params")->GetMap()) {
- TestParameters[entry.first] = entry.second.GetStringSafe("");
- }
- }
-
- value = context.GetValueByPath("internal.core_search_file");
- if (value) {
- CoreSearchFile = value->GetStringSafe("");
- }
+
+ value = context.GetValueByPath("runtime.gdb_bin");
+ if (value) {
+ GdbPath = value->GetStringSafe("");
+ }
+
+ value = context.GetValueByPath("runtime.test_params");
+ if (value) {
+ for (const auto& entry : context.GetValueByPath("runtime.test_params")->GetMap()) {
+ TestParameters[entry.first] = entry.second.GetStringSafe("");
+ }
+ }
+
+ value = context.GetValueByPath("internal.core_search_file");
+ if (value) {
+ CoreSearchFile = value->GetStringSafe("");
+ }
}
if (!YtHddPath) {
@@ -257,10 +257,10 @@ namespace NPrivate {
IsRunningFromTest = (fromEnv == "1");
}
- void TTestEnv::AddTestParam(TStringBuf name, TStringBuf value) {
- TestParameters[TString{name}] = value;
- }
-
+ void TTestEnv::AddTestParam(TStringBuf name, TStringBuf value) {
+ TestParameters[TString{name}] = value;
+ }
+
TString GetCwd() {
try {
return NFs::CurrentWorkingDirectory();
diff --git a/library/cpp/testing/common/env.h b/library/cpp/testing/common/env.h
index 9f405145f1..7b89aa1bed 100644
--- a/library/cpp/testing/common/env.h
+++ b/library/cpp/testing/common/env.h
@@ -1,7 +1,7 @@
#pragma once
-#include <unordered_map>
-
+#include <unordered_map>
+
#include <util/folder/path.h>
#include <util/generic/string.h>
#include <util/generic/strbuf.h>
@@ -14,7 +14,7 @@ TString ArcadiaSourceRoot();
// for the instance: there is 2 files in folder test example_ut.cpp and example.data, so full path to test/example.data can be obtained
// from example_ut.cpp as ArcadiaFromCurrentLocation(__SOURCE_FILE__, "example.data")
TString ArcadiaFromCurrentLocation(TStringBuf where, TStringBuf path);
-
+
// @brief return build folder path
TString BuildRoot();
@@ -34,26 +34,26 @@ TString GetWorkPath();
TFsPath GetOutputPath();
// @brief return path from env:YA_TEST_RAM_DRIVE_PATH
-const TString& GetRamDrivePath();
-
+const TString& GetRamDrivePath();
+
// @brief return path from env:YA_TEST_OUTPUT_RAM_DRIVE_PATH
-const TString& GetOutputRamDrivePath();
-
-// @brief return test parameter by name. If not exists, return an empty string
-const TString& GetTestParam(TStringBuf name);
-
-// @brief return test parameter by name. If not exists, return specified default value
-const TString& GetTestParam(TStringBuf name, const TString& def);
-
-// @brief return path to the gdb
-const TString& GdbPath();
-
-// @brief register the process. Test suite will be marked as failed if the process is terminated with a core dump file after testing
-void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd = TFsPath());
-
-// @brief mark the process as successfully completed - a test machinery won't try to recover core dump file for the process
-void StopProcessCoreWatching(int pid);
-
+const TString& GetOutputRamDrivePath();
+
+// @brief return test parameter by name. If not exists, return an empty string
+const TString& GetTestParam(TStringBuf name);
+
+// @brief return test parameter by name. If not exists, return specified default value
+const TString& GetTestParam(TStringBuf name, const TString& def);
+
+// @brief return path to the gdb
+const TString& GdbPath();
+
+// @brief register the process. Test suite will be marked as failed if the process is terminated with a core dump file after testing
+void WatchProcessCore(int pid, const TFsPath& binaryPath, const TFsPath& cwd = TFsPath());
+
+// @brief mark the process as successfully completed - a test machinery won't try to recover core dump file for the process
+void StopProcessCoreWatching(int pid);
+
#define SRC_(path) ArcadiaFromCurrentLocation(__SOURCE_FILE__, path)
namespace NPrivate {
@@ -63,8 +63,8 @@ namespace NPrivate {
void ReInitialize();
- void AddTestParam(TStringBuf name, TStringBuf value);
-
+ void AddTestParam(TStringBuf name, TStringBuf value);
+
bool IsRunningFromTest;
TString ArcadiaTestsDataDir;
TString SourceRoot;
@@ -73,9 +73,9 @@ namespace NPrivate {
TString RamDrivePath;
TString YtHddPath;
TString TestOutputRamDrivePath;
- TString GdbPath;
- TString CoreSearchFile;
- std::unordered_map<TString, TString> TestParameters;
+ TString GdbPath;
+ TString CoreSearchFile;
+ std::unordered_map<TString, TString> TestParameters;
};
TString GetCwd();
diff --git a/library/cpp/testing/common/ut/env_ut.cpp b/library/cpp/testing/common/ut/env_ut.cpp
index 408661d84d..2aed1e4a25 100644
--- a/library/cpp/testing/common/ut/env_ut.cpp
+++ b/library/cpp/testing/common/ut/env_ut.cpp
@@ -1,12 +1,12 @@
#include <library/cpp/testing/common/env.h>
#include <library/cpp/testing/common/scope.h>
-#include <library/cpp/testing/gtest/gtest.h>
+#include <library/cpp/testing/gtest/gtest.h>
#include <util/folder/dirut.h>
-#include <util/stream/file.h>
+#include <util/stream/file.h>
#include <util/system/env.h>
#include <util/system/execpath.h>
-#include <util/system/fs.h>
+#include <util/system/fs.h>
TEST(Runtime, ArcadiaSourceRoot) {
@@ -99,64 +99,64 @@ TEST(Runtime, GetOutputRamDrivePath) {
Singleton<NPrivate::TTestEnv>()->ReInitialize();
EXPECT_EQ(tmpDir, GetOutputRamDrivePath());
}
-
-#ifdef _linux_
-TEST(Runtime, GdbPath) {
- Singleton<NPrivate::TTestEnv>()->ReInitialize();
- EXPECT_TRUE(NFs::Exists(::GdbPath()));
-}
-#endif
-
-TString ReInitializeContext(TStringBuf data) {
- auto tmpDir = ::GetSystemTempDir();
- auto filename = tmpDir + "/context.json";
- TOFStream stream(filename);
- stream.Write(data.data(), data.size());
- stream.Finish();
-
- NTesting::TScopedEnvironment contextGuard("YA_TEST_CONTEXT_FILE", filename);
- Singleton<NPrivate::TTestEnv>()->ReInitialize();
-
- return filename;
-}
-
-TEST(Runtime, GetTestParam) {
- TString context = R"json({
- "runtime": {
- "test_params": {
- "a": "b",
- "c": "d"
- }
- }
- })json";
- auto filename = ReInitializeContext(context);
-
- EXPECT_EQ("b", GetTestParam("a"));
- EXPECT_EQ("d", GetTestParam("c"));
- EXPECT_EQ("", GetTestParam("e"));
- EXPECT_EQ("w", GetTestParam("e", "w"));
-
- Singleton<NPrivate::TTestEnv>()->AddTestParam("e", "e");
- EXPECT_EQ("e", GetTestParam("e"));
-}
-
-TEST(Runtime, WatchProcessCore) {
- TString context = R"json({
- "internal": {
- "core_search_file": "watch_core.txt"
- }
- })json";
- auto filename = ReInitializeContext(context);
-
- WatchProcessCore(1, "bin1", "pwd");
- WatchProcessCore(2, "bin1");
- StopProcessCoreWatching(2);
-
- TIFStream file("watch_core.txt");
- auto data = file.ReadAll();
- TString expected = R"json({"cmd":"add","pid":1,"binary_path":"bin1","cwd":"pwd"}
-{"cmd":"add","pid":2,"binary_path":"bin1"}
-{"cmd":"drop","pid":2}
-)json";
- EXPECT_EQ(expected, data);
-}
+
+#ifdef _linux_
+TEST(Runtime, GdbPath) {
+ Singleton<NPrivate::TTestEnv>()->ReInitialize();
+ EXPECT_TRUE(NFs::Exists(::GdbPath()));
+}
+#endif
+
+TString ReInitializeContext(TStringBuf data) {
+ auto tmpDir = ::GetSystemTempDir();
+ auto filename = tmpDir + "/context.json";
+ TOFStream stream(filename);
+ stream.Write(data.data(), data.size());
+ stream.Finish();
+
+ NTesting::TScopedEnvironment contextGuard("YA_TEST_CONTEXT_FILE", filename);
+ Singleton<NPrivate::TTestEnv>()->ReInitialize();
+
+ return filename;
+}
+
+TEST(Runtime, GetTestParam) {
+ TString context = R"json({
+ "runtime": {
+ "test_params": {
+ "a": "b",
+ "c": "d"
+ }
+ }
+ })json";
+ auto filename = ReInitializeContext(context);
+
+ EXPECT_EQ("b", GetTestParam("a"));
+ EXPECT_EQ("d", GetTestParam("c"));
+ EXPECT_EQ("", GetTestParam("e"));
+ EXPECT_EQ("w", GetTestParam("e", "w"));
+
+ Singleton<NPrivate::TTestEnv>()->AddTestParam("e", "e");
+ EXPECT_EQ("e", GetTestParam("e"));
+}
+
+TEST(Runtime, WatchProcessCore) {
+ TString context = R"json({
+ "internal": {
+ "core_search_file": "watch_core.txt"
+ }
+ })json";
+ auto filename = ReInitializeContext(context);
+
+ WatchProcessCore(1, "bin1", "pwd");
+ WatchProcessCore(2, "bin1");
+ StopProcessCoreWatching(2);
+
+ TIFStream file("watch_core.txt");
+ auto data = file.ReadAll();
+ TString expected = R"json({"cmd":"add","pid":1,"binary_path":"bin1","cwd":"pwd"}
+{"cmd":"add","pid":2,"binary_path":"bin1"}
+{"cmd":"drop","pid":2}
+)json";
+ EXPECT_EQ(expected, data);
+}
diff --git a/library/cpp/testing/unittest/fat/test_port_manager.cpp b/library/cpp/testing/unittest/fat/test_port_manager.cpp
index 472a66249e..f77d2e3a25 100644
--- a/library/cpp/testing/unittest/fat/test_port_manager.cpp
+++ b/library/cpp/testing/unittest/fat/test_port_manager.cpp
@@ -1,36 +1,36 @@
#include <library/cpp/testing/unittest/registar.h>
#include <library/cpp/testing/unittest/tests_data.h>
-
-bool IsFreePort(ui16 port) {
- TInet6StreamSocket sock;
- TSockAddrInet6 addr("::", port);
- Y_ENSURE(SetSockOpt(sock, SOL_SOCKET, SO_REUSEADDR, 1) == 0);
- SetReuseAddressAndPort(sock);
- if (sock.Bind(&addr) == 0) {
- return true;
- }
- return false;
-}
-
-void get_port_ranges() {
+
+bool IsFreePort(ui16 port) {
+ TInet6StreamSocket sock;
+ TSockAddrInet6 addr("::", port);
+ Y_ENSURE(SetSockOpt(sock, SOL_SOCKET, SO_REUSEADDR, 1) == 0);
+ SetReuseAddressAndPort(sock);
+ if (sock.Bind(&addr) == 0) {
+ return true;
+ }
+ return false;
+}
+
+void get_port_ranges() {
for (int i = 1; i < 10; ++i) {
- TPortManager pm;
- ui16 port = pm.GetPortsRange(1024, i);
- for (int p = port; p < port + i; ++p) {
- UNIT_ASSERT(IsFreePort(p));
- }
- }
-}
-
-Y_UNIT_TEST_SUITE(TestTPortManager) {
- Y_UNIT_TEST(ParallelRun0) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun1) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun2) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun3) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun4) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun5) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun6) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun7) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun8) {get_port_ranges();}
- Y_UNIT_TEST(ParallelRun9) {get_port_ranges();}
-}
+ TPortManager pm;
+ ui16 port = pm.GetPortsRange(1024, i);
+ for (int p = port; p < port + i; ++p) {
+ UNIT_ASSERT(IsFreePort(p));
+ }
+ }
+}
+
+Y_UNIT_TEST_SUITE(TestTPortManager) {
+ Y_UNIT_TEST(ParallelRun0) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun1) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun2) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun3) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun4) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun5) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun6) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun7) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun8) {get_port_ranges();}
+ Y_UNIT_TEST(ParallelRun9) {get_port_ranges();}
+}
diff --git a/library/cpp/testing/unittest/fat/ya.make b/library/cpp/testing/unittest/fat/ya.make
index 6eb68767b4..d405e599ee 100644
--- a/library/cpp/testing/unittest/fat/ya.make
+++ b/library/cpp/testing/unittest/fat/ya.make
@@ -1,19 +1,19 @@
-UNITTEST()
-
-OWNER(g:yatool)
-
-SRCS(
- test_port_manager.cpp
-)
-
-SIZE(LARGE)
-
-# We need to run tests at the same time on the single machine
-FORK_SUBTESTS()
-
+UNITTEST()
+
+OWNER(g:yatool)
+
+SRCS(
+ test_port_manager.cpp
+)
+
+SIZE(LARGE)
+
+# We need to run tests at the same time on the single machine
+FORK_SUBTESTS()
+
TAG(
ya:fat
ya:force_sandbox
)
-END()
+END()
diff --git a/library/cpp/testing/unittest/registar.cpp b/library/cpp/testing/unittest/registar.cpp
index 06882dd1ae..3679b768ed 100644
--- a/library/cpp/testing/unittest/registar.cpp
+++ b/library/cpp/testing/unittest/registar.cpp
@@ -106,10 +106,10 @@ struct TDiffColorizer {
}
};
-struct TTraceDiffFormatter {
+struct TTraceDiffFormatter {
bool Reverse = false;
- explicit TTraceDiffFormatter(bool reverse = false)
+ explicit TTraceDiffFormatter(bool reverse = false)
: Reverse(reverse)
{
}
@@ -123,26 +123,26 @@ struct TTraceDiffFormatter {
}
TString Left(TArrayRef<const char> str) const {
- return NUnitTest::GetFormatTag("good") +
+ return NUnitTest::GetFormatTag("good") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
}
TString Right(TArrayRef<const char> str) const {
- return NUnitTest::GetFormatTag("bad") +
+ return NUnitTest::GetFormatTag("bad") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
}
};
TString NUnitTest::GetFormatTag(const char* name) {
- return Sprintf("[[%s]]", name);
-}
-
+ return Sprintf("[[%s]]", name);
+}
+
TString NUnitTest::GetResetTag() {
return TString("[[rst]]");
-}
-
+}
+
TString NUnitTest::ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& delims, bool reverse) {
TStringStream res;
TVector<NDiff::TChunk<char>> chunks;
@@ -150,8 +150,8 @@ TString NUnitTest::ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& deli
if (NUnitTest::ShouldColorizeDiff) {
NDiff::PrintChunks(res, TDiffColorizer(reverse), chunks);
} else {
- res << NUnitTest::GetResetTag();
- NDiff::PrintChunks(res, TTraceDiffFormatter(reverse), chunks);
+ res << NUnitTest::GetResetTag();
+ NDiff::PrintChunks(res, TTraceDiffFormatter(reverse), chunks);
}
return res.Str();
}
@@ -478,18 +478,18 @@ unsigned NUnitTest::TTestFactory::Execute() {
#ifdef _unix_ // on Windows RTTI causes memory leaks
TString type = test->TypeId();
if (types.insert(type).second == false) {
- warnx("Duplicate suite found: %s (%s). Probably you have copy-pasted suite without changing it name", factory->Name().c_str(), type.c_str());
+ warnx("Duplicate suite found: %s (%s). Probably you have copy-pasted suite without changing it name", factory->Name().c_str(), type.c_str());
return 1;
}
#endif // _unix_
test->Parent_ = this;
-#ifdef UT_SKIP_EXCEPTIONS
+#ifdef UT_SKIP_EXCEPTIONS
try {
#endif
test->Execute();
-#ifdef UT_SKIP_EXCEPTIONS
+#ifdef UT_SKIP_EXCEPTIONS
} catch (...) {
}
#endif
@@ -497,7 +497,7 @@ unsigned NUnitTest::TTestFactory::Execute() {
Processor_->End();
- return bool(Processor_->FailTests());
+ return bool(Processor_->FailTests());
}
void NUnitTest::TTestFactory::SetProcessor(ITestSuiteProcessor* processor) {
diff --git a/library/cpp/testing/unittest/tests_data.cpp b/library/cpp/testing/unittest/tests_data.cpp
index d1edd84196..b51cbc4b87 100644
--- a/library/cpp/testing/unittest/tests_data.cpp
+++ b/library/cpp/testing/unittest/tests_data.cpp
@@ -6,52 +6,52 @@
#include <util/system/env.h>
#include <util/system/mutex.h>
-class TPortManager::TPortManagerImpl {
-public:
+class TPortManager::TPortManagerImpl {
+public:
TPortManagerImpl(bool reservePortsForCurrentTest)
: EnableReservePortsForCurrentTest(reservePortsForCurrentTest)
, DisableRandomPorts(!GetEnv("NO_RANDOM_PORTS").empty())
{
- }
-
- ui16 GetPort(ui16 port) {
+ }
+
+ ui16 GetPort(ui16 port) {
if (port && DisableRandomPorts) {
- return port;
- }
-
+ return port;
+ }
+
TAtomicSharedPtr<NTesting::IPort> holder(NTesting::GetFreePort().Release());
ReservePortForCurrentTest(holder);
-
+
TGuard<TMutex> g(Lock);
ReservedPorts.push_back(holder);
return holder->Get();
}
-
+
ui16 GetUdpPort(ui16 port) {
return GetPort(port);
}
-
+
ui16 GetTcpPort(ui16 port) {
return GetPort(port);
- }
-
+ }
+
ui16 GetTcpAndUdpPort(ui16 port) {
return GetPort(port);
}
- ui16 GetPortsRange(const ui16 startPort, const ui16 range) {
+ ui16 GetPortsRange(const ui16 startPort, const ui16 range) {
Y_UNUSED(startPort);
auto ports = NTesting::NLegacy::GetFreePortsRange(range);
ui16 first = ports[0];
- TGuard<TMutex> g(Lock);
+ TGuard<TMutex> g(Lock);
for (auto& port : ports) {
ReservedPorts.emplace_back(port.Release());
ReservePortForCurrentTest(ReservedPorts.back());
- }
+ }
return first;
- }
-
-private:
+ }
+
+private:
void ReservePortForCurrentTest(const TAtomicSharedPtr<NTesting::IPort>& portGuard) {
if (EnableReservePortsForCurrentTest) {
TTestBase* currentTest = NUnitTest::NPrivate::GetCurrentTest();
@@ -64,40 +64,40 @@ private:
}
private:
- TMutex Lock;
+ TMutex Lock;
TVector<TAtomicSharedPtr<NTesting::IPort>> ReservedPorts;
const bool EnableReservePortsForCurrentTest;
const bool DisableRandomPorts;
-};
-
+};
+
TPortManager::TPortManager(bool reservePortsForCurrentTest)
: Impl_(new TPortManagerImpl(reservePortsForCurrentTest))
-{
-}
-
-TPortManager::~TPortManager() {
-}
-
-ui16 TPortManager::GetPort(ui16 port) {
- return Impl_->GetTcpPort(port);
-}
+{
+}
+
+TPortManager::~TPortManager() {
+}
+
+ui16 TPortManager::GetPort(ui16 port) {
+ return Impl_->GetTcpPort(port);
+}
ui16 TPortManager::GetTcpPort(ui16 port) {
- return Impl_->GetTcpPort(port);
+ return Impl_->GetTcpPort(port);
}
ui16 TPortManager::GetUdpPort(ui16 port) {
- return Impl_->GetUdpPort(port);
+ return Impl_->GetUdpPort(port);
}
ui16 TPortManager::GetTcpAndUdpPort(ui16 port) {
- return Impl_->GetTcpAndUdpPort(port);
+ return Impl_->GetTcpAndUdpPort(port);
+}
+
+ui16 TPortManager::GetPortsRange(const ui16 startPort, const ui16 range) {
+ return Impl_->GetPortsRange(startPort, range);
}
-ui16 TPortManager::GetPortsRange(const ui16 startPort, const ui16 range) {
- return Impl_->GetPortsRange(startPort, range);
-}
-
ui16 GetRandomPort() {
TPortManager* pm = Singleton<TPortManager>(false);
return pm->GetPort();
diff --git a/library/cpp/testing/unittest/tests_data.h b/library/cpp/testing/unittest/tests_data.h
index dac65dfc72..6536bc1ae6 100644
--- a/library/cpp/testing/unittest/tests_data.h
+++ b/library/cpp/testing/unittest/tests_data.h
@@ -2,8 +2,8 @@
#include <library/cpp/testing/common/env.h>
-#include <util/generic/noncopyable.h>
-#include <util/generic/ptr.h>
+#include <util/generic/noncopyable.h>
+#include <util/generic/ptr.h>
#include <util/generic/string.h>
#include <util/network/sock.h>
@@ -28,12 +28,12 @@ void SetReuseAddressAndPort(const TSocketType& sock) {
}
class TPortManager: public TNonCopyable {
-public:
+public:
TPortManager(bool reservePortsForCurrentTest = true);
- ~TPortManager();
+ ~TPortManager();
// Gets free TCP port
- ui16 GetPort(ui16 port = 0);
+ ui16 GetPort(ui16 port = 0);
// Gets free TCP port
ui16 GetTcpPort(ui16 port = 0);
@@ -44,11 +44,11 @@ public:
// Gets one free port for use in both TCP and UDP protocols
ui16 GetTcpAndUdpPort(ui16 port = 0);
- ui16 GetPortsRange(const ui16 startPort, const ui16 range);
-
-private:
- class TPortManagerImpl;
- THolder<TPortManagerImpl> Impl_;
+ ui16 GetPortsRange(const ui16 startPort, const ui16 range);
+
+private:
+ class TPortManagerImpl;
+ THolder<TPortManagerImpl> Impl_;
};
ui16 GetRandomPort();
diff --git a/library/cpp/testing/unittest/ut/main.cpp b/library/cpp/testing/unittest/ut/main.cpp
index 0614e77fe2..e303e21e30 100644
--- a/library/cpp/testing/unittest/ut/main.cpp
+++ b/library/cpp/testing/unittest/ut/main.cpp
@@ -1,11 +1,11 @@
#include <library/cpp/testing/unittest/gtest.h>
#include <library/cpp/testing/unittest/registar.h>
#include <library/cpp/testing/unittest/tests_data.h>
-
-#include <util/generic/set.h>
-#include <util/network/sock.h>
-#include <util/system/env.h>
-#include <util/system/fs.h>
+
+#include <util/generic/set.h>
+#include <util/network/sock.h>
+#include <util/system/env.h>
+#include <util/system/fs.h>
TEST(GTest, Test1) {
UNIT_ASSERT_EQUAL(1, 1);
@@ -60,7 +60,7 @@ TEST(ETest, Test1) {
UNIT_CHECK_GENERATED_EXCEPTION(ythrow yexception(), yexception);
UNIT_CHECK_GENERATED_NO_EXCEPTION(true, yexception);
}
-
+
Y_UNIT_TEST_SUITE(TestSingleTestFixture)
{
Y_UNIT_TEST_F(Test3, TSimpleFixture) {
diff --git a/library/cpp/testing/unittest/utmain.cpp b/library/cpp/testing/unittest/utmain.cpp
index e5c4185001..305bc6b40f 100644
--- a/library/cpp/testing/unittest/utmain.cpp
+++ b/library/cpp/testing/unittest/utmain.cpp
@@ -6,7 +6,7 @@
#include <library/cpp/json/writer/json.h>
#include <library/cpp/json/writer/json_value.h>
-#include <library/cpp/testing/common/env.h>
+#include <library/cpp/testing/common/env.h>
#include <library/cpp/testing/hook/hook.h>
#include <util/datetime/base.h>
@@ -19,9 +19,9 @@
#include <util/network/init.h>
-#include <util/stream/file.h>
+#include <util/stream/file.h>
#include <util/stream/output.h>
-#include <util/string/join.h>
+#include <util/string/join.h>
#include <util/string/util.h>
#include <util/system/defaults.h>
@@ -46,8 +46,8 @@
#define NOTE_IN_VALGRIND(test)
#endif
-const size_t MAX_COMMENT_MESSAGE_LENGTH = 1024 * 1024; // 1 MB
-
+const size_t MAX_COMMENT_MESSAGE_LENGTH = 1024 * 1024; // 1 MB
+
using namespace NUnitTest;
class TNullTraceWriterProcessor: public ITestSuiteProcessor {
@@ -56,8 +56,8 @@ class TNullTraceWriterProcessor: public ITestSuiteProcessor {
class TTraceWriterProcessor: public ITestSuiteProcessor {
public:
inline TTraceWriterProcessor(const char* traceFilePath, EOpenMode mode)
- : PrevTime(TInstant::Now())
- {
+ : PrevTime(TInstant::Now())
+ {
TraceFile = new TUnbufferedFileOutput(TFile(traceFilePath, mode | WrOnly | Seq));
}
@@ -68,17 +68,17 @@ private:
TVector<TString> ErrorMessages;
inline void Trace(const TString eventName, const NJson::TJsonValue eventValue) {
- NJsonWriter::TBuf json(NJsonWriter::HEM_UNSAFE);
- json.BeginObject();
-
- json.WriteKey("name").WriteString(eventName);
- json.WriteKey("value").WriteJsonValue(&eventValue);
- json.WriteKey("timestamp").WriteDouble(TInstant::Now().SecondsFloat(), PREC_NDIGITS, 14);
-
- json.EndObject();
-
- json.FlushTo(TraceFile.Get());
- *TraceFile << "\n";
+ NJsonWriter::TBuf json(NJsonWriter::HEM_UNSAFE);
+ json.BeginObject();
+
+ json.WriteKey("name").WriteString(eventName);
+ json.WriteKey("value").WriteJsonValue(&eventValue);
+ json.WriteKey("timestamp").WriteDouble(TInstant::Now().SecondsFloat(), PREC_NDIGITS, 14);
+
+ json.EndObject();
+
+ json.FlushTo(TraceFile.Get());
+ *TraceFile << "\n";
}
inline void TraceSubtestFinished(const char* className, const char* subtestName, const char* status, const TString comment, const TTestContext* context) {
@@ -88,43 +88,43 @@ private:
event.InsertValue("subtest", subtestName);
event.InsertValue("status", status);
event.InsertValue("comment", comment.data());
- event.InsertValue("time", (now - PrevTime).SecondsFloat());
+ event.InsertValue("time", (now - PrevTime).SecondsFloat());
if (context) {
for (const auto& metric : context->Metrics) {
event["metrics"].InsertValue(metric.first, metric.second);
}
}
Trace("subtest-finished", event);
-
+
PrevTime = now;
TString marker = Join("", "\n###subtest-finished:", className, "::", subtestName, "\n");
- Cout << marker;
- Cout.Flush();
- Cerr << comment;
- Cerr << marker;
- Cerr.Flush();
+ Cout << marker;
+ Cout.Flush();
+ Cerr << comment;
+ Cerr << marker;
+ Cerr.Flush();
}
virtual TString BuildComment(const char* message, const char* backTrace) {
- return NUnitTest::GetFormatTag("bad") +
+ return NUnitTest::GetFormatTag("bad") +
TString(message).substr(0, MAX_COMMENT_MESSAGE_LENGTH) +
NUnitTest::GetResetTag() +
TString("\n") +
NUnitTest::GetFormatTag("alt1") +
TString(backTrace).substr(0, MAX_COMMENT_MESSAGE_LENGTH) +
NUnitTest::GetResetTag();
- }
-
+ }
+
void OnBeforeTest(const TTest* test) override {
NJson::TJsonValue event;
event.InsertValue("class", test->unit->name);
event.InsertValue("subtest", test->name);
Trace("subtest-started", event);
TString marker = Join("", "\n###subtest-started:", test->unit->name, "::", test->name, "\n");
- Cout << marker;
- Cout.Flush();
- Cerr << marker;
- Cerr.Flush();
+ Cout << marker;
+ Cout.Flush();
+ Cerr << marker;
+ Cerr.Flush();
}
void OnUnitStart(const TUnit* unit) override {
@@ -552,13 +552,13 @@ public:
if (Verbose_) {
return true;
} else {
- Stream_ << name << "\n";
+ Stream_ << name << "\n";
return false;
}
}
bool CheckAccessTest(TString suite, const char* name) override {
- Stream_ << suite << "::" << name << "\n";
+ Stream_ << suite << "::" << name << "\n";
return false;
}
@@ -601,7 +601,7 @@ static const TWinEnvironment Instance;
#endif // _win_
static int DoList(bool verbose, IOutputStream& stream) {
- TEnumeratingProcessor eproc(verbose, stream);
+ TEnumeratingProcessor eproc(verbose, stream);
TTestFactory::Instance().SetProcessor(&eproc);
TTestFactory::Instance().Execute();
return 0;
@@ -625,28 +625,28 @@ static int DoUsage(const char* progname) {
return 0;
}
-#if defined(_linux_) && defined(CLANG_COVERAGE)
-extern "C" int __llvm_profile_write_file(void);
-
-static void GracefulShutdownHandler(int) {
- try {
- __llvm_profile_write_file();
- } catch (...) {
- }
- abort();
-}
-#endif
-
+#if defined(_linux_) && defined(CLANG_COVERAGE)
+extern "C" int __llvm_profile_write_file(void);
+
+static void GracefulShutdownHandler(int) {
+ try {
+ __llvm_profile_write_file();
+ } catch (...) {
+ }
+ abort();
+}
+#endif
+
int NUnitTest::RunMain(int argc, char** argv) {
-#if defined(_linux_) && defined(CLANG_COVERAGE)
- {
- struct sigaction sa;
- memset(&sa, 0, sizeof(sa));
- sa.sa_handler = GracefulShutdownHandler;
- sa.sa_flags = SA_SIGINFO | SA_RESTART;
- Y_VERIFY(!sigaction(SIGUSR2, &sa, nullptr));
- }
-#endif
+#if defined(_linux_) && defined(CLANG_COVERAGE)
+ {
+ struct sigaction sa;
+ memset(&sa, 0, sizeof(sa));
+ sa.sa_handler = GracefulShutdownHandler;
+ sa.sa_flags = SA_SIGINFO | SA_RESTART;
+ Y_VERIFY(!sigaction(SIGUSR2, &sa, nullptr));
+ }
+#endif
NTesting::THook::CallBeforeInit();
InitNetworkSubSystem();
@@ -668,13 +668,13 @@ int NUnitTest::RunMain(int argc, char** argv) {
IOutputStream* listStream = &Cout;
THolder<IOutputStream> listFile;
- enum EListType {
- DONT_LIST,
- LIST,
- LIST_VERBOSE
- };
- EListType listTests = DONT_LIST;
-
+ enum EListType {
+ DONT_LIST,
+ LIST,
+ LIST_VERBOSE
+ };
+ EListType listTests = DONT_LIST;
+
for (size_t i = 1; i < (size_t)argc; ++i) {
const char* name = argv[i];
@@ -682,9 +682,9 @@ int NUnitTest::RunMain(int argc, char** argv) {
if (strcmp(name, "--help") == 0 || strcmp(name, "-h") == 0) {
return DoUsage(argv[0]);
} else if (strcmp(name, "--list") == 0 || strcmp(name, "-l") == 0) {
- listTests = LIST;
+ listTests = LIST;
} else if (strcmp(name, "--list-verbose") == 0 || strcmp(name, "-A") == 0) {
- listTests = LIST_VERBOSE;
+ listTests = LIST_VERBOSE;
} else if (strcmp(name, "--print-before-suite=false") == 0) {
processor.SetPrintBeforeSuite(false);
} else if (strcmp(name, "--print-before-test=false") == 0) {
@@ -718,20 +718,20 @@ int NUnitTest::RunMain(int argc, char** argv) {
processor.BeQuiet();
NUnitTest::ShouldColorizeDiff = false;
processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], CreateAlways));
- } else if (strcmp(name, "--trace-path-append") == 0) {
+ } else if (strcmp(name, "--trace-path-append") == 0) {
++i;
processor.BeQuiet();
NUnitTest::ShouldColorizeDiff = false;
- processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], OpenAlways | ForAppend));
- } else if (strcmp(name, "--list-path") == 0) {
- ++i;
+ processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], OpenAlways | ForAppend));
+ } else if (strcmp(name, "--list-path") == 0) {
+ ++i;
listFile = MakeHolder<TFixedBufferFileOutput>(argv[i]);
- listStream = listFile.Get();
+ listStream = listFile.Get();
} else if (strcmp(name, "--test-param") == 0) {
++i;
TString param(argv[i]);
size_t assign = param.find('=');
- Singleton<::NPrivate::TTestEnv>()->AddTestParam(param.substr(0, assign), param.substr(assign + 1));
+ Singleton<::NPrivate::TTestEnv>()->AddTestParam(param.substr(0, assign), param.substr(assign + 1));
} else if (TString(name).StartsWith("--")) {
return DoUsage(argv[0]), 1;
} else if (*name == '-') {
@@ -743,9 +743,9 @@ int NUnitTest::RunMain(int argc, char** argv) {
}
}
}
- if (listTests != DONT_LIST) {
- return DoList(listTests == LIST_VERBOSE, *listStream);
- }
+ if (listTests != DONT_LIST) {
+ return DoList(listTests == LIST_VERBOSE, *listStream);
+ }
TTestFactory::Instance().SetProcessor(&processor);
diff --git a/library/cpp/testing/ya.make b/library/cpp/testing/ya.make
index bff91ef52d..6a57ac2ee6 100644
--- a/library/cpp/testing/ya.make
+++ b/library/cpp/testing/ya.make
@@ -6,7 +6,7 @@ RECURSE(
benchmark/main
boost_test
boost_test_main
- dump_clang_coverage
+ dump_clang_coverage
gbenchmark_main
gmock
gmock_in_unittest
diff --git a/library/cpp/ya.make b/library/cpp/ya.make
index 48e44aea55..8c1193b007 100644
--- a/library/cpp/ya.make
+++ b/library/cpp/ya.make
@@ -29,14 +29,14 @@ RECURSE(
bucket_quoter
build_info
cache
- case_insensitive_string
+ case_insensitive_string
cgiparam
cgiparam/fuzz
cgiparam/ut
charset
charset/ut
chromium_trace
- clang_tidy
+ clang_tidy
clickhouse
clustered_hnsw
clustered_hnsw/ut
@@ -60,8 +60,8 @@ RECURSE(
config
config/extra
config/ut
- consistent_hash_ring
- consistent_hash_ring/ut
+ consistent_hash_ring
+ consistent_hash_ring/ut
consistent_hashing
consistent_hashing/ut
containers
@@ -142,7 +142,7 @@ RECURSE(
getopt/last_getopt_demo
getopt/small
getopt/ut
- getoptpb
+ getoptpb
gettimeofday
gradient_optimize
gradient_optimize/ut
@@ -290,7 +290,7 @@ RECURSE(
proto_config/plugin
proto_config/protos
proto_config/ut
- protobuf
+ protobuf
pybind
pybind/example
pybind/example/dynamic
@@ -342,7 +342,7 @@ RECURSE(
sqlite3/ut
sse
ssh
- ssh/ut
+ ssh/ut
ssh_sign
ssh_sign/ut
stat-handle
@@ -372,9 +372,9 @@ RECURSE(
token/serialization
token/serialization/ut
token/ut
- tokenclassifiers
- tokenizer
- tokenizer/ut
+ tokenclassifiers
+ tokenizer
+ tokenizer/ut
trace_usage
trace_usage/benchmark
trace_usage/ut
diff --git a/library/python/cores/__init__.py b/library/python/cores/__init__.py
index 137056a915..fdb1f82a46 100644
--- a/library/python/cores/__init__.py
+++ b/library/python/cores/__init__.py
@@ -28,7 +28,7 @@ def recover_core_dump_file(binary_path, cwd, pid):
self.path = path
self.mask = mask
- cwd = cwd or os.getcwd()
+ cwd = cwd or os.getcwd()
system = platform.system().lower()
if system.startswith("linux"):
import stat
diff --git a/library/python/cores/ya.make b/library/python/cores/ya.make
index f99c4c1da6..76264e9cce 100644
--- a/library/python/cores/ya.make
+++ b/library/python/cores/ya.make
@@ -1,6 +1,6 @@
OWNER(
prettyboy
- g:yatest
+ g:yatest
)
PY23_LIBRARY()
diff --git a/library/python/filelock/__init__.py b/library/python/filelock/__init__.py
index f62d08b66c..f81ff67f37 100644
--- a/library/python/filelock/__init__.py
+++ b/library/python/filelock/__init__.py
@@ -21,7 +21,7 @@ class AbstractFileLock(object):
def __init__(self, path):
self.path = path
- def acquire(self, blocking=True):
+ def acquire(self, blocking=True):
raise NotImplementedError
def release(self):
@@ -39,24 +39,24 @@ class _NixFileLock(AbstractFileLock):
def __init__(self, path):
super(_NixFileLock, self).__init__(path)
- from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
- self._locker = lambda lock, blocking: flock(lock, LOCK_EX if blocking else LOCK_EX | LOCK_NB)
+ from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
+ self._locker = lambda lock, blocking: flock(lock, LOCK_EX if blocking else LOCK_EX | LOCK_NB)
self._unlocker = lambda lock: flock(lock, LOCK_UN)
- self._lock = open(self.path, 'a')
- set_close_on_exec(self._lock)
-
- def acquire(self, blocking=True):
- import errno
- try:
- self._locker(self._lock, blocking)
- except IOError as e:
- if e.errno in (errno.EAGAIN, errno.EACCES) and not blocking:
- return False
- raise
- return True
+ self._lock = open(self.path, 'a')
+ set_close_on_exec(self._lock)
+
+ def acquire(self, blocking=True):
+ import errno
+ try:
+ self._locker(self._lock, blocking)
+ except IOError as e:
+ if e.errno in (errno.EAGAIN, errno.EACCES) and not blocking:
+ return False
+ raise
+ return True
def release(self):
- self._unlocker(self._lock)
+ self._unlocker(self._lock)
def __del__(self):
if hasattr(self, "_lock"):
@@ -81,26 +81,26 @@ class _WinFileLock(AbstractFileLock):
if e.errno != errno.EACCES or not os.path.isfile(path):
raise
- def acquire(self, blocking=True):
+ def acquire(self, blocking=True):
self._lock = open(self.path)
set_close_on_exec(self._lock)
-
+
import time
locked = False
while not locked:
locked = library.python.windows.lock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
- if locked:
- return True
- if blocking:
+ if locked:
+ return True
+ if blocking:
time.sleep(.5)
- else:
- return False
+ else:
+ return False
def release(self):
if self._lock:
library.python.windows.unlock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
- self._lock.close()
- self._lock = None
+ self._lock.close()
+ self._lock = None
class FileLock(AbstractFileLock):
@@ -113,9 +113,9 @@ class FileLock(AbstractFileLock):
else:
self._lock = _NixFileLock(path)
- def acquire(self, blocking=True):
- logger.debug('Acquiring filelock (blocking=%s): %s', blocking, self.path)
- return self._lock.acquire(blocking)
+ def acquire(self, blocking=True):
+ logger.debug('Acquiring filelock (blocking=%s): %s', blocking, self.path)
+ return self._lock.acquire(blocking)
def release(self):
logger.debug('Ensuring filelock released: %s', self.path)
diff --git a/library/python/filelock/ut/lib/test_filelock.py b/library/python/filelock/ut/lib/test_filelock.py
index a5624f827c..1b11d89123 100644
--- a/library/python/filelock/ut/lib/test_filelock.py
+++ b/library/python/filelock/ut/lib/test_filelock.py
@@ -3,7 +3,7 @@ import time
import logging
import multiprocessing
import tempfile
-import threading
+import threading
import library.python.filelock
@@ -48,36 +48,36 @@ def test_filelock():
time1 = time2
-def test_filelock_init_acquired():
+def test_filelock_init_acquired():
temp_dir = tempfile.mkdtemp()
lock_path = os.path.join(temp_dir, "file.lock")
with library.python.filelock.FileLock(lock_path):
sublock = library.python.filelock.FileLock(lock_path)
del sublock
-
-
-def test_concurrent_lock():
- filename = 'con.lock'
-
- def lock():
- l = library.python.filelock.FileLock(filename)
- time.sleep(1)
- l.acquire()
- l.release()
- try:
- os.unlink(filename)
- except OSError:
- pass
-
- threads = []
- for i in range(100):
- t = threading.Thread(target=lock)
- t.daemon = True
- threads.append(t)
-
- for t in threads:
- t.start()
-
- for t in threads:
- t.join()
+
+
+def test_concurrent_lock():
+ filename = 'con.lock'
+
+ def lock():
+ l = library.python.filelock.FileLock(filename)
+ time.sleep(1)
+ l.acquire()
+ l.release()
+ try:
+ os.unlink(filename)
+ except OSError:
+ pass
+
+ threads = []
+ for i in range(100):
+ t = threading.Thread(target=lock)
+ t.daemon = True
+ threads.append(t)
+
+ for t in threads:
+ t.start()
+
+ for t in threads:
+ t.join()
diff --git a/library/python/fs/__init__.py b/library/python/fs/__init__.py
index c9651b03ae..b1b7cde079 100644
--- a/library/python/fs/__init__.py
+++ b/library/python/fs/__init__.py
@@ -4,11 +4,11 @@ import codecs
import errno
import logging
import os
-import random
+import random
import shutil
import six
import stat
-import sys
+import sys
import library.python.func
import library.python.strings
@@ -202,13 +202,13 @@ def hardlink_or_copy(src, lnk):
if WindowsError is not None and isinstance(exc, WindowsError) and exc.winerror == 1142: # too many hardlinks
return True
# cross-device hardlink or too many hardlinks, or some known WSL error
- if isinstance(exc, OSError) and exc.errno in (
- errno.EXDEV,
- errno.EMLINK,
- errno.EINVAL,
- errno.EACCES,
- errno.EPERM,
- ):
+ if isinstance(exc, OSError) and exc.errno in (
+ errno.EXDEV,
+ errno.EMLINK,
+ errno.EINVAL,
+ errno.EACCES,
+ errno.EPERM,
+ ):
return True
return False
@@ -312,9 +312,9 @@ def read_file_unicode(path, binary=True, enc='utf-8'):
@errorfix_win
def open_file(*args, **kwargs):
- return (
- library.python.windows.open_file(*args, **kwargs) if library.python.windows.on_win() else open(*args, **kwargs)
- )
+ return (
+ library.python.windows.open_file(*args, **kwargs) if library.python.windows.on_win() else open(*args, **kwargs)
+ )
# Atomic file write
@@ -363,15 +363,15 @@ def get_tree_size(path, recursive=False, raise_all_errors=False):
# Directory copy ported from Python 3
-def copytree3(
- src,
- dst,
- symlinks=False,
- ignore=None,
- copy_function=shutil.copy2,
- ignore_dangling_symlinks=False,
- dirs_exist_ok=False,
-):
+def copytree3(
+ src,
+ dst,
+ symlinks=False,
+ ignore=None,
+ copy_function=shutil.copy2,
+ ignore_dangling_symlinks=False,
+ dirs_exist_ok=False,
+):
"""Recursively copy a directory tree.
The copytree3 is a port of shutil.copytree function from python-3.2.
@@ -467,35 +467,35 @@ def walk_relative(path, topdown=True, onerror=None, followlinks=False):
for dirpath, dirnames, filenames in os.walk(path, topdown=topdown, onerror=onerror, followlinks=followlinks):
yield os.path.relpath(dirpath, path), dirnames, filenames
-
+
def supports_clone():
if 'darwin' in sys.platform:
import platform
-
+
return list(map(int, platform.mac_ver()[0].split('.'))) >= [10, 13]
return False
-
-
-def commonpath(paths):
- assert paths
- if len(paths) == 1:
- return next(iter(paths))
-
- split_paths = [path.split(os.sep) for path in paths]
- smin = min(split_paths)
- smax = max(split_paths)
-
- common = smin
- for i, c in enumerate(smin):
- if c != smax[i]:
- common = smin[:i]
- break
-
- return os.path.sep.join(common)
-
-
-def set_execute_bits(filename):
- stm = os.stat(filename).st_mode
- exe = stm | 0o111
- if stm != exe:
- os.chmod(filename, exe)
+
+
+def commonpath(paths):
+ assert paths
+ if len(paths) == 1:
+ return next(iter(paths))
+
+ split_paths = [path.split(os.sep) for path in paths]
+ smin = min(split_paths)
+ smax = max(split_paths)
+
+ common = smin
+ for i, c in enumerate(smin):
+ if c != smax[i]:
+ common = smin[:i]
+ break
+
+ return os.path.sep.join(common)
+
+
+def set_execute_bits(filename):
+ stm = os.stat(filename).st_mode
+ exe = stm | 0o111
+ if stm != exe:
+ os.chmod(filename, exe)
diff --git a/library/python/fs/test/test_fs.py b/library/python/fs/test/test_fs.py
index 5c1c6030b5..9e2c70c069 100644
--- a/library/python/fs/test/test_fs.py
+++ b/library/python/fs/test/test_fs.py
@@ -18,16 +18,16 @@ def in_env(case):
def wrapped_case(*args, **kwargs):
with library.python.tmp.temp_dir() as temp_dir:
case(lambda path: os.path.join(temp_dir, path))
-
+
return wrapped_case
def mkfile(path, data=''):
with open(path, 'wb') as f:
if data:
- f.write(data) if isinstance(data, six.binary_type) else f.write(
- data.encode(library.python.strings.fs_encoding())
- )
+ f.write(data) if isinstance(data, six.binary_type) else f.write(
+ data.encode(library.python.strings.fs_encoding())
+ )
def mktree_example(path, name):
@@ -820,20 +820,20 @@ def test_read_file_empty(path):
@in_env
def test_read_file_multiline(path):
mkfile(path('src'), 'SRC line 1\nSRC line 2\n')
- assert (
- library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
- == 'SRC line 1\nSRC line 2\n'
- )
+ assert (
+ library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
+ == 'SRC line 1\nSRC line 2\n'
+ )
assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
@in_env
def test_read_file_multiline_crlf(path):
mkfile(path('src'), 'SRC line 1\r\nSRC line 2\r\n')
- assert (
- library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
- == 'SRC line 1\r\nSRC line 2\r\n'
- )
+ assert (
+ library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
+ == 'SRC line 1\r\nSRC line 2\r\n'
+ )
if library.python.windows.on_win() or six.PY3: # universal newlines are by default in text mode in python3
assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
else:
@@ -1005,9 +1005,9 @@ def test_copy_tree_custom_copy_function():
shutil.copy2(src, dst)
copied.append(dst)
- library.python.fs.copy_tree(
- "test_copy_tree_src", yatest.common.work_path("test_copy_tree_dst"), copy_function=copy_function
- )
+ library.python.fs.copy_tree(
+ "test_copy_tree_src", yatest.common.work_path("test_copy_tree_dst"), copy_function=copy_function
+ )
assert len(copied) == 2
assert yatest.common.work_path("test_copy_tree_dst/deepper/deepper.txt") in copied
assert yatest.common.work_path("test_copy_tree_dst/deepper/inner/inner.txt") in copied
@@ -1019,19 +1019,19 @@ def test_copy2():
assert os.path.islink("link2")
assert os.readlink("link2") == "non-existent"
-
-
-def test_commonpath():
- pj = os.path.join
- pja = lambda *x: os.path.abspath(pj(*x))
-
- assert library.python.fs.commonpath(['a', 'b']) == ''
- assert library.python.fs.commonpath([pj('t', '1')]) == pj('t', '1')
- assert library.python.fs.commonpath([pj('t', '1'), pj('t', '2')]) == pj('t')
- assert library.python.fs.commonpath([pj('t', '1', '2'), pj('t', '1', '2')]) == pj('t', '1', '2')
- assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2')]) == pj('t', '1')
- assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2'), pj('t', '1', '3')]) == pj('t', '1')
-
- assert library.python.fs.commonpath([pja('t', '1', '1'), pja('t', '1', '2')]) == pja('t', '1')
-
- assert library.python.fs.commonpath({pj('t', '1'), pj('t', '2')}) == pj('t')
+
+
+def test_commonpath():
+ pj = os.path.join
+ pja = lambda *x: os.path.abspath(pj(*x))
+
+ assert library.python.fs.commonpath(['a', 'b']) == ''
+ assert library.python.fs.commonpath([pj('t', '1')]) == pj('t', '1')
+ assert library.python.fs.commonpath([pj('t', '1'), pj('t', '2')]) == pj('t')
+ assert library.python.fs.commonpath([pj('t', '1', '2'), pj('t', '1', '2')]) == pj('t', '1', '2')
+ assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2')]) == pj('t', '1')
+ assert library.python.fs.commonpath([pj('t', '1', '1'), pj('t', '1', '2'), pj('t', '1', '3')]) == pj('t', '1')
+
+ assert library.python.fs.commonpath([pja('t', '1', '1'), pja('t', '1', '2')]) == pja('t', '1')
+
+ assert library.python.fs.commonpath({pj('t', '1'), pj('t', '2')}) == pj('t')
diff --git a/library/python/func/__init__.py b/library/python/func/__init__.py
index e37ea95c7c..7424361635 100644
--- a/library/python/func/__init__.py
+++ b/library/python/func/__init__.py
@@ -1,6 +1,6 @@
import functools
-import threading
-import collections
+import threading
+import collections
def map0(func, value):
@@ -20,12 +20,12 @@ class _Result(object):
def lazy(func):
result = _Result()
- @functools.wraps(func)
- def wrapper(*args):
+ @functools.wraps(func)
+ def wrapper(*args):
try:
return result.result
except AttributeError:
- result.result = func(*args)
+ result.result = func(*args)
return result.result
@@ -64,54 +64,54 @@ class lazy_classproperty(object):
return getattr(owner, attr_name)
-def memoize(limit=0, thread_local=False):
- assert limit >= 0
-
+def memoize(limit=0, thread_local=False):
+ assert limit >= 0
+
def decorator(func):
- memory = {}
- lock = threading.Lock()
-
- if limit:
- keys = collections.deque()
-
- def get(args):
- try:
- return memory[args]
- except KeyError:
- with lock:
- if args not in memory:
- fargs = args[-1]
- memory[args] = func(*fargs)
- keys.append(args)
- if len(keys) > limit:
- del memory[keys.popleft()]
- return memory[args]
-
- else:
-
- def get(args):
- if args not in memory:
- with lock:
- if args not in memory:
- fargs = args[-1]
- memory[args] = func(*fargs)
- return memory[args]
-
- if thread_local:
-
- @functools.wraps(func)
- def wrapper(*args):
- th = threading.current_thread()
- return get((th.ident, th.name, args))
-
- else:
-
- @functools.wraps(func)
- def wrapper(*args):
- return get(('', '', args))
-
- return wrapper
-
+ memory = {}
+ lock = threading.Lock()
+
+ if limit:
+ keys = collections.deque()
+
+ def get(args):
+ try:
+ return memory[args]
+ except KeyError:
+ with lock:
+ if args not in memory:
+ fargs = args[-1]
+ memory[args] = func(*fargs)
+ keys.append(args)
+ if len(keys) > limit:
+ del memory[keys.popleft()]
+ return memory[args]
+
+ else:
+
+ def get(args):
+ if args not in memory:
+ with lock:
+ if args not in memory:
+ fargs = args[-1]
+ memory[args] = func(*fargs)
+ return memory[args]
+
+ if thread_local:
+
+ @functools.wraps(func)
+ def wrapper(*args):
+ th = threading.current_thread()
+ return get((th.ident, th.name, args))
+
+ else:
+
+ @functools.wraps(func)
+ def wrapper(*args):
+ return get(('', '', args))
+
+ return wrapper
+
return decorator
@@ -119,52 +119,52 @@ def memoize(limit=0, thread_local=False):
def compose(*functions):
def compose2(f, g):
return lambda x: f(g(x))
-
+
return functools.reduce(compose2, functions, lambda x: x)
-
-
-class Singleton(type):
- _instances = {}
-
- def __call__(cls, *args, **kwargs):
- if cls not in cls._instances:
- cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
- return cls._instances[cls]
-
-
-def stable_uniq(it):
- seen = set()
- res = []
- for e in it:
- if e not in seen:
- res.append(e)
- seen.add(e)
- return res
-
-
-def first(it):
- for d in it:
- if d:
- return d
-
-
-def split(data, func):
- l, r = [], []
- for e in data:
- if func(e):
- l.append(e)
- else:
- r.append(e)
- return l, r
+
+
+class Singleton(type):
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
+
+
+def stable_uniq(it):
+ seen = set()
+ res = []
+ for e in it:
+ if e not in seen:
+ res.append(e)
+ seen.add(e)
+ return res
+
+
+def first(it):
+ for d in it:
+ if d:
+ return d
+
+
+def split(data, func):
+ l, r = [], []
+ for e in data:
+ if func(e):
+ l.append(e)
+ else:
+ r.append(e)
+ return l, r
def flatten_dict(dd, separator='.', prefix=''):
- return (
- {
- prefix + separator + k if prefix else k: v
- for kk, vv in dd.items()
- for k, v in flatten_dict(vv, separator, kk).items()
- }
- if isinstance(dd, dict)
- else {prefix: dd}
- )
+ return (
+ {
+ prefix + separator + k if prefix else k: v
+ for kk, vv in dd.items()
+ for k, v in flatten_dict(vv, separator, kk).items()
+ }
+ if isinstance(dd, dict)
+ else {prefix: dd}
+ )
diff --git a/library/python/func/ut/test_func.py b/library/python/func/ut/test_func.py
index d283402374..3c4fad1a07 100644
--- a/library/python/func/ut/test_func.py
+++ b/library/python/func/ut/test_func.py
@@ -1,13 +1,13 @@
import pytest
-import threading
+import threading
import library.python.func as func
def test_map0():
- assert None is func.map0(lambda x: x + 1, None)
+ assert None is func.map0(lambda x: x + 1, None)
assert 3 == func.map0(lambda x: x + 1, 2)
- assert None is func.map0(len, None)
+ assert None is func.map0(len, None)
assert 2 == func.map0(len, [1, 2])
@@ -26,34 +26,34 @@ def test_memoize():
Counter._qty = getattr(Counter, '_qty', 0) + 1
return Counter._qty
- @func.memoize()
+ @func.memoize()
def t1(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t2(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t3(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t4(a):
return a, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t5(a, b, c):
return a + b + c, Counter.inc()
- @func.memoize()
+ @func.memoize()
def t6():
return Counter.inc()
- @func.memoize(limit=2)
- def t7(a, _b):
- return a, Counter.inc()
-
+ @func.memoize(limit=2)
+ def t7(a, _b):
+ return a, Counter.inc()
+
assert (1, 1) == t1(1)
assert (1, 1) == t1(1)
assert (2, 2) == t1(2)
@@ -82,48 +82,48 @@ def test_memoize():
assert 11 == t6()
assert 11 == t6()
- assert (1, 12) == t7(1, None)
- assert (2, 13) == t7(2, None)
- assert (1, 12) == t7(1, None)
- assert (2, 13) == t7(2, None)
- # removed result for (1, None)
- assert (3, 14) == t7(3, None)
- assert (1, 15) == t7(1, None)
-
- class ClassWithMemoizedMethod(object):
- def __init__(self):
- self.a = 0
-
- @func.memoize(True)
- def t(self, i):
- self.a += i
- return i
-
- obj = ClassWithMemoizedMethod()
- assert 10 == obj.t(10)
- assert 10 == obj.a
- assert 10 == obj.t(10)
- assert 10 == obj.a
-
- assert 20 == obj.t(20)
- assert 30 == obj.a
- assert 20 == obj.t(20)
- assert 30 == obj.a
-
-
-def test_first():
- assert func.first([0, [], (), None, False, {}, 0.0, '1', 0]) == '1'
- assert func.first([]) is None
- assert func.first([0]) is None
-
-
-def test_split():
- assert func.split([1, 1], lambda x: x) == ([1, 1], [])
- assert func.split([0, 0], lambda x: x) == ([], [0, 0])
- assert func.split([], lambda x: x) == ([], [])
- assert func.split([1, 0, 1], lambda x: x) == ([1, 1], [0])
-
-
+ assert (1, 12) == t7(1, None)
+ assert (2, 13) == t7(2, None)
+ assert (1, 12) == t7(1, None)
+ assert (2, 13) == t7(2, None)
+ # removed result for (1, None)
+ assert (3, 14) == t7(3, None)
+ assert (1, 15) == t7(1, None)
+
+ class ClassWithMemoizedMethod(object):
+ def __init__(self):
+ self.a = 0
+
+ @func.memoize(True)
+ def t(self, i):
+ self.a += i
+ return i
+
+ obj = ClassWithMemoizedMethod()
+ assert 10 == obj.t(10)
+ assert 10 == obj.a
+ assert 10 == obj.t(10)
+ assert 10 == obj.a
+
+ assert 20 == obj.t(20)
+ assert 30 == obj.a
+ assert 20 == obj.t(20)
+ assert 30 == obj.a
+
+
+def test_first():
+ assert func.first([0, [], (), None, False, {}, 0.0, '1', 0]) == '1'
+ assert func.first([]) is None
+ assert func.first([0]) is None
+
+
+def test_split():
+ assert func.split([1, 1], lambda x: x) == ([1, 1], [])
+ assert func.split([0, 0], lambda x: x) == ([], [0, 0])
+ assert func.split([], lambda x: x) == ([], [])
+ assert func.split([1, 0, 1], lambda x: x) == ([1, 1], [0])
+
+
def test_flatten_dict():
assert func.flatten_dict({"a": 1, "b": 2}) == {"a": 1, "b": 2}
assert func.flatten_dict({"a": 1}) == {"a": 1}
@@ -132,31 +132,31 @@ def test_flatten_dict():
assert func.flatten_dict({"a": 1, "b": {"c": {"d": 2}}}, separator="/") == {"a": 1, "b/c/d": 2}
-def test_memoize_thread_local():
- class Counter(object):
- def __init__(self, s):
- self.val = s
-
- def inc(self):
- self.val += 1
- return self.val
-
- @func.memoize(thread_local=True)
- def get_counter(start):
- return Counter(start)
-
- def th_inc():
- assert get_counter(0).inc() == 1
- assert get_counter(0).inc() == 2
- assert get_counter(10).inc() == 11
- assert get_counter(10).inc() == 12
-
- th_inc()
-
- th = threading.Thread(target=th_inc)
- th.start()
- th.join()
-
-
+def test_memoize_thread_local():
+ class Counter(object):
+ def __init__(self, s):
+ self.val = s
+
+ def inc(self):
+ self.val += 1
+ return self.val
+
+ @func.memoize(thread_local=True)
+ def get_counter(start):
+ return Counter(start)
+
+ def th_inc():
+ assert get_counter(0).inc() == 1
+ assert get_counter(0).inc() == 2
+ assert get_counter(10).inc() == 11
+ assert get_counter(10).inc() == 12
+
+ th_inc()
+
+ th = threading.Thread(target=th_inc)
+ th.start()
+ th.join()
+
+
if __name__ == '__main__':
pytest.main([__file__])
diff --git a/library/python/func/ut/ya.make b/library/python/func/ut/ya.make
index 4d7e8b8f5b..5ec6c1225e 100644
--- a/library/python/func/ut/ya.make
+++ b/library/python/func/ut/ya.make
@@ -1,11 +1,11 @@
-OWNER(g:yatool)
-
-PY23_TEST()
-
-TEST_SRCS(test_func.py)
-
-PEERDIR(
- library/python/func
-)
-
-END()
+OWNER(g:yatool)
+
+PY23_TEST()
+
+TEST_SRCS(test_func.py)
+
+PEERDIR(
+ library/python/func
+)
+
+END()
diff --git a/library/python/func/ya.make b/library/python/func/ya.make
index 2f7b4890db..9d414a976e 100644
--- a/library/python/func/ya.make
+++ b/library/python/func/ya.make
@@ -5,7 +5,7 @@ PY23_LIBRARY()
PY_SRCS(__init__.py)
END()
-
-RECURSE_FOR_TESTS(
- ut
-)
+
+RECURSE_FOR_TESTS(
+ ut
+)
diff --git a/library/python/pytest/main.py b/library/python/pytest/main.py
index aa08f846b1..6296bd6f0f 100644
--- a/library/python/pytest/main.py
+++ b/library/python/pytest/main.py
@@ -1,47 +1,47 @@
-import os
+import os
import sys
import time
-
+
import __res
-FORCE_EXIT_TESTSFAILED_ENV = 'FORCE_EXIT_TESTSFAILED'
+FORCE_EXIT_TESTSFAILED_ENV = 'FORCE_EXIT_TESTSFAILED'
+
-
def main():
import library.python.pytest.context as context
context.Ctx["YA_PYTEST_START_TIMESTAMP"] = time.time()
- profile = None
- if '--profile-pytest' in sys.argv:
- sys.argv.remove('--profile-pytest')
+ profile = None
+ if '--profile-pytest' in sys.argv:
+ sys.argv.remove('--profile-pytest')
+
+ import pstats
+ import cProfile
+ profile = cProfile.Profile()
+ profile.enable()
- import pstats
- import cProfile
- profile = cProfile.Profile()
- profile.enable()
+ # Reset influencing env. vars
+ # For more info see library/python/testing/yatest_common/yatest/common/errors.py
+ if FORCE_EXIT_TESTSFAILED_ENV in os.environ:
+ del os.environ[FORCE_EXIT_TESTSFAILED_ENV]
- # Reset influencing env. vars
- # For more info see library/python/testing/yatest_common/yatest/common/errors.py
- if FORCE_EXIT_TESTSFAILED_ENV in os.environ:
- del os.environ[FORCE_EXIT_TESTSFAILED_ENV]
-
if "Y_PYTHON_CLEAR_ENTRY_POINT" in os.environ:
if "Y_PYTHON_ENTRY_POINT" in os.environ:
del os.environ["Y_PYTHON_ENTRY_POINT"]
del os.environ["Y_PYTHON_CLEAR_ENTRY_POINT"]
- listing_mode = '--collect-only' in sys.argv
- yatest_runner = os.environ.get('YA_TEST_RUNNER') == '1'
-
- import pytest
-
- import library.python.pytest.plugins.collection as collection
- import library.python.pytest.plugins.ya as ya
- import library.python.pytest.plugins.conftests as conftests
-
+ listing_mode = '--collect-only' in sys.argv
+ yatest_runner = os.environ.get('YA_TEST_RUNNER') == '1'
+
+ import pytest
+
+ import library.python.pytest.plugins.collection as collection
+ import library.python.pytest.plugins.ya as ya
+ import library.python.pytest.plugins.conftests as conftests
+
import _pytest.assertion
from _pytest.monkeypatch import MonkeyPatch
- from . import rewrite
+ from . import rewrite
m = MonkeyPatch()
m.setattr(_pytest.assertion.rewrite, "AssertionRewritingHook", rewrite.AssertionRewritingHook)
@@ -52,10 +52,10 @@ def main():
if name.startswith(prefix) and not name.endswith('.conftest')
]
- doctest_packages = __res.find("PY_DOCTEST_PACKAGES") or ""
- if isinstance(doctest_packages, bytes):
- doctest_packages = doctest_packages.decode('utf-8')
- doctest_packages = doctest_packages.split()
+ doctest_packages = __res.find("PY_DOCTEST_PACKAGES") or ""
+ if isinstance(doctest_packages, bytes):
+ doctest_packages = doctest_packages.decode('utf-8')
+ doctest_packages = doctest_packages.split()
def is_doctest_module(name):
for package in doctest_packages:
@@ -85,31 +85,31 @@ def main():
return new_paths
sys.path = remove_user_site(sys.path)
- rc = pytest.main(plugins=[
+ rc = pytest.main(plugins=[
collection.CollectionPlugin(test_modules, doctest_modules),
ya,
conftests,
- ])
-
+ ])
+
if rc == 5:
# don't care about EXIT_NOTESTSCOLLECTED
rc = 0
- if rc == 1 and yatest_runner and not listing_mode and not os.environ.get(FORCE_EXIT_TESTSFAILED_ENV) == '1':
- # XXX it's place for future improvements
- # Test wrapper should terminate with 0 exit code if there are common test failures
- # and report it with trace-file machinery.
- # However, there are several case when we don't want to suppress exit_code:
- # - listing machinery doesn't use trace-file currently and rely on stdout and exit_code
- # - RestartTestException and InfrastructureException required non-zero exit_code to be processes correctly
- rc = 0
-
- if profile:
- profile.disable()
- ps = pstats.Stats(profile, stream=sys.stderr).sort_stats('cumulative')
- ps.print_stats()
-
- sys.exit(rc)
+ if rc == 1 and yatest_runner and not listing_mode and not os.environ.get(FORCE_EXIT_TESTSFAILED_ENV) == '1':
+ # XXX it's place for future improvements
+ # Test wrapper should terminate with 0 exit code if there are common test failures
+ # and report it with trace-file machinery.
+ # However, there are several case when we don't want to suppress exit_code:
+ # - listing machinery doesn't use trace-file currently and rely on stdout and exit_code
+ # - RestartTestException and InfrastructureException required non-zero exit_code to be processes correctly
+ rc = 0
+
+ if profile:
+ profile.disable()
+ ps = pstats.Stats(profile, stream=sys.stderr).sort_stats('cumulative')
+ ps.print_stats()
+
+ sys.exit(rc)
if __name__ == '__main__':
diff --git a/library/python/pytest/plugins/collection.py b/library/python/pytest/plugins/collection.py
index 93932e4b02..e36f47a78f 100644
--- a/library/python/pytest/plugins/collection.py
+++ b/library/python/pytest/plugins/collection.py
@@ -89,7 +89,7 @@ def pytest_ignore_collect(module, session, filenames_from_full_filters, accept_f
if test_file_filter is None:
return False
if module.name != test_file_filter.replace('/', '.'):
- return True
+ return True
return False
diff --git a/library/python/pytest/plugins/fakeid_py2.py b/library/python/pytest/plugins/fakeid_py2.py
index 8efc368629..8b26148e2e 100644
--- a/library/python/pytest/plugins/fakeid_py2.py
+++ b/library/python/pytest/plugins/fakeid_py2.py
@@ -1,2 +1,2 @@
-# Inc this number to change uid for every PYTEST() target
-fake_id = 0
+# Inc this number to change uid for every PYTEST() target
+fake_id = 0
diff --git a/library/python/pytest/plugins/fakeid_py3.py b/library/python/pytest/plugins/fakeid_py3.py
index d6812eadba..247cc8b29d 100644
--- a/library/python/pytest/plugins/fakeid_py3.py
+++ b/library/python/pytest/plugins/fakeid_py3.py
@@ -1,2 +1,2 @@
-# Inc this number to change uid for every PY3TEST() target
-fake_id = 10
+# Inc this number to change uid for every PY3TEST() target
+fake_id = 10
diff --git a/library/python/pytest/plugins/ya.make b/library/python/pytest/plugins/ya.make
index 638c532e86..c15d6f759d 100644
--- a/library/python/pytest/plugins/ya.make
+++ b/library/python/pytest/plugins/ya.make
@@ -1,4 +1,4 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
@@ -10,23 +10,23 @@ PY_SRCS(
)
PEERDIR(
- library/python/filelock
+ library/python/filelock
library/python/find_root
library/python/testing/filter
)
-IF (PYTHON2)
- PY_SRCS(
- fakeid_py2.py
- )
-
- PEERDIR(
- contrib/python/faulthandler
- )
-ELSE()
- PY_SRCS(
- fakeid_py3.py
- )
-ENDIF()
-
+IF (PYTHON2)
+ PY_SRCS(
+ fakeid_py2.py
+ )
+
+ PEERDIR(
+ contrib/python/faulthandler
+ )
+ELSE()
+ PY_SRCS(
+ fakeid_py3.py
+ )
+ENDIF()
+
END()
diff --git a/library/python/pytest/plugins/ya.py b/library/python/pytest/plugins/ya.py
index d7398ae90d..1bde03042d 100644
--- a/library/python/pytest/plugins/ya.py
+++ b/library/python/pytest/plugins/ya.py
@@ -1,15 +1,15 @@
-# coding: utf-8
-
-import base64
-import errno
-import re
+# coding: utf-8
+
+import base64
+import errno
+import re
import sys
import os
import logging
import fnmatch
import json
import time
-import traceback
+import traceback
import collections
import signal
import inspect
@@ -28,7 +28,7 @@ import _pytest.outcomes
import _pytest.skipping
from _pytest.warning_types import PytestUnhandledCoroutineWarning
-
+
from yatest_lib import test_splitter
try:
@@ -42,12 +42,12 @@ except ImportError:
# fallback for pytest script mode
import yatest_tools as tools
-try:
- from library.python import filelock
-except ImportError:
- filelock = None
-
-
+try:
+ from library.python import filelock
+except ImportError:
+ filelock = None
+
+
import yatest_lib.tools
import yatest_lib.external as canon
@@ -61,7 +61,7 @@ yatest_logger = logging.getLogger("ya.test")
_pytest.main.EXIT_NOTESTSCOLLECTED = 0
-SHUTDOWN_REQUESTED = False
+SHUTDOWN_REQUESTED = False
pytest_config = None
@@ -71,8 +71,8 @@ def configure_pdb_on_demand():
if hasattr(signal, "SIGUSR1"):
def on_signal(*args):
- import ipdb
- ipdb.set_trace()
+ import ipdb
+ ipdb.set_trace()
signal.signal(signal.SIGUSR1, on_signal)
@@ -147,40 +147,40 @@ def pytest_addoption(parser):
parser.addoption("--python-path", action="store", dest="python_path", default="", help="path the canonical python binary")
parser.addoption("--valgrind-path", action="store", dest="valgrind_path", default="", help="path the canonical valgring binary")
parser.addoption("--test-filter", action="append", dest="test_filter", default=None, help="test filter")
- parser.addoption("--test-file-filter", action="store", dest="test_file_filter", default=None, help="test file filter")
+ parser.addoption("--test-file-filter", action="store", dest="test_file_filter", default=None, help="test file filter")
parser.addoption("--test-param", action="append", dest="test_params", default=None, help="test parameters")
parser.addoption("--test-log-level", action="store", dest="test_log_level", choices=["critical", "error", "warning", "info", "debug"], default="debug", help="test log level")
parser.addoption("--mode", action="store", choices=[yatest_lib.ya.RunMode.List, yatest_lib.ya.RunMode.Run], dest="mode", default=yatest_lib.ya.RunMode.Run, help="testing mode")
- parser.addoption("--test-list-file", action="store", dest="test_list_file")
+ parser.addoption("--test-list-file", action="store", dest="test_list_file")
parser.addoption("--modulo", default=1, type=int)
parser.addoption("--modulo-index", default=0, type=int)
parser.addoption("--partition-mode", default='SEQUENTIAL', help="Split tests according to partitoin mode")
parser.addoption("--split-by-tests", action='store_true', help="Split test execution by tests instead of suites", default=False)
parser.addoption("--project-path", action="store", default="", help="path to CMakeList where test is declared")
parser.addoption("--build-type", action="store", default="", help="build type")
- parser.addoption("--flags", action="append", dest="flags", default=[], help="build flags (-D)")
+ parser.addoption("--flags", action="append", dest="flags", default=[], help="build flags (-D)")
parser.addoption("--sanitize", action="store", default="", help="sanitize mode")
parser.addoption("--test-stderr", action="store_true", default=False, help="test stderr")
parser.addoption("--test-debug", action="store_true", default=False, help="test debug mode")
parser.addoption("--root-dir", action="store", default=None)
parser.addoption("--ya-trace", action="store", dest="ya_trace_path", default=None, help="path to ya trace report")
- parser.addoption("--ya-version", action="store", dest="ya_version", default=0, type=int, help="allows to be compatible with ya and the new changes in ya-dev")
+ parser.addoption("--ya-version", action="store", dest="ya_version", default=0, type=int, help="allows to be compatible with ya and the new changes in ya-dev")
parser.addoption(
"--test-suffix", action="store", dest="test_suffix", default=None, help="add suffix to every test name"
)
parser.addoption("--gdb-path", action="store", dest="gdb_path", default="", help="path the canonical gdb binary")
parser.addoption("--collect-cores", action="store_true", dest="collect_cores", default=False, help="allows core dump file recovering during test")
- parser.addoption("--sanitizer-extra-checks", action="store_true", dest="sanitizer_extra_checks", default=False, help="enables extra checks for tests built with sanitizers")
+ parser.addoption("--sanitizer-extra-checks", action="store_true", dest="sanitizer_extra_checks", default=False, help="enables extra checks for tests built with sanitizers")
parser.addoption("--report-deselected", action="store_true", dest="report_deselected", default=False, help="report deselected tests to the trace file")
parser.addoption("--pdb-on-sigusr1", action="store_true", default=False, help="setup pdb.set_trace on SIGUSR1")
- parser.addoption("--test-tool-bin", help="Path to test_tool")
+ parser.addoption("--test-tool-bin", help="Path to test_tool")
parser.addoption("--test-list-path", dest="test_list_path", action="store", help="path to test list", default="")
-def from_ya_test():
- return "YA_TEST_RUNNER" in os.environ
-
-
+def from_ya_test():
+ return "YA_TEST_RUNNER" in os.environ
+
+
def pytest_configure(config):
global pytest_config
pytest_config = config
@@ -189,7 +189,7 @@ def pytest_configure(config):
config.addinivalue_line("markers", "ya:external")
- config.from_ya_test = from_ya_test()
+ config.from_ya_test = from_ya_test()
config.test_logs = collections.defaultdict(dict)
config.test_metrics = {}
config.suite_metrics = {}
@@ -234,65 +234,65 @@ def pytest_configure(config):
config.current_test_name = None
config.test_cores_count = 0
config.collect_cores = config.option.collect_cores
- config.sanitizer_extra_checks = config.option.sanitizer_extra_checks
+ config.sanitizer_extra_checks = config.option.sanitizer_extra_checks
try:
config.test_tool_bin = config.option.test_tool_bin
except AttributeError:
logging.info("test_tool_bin not specified")
if config.sanitizer_extra_checks:
- for envvar in ['LSAN_OPTIONS', 'ASAN_OPTIONS']:
- if envvar in os.environ:
- os.environ.pop(envvar)
- if envvar + '_ORIGINAL' in os.environ:
- os.environ[envvar] = os.environ[envvar + '_ORIGINAL']
+ for envvar in ['LSAN_OPTIONS', 'ASAN_OPTIONS']:
+ if envvar in os.environ:
+ os.environ.pop(envvar)
+ if envvar + '_ORIGINAL' in os.environ:
+ os.environ[envvar] = os.environ[envvar + '_ORIGINAL']
if config.option.root_dir:
config.rootdir = py.path.local(config.option.root_dir)
config.invocation_params = attr.evolve(config.invocation_params, dir=config.rootdir)
- extra_sys_path = []
- # Arcadia paths from the test DEPENDS section of ya.make
- extra_sys_path.append(os.path.join(config.option.source_root, config.option.project_path))
+ extra_sys_path = []
+ # Arcadia paths from the test DEPENDS section of ya.make
+ extra_sys_path.append(os.path.join(config.option.source_root, config.option.project_path))
# Build root is required for correct import of protobufs, because imports are related to the root
# (like import devtools.dummy_arcadia.protos.lib.my_proto_pb2)
- extra_sys_path.append(config.option.build_root)
-
- for path in config.option.dep_roots:
- if os.path.isabs(path):
- extra_sys_path.append(path)
- else:
- extra_sys_path.append(os.path.join(config.option.source_root, path))
-
- sys_path_set = set(sys.path)
- for path in extra_sys_path:
- if path not in sys_path_set:
- sys.path.append(path)
- sys_path_set.add(path)
-
- os.environ["PYTHONPATH"] = os.pathsep.join(sys.path)
-
+ extra_sys_path.append(config.option.build_root)
+
+ for path in config.option.dep_roots:
+ if os.path.isabs(path):
+ extra_sys_path.append(path)
+ else:
+ extra_sys_path.append(os.path.join(config.option.source_root, path))
+
+ sys_path_set = set(sys.path)
+ for path in extra_sys_path:
+ if path not in sys_path_set:
+ sys.path.append(path)
+ sys_path_set.add(path)
+
+ os.environ["PYTHONPATH"] = os.pathsep.join(sys.path)
+
if not config.option.collectonly:
if config.option.ya_trace_path:
config.ya_trace_reporter = TraceReportGenerator(config.option.ya_trace_path)
else:
config.ya_trace_reporter = DryTraceReportGenerator(config.option.ya_trace_path)
- config.ya_version = config.option.ya_version
+ config.ya_version = config.option.ya_version
sys.meta_path.append(CustomImporter([config.option.build_root] + [os.path.join(config.option.build_root, dep) for dep in config.option.dep_roots]))
if config.option.pdb_on_sigusr1:
configure_pdb_on_demand()
- # Dump python backtrace in case of any errors
- faulthandler.enable()
- if hasattr(signal, "SIGQUIT"):
- # SIGQUIT is used by test_tool to teardown tests which overruns timeout
- faulthandler.register(signal.SIGQUIT, chain=True)
-
- if hasattr(signal, "SIGUSR2"):
- signal.signal(signal.SIGUSR2, _graceful_shutdown)
+ # Dump python backtrace in case of any errors
+ faulthandler.enable()
+ if hasattr(signal, "SIGQUIT"):
+ # SIGQUIT is used by test_tool to teardown tests which overruns timeout
+ faulthandler.register(signal.SIGQUIT, chain=True)
+
+ if hasattr(signal, "SIGUSR2"):
+ signal.signal(signal.SIGUSR2, _graceful_shutdown)
+
-
session_should_exit = False
@@ -313,20 +313,20 @@ def pytest_runtest_logfinish(nodeid, location):
_graceful_shutdown_on_log(session_should_exit)
-def _graceful_shutdown(*args):
+def _graceful_shutdown(*args):
global session_should_exit
session_should_exit = True
- try:
- import library.python.coverage
- library.python.coverage.stop_coverage_tracing()
- except ImportError:
- pass
- traceback.print_stack(file=sys.stderr)
+ try:
+ import library.python.coverage
+ library.python.coverage.stop_coverage_tracing()
+ except ImportError:
+ pass
+ traceback.print_stack(file=sys.stderr)
capman = pytest_config.pluginmanager.getplugin("capturemanager")
capman.suspend(in_=True)
_graceful_shutdown_on_log(not capman.is_globally_capturing())
-
-
+
+
def _get_rusage():
return resource and resource.getrusage(resource.RUSAGE_SELF)
@@ -342,7 +342,7 @@ def _collect_test_rusage(item):
if not modifier:
modifier = lambda x: x
if hasattr(item.rusage, attr_name):
- ya_inst.set_metric_value(metric_name, modifier(getattr(finish_rusage, attr_name) - getattr(item.rusage, attr_name)))
+ ya_inst.set_metric_value(metric_name, modifier(getattr(finish_rusage, attr_name) - getattr(item.rusage, attr_name)))
for args in [
("ru_maxrss", "ru_rss", lambda x: x*1024), # to be the same as in util/system/rusage.cpp
@@ -431,7 +431,7 @@ def pytest_collection_modifyitems(items, config):
canonical_node_id = str(CustomTestItem(item.nodeid, pytest_config.option.test_suffix))
matched = False
for flt in filters:
- if "::" not in flt and "*" not in flt:
+ if "::" not in flt and "*" not in flt:
flt += "*" # add support for filtering by module name
if canonical_node_id.endswith(flt) or fnmatch.fnmatch(tools.escape_for_fnmatch(canonical_node_id), tools.escape_for_fnmatch(flt)):
matched = True
@@ -507,10 +507,10 @@ def pytest_collection_modifyitems(items, config):
"tags": _get_item_tags(item),
}
tests.append(record)
- if config.option.test_list_file:
- with open(config.option.test_list_file, 'w') as afile:
- json.dump(tests, afile)
- # TODO prettyboy remove after test_tool release - currently it's required for backward compatibility
+ if config.option.test_list_file:
+ with open(config.option.test_list_file, 'w') as afile:
+ json.dump(tests, afile)
+ # TODO prettyboy remove after test_tool release - currently it's required for backward compatibility
sys.stderr.write(json.dumps(tests))
@@ -548,7 +548,7 @@ def pytest_runtest_makereport(item, call):
if not pytest_config.suite_metrics and context.Ctx.get("YA_PYTEST_START_TIMESTAMP"):
pytest_config.suite_metrics["pytest_startup_duration"] = call.start - context.Ctx["YA_PYTEST_START_TIMESTAMP"]
pytest_config.ya_trace_reporter.dump_suite_metrics()
-
+
pytest_config.ya_trace_reporter.on_log_report(test_item)
if report.outcome == "failed":
@@ -591,48 +591,48 @@ def pytest_make_parametrize_id(config, val, argname):
return None
-def get_formatted_error(report):
- if isinstance(report.longrepr, tuple):
- text = ""
- for entry in report.longrepr:
- text += colorize(entry)
- else:
- text = colorize(report.longrepr)
+def get_formatted_error(report):
+ if isinstance(report.longrepr, tuple):
+ text = ""
+ for entry in report.longrepr:
+ text += colorize(entry)
+ else:
+ text = colorize(report.longrepr)
text = yatest_lib.tools.to_utf8(text)
- return text
-
-
-def colorize(longrepr):
- # use default pytest colorization
+ return text
+
+
+def colorize(longrepr):
+ # use default pytest colorization
if pytest_config.option.tbstyle != "short":
- io = py.io.TextIO()
+ io = py.io.TextIO()
if six.PY2:
writer = py.io.TerminalWriter(file=io)
else:
writer = _pytest._io.TerminalWriter(file=io)
- # enable colorization
- writer.hasmarkup = True
-
- if hasattr(longrepr, 'reprtraceback') and hasattr(longrepr.reprtraceback, 'toterminal'):
- longrepr.reprtraceback.toterminal(writer)
- return io.getvalue().strip()
+ # enable colorization
+ writer.hasmarkup = True
+
+ if hasattr(longrepr, 'reprtraceback') and hasattr(longrepr.reprtraceback, 'toterminal'):
+ longrepr.reprtraceback.toterminal(writer)
+ return io.getvalue().strip()
return yatest_lib.tools.to_utf8(longrepr)
-
+
text = yatest_lib.tools.to_utf8(longrepr)
- pos = text.find("E ")
- if pos == -1:
- return text
-
- bt, error = text[:pos], text[pos:]
- filters = [
- # File path, line number and function name
- (re.compile(r"^(.*?):(\d+): in (\S+)", flags=re.MULTILINE), r"[[unimp]]\1[[rst]]:[[alt2]]\2[[rst]]: in [[alt1]]\3[[rst]]"),
- ]
- for regex, substitution in filters:
- bt = regex.sub(substitution, bt)
- return "{}[[bad]]{}".format(bt, error)
-
-
+ pos = text.find("E ")
+ if pos == -1:
+ return text
+
+ bt, error = text[:pos], text[pos:]
+ filters = [
+ # File path, line number and function name
+ (re.compile(r"^(.*?):(\d+): in (\S+)", flags=re.MULTILINE), r"[[unimp]]\1[[rst]]:[[alt2]]\2[[rst]]: in [[alt1]]\3[[rst]]"),
+ ]
+ for regex, substitution in filters:
+ bt = regex.sub(substitution, bt)
+ return "{}[[bad]]{}".format(bt, error)
+
+
class TestItem(object):
def __init__(self, report, result, test_suffix):
@@ -691,7 +691,7 @@ class TestItem(object):
def error(self):
return self._error
- def set_error(self, entry, marker='bad'):
+ def set_error(self, entry, marker='bad'):
if isinstance(entry, _pytest.reports.BaseReport):
self._error = get_formatted_error(entry)
else:
@@ -750,80 +750,80 @@ class DeselectedTestItem(CustomTestItem):
class TraceReportGenerator(object):
def __init__(self, out_file_path):
- self._filename = out_file_path
- self._file = open(out_file_path, 'w')
- self._wreckage_filename = out_file_path + '.wreckage'
+ self._filename = out_file_path
+ self._file = open(out_file_path, 'w')
+ self._wreckage_filename = out_file_path + '.wreckage'
self._test_messages = {}
self._test_duration = {}
- # Some machinery to avoid data corruption due sloppy fork()
- self._current_test = (None, None)
- self._pid = os.getpid()
- self._check_intricate_respawn()
-
- def _check_intricate_respawn(self):
- pid_file = self._filename + '.pid'
- try:
- # python2 doesn't support open(f, 'x')
- afile = os.fdopen(os.open(pid_file, os.O_WRONLY | os.O_EXCL | os.O_CREAT), 'w')
- afile.write(str(self._pid))
- afile.close()
- return
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- # Looks like the test binary was respawned
- if from_ya_test():
- try:
- with open(pid_file) as afile:
- prev_pid = afile.read()
- except Exception as e:
- prev_pid = '(failed to obtain previous pid: {})'.format(e)
-
- parts = [
- "Aborting test run: test machinery found that the test binary {} has already been run before.".format(sys.executable),
- "Looks like test has incorrect respawn/relaunch logic within test binary.",
- "Test should not try to restart itself - this is a poorly designed test case that leads to errors and could corrupt internal test machinery files.",
- "Debug info: previous pid:{} current:{}".format(prev_pid, self._pid),
- ]
- msg = '\n'.join(parts)
- yatest_logger.error(msg)
-
- if filelock:
- lock = filelock.FileLock(self._wreckage_filename + '.lock')
- lock.acquire()
-
- with open(self._wreckage_filename, 'a') as afile:
- self._file = afile
-
- self._dump_trace('chunk_event', {"errors": [('fail', '[[bad]]' + msg)]})
-
- raise Exception(msg)
- else:
- # Test binary is launched without `ya make -t`'s testing machinery - don't rely on clean environment
- pass
-
+ # Some machinery to avoid data corruption due sloppy fork()
+ self._current_test = (None, None)
+ self._pid = os.getpid()
+ self._check_intricate_respawn()
+
+ def _check_intricate_respawn(self):
+ pid_file = self._filename + '.pid'
+ try:
+ # python2 doesn't support open(f, 'x')
+ afile = os.fdopen(os.open(pid_file, os.O_WRONLY | os.O_EXCL | os.O_CREAT), 'w')
+ afile.write(str(self._pid))
+ afile.close()
+ return
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Looks like the test binary was respawned
+ if from_ya_test():
+ try:
+ with open(pid_file) as afile:
+ prev_pid = afile.read()
+ except Exception as e:
+ prev_pid = '(failed to obtain previous pid: {})'.format(e)
+
+ parts = [
+ "Aborting test run: test machinery found that the test binary {} has already been run before.".format(sys.executable),
+ "Looks like test has incorrect respawn/relaunch logic within test binary.",
+ "Test should not try to restart itself - this is a poorly designed test case that leads to errors and could corrupt internal test machinery files.",
+ "Debug info: previous pid:{} current:{}".format(prev_pid, self._pid),
+ ]
+ msg = '\n'.join(parts)
+ yatest_logger.error(msg)
+
+ if filelock:
+ lock = filelock.FileLock(self._wreckage_filename + '.lock')
+ lock.acquire()
+
+ with open(self._wreckage_filename, 'a') as afile:
+ self._file = afile
+
+ self._dump_trace('chunk_event', {"errors": [('fail', '[[bad]]' + msg)]})
+
+ raise Exception(msg)
+ else:
+ # Test binary is launched without `ya make -t`'s testing machinery - don't rely on clean environment
+ pass
+
def on_start_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
- class_name = test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name
- self._current_test = (class_name, None)
- self.trace('test-started', {'class': class_name})
+ class_name = test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name
+ self._current_test = (class_name, None)
+ self.trace('test-started', {'class': class_name})
def on_finish_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
self.trace('test-finished', {'class': test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name})
def on_start_test_case(self, test_item):
- class_name = yatest_lib.tools.to_utf8(test_item.class_name)
- subtest_name = yatest_lib.tools.to_utf8(test_item.test_name)
+ class_name = yatest_lib.tools.to_utf8(test_item.class_name)
+ subtest_name = yatest_lib.tools.to_utf8(test_item.test_name)
message = {
- 'class': class_name,
- 'subtest': subtest_name,
+ 'class': class_name,
+ 'subtest': subtest_name,
}
if test_item.nodeid in pytest_config.test_logs:
message['logs'] = pytest_config.test_logs[test_item.nodeid]
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
- self._current_test = (class_name, subtest_name)
+ self._current_test = (class_name, subtest_name)
self.trace('subtest-started', message)
def on_finish_test_case(self, test_item, duration_only=False):
@@ -865,9 +865,9 @@ class TraceReportGenerator(object):
message = {"metrics": pytest_config.suite_metrics}
self.trace("suite-event", message)
- def on_error(self, test_item):
- self.trace('chunk_event', {"errors": [(test_item.status, self._get_comment(test_item))]})
-
+ def on_error(self, test_item):
+ self.trace('chunk_event', {"errors": [(test_item.status, self._get_comment(test_item))]})
+
def on_log_report(self, test_item):
if test_item.nodeid in self._test_duration:
self._test_duration[test_item.nodeid] += test_item._duration
@@ -879,77 +879,77 @@ class TraceReportGenerator(object):
msg = yatest_lib.tools.to_utf8(test_item.error)
if not msg:
return ""
- return msg + "[[rst]]"
+ return msg + "[[rst]]"
- def _dump_trace(self, name, value):
+ def _dump_trace(self, name, value):
event = {
'timestamp': time.time(),
'value': value,
'name': name
}
-
+
data = yatest_lib.tools.to_str(json.dumps(event, ensure_ascii=False))
- self._file.write(data + '\n')
- self._file.flush()
-
- def _check_sloppy_fork(self, name, value):
- if self._pid == os.getpid():
- return
-
- yatest_logger.error("Skip tracing to avoid data corruption, name = %s, value = %s", name, value)
-
- try:
- # Lock wreckage tracefile to avoid race if multiple tests use fork sloppily
- if filelock:
- lock = filelock.FileLock(self._wreckage_filename + '.lock')
- lock.acquire()
-
- with open(self._wreckage_filename, 'a') as afile:
- self._file = afile
-
- parts = [
- "It looks like you have leaked process - it could corrupt internal test machinery files.",
- "Usually it happens when you casually use fork() without os._exit(),",
- "which results in two pytest processes running at the same time.",
- "Pid of the original pytest's process is {}, however current process has {} pid.".format(self._pid, os.getpid()),
- ]
- if self._current_test[1]:
- parts.append("Most likely the problem is in '{}' test.".format(self._current_test))
- else:
- parts.append("Most likely new process was created before any test was launched (during the import stage?).")
-
- if value.get('comment'):
- comment = value.get('comment', '').strip()
- # multiline comment
- newline_required = '\n' if '\n' in comment else ''
- parts.append("Debug info: name = '{}' comment:{}{}".format(name, newline_required, comment))
- else:
- val_str = json.dumps(value, ensure_ascii=False).encode('utf-8')
- parts.append("Debug info: name = '{}' value = '{}'".format(name, base64.b64encode(val_str)))
-
- msg = "[[bad]]{}".format('\n'.join(parts))
- class_name, subtest_name = self._current_test
- if subtest_name:
- data = {
- 'class': class_name,
- 'subtest': subtest_name,
- 'status': 'fail',
- 'comment': msg,
- }
- # overwrite original status
- self._dump_trace('subtest-finished', data)
- else:
- self._dump_trace('chunk_event', {"errors": [('fail', msg)]})
- except Exception as e:
- yatest_logger.exception(e)
- finally:
- os._exit(38)
-
- def trace(self, name, value):
- self._check_sloppy_fork(name, value)
- self._dump_trace(name, value)
-
-
+ self._file.write(data + '\n')
+ self._file.flush()
+
+ def _check_sloppy_fork(self, name, value):
+ if self._pid == os.getpid():
+ return
+
+ yatest_logger.error("Skip tracing to avoid data corruption, name = %s, value = %s", name, value)
+
+ try:
+ # Lock wreckage tracefile to avoid race if multiple tests use fork sloppily
+ if filelock:
+ lock = filelock.FileLock(self._wreckage_filename + '.lock')
+ lock.acquire()
+
+ with open(self._wreckage_filename, 'a') as afile:
+ self._file = afile
+
+ parts = [
+ "It looks like you have leaked process - it could corrupt internal test machinery files.",
+ "Usually it happens when you casually use fork() without os._exit(),",
+ "which results in two pytest processes running at the same time.",
+ "Pid of the original pytest's process is {}, however current process has {} pid.".format(self._pid, os.getpid()),
+ ]
+ if self._current_test[1]:
+ parts.append("Most likely the problem is in '{}' test.".format(self._current_test))
+ else:
+ parts.append("Most likely new process was created before any test was launched (during the import stage?).")
+
+ if value.get('comment'):
+ comment = value.get('comment', '').strip()
+ # multiline comment
+ newline_required = '\n' if '\n' in comment else ''
+ parts.append("Debug info: name = '{}' comment:{}{}".format(name, newline_required, comment))
+ else:
+ val_str = json.dumps(value, ensure_ascii=False).encode('utf-8')
+ parts.append("Debug info: name = '{}' value = '{}'".format(name, base64.b64encode(val_str)))
+
+ msg = "[[bad]]{}".format('\n'.join(parts))
+ class_name, subtest_name = self._current_test
+ if subtest_name:
+ data = {
+ 'class': class_name,
+ 'subtest': subtest_name,
+ 'status': 'fail',
+ 'comment': msg,
+ }
+ # overwrite original status
+ self._dump_trace('subtest-finished', data)
+ else:
+ self._dump_trace('chunk_event', {"errors": [('fail', msg)]})
+ except Exception as e:
+ yatest_logger.exception(e)
+ finally:
+ os._exit(38)
+
+ def trace(self, name, value):
+ self._check_sloppy_fork(name, value)
+ self._dump_trace(name, value)
+
+
class DryTraceReportGenerator(TraceReportGenerator):
"""
Generator does not write any information.
diff --git a/library/python/pytest/ya.make b/library/python/pytest/ya.make
index 662c7787b3..060c92c313 100644
--- a/library/python/pytest/ya.make
+++ b/library/python/pytest/ya.make
@@ -6,7 +6,7 @@ OWNER(
)
PY_SRCS(
- __init__.py
+ __init__.py
main.py
rewrite.py
yatest_tools.py
@@ -14,19 +14,19 @@ PY_SRCS(
)
PEERDIR(
- contrib/python/dateutil
- contrib/python/ipdb
- contrib/python/py
- contrib/python/pytest
- contrib/python/requests
+ contrib/python/dateutil
+ contrib/python/ipdb
+ contrib/python/py
+ contrib/python/pytest
+ contrib/python/requests
library/python/pytest/plugins
library/python/testing/yatest_common
library/python/testing/yatest_lib
)
-RESOURCE_FILES(
- PREFIX library/python/pytest/
- pytest.yatest.ini
-)
-
+RESOURCE_FILES(
+ PREFIX library/python/pytest/
+ pytest.yatest.ini
+)
+
END()
diff --git a/library/python/pytest/yatest_tools.py b/library/python/pytest/yatest_tools.py
index 0c336250c6..6b8b896394 100644
--- a/library/python/pytest/yatest_tools.py
+++ b/library/python/pytest/yatest_tools.py
@@ -1,25 +1,25 @@
-# coding: utf-8
-
-import collections
-import functools
-import math
+# coding: utf-8
+
+import collections
+import functools
+import math
import os
import re
-import sys
+import sys
import yatest_lib.tools
class Subtest(object):
- def __init__(self, name, test_name, status, comment, elapsed, result=None, test_type=None, logs=None, cwd=None, metrics=None):
- self._name = name
- self._test_name = test_name
+ def __init__(self, name, test_name, status, comment, elapsed, result=None, test_type=None, logs=None, cwd=None, metrics=None):
+ self._name = name
+ self._test_name = test_name
self.status = status
self.elapsed = elapsed
self.comment = comment
self.result = result
self.test_type = test_type
- self.logs = logs or {}
+ self.logs = logs or {}
self.cwd = cwd
self.metrics = metrics
@@ -31,17 +31,17 @@ class Subtest(object):
def __str__(self):
return yatest_lib.tools.to_utf8(unicode(self))
- def __unicode__(self):
- return u"{}::{}".format(self.test_name, self.test_name)
-
- @property
- def name(self):
+ def __unicode__(self):
+ return u"{}::{}".format(self.test_name, self.test_name)
+
+ @property
+ def name(self):
return yatest_lib.tools.to_utf8(self._name)
-
- @property
- def test_name(self):
+
+ @property
+ def test_name(self):
return yatest_lib.tools.to_utf8(self._test_name)
-
+
def __repr__(self):
return "Subtest [{}::{} - {}[{}]: {}]".format(self.name, self.test_name, self.status, self.elapsed, self.comment)
@@ -84,7 +84,7 @@ class SubtestInfo(object):
class Status(object):
- GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(7)
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(7)
SKIPPED = -100
NOT_LAUNCHED = -200
CANON_DIFF = -300
@@ -152,76 +152,76 @@ ya_ctx = YaCtx()
TRACE_FILE_NAME = "ytest.report.trace"
-def lazy(func):
- mem = {}
-
- @functools.wraps(func)
- def wrapper():
- if "results" not in mem:
- mem["results"] = func()
- return mem["results"]
-
- return wrapper
-
-
-@lazy
-def _get_mtab():
- if os.path.exists("/etc/mtab"):
- with open("/etc/mtab") as afile:
- data = afile.read()
- return [line.split(" ") for line in data.split("\n") if line]
- return []
-
-
-def get_max_filename_length(dirname):
+def lazy(func):
+ mem = {}
+
+ @functools.wraps(func)
+ def wrapper():
+ if "results" not in mem:
+ mem["results"] = func()
+ return mem["results"]
+
+ return wrapper
+
+
+@lazy
+def _get_mtab():
+ if os.path.exists("/etc/mtab"):
+ with open("/etc/mtab") as afile:
+ data = afile.read()
+ return [line.split(" ") for line in data.split("\n") if line]
+ return []
+
+
+def get_max_filename_length(dirname):
"""
- Return maximum filename length for the filesystem
- :return:
- """
- if sys.platform.startswith("linux"):
- # Linux user's may work on mounted ecryptfs filesystem
- # which has filename length limitations
- for entry in _get_mtab():
- mounted_dir, filesystem = entry[1], entry[2]
- # http://unix.stackexchange.com/questions/32795/what-is-the-maximum-allowed-filename-and-folder-size-with-ecryptfs
- if filesystem == "ecryptfs" and dirname and dirname.startswith(mounted_dir):
- return 140
- # default maximum filename length for most filesystems
- return 255
-
-
-def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set)):
- """
- Get unique filename in dir with proper filename length, using given filename/dir.
- File/dir won't be created (thread nonsafe)
+ Return maximum filename length for the filesystem
+ :return:
+ """
+ if sys.platform.startswith("linux"):
+ # Linux user's may work on mounted ecryptfs filesystem
+ # which has filename length limitations
+ for entry in _get_mtab():
+ mounted_dir, filesystem = entry[1], entry[2]
+ # http://unix.stackexchange.com/questions/32795/what-is-the-maximum-allowed-filename-and-folder-size-with-ecryptfs
+ if filesystem == "ecryptfs" and dirname and dirname.startswith(mounted_dir):
+ return 140
+ # default maximum filename length for most filesystems
+ return 255
+
+
+def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set)):
+ """
+ Get unique filename in dir with proper filename length, using given filename/dir.
+ File/dir won't be created (thread nonsafe)
:param dir_path: path to dir
- :param filename: original filename
- :return: unique filename
+ :param filename: original filename
+ :return: unique filename
"""
- max_suffix = 10000
- # + 1 symbol for dot before suffix
- tail_length = int(round(math.log(max_suffix, 10))) + 1
- # truncate filename length in accordance with filesystem limitations
- filename, extension = os.path.splitext(filename)
- # XXX
- if sys.platform.startswith("win"):
- # Trying to fit into MAX_PATH if it's possible.
- # Remove after DEVTOOLS-1646
- max_path = 260
- filename_len = len(dir_path) + len(extension) + tail_length + len(os.sep)
- if filename_len < max_path:
+ max_suffix = 10000
+ # + 1 symbol for dot before suffix
+ tail_length = int(round(math.log(max_suffix, 10))) + 1
+ # truncate filename length in accordance with filesystem limitations
+ filename, extension = os.path.splitext(filename)
+ # XXX
+ if sys.platform.startswith("win"):
+ # Trying to fit into MAX_PATH if it's possible.
+ # Remove after DEVTOOLS-1646
+ max_path = 260
+ filename_len = len(dir_path) + len(extension) + tail_length + len(os.sep)
+ if filename_len < max_path:
filename = yatest_lib.tools.trim_string(filename, max_path - filename_len)
filename = yatest_lib.tools.trim_string(filename, get_max_filename_length(dir_path) - tail_length - len(extension)) + extension
- candidate = os.path.join(dir_path, filename)
-
- key = dir_path + filename
- counter = sorted(cache.get(key, {0, }))[-1]
- while os.path.exists(candidate):
- cache[key].add(counter)
- counter += 1
- assert counter < max_suffix
- candidate = os.path.join(dir_path, filename + ".{}".format(counter))
- return candidate
+ candidate = os.path.join(dir_path, filename)
+
+ key = dir_path + filename
+ counter = sorted(cache.get(key, {0, }))[-1]
+ while os.path.exists(candidate):
+ cache[key].add(counter)
+ counter += 1
+ assert counter < max_suffix
+ candidate = os.path.join(dir_path, filename + ".{}".format(counter))
+ return candidate
def escape_for_fnmatch(s):
@@ -251,18 +251,18 @@ def normalize_name(name):
return name
-def normalize_filename(filename):
+def normalize_filename(filename):
"""
Replace invalid for file names characters with string equivalents
:param some_string: string to be converted to a valid file name
:return: valid file name
"""
not_allowed_pattern = r"[\[\]\/:*?\"\'<>|+\0\\\s\x0b\x0c]"
- filename = re.sub(not_allowed_pattern, ".", filename)
- return re.sub(r"\.{2,}", ".", filename)
+ filename = re.sub(not_allowed_pattern, ".", filename)
+ return re.sub(r"\.{2,}", ".", filename)
-def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
+def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
"""
get test log file path, platform dependant
:param output_dir: dir where log file should be placed
@@ -271,16 +271,16 @@ def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
:return: test log file name
"""
if os.name == "nt":
- # don't add class name to the log's filename
- # to reduce it's length on windows
- filename = test_name
+ # don't add class name to the log's filename
+ # to reduce it's length on windows
+ filename = test_name
else:
- filename = "{}.{}".format(class_name, test_name)
- if not filename:
- filename = "test"
- filename += "." + extension
- filename = normalize_filename(filename)
- return get_unique_file_path(output_dir, filename)
+ filename = "{}.{}".format(class_name, test_name)
+ if not filename:
+ filename = "test"
+ filename += "." + extension
+ filename = normalize_filename(filename)
+ return get_unique_file_path(output_dir, filename)
def split_node_id(nodeid, test_suffix=None):
diff --git a/library/python/reservoir_sampling/README.md b/library/python/reservoir_sampling/README.md
index 30ff3fcfcb..27674ba4f0 100644
--- a/library/python/reservoir_sampling/README.md
+++ b/library/python/reservoir_sampling/README.md
@@ -1,11 +1,11 @@
-### Overview
-Reservoir sampling is a family of randomized algorithms for choosing a simple random sample, without replacement, of k items from a population of unknown size n in a single pass over the items.
-
-### Example
-
-```jupyter
-In [1]: from library.python import reservoir_sampling
-
-In [2]: reservoir_sampling.reservoir_sampling(data=range(100), nsamples=10)
-Out[2]: [27, 19, 81, 45, 89, 78, 13, 36, 29, 9]
-```
+### Overview
+Reservoir sampling is a family of randomized algorithms for choosing a simple random sample, without replacement, of k items from a population of unknown size n in a single pass over the items.
+
+### Example
+
+```jupyter
+In [1]: from library.python import reservoir_sampling
+
+In [2]: reservoir_sampling.reservoir_sampling(data=range(100), nsamples=10)
+Out[2]: [27, 19, 81, 45, 89, 78, 13, 36, 29, 9]
+```
diff --git a/library/python/resource/__init__.py b/library/python/resource/__init__.py
index 1e4f0526b7..26503ef7fc 100644
--- a/library/python/resource/__init__.py
+++ b/library/python/resource/__init__.py
@@ -30,8 +30,8 @@ def iteritems(prefix='', strip_prefix=False):
if strip_prefix:
key = key[len(prefix):]
yield key, value
-
-
+
+
def resfs_file_exists(path):
return resfs_src(path, resfs_file=True) is not None
@@ -41,9 +41,9 @@ def resfs_files(prefix=''):
if isinstance(prefix, six.text_type):
decode = lambda s: s.decode('utf-8')
return [decode(s) for s in __resfs_files(prefix=prefix)]
-
-
-def find(path):
- if isinstance(path, six.text_type):
- path = path.encode('utf-8')
- return __find(path)
+
+
+def find(path):
+ if isinstance(path, six.text_type):
+ path = path.encode('utf-8')
+ return __find(path)
diff --git a/library/python/resource/ut/lib/test_simple.py b/library/python/resource/ut/lib/test_simple.py
index f76b656cb9..52f006ff91 100644
--- a/library/python/resource/ut/lib/test_simple.py
+++ b/library/python/resource/ut/lib/test_simple.py
@@ -1,11 +1,11 @@
-import six # noqa
-
+import six # noqa
+
import library.python.resource as rs
text = b'na gorshke sidel korol\n'
-def test_find():
+def test_find():
assert rs.find('/qw.txt') == text
@@ -21,11 +21,11 @@ def test_iter():
('/1.txt', text),
('/2.txt', text),
}
-
-
-def test_resfs_files():
- assert 'contrib/python/py/.dist-info/METADATA' in set(rs.resfs_files())
-
-
-def test_resfs_read():
- assert 'Metadata-Version' in rs.resfs_read('contrib/python/py/.dist-info/METADATA').decode('utf-8')
+
+
+def test_resfs_files():
+ assert 'contrib/python/py/.dist-info/METADATA' in set(rs.resfs_files())
+
+
+def test_resfs_read():
+ assert 'Metadata-Version' in rs.resfs_read('contrib/python/py/.dist-info/METADATA').decode('utf-8')
diff --git a/library/python/runtime_py3/entry_points.py b/library/python/runtime_py3/entry_points.py
index fe2dff2c77..05098723cb 100644
--- a/library/python/runtime_py3/entry_points.py
+++ b/library/python/runtime_py3/entry_points.py
@@ -42,11 +42,11 @@ def repl():
def resource_files():
sys.stdout.buffer.write(b'\n'.join(sorted(__res.resfs_files()) + [b'']))
-
-
-def run_constructors():
- for key, module_name in __res.iter_keys(b'py/constructors/'):
- import importlib
- module = importlib.import_module(module_name.decode())
- init_func = getattr(module, __res.find(key).decode())
- init_func()
+
+
+def run_constructors():
+ for key, module_name in __res.iter_keys(b'py/constructors/'):
+ import importlib
+ module = importlib.import_module(module_name.decode())
+ init_func = getattr(module, __res.find(key).decode())
+ init_func()
diff --git a/library/python/runtime_py3/main/main.c b/library/python/runtime_py3/main/main.c
index d135e00e99..3159800615 100644
--- a/library/python/runtime_py3/main/main.c
+++ b/library/python/runtime_py3/main/main.c
@@ -160,21 +160,21 @@ static int pymain(int argc, char** argv) {
Py_InitArgcArgv(argc, argv_copy);
PySys_SetArgv(argc, argv_copy);
- {
- PyObject* module = PyImport_ImportModule("library.python.runtime_py3.entry_points");
- if (module == NULL) {
- PyErr_Print();
- } else {
- PyObject* res = PyObject_CallMethod(module, "run_constructors", NULL);
- if (res == NULL) {
- PyErr_Print();
- } else {
- Py_DECREF(res);
- }
- Py_DECREF(module);
- }
- }
-
+ {
+ PyObject* module = PyImport_ImportModule("library.python.runtime_py3.entry_points");
+ if (module == NULL) {
+ PyErr_Print();
+ } else {
+ PyObject* res = PyObject_CallMethod(module, "run_constructors", NULL);
+ if (res == NULL) {
+ PyErr_Print();
+ } else {
+ Py_DECREF(res);
+ }
+ Py_DECREF(module);
+ }
+ }
+
const char* module_name = entry_point_copy;
const char* func_name = NULL;
diff --git a/library/python/strings/__init__.py b/library/python/strings/__init__.py
index 47a731b1de..bd6bf6e7ce 100644
--- a/library/python/strings/__init__.py
+++ b/library/python/strings/__init__.py
@@ -1,17 +1,17 @@
-# flake8 noqa: F401
-
-from .strings import (
- DEFAULT_ENCODING,
- ENCODING_ERRORS_POLICY,
- encode,
- fs_encoding,
- get_stream_encoding,
- guess_default_encoding,
- left_strip,
- locale_encoding,
- stringize_deep,
- to_basestring,
- to_str,
- to_unicode,
- unicodize_deep,
-)
+# flake8 noqa: F401
+
+from .strings import (
+ DEFAULT_ENCODING,
+ ENCODING_ERRORS_POLICY,
+ encode,
+ fs_encoding,
+ get_stream_encoding,
+ guess_default_encoding,
+ left_strip,
+ locale_encoding,
+ stringize_deep,
+ to_basestring,
+ to_str,
+ to_unicode,
+ unicodize_deep,
+)
diff --git a/library/python/strings/strings.py b/library/python/strings/strings.py
index 476a797117..5bfddfe78a 100644
--- a/library/python/strings/strings.py
+++ b/library/python/strings/strings.py
@@ -9,7 +9,7 @@ import library.python.func
logger = logging.getLogger(__name__)
-DEFAULT_ENCODING = 'utf-8'
+DEFAULT_ENCODING = 'utf-8'
ENCODING_ERRORS_POLICY = 'replace'
@@ -40,7 +40,7 @@ def to_basestring(value):
to_text = to_basestring
-def to_unicode(value, from_enc=DEFAULT_ENCODING):
+def to_unicode(value, from_enc=DEFAULT_ENCODING):
if isinstance(value, six.text_type):
return value
if isinstance(value, six.binary_type):
@@ -52,7 +52,7 @@ def to_unicode(value, from_enc=DEFAULT_ENCODING):
# Optional from_enc enables transcoding
-def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
+def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
if isinstance(value, six.binary_type):
if from_enc is None or to_enc == from_enc:
# Unknown input encoding or input and output encoding are the same
@@ -63,32 +63,32 @@ def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
return six.binary_type(value)
-def _convert_deep(x, enc, convert, relaxed=True):
+def _convert_deep(x, enc, convert, relaxed=True):
if x is None:
return None
if isinstance(x, (six.text_type, six.binary_type)):
return convert(x, enc)
if isinstance(x, dict):
return {convert(k, enc): _convert_deep(v, enc, convert, relaxed) for k, v in six.iteritems(x)}
- if isinstance(x, list):
- return [_convert_deep(e, enc, convert, relaxed) for e in x]
- if isinstance(x, tuple):
- return tuple([_convert_deep(e, enc, convert, relaxed) for e in x])
-
- if relaxed:
- return x
+ if isinstance(x, list):
+ return [_convert_deep(e, enc, convert, relaxed) for e in x]
+ if isinstance(x, tuple):
+ return tuple([_convert_deep(e, enc, convert, relaxed) for e in x])
+
+ if relaxed:
+ return x
raise TypeError('unsupported type')
-def unicodize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
- return _convert_deep(x, enc, to_unicode, relaxed)
+def unicodize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
+ return _convert_deep(x, enc, to_unicode, relaxed)
-def stringize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
- return _convert_deep(x, enc, to_str, relaxed)
+def stringize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
+ return _convert_deep(x, enc, to_str, relaxed)
-@library.python.func.memoize()
+@library.python.func.memoize()
def locale_encoding():
try:
loc = locale.getdefaultlocale()[1]
@@ -109,10 +109,10 @@ def fs_encoding():
def guess_default_encoding():
enc = locale_encoding()
- return enc if enc else DEFAULT_ENCODING
+ return enc if enc else DEFAULT_ENCODING
-@library.python.func.memoize()
+@library.python.func.memoize()
def get_stream_encoding(stream):
if stream.encoding:
try:
@@ -120,10 +120,10 @@ def get_stream_encoding(stream):
return stream.encoding
except LookupError:
pass
- return DEFAULT_ENCODING
-
-
-def encode(value, encoding=DEFAULT_ENCODING):
+ return DEFAULT_ENCODING
+
+
+def encode(value, encoding=DEFAULT_ENCODING):
if isinstance(value, six.binary_type):
- value = value.decode(encoding, errors='ignore')
- return value.encode(encoding)
+ value = value.decode(encoding, errors='ignore')
+ return value.encode(encoding)
diff --git a/library/python/strings/ut/test_strings.py b/library/python/strings/ut/test_strings.py
index 663ff24b14..dd0c694ee1 100644
--- a/library/python/strings/ut/test_strings.py
+++ b/library/python/strings/ut/test_strings.py
@@ -115,7 +115,7 @@ def test_stringize_deep():
assert library.python.strings.stringize_deep({
'key 1': 'value 1',
u'ключ 2': u'значение 2',
- 'list': [u'ключ 2', 'key 1', (u'к', 2)]
+ 'list': [u'ключ 2', 'key 1', (u'к', 2)]
}) == {
'key 1' if six.PY2 else b'key 1': 'value 1' if six.PY2 else b'value 1',
u'ключ 2'.encode('utf-8'): u'значение 2'.encode('utf-8'),
@@ -157,9 +157,9 @@ def test_stringize_deep_plain():
def test_stringize_deep_nonstr():
with pytest.raises(TypeError):
- library.python.strings.stringize_deep(Convertible(), relaxed=False)
- x = Convertible()
- assert x == library.python.strings.stringize_deep(x)
+ library.python.strings.stringize_deep(Convertible(), relaxed=False)
+ x = Convertible()
+ assert x == library.python.strings.stringize_deep(x)
def test_unicodize_deep():
@@ -200,6 +200,6 @@ def test_unicodize_deep_plain():
def test_unicodize_deep_nonstr():
with pytest.raises(TypeError):
- library.python.strings.unicodize_deep(Convertible(), relaxed=False)
- x = Convertible()
- assert x == library.python.strings.stringize_deep(x)
+ library.python.strings.unicodize_deep(Convertible(), relaxed=False)
+ x = Convertible()
+ assert x == library.python.strings.stringize_deep(x)
diff --git a/library/python/strings/ya.make b/library/python/strings/ya.make
index 5285c13774..7e0b033717 100644
--- a/library/python/strings/ya.make
+++ b/library/python/strings/ya.make
@@ -2,11 +2,11 @@ OWNER(g:yatool)
PY23_LIBRARY()
-PY_SRCS(
- __init__.py
- CYTHONIZE_PY
- strings.py
-)
+PY_SRCS(
+ __init__.py
+ CYTHONIZE_PY
+ strings.py
+)
PEERDIR(
library/python/func
diff --git a/library/python/testing/filter/ya.make b/library/python/testing/filter/ya.make
index 7944b713a5..22c485d258 100644
--- a/library/python/testing/filter/ya.make
+++ b/library/python/testing/filter/ya.make
@@ -1,5 +1,5 @@
PY23_LIBRARY()
-OWNER(g:yatest)
+OWNER(g:yatest)
PY_SRCS(filter.py)
END()
diff --git a/library/python/testing/import_test/import_test.py b/library/python/testing/import_test/import_test.py
index 440690af59..3e3b7234ef 100644
--- a/library/python/testing/import_test/import_test.py
+++ b/library/python/testing/import_test/import_test.py
@@ -3,8 +3,8 @@ from __future__ import print_function
import os
import re
import sys
-import time
-import traceback
+import time
+import traceback
import __res
from __res import importer
@@ -32,7 +32,7 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
rx = re.compile('^({})$'.format('|'.join(patterns)))
failed = []
- import_times = {}
+ import_times = {}
norm = lambda s: s[:-9] if s.endswith('.__init__') else s
@@ -59,10 +59,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
try:
print('TRY', module)
- # XXX waiting for py3 to use print(..., flush=True)
- sys.stdout.flush()
-
- s = time.time()
+ # XXX waiting for py3 to use print(..., flush=True)
+ sys.stdout.flush()
+
+ s = time.time()
if module == '__main__':
importer.load_module('__main__', '__main__py')
elif module.endswith('.__init__'):
@@ -70,10 +70,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
else:
__import__(module)
- delay = time.time() - s
- import_times[str(module)] = delay
- print('OK ', module, '{:.3f}s'.format(delay))
-
+ delay = time.time() - s
+ import_times[str(module)] = delay
+ print('OK ', module, '{:.3f}s'.format(delay))
+
except Exception as e:
print('FAIL:', module, e, file=sys.stderr)
print_backtrace_marked(sys.exc_info())
@@ -86,10 +86,10 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
failed.append('{}: {}'.format(module, e))
raise
- print("Slowest imports:")
- for m, t in sorted(import_times.items(), key=lambda x: x[1], reverse=True)[:30]:
- print(' ', '{:.3f}s'.format(t), m)
-
+ print("Slowest imports:")
+ for m, t in sorted(import_times.items(), key=lambda x: x[1], reverse=True)[:30]:
+ print(' ', '{:.3f}s'.format(t), m)
+
if failed:
raise ImportError('modules not imported:\n' + '\n'.join(failed))
diff --git a/library/python/testing/import_test/ya.make b/library/python/testing/import_test/ya.make
index b996336159..fae36ffe8f 100644
--- a/library/python/testing/import_test/ya.make
+++ b/library/python/testing/import_test/ya.make
@@ -1,5 +1,5 @@
OWNER(
- g:yatest
+ g:yatest
exprmntr
)
diff --git a/library/python/testing/recipe/__init__.py b/library/python/testing/recipe/__init__.py
index 3ea95ebb1d..5ef9c5c189 100644
--- a/library/python/testing/recipe/__init__.py
+++ b/library/python/testing/recipe/__init__.py
@@ -16,7 +16,7 @@ collect_cores = None
sanitizer_extra_checks = None
-def _setup_logging(level=logging.DEBUG):
+def _setup_logging(level=logging.DEBUG):
root_logger = logging.getLogger()
root_logger.setLevel(level)
@@ -41,7 +41,7 @@ def get_options():
args, opts = parser.parse_known_args()
global ya, sanitizer_extra_checks, collect_cores
- _setup_logging()
+ _setup_logging()
context = {
"test_stderr": args.test_stderr,
@@ -96,7 +96,7 @@ def declare_recipe(start, stop):
except Exception:
if parsed_args.pdb:
tty()
- import ipdb
- ipdb.post_mortem()
+ import ipdb
+ ipdb.post_mortem()
else:
raise
diff --git a/library/python/testing/recipe/ya.make b/library/python/testing/recipe/ya.make
index 239223c789..dd323aa245 100644
--- a/library/python/testing/recipe/ya.make
+++ b/library/python/testing/recipe/ya.make
@@ -1,6 +1,6 @@
OWNER(
exprmntr
- g:yatest
+ g:yatest
)
PY23_LIBRARY()
@@ -10,10 +10,10 @@ PY_SRCS(
ports.py
)
-PEERDIR(
- contrib/python/ipdb
+PEERDIR(
+ contrib/python/ipdb
library/python/testing/yatest_common
library/python/testing/yatest_lib
-)
-
+)
+
END()
diff --git a/library/python/testing/ya.make b/library/python/testing/ya.make
index c232bc663e..883bc8d7ab 100644
--- a/library/python/testing/ya.make
+++ b/library/python/testing/ya.make
@@ -1,8 +1,8 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
RECURSE(
behave
- deprecated
+ deprecated
fake_ya_package
filter
gtest
@@ -15,8 +15,8 @@ RECURSE(
recipe
system_info
types_test
- yapackage
- yapackage/test
+ yapackage
+ yapackage/test
yatest_common
yatest_lib
)
diff --git a/library/python/testing/yatest_common/ya.make b/library/python/testing/yatest_common/ya.make
index 0098e1be85..5662db4c5d 100644
--- a/library/python/testing/yatest_common/ya.make
+++ b/library/python/testing/yatest_common/ya.make
@@ -1,31 +1,31 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
-OWNER(g:yatest)
+OWNER(g:yatest)
NO_EXTENDED_SOURCE_SEARCH()
PY_SRCS(
TOP_LEVEL
- yatest/__init__.py
+ yatest/__init__.py
yatest/common/__init__.py
yatest/common/benchmark.py
yatest/common/canonical.py
yatest/common/environment.py
yatest/common/errors.py
yatest/common/legacy.py
- yatest/common/misc.py
+ yatest/common/misc.py
yatest/common/network.py
yatest/common/path.py
yatest/common/process.py
yatest/common/runtime.py
yatest/common/runtime_java.py
yatest/common/tags.py
-)
+)
-PEERDIR(
- contrib/python/six
+PEERDIR(
+ contrib/python/six
library/python/cores
library/python/filelock
library/python/fs
diff --git a/library/python/testing/yatest_common/yatest/common/canonical.py b/library/python/testing/yatest_common/yatest/common/canonical.py
index 7a258d070b..b6a136d3e9 100644
--- a/library/python/testing/yatest_common/yatest/common/canonical.py
+++ b/library/python/testing/yatest_common/yatest/common/canonical.py
@@ -68,7 +68,7 @@ def canonical_execute(
):
"""
Shortcut to execute a binary and canonize its stdout
- :param binary: absolute path to the binary
+ :param binary: absolute path to the binary
:param args: binary arguments
:param check_exit_code: will raise ExecutionError if the command exits with non zero code
:param shell: use shell to run the command
@@ -78,7 +78,7 @@ def canonical_execute(
:param stdin: command stdin
:param stderr: command stderr
:param creationflags: command creation flags
- :param file_name: output file name. if not specified program name will be used
+ :param file_name: output file name. if not specified program name will be used
:param diff_tool: path to custome diff tool
:param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
@@ -94,7 +94,7 @@ def canonical_execute(
execute_args = locals()
del execute_args["binary"]
del execute_args["args"]
- del execute_args["file_name"]
+ del execute_args["file_name"]
del execute_args["save_locally"]
del execute_args["diff_tool"]
del execute_args["diff_file_name"]
@@ -123,7 +123,7 @@ def canonical_py_execute(
:param stdin: command stdin
:param stderr: command stderr
:param creationflags: command creation flags
- :param file_name: output file name. if not specified program name will be used
+ :param file_name: output file name. if not specified program name will be used
:param diff_tool: path to custome diff tool
:param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
@@ -135,7 +135,7 @@ def canonical_py_execute(
execute_args = locals()
del execute_args["script_path"]
del execute_args["args"]
- del execute_args["file_name"]
+ del execute_args["file_name"]
del execute_args["save_locally"]
del execute_args["diff_tool"]
del execute_args["diff_file_name"]
diff --git a/library/python/testing/yatest_common/yatest/common/environment.py b/library/python/testing/yatest_common/yatest/common/environment.py
index 62fc501d52..43f48d0958 100644
--- a/library/python/testing/yatest_common/yatest/common/environment.py
+++ b/library/python/testing/yatest_common/yatest/common/environment.py
@@ -1,5 +1,5 @@
-# coding: utf-8
-
+# coding: utf-8
-def extend_env_var(env, name, value, sep=":"):
- return sep.join(filter(None, [env.get(name), value]))
+
+def extend_env_var(env, name, value, sep=":"):
+ return sep.join(filter(None, [env.get(name), value]))
diff --git a/library/python/testing/yatest_common/yatest/common/errors.py b/library/python/testing/yatest_common/yatest/common/errors.py
index 8d5abc9b96..8c038fc381 100644
--- a/library/python/testing/yatest_common/yatest/common/errors.py
+++ b/library/python/testing/yatest_common/yatest/common/errors.py
@@ -1,4 +1,4 @@
-import os
+import os
import sys
@@ -8,7 +8,7 @@ class RestartTestException(Exception):
super(RestartTestException, self).__init__(*args, **kwargs)
sys.stderr.write("##restart-test##\n")
sys.stderr.flush()
- os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
+ os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
class InfrastructureException(Exception):
@@ -17,4 +17,4 @@ class InfrastructureException(Exception):
super(InfrastructureException, self).__init__(*args, **kwargs)
sys.stderr.write("##infrastructure-error##\n")
sys.stderr.flush()
- os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
+ os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
diff --git a/library/python/testing/yatest_common/yatest/common/misc.py b/library/python/testing/yatest_common/yatest/common/misc.py
index 3c8d239a40..20d3725ac9 100644
--- a/library/python/testing/yatest_common/yatest/common/misc.py
+++ b/library/python/testing/yatest_common/yatest/common/misc.py
@@ -1,19 +1,19 @@
-import functools
-
-
-def first(it):
- for d in it:
- if d:
- return d
-
-
-def lazy(func):
- res = []
-
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- if not res:
- res.append(func(*args, **kwargs))
- return res[0]
-
- return wrapper
+import functools
+
+
+def first(it):
+ for d in it:
+ if d:
+ return d
+
+
+def lazy(func):
+ res = []
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not res:
+ res.append(func(*args, **kwargs))
+ return res[0]
+
+ return wrapper
diff --git a/library/python/testing/yatest_common/yatest/common/network.py b/library/python/testing/yatest_common/yatest/common/network.py
index e3e3b0abc2..37bcb1b8e0 100644
--- a/library/python/testing/yatest_common/yatest/common/network.py
+++ b/library/python/testing/yatest_common/yatest/common/network.py
@@ -1,47 +1,47 @@
# coding=utf-8
-
-import os
-import errno
-import socket
-import random
-import logging
-import platform
-import threading
-
+
+import os
+import errno
+import socket
+import random
+import logging
+import platform
+import threading
+
import six
-UI16MAXVAL = (1 << 16) - 1
-logger = logging.getLogger(__name__)
-
-
-class PortManagerException(Exception):
- pass
-
-
-class PortManager(object):
+UI16MAXVAL = (1 << 16) - 1
+logger = logging.getLogger(__name__)
+
+
+class PortManagerException(Exception):
+ pass
+
+
+class PortManager(object):
"""
See documentation here
-
+
https://wiki.yandex-team.ru/yatool/test/#python-acquire-ports
"""
- def __init__(self, sync_dir=None):
- self._sync_dir = sync_dir or os.environ.get('PORT_SYNC_PATH')
- if self._sync_dir:
- _makedirs(self._sync_dir)
-
- self._valid_range = get_valid_port_range()
- self._valid_port_count = self._count_valid_ports()
- self._filelocks = {}
- self._lock = threading.Lock()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.release()
-
- def get_port(self, port=0):
+ def __init__(self, sync_dir=None):
+ self._sync_dir = sync_dir or os.environ.get('PORT_SYNC_PATH')
+ if self._sync_dir:
+ _makedirs(self._sync_dir)
+
+ self._valid_range = get_valid_port_range()
+ self._valid_port_count = self._count_valid_ports()
+ self._filelocks = {}
+ self._lock = threading.Lock()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.release()
+
+ def get_port(self, port=0):
'''
Gets free TCP port
'''
@@ -64,8 +64,8 @@ class PortManager(object):
Gets one free port for use in both TCP and UDP protocols
'''
if port and self._no_random_ports():
- return port
-
+ return port
+
retries = 20
while retries > 0:
retries -= 1
@@ -73,199 +73,199 @@ class PortManager(object):
result_port = self.get_tcp_port()
if not self.is_port_free(result_port, socket.SOCK_DGRAM):
self.release_port(result_port)
- # Don't try to _capture_port(), it's already captured in the get_tcp_port()
+ # Don't try to _capture_port(), it's already captured in the get_tcp_port()
return result_port
raise Exception('Failed to find port')
def release_port(self, port):
- with self._lock:
- self._release_port_no_lock(port)
-
- def _release_port_no_lock(self, port):
- filelock = self._filelocks.pop(port, None)
- if filelock:
- filelock.release()
-
+ with self._lock:
+ self._release_port_no_lock(port)
+
+ def _release_port_no_lock(self, port):
+ filelock = self._filelocks.pop(port, None)
+ if filelock:
+ filelock.release()
+
def release(self):
- with self._lock:
- while self._filelocks:
- _, filelock = self._filelocks.popitem()
- if filelock:
- filelock.release()
-
- def get_port_range(self, start_port, count, random_start=True):
- assert count > 0
- if start_port and self._no_random_ports():
- return start_port
-
- candidates = []
-
- def drop_candidates():
- for port in candidates:
- self._release_port_no_lock(port)
- candidates[:] = []
-
- with self._lock:
- for attempts in six.moves.range(128):
- for left, right in self._valid_range:
- if right - left < count:
- continue
-
- if random_start:
- start = random.randint(left, right - ((right - left) // 2))
- else:
- start = left
- for probe_port in six.moves.range(start, right):
- if self._capture_port_no_lock(probe_port, socket.SOCK_STREAM):
- candidates.append(probe_port)
- else:
- drop_candidates()
-
- if len(candidates) == count:
- return candidates[0]
- # Can't find required number of ports without gap in the current range
- drop_candidates()
-
- raise PortManagerException("Failed to find valid port range (start_port: {} count: {}) (range: {} used: {})".format(
- start_port, count, self._valid_range, self._filelocks))
-
- def _count_valid_ports(self):
- res = 0
- for left, right in self._valid_range:
- res += right - left
- assert res, ('There are no available valid ports', self._valid_range)
- return res
-
+ with self._lock:
+ while self._filelocks:
+ _, filelock = self._filelocks.popitem()
+ if filelock:
+ filelock.release()
+
+ def get_port_range(self, start_port, count, random_start=True):
+ assert count > 0
+ if start_port and self._no_random_ports():
+ return start_port
+
+ candidates = []
+
+ def drop_candidates():
+ for port in candidates:
+ self._release_port_no_lock(port)
+ candidates[:] = []
+
+ with self._lock:
+ for attempts in six.moves.range(128):
+ for left, right in self._valid_range:
+ if right - left < count:
+ continue
+
+ if random_start:
+ start = random.randint(left, right - ((right - left) // 2))
+ else:
+ start = left
+ for probe_port in six.moves.range(start, right):
+ if self._capture_port_no_lock(probe_port, socket.SOCK_STREAM):
+ candidates.append(probe_port)
+ else:
+ drop_candidates()
+
+ if len(candidates) == count:
+ return candidates[0]
+ # Can't find required number of ports without gap in the current range
+ drop_candidates()
+
+ raise PortManagerException("Failed to find valid port range (start_port: {} count: {}) (range: {} used: {})".format(
+ start_port, count, self._valid_range, self._filelocks))
+
+ def _count_valid_ports(self):
+ res = 0
+ for left, right in self._valid_range:
+ res += right - left
+ assert res, ('There are no available valid ports', self._valid_range)
+ return res
+
def _get_port(self, port, sock_type):
if port and self._no_random_ports():
return port
- if len(self._filelocks) >= self._valid_port_count:
- raise PortManagerException("All valid ports are taken ({}): {}".format(self._valid_range, self._filelocks))
-
- salt = random.randint(0, UI16MAXVAL)
+ if len(self._filelocks) >= self._valid_port_count:
+ raise PortManagerException("All valid ports are taken ({}): {}".format(self._valid_range, self._filelocks))
+
+ salt = random.randint(0, UI16MAXVAL)
for attempt in six.moves.range(self._valid_port_count):
- probe_port = (salt + attempt) % self._valid_port_count
-
- for left, right in self._valid_range:
- if probe_port >= (right - left):
- probe_port -= right - left
- else:
- probe_port += left
- break
- if not self._capture_port(probe_port, sock_type):
- continue
- return probe_port
-
- raise PortManagerException("Failed to find valid port (range: {} used: {})".format(self._valid_range, self._filelocks))
-
- def _capture_port(self, port, sock_type):
- with self._lock:
- return self._capture_port_no_lock(port, sock_type)
-
+ probe_port = (salt + attempt) % self._valid_port_count
+
+ for left, right in self._valid_range:
+ if probe_port >= (right - left):
+ probe_port -= right - left
+ else:
+ probe_port += left
+ break
+ if not self._capture_port(probe_port, sock_type):
+ continue
+ return probe_port
+
+ raise PortManagerException("Failed to find valid port (range: {} used: {})".format(self._valid_range, self._filelocks))
+
+ def _capture_port(self, port, sock_type):
+ with self._lock:
+ return self._capture_port_no_lock(port, sock_type)
+
def is_port_free(self, port, sock_type=socket.SOCK_STREAM):
- sock = socket.socket(socket.AF_INET6, sock_type)
- try:
- sock.bind(('::', port))
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- except socket.error as e:
- if e.errno == errno.EADDRINUSE:
- return False
- raise
- finally:
- sock.close()
- return True
-
- def _capture_port_no_lock(self, port, sock_type):
- if port in self._filelocks:
- return False
-
- filelock = None
- if self._sync_dir:
- # yatest.common should try to be hermetic and don't have peerdirs
- # otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work
- import library.python.filelock
-
- filelock = library.python.filelock.FileLock(os.path.join(self._sync_dir, str(port)))
- if not filelock.acquire(blocking=False):
- return False
+ sock = socket.socket(socket.AF_INET6, sock_type)
+ try:
+ sock.bind(('::', port))
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ except socket.error as e:
+ if e.errno == errno.EADDRINUSE:
+ return False
+ raise
+ finally:
+ sock.close()
+ return True
+
+ def _capture_port_no_lock(self, port, sock_type):
+ if port in self._filelocks:
+ return False
+
+ filelock = None
+ if self._sync_dir:
+ # yatest.common should try to be hermetic and don't have peerdirs
+ # otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work
+ import library.python.filelock
+
+ filelock = library.python.filelock.FileLock(os.path.join(self._sync_dir, str(port)))
+ if not filelock.acquire(blocking=False):
+ return False
if self.is_port_free(port, sock_type):
- self._filelocks[port] = filelock
- return True
- else:
- filelock.release()
- return False
-
+ self._filelocks[port] = filelock
+ return True
+ else:
+ filelock.release()
+ return False
+
if self.is_port_free(port, sock_type):
- self._filelocks[port] = filelock
- return True
- if filelock:
- filelock.release()
- return False
-
+ self._filelocks[port] = filelock
+ return True
+ if filelock:
+ filelock.release()
+ return False
+
def _no_random_ports(self):
return os.environ.get("NO_RANDOM_PORTS")
-
-
-def get_valid_port_range():
- first_valid = 1025
- last_valid = UI16MAXVAL
-
- given_range = os.environ.get('VALID_PORT_RANGE')
- if given_range and ':' in given_range:
- return [list(int(x) for x in given_range.split(':', 2))]
-
- first_eph, last_eph = get_ephemeral_range()
- first_invalid = max(first_eph, first_valid)
- last_invalid = min(last_eph, last_valid)
-
- ranges = []
- if first_invalid > first_valid:
- ranges.append((first_valid, first_invalid - 1))
- if last_invalid < last_valid:
- ranges.append((last_invalid + 1, last_valid))
- return ranges
-
-
-def get_ephemeral_range():
- if platform.system() == 'Linux':
- filename = "/proc/sys/net/ipv4/ip_local_port_range"
- if os.path.exists(filename):
- with open(filename) as afile:
+
+
+def get_valid_port_range():
+ first_valid = 1025
+ last_valid = UI16MAXVAL
+
+ given_range = os.environ.get('VALID_PORT_RANGE')
+ if given_range and ':' in given_range:
+ return [list(int(x) for x in given_range.split(':', 2))]
+
+ first_eph, last_eph = get_ephemeral_range()
+ first_invalid = max(first_eph, first_valid)
+ last_invalid = min(last_eph, last_valid)
+
+ ranges = []
+ if first_invalid > first_valid:
+ ranges.append((first_valid, first_invalid - 1))
+ if last_invalid < last_valid:
+ ranges.append((last_invalid + 1, last_valid))
+ return ranges
+
+
+def get_ephemeral_range():
+ if platform.system() == 'Linux':
+ filename = "/proc/sys/net/ipv4/ip_local_port_range"
+ if os.path.exists(filename):
+ with open(filename) as afile:
data = afile.read(1024) # fix for musl
- port_range = tuple(map(int, data.strip().split()))
- if len(port_range) == 2:
- return port_range
- else:
- logger.warning("Bad ip_local_port_range format: '%s'. Going to use IANA suggestion", data)
- elif platform.system() == 'Darwin':
- first = _sysctlbyname_uint("net.inet.ip.portrange.first")
- last = _sysctlbyname_uint("net.inet.ip.portrange.last")
- if first and last:
- return first, last
- # IANA suggestion
- return (1 << 15) + (1 << 14), UI16MAXVAL
-
-
-def _sysctlbyname_uint(name):
- try:
- from ctypes import CDLL, c_uint, byref
- from ctypes.util import find_library
- except ImportError:
- return
-
- libc = CDLL(find_library("c"))
- size = c_uint(0)
- res = c_uint(0)
- libc.sysctlbyname(name, None, byref(size), None, 0)
- libc.sysctlbyname(name, byref(res), byref(size), None, 0)
- return res.value
-
-
-def _makedirs(path):
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno == errno.EEXIST:
- return
- raise
+ port_range = tuple(map(int, data.strip().split()))
+ if len(port_range) == 2:
+ return port_range
+ else:
+ logger.warning("Bad ip_local_port_range format: '%s'. Going to use IANA suggestion", data)
+ elif platform.system() == 'Darwin':
+ first = _sysctlbyname_uint("net.inet.ip.portrange.first")
+ last = _sysctlbyname_uint("net.inet.ip.portrange.last")
+ if first and last:
+ return first, last
+ # IANA suggestion
+ return (1 << 15) + (1 << 14), UI16MAXVAL
+
+
+def _sysctlbyname_uint(name):
+ try:
+ from ctypes import CDLL, c_uint, byref
+ from ctypes.util import find_library
+ except ImportError:
+ return
+
+ libc = CDLL(find_library("c"))
+ size = c_uint(0)
+ res = c_uint(0)
+ libc.sysctlbyname(name, None, byref(size), None, 0)
+ libc.sysctlbyname(name, byref(res), byref(size), None, 0)
+ return res.value
+
+
+def _makedirs(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ return
+ raise
diff --git a/library/python/testing/yatest_common/yatest/common/path.py b/library/python/testing/yatest_common/yatest/common/path.py
index d4f52f6333..6fed7dda8a 100644
--- a/library/python/testing/yatest_common/yatest/common/path.py
+++ b/library/python/testing/yatest_common/yatest/common/path.py
@@ -1,12 +1,12 @@
# coding=utf-8
import errno
-import os
-import shutil
-import contextlib
-
+import os
+import shutil
+import contextlib
+
import library.python.fs as lpf
-
+
def replace_in_file(path, old, new):
"""
@@ -21,32 +21,32 @@ def replace_in_file(path, old, new):
lpf.ensure_removed(path)
with open(path, 'w') as fp:
fp.write(content.replace(old, new))
-
-
-@contextlib.contextmanager
-def change_dir(path):
- old = os.getcwd()
- try:
- os.chdir(path)
- yield path
- finally:
- os.chdir(old)
-
-
+
+
+@contextlib.contextmanager
+def change_dir(path):
+ old = os.getcwd()
+ try:
+ os.chdir(path)
+ yield path
+ finally:
+ os.chdir(old)
+
+
def copytree(src, dst, symlinks=False, ignore=None, postprocessing=None):
- '''
- Copy an entire directory of files into an existing directory
- instead of raising Exception what shtuil.copytree does
- '''
+ '''
+ Copy an entire directory of files into an existing directory
+ instead of raising Exception what shtuil.copytree does
+ '''
if not os.path.exists(dst) and os.path.isdir(src):
os.makedirs(dst)
- for item in os.listdir(src):
- s = os.path.join(src, item)
- d = os.path.join(dst, item)
- if os.path.isdir(s):
- shutil.copytree(s, d, symlinks, ignore)
- else:
- shutil.copy2(s, d)
+ for item in os.listdir(src):
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if os.path.isdir(s):
+ shutil.copytree(s, d, symlinks, ignore)
+ else:
+ shutil.copy2(s, d)
if postprocessing:
postprocessing(dst, False)
for root, dirs, files in os.walk(dst):
diff --git a/library/python/testing/yatest_common/yatest/common/process.py b/library/python/testing/yatest_common/yatest/common/process.py
index f9eeb16525..a8bcc21f51 100644
--- a/library/python/testing/yatest_common/yatest/common/process.py
+++ b/library/python/testing/yatest_common/yatest/common/process.py
@@ -1,10 +1,10 @@
-# coding: utf-8
-
+# coding: utf-8
+
import os
-import re
+import re
import time
import signal
-import shutil
+import shutil
import logging
import tempfile
import subprocess
@@ -13,19 +13,19 @@ import distutils.version
import six
-try:
- # yatest.common should try to be hermetic, otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work.
- import library.python.cores as cores
-except ImportError:
- cores = None
-
+try:
+ # yatest.common should try to be hermetic, otherwise, PYTEST_SCRIPT (aka USE_ARCADIA_PYTHON=no) won't work.
+ import library.python.cores as cores
+except ImportError:
+ cores = None
+
from . import runtime
from . import path
from . import environment
MAX_OUT_LEN = 1000 * 1000 # 1 mb
-MAX_MESSAGE_LEN = 1500
+MAX_MESSAGE_LEN = 1500
SANITIZER_ERROR_PATTERN = br": ([A-Z][\w]+Sanitizer)"
GLIBC_PATTERN = re.compile(r"\S+@GLIBC_([0-9.]+)")
yatest_logger = logging.getLogger("ya.test")
@@ -55,12 +55,12 @@ class ExecutionError(Exception):
command=command,
code=execution_result.exit_code,
err=_format_error(execution_result.std_err))
- if cores:
- if execution_result.backtrace:
- message += "Backtrace:\n[[rst]]{}[[bad]]\n".format(cores.colorize_backtrace(execution_result._backtrace))
- else:
- message += "Backtrace is not available: module cores isn't available"
-
+ if cores:
+ if execution_result.backtrace:
+ message += "Backtrace:\n[[rst]]{}[[bad]]\n".format(cores.colorize_backtrace(execution_result._backtrace))
+ else:
+ message += "Backtrace is not available: module cores isn't available"
+
super(ExecutionError, self).__init__(message)
self.execution_result = execution_result
@@ -85,31 +85,31 @@ class SignalInterruptionError(Exception):
self.res = None
-class InvalidCommandError(Exception):
- pass
-
-
+class InvalidCommandError(Exception):
+ pass
+
+
class _Execution(object):
- def __init__(self, command, process, out_file, err_file, process_progress_listener=None, cwd=None, collect_cores=True, check_sanitizer=True, started=0, user_stdout=False, user_stderr=False):
+ def __init__(self, command, process, out_file, err_file, process_progress_listener=None, cwd=None, collect_cores=True, check_sanitizer=True, started=0, user_stdout=False, user_stderr=False):
self._command = command
self._process = process
self._out_file = out_file
self._err_file = err_file
self._std_out = None
self._std_err = None
- self._elapsed = None
- self._start = time.time()
+ self._elapsed = None
+ self._start = time.time()
self._process_progress_listener = process_progress_listener
- self._cwd = cwd or os.getcwd()
- self._collect_cores = collect_cores
- self._backtrace = ''
- self._check_sanitizer = check_sanitizer
+ self._cwd = cwd or os.getcwd()
+ self._collect_cores = collect_cores
+ self._backtrace = ''
+ self._check_sanitizer = check_sanitizer
self._metrics = {}
- self._started = started
- self._user_stdout = bool(user_stdout)
- self._user_stderr = bool(user_stderr)
- self._exit_code = None
+ self._started = started
+ self._user_stdout = bool(user_stdout)
+ self._user_stderr = bool(user_stderr)
+ self._exit_code = None
if process_progress_listener:
process_progress_listener.open(command, process, out_file, err_file)
@@ -122,21 +122,21 @@ class _Execution(object):
self._save_outputs(False)
_kill_process_tree(self._process.pid)
self._clean_files()
- # DEVTOOLS-2347
- yatest_logger.debug("Process status before wait_for: %s", self.running)
- try:
- wait_for(lambda: not self.running, timeout=5, fail_message="Could not kill process {}".format(self._process.pid), sleep_time=.1)
- except TimeoutError:
- yatest_logger.debug("Process status after wait_for: %s", self.running)
- yatest_logger.debug("Process %d info: %s", self._process.pid, _get_proc_tree_info([self._process.pid]))
- raise
+ # DEVTOOLS-2347
+ yatest_logger.debug("Process status before wait_for: %s", self.running)
+ try:
+ wait_for(lambda: not self.running, timeout=5, fail_message="Could not kill process {}".format(self._process.pid), sleep_time=.1)
+ except TimeoutError:
+ yatest_logger.debug("Process status after wait_for: %s", self.running)
+ yatest_logger.debug("Process %d info: %s", self._process.pid, _get_proc_tree_info([self._process.pid]))
+ raise
else:
raise InvalidExecutionStateError("Cannot kill a stopped process")
- def terminate(self):
- if self.running:
- self._process.terminate()
-
+ def terminate(self):
+ if self.running:
+ self._process.terminate()
+
@property
def process(self):
return self._process
@@ -146,56 +146,56 @@ class _Execution(object):
return self._command
@property
- def returncode(self):
- return self.exit_code
-
- @property
+ def returncode(self):
+ return self.exit_code
+
+ @property
def exit_code(self):
- """
- Deprecated, use returncode
- """
- if self._exit_code is None:
- self._exit_code = self._process.returncode
- return self._exit_code
+ """
+ Deprecated, use returncode
+ """
+ if self._exit_code is None:
+ self._exit_code = self._process.returncode
+ return self._exit_code
+
+ @property
+ def stdout(self):
+ return self.std_out
@property
- def stdout(self):
- return self.std_out
-
- @property
def std_out(self):
- """
- Deprecated, use stdout
- """
+ """
+ Deprecated, use stdout
+ """
if self._std_out is not None:
return self._std_out
- if self._process.stdout and not self._user_stdout:
- self._std_out = self._process.stdout.read()
- return self._std_out
+ if self._process.stdout and not self._user_stdout:
+ self._std_out = self._process.stdout.read()
+ return self._std_out
+
+ @property
+ def stderr(self):
+ return self.std_err
@property
- def stderr(self):
- return self.std_err
-
- @property
def std_err(self):
- """
- Deprecated, use stderr
- """
+ """
+ Deprecated, use stderr
+ """
if self._std_err is not None:
return self._std_err
- if self._process.stderr and not self._user_stderr:
- self._std_err = self._process.stderr.read()
- return self._std_err
-
- @property
- def elapsed(self):
- return self._elapsed
-
- @property
- def backtrace(self):
- return self._backtrace
-
+ if self._process.stderr and not self._user_stderr:
+ self._std_err = self._process.stderr.read()
+ return self._std_err
+
+ @property
+ def elapsed(self):
+ return self._elapsed
+
+ @property
+ def backtrace(self):
+ return self._backtrace
+
@property
def metrics(self):
return self._metrics
@@ -204,25 +204,25 @@ class _Execution(object):
if self._process_progress_listener:
self._process_progress_listener()
self._process_progress_listener.close()
- if not self._user_stdout:
- if self._out_file is None:
- pass
- elif self._out_file != subprocess.PIPE:
- self._out_file.flush()
- self._out_file.seek(0, os.SEEK_SET)
- self._std_out = self._out_file.read()
- else:
- self._std_out = self._process.stdout.read()
- if not self._user_stderr:
- if self._err_file is None:
- pass
- elif self._err_file != subprocess.PIPE:
- self._err_file.flush()
- self._err_file.seek(0, os.SEEK_SET)
- self._std_err = self._err_file.read()
- else:
- self._std_err = self._process.stderr.read()
-
+ if not self._user_stdout:
+ if self._out_file is None:
+ pass
+ elif self._out_file != subprocess.PIPE:
+ self._out_file.flush()
+ self._out_file.seek(0, os.SEEK_SET)
+ self._std_out = self._out_file.read()
+ else:
+ self._std_out = self._process.stdout.read()
+ if not self._user_stderr:
+ if self._err_file is None:
+ pass
+ elif self._err_file != subprocess.PIPE:
+ self._err_file.flush()
+ self._err_file.seek(0, os.SEEK_SET)
+ self._std_err = self._err_file.read()
+ else:
+ self._std_err = self._process.stderr.read()
+
if clean_files:
self._clean_files()
yatest_logger.debug("Command (pid %s) rc: %s", self._process.pid, self.exit_code)
@@ -245,45 +245,45 @@ class _Execution(object):
yatest_logger.debug("Command (pid %s) errors:\n%s", self._process.pid, truncate(printable_std_err, MAX_OUT_LEN))
def _clean_files(self):
- if self._err_file and not self._user_stderr and self._err_file != subprocess.PIPE:
- self._err_file.close()
- self._err_file = None
- if self._out_file and not self._user_stdout and self._out_file != subprocess.PIPE:
- self._out_file.close()
- self._out_file = None
-
- def _recover_core(self):
- core_path = cores.recover_core_dump_file(self.command[0], self._cwd, self.process.pid)
- if core_path:
- # Core dump file recovering may be disabled (for distbuild for example) - produce only bt
- store_cores = runtime._get_ya_config().collect_cores
- if store_cores:
- new_core_path = path.get_unique_file_path(runtime.output_path(), "{}.{}.core".format(os.path.basename(self.command[0]), self._process.pid))
- # Copy core dump file, because it may be overwritten
+ if self._err_file and not self._user_stderr and self._err_file != subprocess.PIPE:
+ self._err_file.close()
+ self._err_file = None
+ if self._out_file and not self._user_stdout and self._out_file != subprocess.PIPE:
+ self._out_file.close()
+ self._out_file = None
+
+ def _recover_core(self):
+ core_path = cores.recover_core_dump_file(self.command[0], self._cwd, self.process.pid)
+ if core_path:
+ # Core dump file recovering may be disabled (for distbuild for example) - produce only bt
+ store_cores = runtime._get_ya_config().collect_cores
+ if store_cores:
+ new_core_path = path.get_unique_file_path(runtime.output_path(), "{}.{}.core".format(os.path.basename(self.command[0]), self._process.pid))
+ # Copy core dump file, because it may be overwritten
yatest_logger.debug("Coping core dump file from '%s' to the '%s'", core_path, new_core_path)
- shutil.copyfile(core_path, new_core_path)
- core_path = new_core_path
-
- bt_filename = None
- pbt_filename = None
-
- if os.path.exists(runtime.gdb_path()):
- self._backtrace = cores.get_gdb_full_backtrace(self.command[0], core_path, runtime.gdb_path())
- bt_filename = path.get_unique_file_path(runtime.output_path(), "{}.{}.backtrace".format(os.path.basename(self.command[0]), self._process.pid))
+ shutil.copyfile(core_path, new_core_path)
+ core_path = new_core_path
+
+ bt_filename = None
+ pbt_filename = None
+
+ if os.path.exists(runtime.gdb_path()):
+ self._backtrace = cores.get_gdb_full_backtrace(self.command[0], core_path, runtime.gdb_path())
+ bt_filename = path.get_unique_file_path(runtime.output_path(), "{}.{}.backtrace".format(os.path.basename(self.command[0]), self._process.pid))
with open(bt_filename, "wb") as afile:
afile.write(six.ensure_binary(self._backtrace))
- # generate pretty html version of backtrace aka Tri Korochki
- pbt_filename = bt_filename + ".html"
+ # generate pretty html version of backtrace aka Tri Korochki
+ pbt_filename = bt_filename + ".html"
backtrace_to_html(bt_filename, pbt_filename)
-
- if store_cores:
- runtime._register_core(os.path.basename(self.command[0]), self.command[0], core_path, bt_filename, pbt_filename)
- else:
- runtime._register_core(os.path.basename(self.command[0]), None, None, bt_filename, pbt_filename)
-
+
+ if store_cores:
+ runtime._register_core(os.path.basename(self.command[0]), self.command[0], core_path, bt_filename, pbt_filename)
+ else:
+ runtime._register_core(os.path.basename(self.command[0]), None, None, bt_filename, pbt_filename)
+
def wait(self, check_exit_code=True, timeout=None, on_timeout=None):
def _wait():
- finished = None
+ finished = None
interrupted = False
try:
if hasattr(os, "wait4"):
@@ -293,7 +293,7 @@ class _Execution(object):
else:
# PEP 475
pid, sts, rusage = os.wait4(self._process.pid, 0)
- finished = time.time()
+ finished = time.time()
self._process._handle_exitstatus(sts)
for field in [
"ru_idrss",
@@ -328,46 +328,46 @@ class _Execution(object):
if not interrupted:
self._process.wait() # this has to be here unconditionally, so that all process properties are set
- if not finished:
- finished = time.time()
- self._metrics["wtime"] = round(finished - self._started, 3)
-
+ if not finished:
+ finished = time.time()
+ self._metrics["wtime"] = round(finished - self._started, 3)
+
try:
- if timeout:
- process_is_finished = lambda: not self.running
- fail_message = "Command '%s' stopped by %d seconds timeout" % (self._command, timeout)
+ if timeout:
+ process_is_finished = lambda: not self.running
+ fail_message = "Command '%s' stopped by %d seconds timeout" % (self._command, timeout)
try:
wait_for(process_is_finished, timeout, fail_message, sleep_time=0.1, on_check_condition=self._process_progress_listener)
except TimeoutError as e:
if on_timeout:
- yatest_logger.debug("Calling user specified on_timeout function")
+ yatest_logger.debug("Calling user specified on_timeout function")
try:
on_timeout(self, timeout)
except Exception:
yatest_logger.exception("Exception while calling on_timeout")
raise ExecutionTimeoutError(self, str(e))
- # Wait should be always called here, it finalizes internal states of its process and sets up return code
+ # Wait should be always called here, it finalizes internal states of its process and sets up return code
_wait()
- except BaseException as e:
+ except BaseException as e:
_kill_process_tree(self._process.pid)
_wait()
yatest_logger.debug("Command exception: %s", e)
- raise
+ raise
finally:
- self._elapsed = time.time() - self._start
- self._save_outputs()
+ self._elapsed = time.time() - self._start
+ self._save_outputs()
self.verify_no_coredumps()
- self._finalise(check_exit_code)
-
- def _finalise(self, check_exit_code):
+ self._finalise(check_exit_code)
+
+ def _finalise(self, check_exit_code):
# Set the signal (negative number) which caused the process to exit
if check_exit_code and self.exit_code != 0:
yatest_logger.error("Execution failed with exit code: %s\n\t,std_out:%s\n\tstd_err:%s\n",
self.exit_code, truncate(self.std_out, MAX_OUT_LEN), truncate(self.std_err, MAX_OUT_LEN))
raise ExecutionError(self)
- # Don't search for sanitize errors if stderr was redirected
+ # Don't search for sanitize errors if stderr was redirected
self.verify_sanitize_errors()
def verify_no_coredumps(self):
@@ -375,44 +375,44 @@ class _Execution(object):
Verify there is no coredump from this binary. If there is then report backtrace.
"""
if self.exit_code < 0 and self._collect_cores:
- if cores:
- try:
- self._recover_core()
- except Exception:
- yatest_logger.exception("Exception while recovering core")
- else:
- yatest_logger.warning("Core dump file recovering is skipped: module cores isn't available")
+ if cores:
+ try:
+ self._recover_core()
+ except Exception:
+ yatest_logger.exception("Exception while recovering core")
+ else:
+ yatest_logger.warning("Core dump file recovering is skipped: module cores isn't available")
def verify_sanitize_errors(self):
"""
Verify there are no sanitizer (ASAN, MSAN, TSAN, etc) errors for this binary. If there are any report them.
"""
- if self._std_err and self._check_sanitizer and runtime._get_ya_config().sanitizer_extra_checks:
- build_path = runtime.build_path()
- if self.command[0].startswith(build_path):
- match = re.search(SANITIZER_ERROR_PATTERN, self._std_err)
- if match:
- yatest_logger.error("%s sanitizer found errors:\n\tstd_err:%s\n", match.group(1), truncate(self.std_err, MAX_OUT_LEN))
- raise ExecutionError(self)
+ if self._std_err and self._check_sanitizer and runtime._get_ya_config().sanitizer_extra_checks:
+ build_path = runtime.build_path()
+ if self.command[0].startswith(build_path):
+ match = re.search(SANITIZER_ERROR_PATTERN, self._std_err)
+ if match:
+ yatest_logger.error("%s sanitizer found errors:\n\tstd_err:%s\n", match.group(1), truncate(self.std_err, MAX_OUT_LEN))
+ raise ExecutionError(self)
else:
yatest_logger.debug("No sanitizer errors found")
- else:
+ else:
yatest_logger.debug("'%s' doesn't belong to '%s' - no check for sanitize errors", self.command[0], build_path)
-
-def on_timeout_gen_coredump(exec_obj, _):
- """
- Function can be passed to the execute(..., timeout=X, on_timeout=on_timeout_gen_coredump)
- to generate core dump file, backtrace ahd html-version of the backtrace in case of timeout.
- All files will be available in the testing_out_stuff and via links.
- """
- try:
- os.kill(exec_obj.process.pid, signal.SIGQUIT)
- except OSError:
- # process might be already terminated
- pass
-
-
+
+def on_timeout_gen_coredump(exec_obj, _):
+ """
+ Function can be passed to the execute(..., timeout=X, on_timeout=on_timeout_gen_coredump)
+ to generate core dump file, backtrace ahd html-version of the backtrace in case of timeout.
+ All files will be available in the testing_out_stuff and via links.
+ """
+ try:
+ os.kill(exec_obj.process.pid, signal.SIGQUIT)
+ except OSError:
+ # process might be already terminated
+ pass
+
+
def execute(
command, check_exit_code=True,
shell=False, timeout=None,
@@ -421,7 +421,7 @@ def execute(
creationflags=0, wait=True,
process_progress_listener=None, close_fds=False,
collect_cores=True, check_sanitizer=True, preexec_fn=None, on_timeout=None,
- executor=_Execution,
+ executor=_Execution,
):
"""
Executes a command
@@ -438,26 +438,26 @@ def execute(
:param wait: should wait until the command finishes
:param process_progress_listener=object that is polled while execution is in progress
:param close_fds: subrpocess.Popen close_fds args
- :param collect_cores: recover core dump files if shell == False
- :param check_sanitizer: raise ExecutionError if stderr contains sanitize errors
+ :param collect_cores: recover core dump files if shell == False
+ :param check_sanitizer: raise ExecutionError if stderr contains sanitize errors
:param preexec_fn: subrpocess.Popen preexec_fn arg
:param on_timeout: on_timeout(<execution object>, <timeout value>) callback
:return _Execution: Execution object
"""
- if env is None:
- env = os.environ.copy()
+ if env is None:
+ env = os.environ.copy()
else:
- # Certain environment variables must be present for programs to work properly.
- # For more info see DEVTOOLSSUPPORT-4907
- mandatory_env_name = 'YA_MANDATORY_ENV_VARS'
- mandatory_vars = env.get(mandatory_env_name, os.environ.get(mandatory_env_name)) or ''
- if mandatory_vars:
- env[mandatory_env_name] = mandatory_vars
- mandatory_system_vars = filter(None, mandatory_vars.split(':'))
- else:
- mandatory_system_vars = ['TMPDIR']
-
+ # Certain environment variables must be present for programs to work properly.
+ # For more info see DEVTOOLSSUPPORT-4907
+ mandatory_env_name = 'YA_MANDATORY_ENV_VARS'
+ mandatory_vars = env.get(mandatory_env_name, os.environ.get(mandatory_env_name)) or ''
+ if mandatory_vars:
+ env[mandatory_env_name] = mandatory_vars
+ mandatory_system_vars = filter(None, mandatory_vars.split(':'))
+ else:
+ mandatory_system_vars = ['TMPDIR']
+
for var in mandatory_system_vars:
if var not in env and var in os.environ:
env[var] = os.environ[var]
@@ -465,13 +465,13 @@ def execute(
if not wait and timeout is not None:
raise ValueError("Incompatible arguments 'timeout' and wait=False")
- # if subprocess.PIPE in [stdout, stderr]:
- # raise ValueError("Don't use pipe to obtain stream data - it may leads to the deadlock")
-
+ # if subprocess.PIPE in [stdout, stderr]:
+ # raise ValueError("Don't use pipe to obtain stream data - it may leads to the deadlock")
+
def get_out_stream(stream, default_name):
if stream is None:
# No stream is supplied: open new temp file
- return _get_command_output_file(command, default_name), False
+ return _get_command_output_file(command, default_name), False
if isinstance(stream, six.string_types):
# User filename is supplied: open file for writing
@@ -490,86 +490,86 @@ def execute(
if shell and type(command) == list:
command = " ".join(command)
- if shell:
- collect_cores = False
- check_sanitizer = False
- else:
- if isinstance(command, (list, tuple)):
- executable = command[0]
- else:
- executable = command
- if os.path.isabs(executable):
- if not os.path.isfile(executable) and not os.path.isfile(executable + ".exe"):
- exists = os.path.exists(executable)
- if exists:
- stat = os.stat(executable)
- else:
- stat = None
- raise InvalidCommandError("Target program is not a file: {} (exists: {} stat: {})".format(executable, exists, stat))
- if not os.access(executable, os.X_OK) and not os.access(executable + ".exe", os.X_OK):
- raise InvalidCommandError("Target program is not executable: {}".format(executable))
-
- if check_sanitizer:
- env["LSAN_OPTIONS"] = environment.extend_env_var(os.environ, "LSAN_OPTIONS", "exitcode=100")
-
- if stdin:
- name = "PIPE" if stdin == subprocess.PIPE else stdin.name
- yatest_logger.debug("Executing '%s' with input '%s' in '%s'", command, name, cwd)
- else:
+ if shell:
+ collect_cores = False
+ check_sanitizer = False
+ else:
+ if isinstance(command, (list, tuple)):
+ executable = command[0]
+ else:
+ executable = command
+ if os.path.isabs(executable):
+ if not os.path.isfile(executable) and not os.path.isfile(executable + ".exe"):
+ exists = os.path.exists(executable)
+ if exists:
+ stat = os.stat(executable)
+ else:
+ stat = None
+ raise InvalidCommandError("Target program is not a file: {} (exists: {} stat: {})".format(executable, exists, stat))
+ if not os.access(executable, os.X_OK) and not os.access(executable + ".exe", os.X_OK):
+ raise InvalidCommandError("Target program is not executable: {}".format(executable))
+
+ if check_sanitizer:
+ env["LSAN_OPTIONS"] = environment.extend_env_var(os.environ, "LSAN_OPTIONS", "exitcode=100")
+
+ if stdin:
+ name = "PIPE" if stdin == subprocess.PIPE else stdin.name
+ yatest_logger.debug("Executing '%s' with input '%s' in '%s'", command, name, cwd)
+ else:
yatest_logger.debug("Executing '%s' in '%s'", command, cwd)
- # XXX
-
- started = time.time()
- process = subprocess.Popen(
- command, shell=shell, universal_newlines=True,
- stdout=out_file, stderr=err_file, stdin=in_file,
- cwd=cwd, env=env, creationflags=creationflags, close_fds=close_fds, preexec_fn=preexec_fn,
- )
- yatest_logger.debug("Command pid: %s", process.pid)
-
- res = executor(command, process, out_file, err_file, process_progress_listener, cwd, collect_cores, check_sanitizer, started, user_stdout=user_stdout, user_stderr=user_stderr)
+ # XXX
+
+ started = time.time()
+ process = subprocess.Popen(
+ command, shell=shell, universal_newlines=True,
+ stdout=out_file, stderr=err_file, stdin=in_file,
+ cwd=cwd, env=env, creationflags=creationflags, close_fds=close_fds, preexec_fn=preexec_fn,
+ )
+ yatest_logger.debug("Command pid: %s", process.pid)
+
+ res = executor(command, process, out_file, err_file, process_progress_listener, cwd, collect_cores, check_sanitizer, started, user_stdout=user_stdout, user_stderr=user_stderr)
if wait:
res.wait(check_exit_code, timeout, on_timeout)
return res
-def _get_command_output_file(cmd, ext):
- parts = [get_command_name(cmd)]
- if 'YA_RETRY_INDEX' in os.environ:
- parts.append('retry{}'.format(os.environ.get('YA_RETRY_INDEX')))
- if int(os.environ.get('YA_SPLIT_COUNT', '0')) > 1:
- parts.append('chunk{}'.format(os.environ.get('YA_SPLIT_INDEX', '0')))
-
- filename = '.'.join(parts + [ext])
- try:
- # if execution is performed from test, save out / err to the test logs dir
- import yatest.common
+def _get_command_output_file(cmd, ext):
+ parts = [get_command_name(cmd)]
+ if 'YA_RETRY_INDEX' in os.environ:
+ parts.append('retry{}'.format(os.environ.get('YA_RETRY_INDEX')))
+ if int(os.environ.get('YA_SPLIT_COUNT', '0')) > 1:
+ parts.append('chunk{}'.format(os.environ.get('YA_SPLIT_INDEX', '0')))
+
+ filename = '.'.join(parts + [ext])
+ try:
+ # if execution is performed from test, save out / err to the test logs dir
+ import yatest.common
import library.python.pytest.plugins.ya
if getattr(library.python.pytest.plugins.ya, 'pytest_config', None) is None:
- raise ImportError("not in test")
- filename = path.get_unique_file_path(yatest.common.output_path(), filename)
- yatest_logger.debug("Command %s will be placed to %s", ext, os.path.basename(filename))
- return open(filename, "wb+")
- except ImportError:
- return tempfile.NamedTemporaryFile(delete=False, suffix=filename)
-
-
-def _get_proc_tree_info(pids):
- if os.name == 'nt':
- return 'Not supported'
- else:
- stdout, _ = subprocess.Popen(["/bin/ps", "-wufp"] + [str(p) for p in pids], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- return stdout
-
-
-def py_execute(
- command, check_exit_code=True,
- shell=False, timeout=None,
- cwd=None, env=None,
- stdin=None, stdout=None, stderr=None,
- creationflags=0, wait=True,
- process_progress_listener=None, close_fds=False
-):
+ raise ImportError("not in test")
+ filename = path.get_unique_file_path(yatest.common.output_path(), filename)
+ yatest_logger.debug("Command %s will be placed to %s", ext, os.path.basename(filename))
+ return open(filename, "wb+")
+ except ImportError:
+ return tempfile.NamedTemporaryFile(delete=False, suffix=filename)
+
+
+def _get_proc_tree_info(pids):
+ if os.name == 'nt':
+ return 'Not supported'
+ else:
+ stdout, _ = subprocess.Popen(["/bin/ps", "-wufp"] + [str(p) for p in pids], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+ return stdout
+
+
+def py_execute(
+ command, check_exit_code=True,
+ shell=False, timeout=None,
+ cwd=None, env=None,
+ stdin=None, stdout=None, stderr=None,
+ creationflags=0, wait=True,
+ process_progress_listener=None, close_fds=False
+):
"""
Executes a command with the arcadia python
:param command: command to pass to python
@@ -635,50 +635,50 @@ def _kill_process_tree(process_pid, target_pid_signal=None):
if os.name == 'nt':
_win_kill_process_tree(process_pid)
else:
- _nix_kill_process_tree(process_pid, target_pid_signal)
-
-
-def _nix_get_proc_children(pid):
- try:
- cmd = ["pgrep", "-P", str(pid)]
- return [int(p) for p in subprocess.check_output(cmd).split()]
- except Exception:
- return []
-
-
-def _get_binname(pid):
- try:
- return os.path.basename(os.readlink('/proc/{}/exe'.format(pid)))
- except Exception as e:
- return "error({})".format(e)
-
-
+ _nix_kill_process_tree(process_pid, target_pid_signal)
+
+
+def _nix_get_proc_children(pid):
+ try:
+ cmd = ["pgrep", "-P", str(pid)]
+ return [int(p) for p in subprocess.check_output(cmd).split()]
+ except Exception:
+ return []
+
+
+def _get_binname(pid):
+ try:
+ return os.path.basename(os.readlink('/proc/{}/exe'.format(pid)))
+ except Exception as e:
+ return "error({})".format(e)
+
+
def _nix_kill_process_tree(pid, target_pid_signal=None):
"""
Kills the process tree.
"""
- yatest_logger.debug("Killing process tree for pid {} (bin:'{}')".format(pid, _get_binname(pid)))
+ yatest_logger.debug("Killing process tree for pid {} (bin:'{}')".format(pid, _get_binname(pid)))
def try_to_send_signal(pid, sig):
try:
os.kill(pid, sig)
- yatest_logger.debug("Sent signal %d to the pid %d", sig, pid)
+ yatest_logger.debug("Sent signal %d to the pid %d", sig, pid)
except Exception as exc:
- yatest_logger.debug("Error while sending signal {sig} to pid {pid}: {error}".format(sig=sig, pid=pid, error=str(exc)))
+ yatest_logger.debug("Error while sending signal {sig} to pid {pid}: {error}".format(sig=sig, pid=pid, error=str(exc)))
try_to_send_signal(pid, signal.SIGSTOP) # Stop the process to prevent it from starting any child processes.
# Get the child process PID list.
- child_pids = _nix_get_proc_children(pid)
- # Stop the child processes.
- for child_pid in child_pids:
- try:
- # Kill the child recursively.
- _kill_process_tree(int(child_pid))
- except Exception as e:
- # Skip the error and continue killing.
- yatest_logger.debug("Killing child pid {pid} failed: {error}".format(pid=child_pid, error=e))
- continue
+ child_pids = _nix_get_proc_children(pid)
+ # Stop the child processes.
+ for child_pid in child_pids:
+ try:
+ # Kill the child recursively.
+ _kill_process_tree(int(child_pid))
+ except Exception as e:
+ # Skip the error and continue killing.
+ yatest_logger.debug("Killing child pid {pid} failed: {error}".format(pid=child_pid, error=e))
+ continue
try_to_send_signal(pid, target_pid_signal or signal.SIGKILL) # Kill the root process.
@@ -689,16 +689,16 @@ def _nix_kill_process_tree(pid, target_pid_signal=None):
def _win_kill_process_tree(pid):
subprocess.call(['taskkill', '/F', '/T', '/PID', str(pid)])
-
-
-def _run_readelf(binary_path):
+
+
+def _run_readelf(binary_path):
return str(subprocess.check_output([runtime.binary_path('contrib/python/pyelftools/readelf/readelf'), '-s', runtime.binary_path(binary_path)]))
-
-
-def check_glibc_version(binary_path):
+
+
+def check_glibc_version(binary_path):
lucid_glibc_version = distutils.version.LooseVersion("2.11")
- for l in _run_readelf(binary_path).split('\n'):
+ for l in _run_readelf(binary_path).split('\n'):
match = GLIBC_PATTERN.search(l)
if not match:
continue
@@ -706,14 +706,14 @@ def check_glibc_version(binary_path):
def backtrace_to_html(bt_filename, output):
- try:
+ try:
from library.python import coredump_filter
- with open(output, "wb") as afile:
+ with open(output, "wb") as afile:
coredump_filter.filter_stackdump(bt_filename, stream=afile)
- except ImportError as e:
- yatest_logger.debug("Failed to import coredump_filter: %s", e)
- with open(output, "wb") as afile:
- afile.write("<html>Failed to import coredump_filter in USE_ARCADIA_PYTHON=no mode</html>")
+ except ImportError as e:
+ yatest_logger.debug("Failed to import coredump_filter: %s", e)
+ with open(output, "wb") as afile:
+ afile.write("<html>Failed to import coredump_filter in USE_ARCADIA_PYTHON=no mode</html>")
def _try_convert_bytes_to_string(source):
diff --git a/library/python/testing/yatest_common/yatest/common/runtime.py b/library/python/testing/yatest_common/yatest/common/runtime.py
index 07a41ebbbc..e55e193446 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime.py
@@ -1,7 +1,7 @@
-import errno
-import functools
+import errno
+import functools
import json
-import os
+import os
import threading
import six
@@ -10,13 +10,13 @@ import six
_lock = threading.Lock()
-def _get_ya_config():
+def _get_ya_config():
try:
import library.python.pytest.plugins.ya as ya_plugin
if ya_plugin.pytest_config is not None:
return ya_plugin.pytest_config
import pytest
- return pytest.config
+ return pytest.config
except (ImportError, AttributeError):
try:
import library.python.testing.recipe
@@ -27,10 +27,10 @@ def _get_ya_config():
raise NotImplementedError("yatest.common.* is only available from the testing runtime")
-def _get_ya_plugin_instance():
- return _get_ya_config().ya
-
-
+def _get_ya_plugin_instance():
+ return _get_ya_config().ya
+
+
def _norm_path(path):
if path is None:
return None
@@ -46,35 +46,35 @@ def _join_path(main_path, path):
return os.path.join(main_path, _norm_path(path))
-def not_test(func):
- """
- Mark any function as not a test for py.test
- :param func:
- :return:
- """
- @functools.wraps(func)
- def wrapper(*args, **kwds):
- return func(*args, **kwds)
- setattr(wrapper, '__test__', False)
- return wrapper
-
-
-def source_path(path=None):
+def not_test(func):
+ """
+ Mark any function as not a test for py.test
+ :param func:
+ :return:
+ """
+ @functools.wraps(func)
+ def wrapper(*args, **kwds):
+ return func(*args, **kwds)
+ setattr(wrapper, '__test__', False)
+ return wrapper
+
+
+def source_path(path=None):
"""
Get source path inside arcadia
- :param path: path arcadia relative, e.g. yatest.common.source_path('devtools/ya')
+ :param path: path arcadia relative, e.g. yatest.common.source_path('devtools/ya')
:return: absolute path to the source folder
"""
- return _join_path(_get_ya_plugin_instance().source_root, path)
+ return _join_path(_get_ya_plugin_instance().source_root, path)
-def build_path(path=None):
+def build_path(path=None):
"""
Get path inside build directory
- :param path: path relative to the build directory, e.g. yatest.common.build_path('devtools/ya/bin')
+ :param path: path relative to the build directory, e.g. yatest.common.build_path('devtools/ya/bin')
:return: absolute path inside build directory
"""
- return _join_path(_get_ya_plugin_instance().build_root, path)
+ return _join_path(_get_ya_plugin_instance().build_root, path)
def java_path():
@@ -82,7 +82,7 @@ def java_path():
[DEPRECATED] Get path to java
:return: absolute path to java
"""
- from . import runtime_java
+ from . import runtime_java
return runtime_java.get_java_path(binary_path(os.path.join('contrib', 'tools', 'jdk')))
@@ -90,7 +90,7 @@ def java_home():
"""
Get jdk directory path
"""
- from . import runtime_java
+ from . import runtime_java
jdk_dir = runtime_java.get_build_java_dir(binary_path('jdk'))
if not jdk_dir:
raise Exception("Cannot find jdk - make sure 'jdk' is added to the DEPENDS section and exists for the current platform")
@@ -100,60 +100,60 @@ def java_home():
def java_bin():
"""
Get path to the java binary
- Requires DEPENDS(jdk)
+ Requires DEPENDS(jdk)
"""
return os.path.join(java_home(), "bin", "java")
-def data_path(path=None):
+def data_path(path=None):
"""
Get path inside arcadia_tests_data directory
- :param path: path relative to the arcadia_tests_data directory, e.g. yatest.common.data_path("pers/rerank_service")
+ :param path: path relative to the arcadia_tests_data directory, e.g. yatest.common.data_path("pers/rerank_service")
:return: absolute path inside arcadia_tests_data
"""
- return _join_path(_get_ya_plugin_instance().data_root, path)
+ return _join_path(_get_ya_plugin_instance().data_root, path)
-def output_path(path=None):
+def output_path(path=None):
"""
Get path inside the current test suite output dir.
Placing files to this dir guarantees that files will be accessible after the test suite execution.
:param path: path relative to the test suite output dir
:return: absolute path inside the test suite output dir
"""
- return _join_path(_get_ya_plugin_instance().output_dir, path)
-
-
-def ram_drive_path(path=None):
- """
- :param path: path relative to the ram drive.
- :return: absolute path inside the ram drive directory or None if no ram drive was provided by environment.
- """
- if 'YA_TEST_RAM_DRIVE_PATH' in os.environ:
- return _join_path(os.environ['YA_TEST_RAM_DRIVE_PATH'], path)
-
-
-def output_ram_drive_path(path=None):
- """
- Returns path inside ram drive directory which will be saved in the testing_out_stuff directory after testing.
- Returns None if no ram drive was provided by environment.
- :param path: path relative to the output ram drive directory
- """
- if 'YA_TEST_OUTPUT_RAM_DRIVE_PATH' in os.environ:
- return _join_path(os.environ['YA_TEST_OUTPUT_RAM_DRIVE_PATH'], path)
-
-
-def binary_path(path=None):
+ return _join_path(_get_ya_plugin_instance().output_dir, path)
+
+
+def ram_drive_path(path=None):
+ """
+ :param path: path relative to the ram drive.
+ :return: absolute path inside the ram drive directory or None if no ram drive was provided by environment.
+ """
+ if 'YA_TEST_RAM_DRIVE_PATH' in os.environ:
+ return _join_path(os.environ['YA_TEST_RAM_DRIVE_PATH'], path)
+
+
+def output_ram_drive_path(path=None):
+ """
+ Returns path inside ram drive directory which will be saved in the testing_out_stuff directory after testing.
+ Returns None if no ram drive was provided by environment.
+ :param path: path relative to the output ram drive directory
+ """
+ if 'YA_TEST_OUTPUT_RAM_DRIVE_PATH' in os.environ:
+ return _join_path(os.environ['YA_TEST_OUTPUT_RAM_DRIVE_PATH'], path)
+
+
+def binary_path(path=None):
"""
Get path to the built binary
- :param path: path to the binary relative to the build directory e.g. yatest.common.binary_path('devtools/ya/bin/ya-bin')
+ :param path: path to the binary relative to the build directory e.g. yatest.common.binary_path('devtools/ya/bin/ya-bin')
:return: absolute path to the binary
"""
- path = _norm_path(path)
- return _get_ya_plugin_instance().get_binary(path)
+ path = _norm_path(path)
+ return _get_ya_plugin_instance().get_binary(path)
-def work_path(path=None):
+def work_path(path=None):
"""
Get path inside the current test suite working directory. Creating files in the work directory does not guarantee
that files will be accessible after the test suite execution
@@ -169,12 +169,12 @@ def work_path(path=None):
def python_path():
"""
- Get path to the arcadia python.
-
- Warn: if you are using build with system python (-DUSE_SYSTEM_PYTHON=X) beware that some python bundles
- are built in a stripped-down form that is needed for building, not running tests.
- See comments in the file below to find out which version of python is compatible with tests.
- https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc
+ Get path to the arcadia python.
+
+ Warn: if you are using build with system python (-DUSE_SYSTEM_PYTHON=X) beware that some python bundles
+ are built in a stripped-down form that is needed for building, not running tests.
+ See comments in the file below to find out which version of python is compatible with tests.
+ https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc
:return: absolute path to python
"""
return _get_ya_plugin_instance().python_path
@@ -207,18 +207,18 @@ def get_param_dict_copy():
return _get_ya_plugin_instance().get_param_dict_copy()
-@not_test
-def test_output_path(path=None):
+@not_test
+def test_output_path(path=None):
"""
Get dir in the suite output_path for the current test case
"""
test_out_dir = os.path.splitext(_get_ya_config().current_test_log_path)[0]
- try:
+ try:
os.makedirs(test_out_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- return _join_path(test_out_dir, path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ return _join_path(test_out_dir, path)
def project_path(path=None):
@@ -228,13 +228,13 @@ def project_path(path=None):
return _join_path(os.path.join(build_path(), context.project_path), path)
-def gdb_path():
- """
- Get path to the gdb
- """
- return _get_ya_plugin_instance().gdb_path
-
-
+def gdb_path():
+ """
+ Get path to the gdb
+ """
+ return _get_ya_plugin_instance().gdb_path
+
+
def c_compiler_path():
"""
Get path to the gdb
@@ -261,27 +261,27 @@ def global_resources():
return {}
-def _register_core(name, binary_path, core_path, bt_path, pbt_path):
- config = _get_ya_config()
+def _register_core(name, binary_path, core_path, bt_path, pbt_path):
+ config = _get_ya_config()
with _lock:
- if not hasattr(config, 'test_cores_count'):
- config.test_cores_count = 0
+ if not hasattr(config, 'test_cores_count'):
+ config.test_cores_count = 0
config.test_cores_count += 1
count_str = '' if config.test_cores_count == 1 else str(config.test_cores_count)
- log_entry = config.test_logs[config.current_item_nodeid]
- if binary_path:
- log_entry['{} binary{}'.format(name, count_str)] = binary_path
- if core_path:
- log_entry['{} core{}'.format(name, count_str)] = core_path
- if bt_path:
- log_entry['{} backtrace{}'.format(name, count_str)] = bt_path
- if pbt_path:
- log_entry['{} backtrace html{}'.format(name, count_str)] = pbt_path
-
-
-@not_test
+ log_entry = config.test_logs[config.current_item_nodeid]
+ if binary_path:
+ log_entry['{} binary{}'.format(name, count_str)] = binary_path
+ if core_path:
+ log_entry['{} core{}'.format(name, count_str)] = core_path
+ if bt_path:
+ log_entry['{} backtrace{}'.format(name, count_str)] = bt_path
+ if pbt_path:
+ log_entry['{} backtrace html{}'.format(name, count_str)] = pbt_path
+
+
+@not_test
def test_source_path(path=None):
return _join_path(os.path.join(source_path(), context.project_path), path)
diff --git a/library/python/testing/yatest_common/yatest/common/runtime_java.py b/library/python/testing/yatest_common/yatest/common/runtime_java.py
index 5ed678a457..39bbb45570 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime_java.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime_java.py
@@ -2,11 +2,11 @@ import os
import tarfile
import contextlib
-from . import runtime
+from . import runtime
+
+_JAVA_DIR = []
+
-_JAVA_DIR = []
-
-
def get_java_path(jdk_dir):
# deprecated - to be deleted
java_paths = (os.path.join(jdk_dir, 'bin', 'java'), os.path.join(jdk_dir, 'bin', 'java.exe'))
@@ -30,17 +30,17 @@ def get_java_path(jdk_dir):
def get_build_java_dir(jdk_dir):
versions = [8, 10, 11, 12, 13, 14, 15]
- if not _JAVA_DIR:
- for version in versions:
- jdk_tar_path = os.path.join(jdk_dir, "jdk{}.tar".format(version))
- if os.path.exists(jdk_tar_path):
- jdk_dir = runtime.build_path('jdk4test')
- with contextlib.closing(tarfile.open(jdk_tar_path)) as tf:
- tf.extractall(jdk_dir)
- assert os.path.exists(os.path.join(jdk_dir, "bin", "java"))
- _JAVA_DIR.append(jdk_dir)
- break
- else:
- _JAVA_DIR.append(None)
-
- return _JAVA_DIR[0]
+ if not _JAVA_DIR:
+ for version in versions:
+ jdk_tar_path = os.path.join(jdk_dir, "jdk{}.tar".format(version))
+ if os.path.exists(jdk_tar_path):
+ jdk_dir = runtime.build_path('jdk4test')
+ with contextlib.closing(tarfile.open(jdk_tar_path)) as tf:
+ tf.extractall(jdk_dir)
+ assert os.path.exists(os.path.join(jdk_dir, "bin", "java"))
+ _JAVA_DIR.append(jdk_dir)
+ break
+ else:
+ _JAVA_DIR.append(None)
+
+ return _JAVA_DIR[0]
diff --git a/library/python/testing/yatest_common/yatest/common/ya.make b/library/python/testing/yatest_common/yatest/common/ya.make
index fe4ea86d2d..f7c50dfe64 100644
--- a/library/python/testing/yatest_common/yatest/common/ya.make
+++ b/library/python/testing/yatest_common/yatest/common/ya.make
@@ -1 +1 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
diff --git a/library/python/testing/yatest_lib/external.py b/library/python/testing/yatest_lib/external.py
index 69874dece4..39113230d9 100644
--- a/library/python/testing/yatest_lib/external.py
+++ b/library/python/testing/yatest_lib/external.py
@@ -1,20 +1,20 @@
from __future__ import absolute_import
-import re
+import re
import sys
import copy
-import logging
+import logging
from . import tools
from datetime import date, datetime
-import enum
+import enum
import six
-logger = logging.getLogger(__name__)
-MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+logger = logging.getLogger(__name__)
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+
-
def apply(func, value, apply_to_keys=False):
"""
Applies func to every possible member of value
@@ -67,8 +67,8 @@ def serialize(value):
return val
if isinstance(val, six.string_types) or isinstance(val, bytes):
return tools.to_utf8(val)
- if isinstance(val, enum.Enum):
- return str(val)
+ if isinstance(val, enum.Enum):
+ return str(val)
if isinstance(val, six.integer_types) or type(val) in [float, bool]:
return val
if is_external(val):
@@ -136,19 +136,19 @@ class ExternalDataInfo(object):
@property
def path(self):
- if self.uri.count("://") != 1:
- logger.error("Invalid external data uri: '%s'", self.uri)
- return self.uri
+ if self.uri.count("://") != 1:
+ logger.error("Invalid external data uri: '%s'", self.uri)
+ return self.uri
_, path = self.uri.split("://")
return path
- def get_mds_key(self):
- assert self.is_http
- m = re.match(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)', self.uri)
- if m:
- return m.group(1)
- raise AssertionError("Failed to extract mds key properly from '{}'".format(self.uri))
-
+ def get_mds_key(self):
+ assert self.is_http
+ m = re.match(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)', self.uri)
+ if m:
+ return m.group(1)
+ raise AssertionError("Failed to extract mds key properly from '{}'".format(self.uri))
+
@property
def size(self):
return self._data.get("size")
diff --git a/library/python/testing/yatest_lib/test_splitter.py b/library/python/testing/yatest_lib/test_splitter.py
index bc7beba568..acbcd4300e 100644
--- a/library/python/testing/yatest_lib/test_splitter.py
+++ b/library/python/testing/yatest_lib/test_splitter.py
@@ -1,15 +1,15 @@
-# coding: utf-8
+# coding: utf-8
import collections
-def flatten_tests(test_classes):
+def flatten_tests(test_classes):
"""
>>> test_classes = {x: [x] for x in range(5)}
- >>> flatten_tests(test_classes)
+ >>> flatten_tests(test_classes)
[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
>>> test_classes = {x: [x + 1, x + 2] for x in range(2)}
- >>> flatten_tests(test_classes)
+ >>> flatten_tests(test_classes)
[(0, 1), (0, 2), (1, 2), (1, 3)]
"""
tests = []
diff --git a/library/python/testing/yatest_lib/tests/test_external.py b/library/python/testing/yatest_lib/tests/test_external.py
index ea5ebf97a3..18cb560b17 100644
--- a/library/python/testing/yatest_lib/tests/test_external.py
+++ b/library/python/testing/yatest_lib/tests/test_external.py
@@ -1,20 +1,20 @@
-import enum
-import pytest
-
-from yatest_lib import external
-
-
-class MyEnum(enum.Enum):
- VAL1 = 1
- VAL2 = 2
-
-
-@pytest.mark.parametrize("data, expected_val, expected_type", [
- ({}, {}, dict),
- (MyEnum.VAL1, "MyEnum.VAL1", str),
- ({MyEnum.VAL1: MyEnum.VAL2}, {"MyEnum.VAL1": "MyEnum.VAL2"}, dict),
-])
-def test_serialize(data, expected_val, expected_type):
- data = external.serialize(data)
- assert expected_type == type(data), data
- assert expected_val == data
+import enum
+import pytest
+
+from yatest_lib import external
+
+
+class MyEnum(enum.Enum):
+ VAL1 = 1
+ VAL2 = 2
+
+
+@pytest.mark.parametrize("data, expected_val, expected_type", [
+ ({}, {}, dict),
+ (MyEnum.VAL1, "MyEnum.VAL1", str),
+ ({MyEnum.VAL1: MyEnum.VAL2}, {"MyEnum.VAL1": "MyEnum.VAL2"}, dict),
+])
+def test_serialize(data, expected_val, expected_type):
+ data = external.serialize(data)
+ assert expected_type == type(data), data
+ assert expected_val == data
diff --git a/library/python/testing/yatest_lib/tests/ya.make b/library/python/testing/yatest_lib/tests/ya.make
index 89396b733e..8586c6ef7d 100644
--- a/library/python/testing/yatest_lib/tests/ya.make
+++ b/library/python/testing/yatest_lib/tests/ya.make
@@ -1,13 +1,13 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
-PY23_TEST()
+PY23_TEST()
PEERDIR(
library/python/testing/yatest_lib
)
TEST_SRCS(
- test_external.py
+ test_external.py
test_testsplitter.py
)
diff --git a/library/python/testing/yatest_lib/ya.make b/library/python/testing/yatest_lib/ya.make
index 1b9d7aa8c2..342bae82ba 100644
--- a/library/python/testing/yatest_lib/ya.make
+++ b/library/python/testing/yatest_lib/ya.make
@@ -1,4 +1,4 @@
-OWNER(g:yatest)
+OWNER(g:yatest)
PY23_LIBRARY()
@@ -15,12 +15,12 @@ PEERDIR(
contrib/python/six
)
-IF(PYTHON2)
- PEERDIR(
- contrib/python/enum34
- )
-ENDIF()
-
+IF(PYTHON2)
+ PEERDIR(
+ contrib/python/enum34
+ )
+ENDIF()
+
END()
RECURSE_FOR_TESTS(tests)
diff --git a/library/python/windows/__init__.py b/library/python/windows/__init__.py
index ab19b95144..62861b3309 100644
--- a/library/python/windows/__init__.py
+++ b/library/python/windows/__init__.py
@@ -1,10 +1,10 @@
-# coding: utf-8
-
+# coding: utf-8
+
import os
import stat
-import sys
+import sys
import shutil
-import logging
+import logging
from six import reraise
@@ -75,9 +75,9 @@ def errorfix(f):
def f_wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
- except WindowsError:
- tp, value, tb = sys.exc_info()
- fix_error(value)
+ except WindowsError:
+ tp, value, tb = sys.exc_info()
+ fix_error(value)
reraise(tp, value, tb)
return f_wrapped
@@ -105,7 +105,7 @@ if on_win():
_has_ctypes = True
try:
import ctypes
- from ctypes import wintypes
+ from ctypes import wintypes
except ImportError:
_has_ctypes = False
@@ -141,8 +141,8 @@ if on_win():
def run_diehard(f, winerrors, tries, delay, *args, **kwargs):
if isinstance(winerrors, int):
winerrors = (winerrors,)
-
- ei = None
+
+ ei = None
for t in xrange(tries):
if t:
logger.debug('Diehard [errs %s]: try #%d in %s', ','.join(str(x) for x in winerrors), t, f)
@@ -151,7 +151,7 @@ if on_win():
except WindowsError as e:
if e.winerror not in winerrors:
raise
- ei = sys.exc_info()
+ ei = sys.exc_info()
time.sleep(delay)
reraise(ei[0], ei[1], ei[2])
@@ -323,34 +323,34 @@ if on_win():
@require_ctypes
def _high_dword(x):
return ctypes.c_ulong((x >> 32) & ((1 << 32) - 1))
-
- @win_only
- @require_ctypes
- def get_current_process():
- handle = ctypes.windll.kernel32.GetCurrentProcess()
- if not handle:
- raise ctypes.WinError()
- return wintypes.HANDLE(handle)
-
- @win_only
- @require_ctypes
- def get_process_handle_count(proc_handle):
- assert isinstance(proc_handle, wintypes.HANDLE)
-
- GetProcessHandleCount = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.HANDLE, wintypes.POINTER(wintypes.DWORD))(("GetProcessHandleCount", ctypes.windll.kernel32))
- hndcnt = wintypes.DWORD()
- if not GetProcessHandleCount(proc_handle, ctypes.byref(hndcnt)):
- raise ctypes.WinError()
- return hndcnt.value
-
- @win_only
- @require_ctypes
- def set_handle_information(file, inherit=None, protect_from_close=None):
- for flag, value in [(inherit, 1), (protect_from_close, 2)]:
- if flag is not None:
- assert isinstance(flag, bool)
- if not ctypes.windll.kernel32.SetHandleInformation(file_handle(file), _low_dword(value), _low_dword(int(flag))):
- raise ctypes.WinError()
+
+ @win_only
+ @require_ctypes
+ def get_current_process():
+ handle = ctypes.windll.kernel32.GetCurrentProcess()
+ if not handle:
+ raise ctypes.WinError()
+ return wintypes.HANDLE(handle)
+
+ @win_only
+ @require_ctypes
+ def get_process_handle_count(proc_handle):
+ assert isinstance(proc_handle, wintypes.HANDLE)
+
+ GetProcessHandleCount = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.HANDLE, wintypes.POINTER(wintypes.DWORD))(("GetProcessHandleCount", ctypes.windll.kernel32))
+ hndcnt = wintypes.DWORD()
+ if not GetProcessHandleCount(proc_handle, ctypes.byref(hndcnt)):
+ raise ctypes.WinError()
+ return hndcnt.value
+
+ @win_only
+ @require_ctypes
+ def set_handle_information(file, inherit=None, protect_from_close=None):
+ for flag, value in [(inherit, 1), (protect_from_close, 2)]:
+ if flag is not None:
+ assert isinstance(flag, bool)
+ if not ctypes.windll.kernel32.SetHandleInformation(file_handle(file), _low_dword(value), _low_dword(int(flag))):
+ raise ctypes.WinError()
@win_only
@require_ctypes
diff --git a/library/python/ya.make b/library/python/ya.make
index 813ca4ef0d..2e1eb6e0e1 100644
--- a/library/python/ya.make
+++ b/library/python/ya.make
@@ -42,7 +42,7 @@ RECURSE(
cookiemy
coredump_filter
cores
- coverage
+ coverage
cpp_test
cppdemangle
cqueue
diff --git a/tools/archiver/tests/test.py b/tools/archiver/tests/test.py
index 947a0968b2..b92d58f6a9 100644
--- a/tools/archiver/tests/test.py
+++ b/tools/archiver/tests/test.py
@@ -8,7 +8,7 @@ logger = logging.getLogger("test_logger")
class TestArchiver(object):
@classmethod
def setup_class(cls):
- cls.archiver_path = ytc.binary_path("tools/archiver/archiver")
+ cls.archiver_path = ytc.binary_path("tools/archiver/archiver")
def test_recursive(self):
assert 'archiver' == os.path.basename(self.archiver_path)
diff --git a/util/charset/benchmark/to_lower/metrics/ya.make b/util/charset/benchmark/to_lower/metrics/ya.make
index 52428664c6..5bb3461a8d 100644
--- a/util/charset/benchmark/to_lower/metrics/ya.make
+++ b/util/charset/benchmark/to_lower/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/charset/benchmark/utf8_to_wide/metrics/ya.make b/util/charset/benchmark/utf8_to_wide/metrics/ya.make
index e16123bc5f..c406e25bee 100644
--- a/util/charset/benchmark/utf8_to_wide/metrics/ya.make
+++ b/util/charset/benchmark/utf8_to_wide/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/charset/ut/ya.make b/util/charset/ut/ya.make
index c3574cc63d..6526815e92 100644
--- a/util/charset/ut/ya.make
+++ b/util/charset/ut/ya.make
@@ -10,8 +10,8 @@ SRCS(
wide_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
REQUIREMENTS(ram:17)
END()
diff --git a/util/datetime/ut/ya.make b/util/datetime/ut/ya.make
index 0c1acb4b47..c2bc714059 100644
--- a/util/datetime/ut/ya.make
+++ b/util/datetime/ut/ya.make
@@ -10,6 +10,6 @@ SRCS(
datetime/uptime_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/digest/ut/ya.make b/util/digest/ut/ya.make
index 5557ed8bd2..245b2cf6d2 100644
--- a/util/digest/ut/ya.make
+++ b/util/digest/ut/ya.make
@@ -10,6 +10,6 @@ SRCS(
digest/sequence_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/draft/ut/ya.make b/util/draft/ut/ya.make
index 33265f02c1..37ab9413c5 100644
--- a/util/draft/ut/ya.make
+++ b/util/draft/ut/ya.make
@@ -16,6 +16,6 @@ SRCS(
memory_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/folder/ut/ya.make b/util/folder/ut/ya.make
index c77353ea82..64877d9b58 100644
--- a/util/folder/ut/ya.make
+++ b/util/folder/ut/ya.make
@@ -12,6 +12,6 @@ SRCS(
folder/pathsplit_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/generic/benchmark/fastclp2/metrics/ya.make b/util/generic/benchmark/fastclp2/metrics/ya.make
index 2cd33c8057..b2d17ebad3 100644
--- a/util/generic/benchmark/fastclp2/metrics/ya.make
+++ b/util/generic/benchmark/fastclp2/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/generic/benchmark/log2/metrics/ya.make b/util/generic/benchmark/log2/metrics/ya.make
index 2cbdda35fa..eb987e38d2 100644
--- a/util/generic/benchmark/log2/metrics/ya.make
+++ b/util/generic/benchmark/log2/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/generic/benchmark/rotate_bits/metrics/ya.make b/util/generic/benchmark/rotate_bits/metrics/ya.make
index 7cd7b66f9d..ac27d2f845 100644
--- a/util/generic/benchmark/rotate_bits/metrics/ya.make
+++ b/util/generic/benchmark/rotate_bits/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/generic/benchmark/vector_count_ctor/metrics/ya.make b/util/generic/benchmark/vector_count_ctor/metrics/ya.make
index 0713760c81..c48f89b564 100644
--- a/util/generic/benchmark/vector_count_ctor/metrics/ya.make
+++ b/util/generic/benchmark/vector_count_ctor/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/generic/string.pxd b/util/generic/string.pxd
index ffcf591910..c25f7392a1 100644
--- a/util/generic/string.pxd
+++ b/util/generic/string.pxd
@@ -3,8 +3,8 @@ from libcpp.string cimport string as _std_string
cdef extern from "<util/generic/strbuf.h>" nogil:
cdef cppclass TStringBuf:
- TStringBuf() except +
- TStringBuf(const char*) except +
+ TStringBuf() except +
+ TStringBuf(const char*) except +
TStringBuf(const char*, size_t) except +
const char* data()
char* Data()
diff --git a/util/generic/ut/ya.make b/util/generic/ut/ya.make
index 69fc3f705a..6eaf24cc5f 100644
--- a/util/generic/ut/ya.make
+++ b/util/generic/ut/ya.make
@@ -3,8 +3,8 @@ UNITTEST_FOR(util)
OWNER(g:util)
SUBSCRIBER(g:util-subscribers)
-FORK_TESTS()
-
+FORK_TESTS()
+
SRCS(
generic/adaptor_ut.cpp
generic/algorithm_ut.cpp
@@ -56,8 +56,8 @@ SRCS(
generic/scope_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
IF (NOT OS_IOS AND NOT ARCH_PPC64LE)
# Abseil fails to build (with linkage error) on ios and with compilation error on PowerPC
# (somewhere in unscaledcycleclock.cc).
diff --git a/util/memory/benchmark/pool/metrics/ya.make b/util/memory/benchmark/pool/metrics/ya.make
index 75dc9b37b3..c671bc5c1c 100644
--- a/util/memory/benchmark/pool/metrics/ya.make
+++ b/util/memory/benchmark/pool/metrics/ya.make
@@ -3,12 +3,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/memory/ut/ya.make b/util/memory/ut/ya.make
index a8f28f1d0f..d3a988617d 100644
--- a/util/memory/ut/ya.make
+++ b/util/memory/ut/ya.make
@@ -11,6 +11,6 @@ SRCS(
memory/tempbuf_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/network/ut/ya.make b/util/network/ut/ya.make
index 7bf3e91561..1ba03e167c 100644
--- a/util/network/ut/ya.make
+++ b/util/network/ut/ya.make
@@ -18,6 +18,6 @@ SRCS(
network/socket_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/random/benchmark/prng/metrics/ya.make b/util/random/benchmark/prng/metrics/ya.make
index 37b4b835ac..1f56aac0bd 100644
--- a/util/random/benchmark/prng/metrics/ya.make
+++ b/util/random/benchmark/prng/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/random/ut/ya.make b/util/random/ut/ya.make
index 9c2cc693c4..5080b339de 100644
--- a/util/random/ut/ya.make
+++ b/util/random/ut/ya.make
@@ -14,6 +14,6 @@ SRCS(
random/shuffle_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/stream/ut/ya.make b/util/stream/ut/ya.make
index c1414bc2cf..f0176dd7b4 100644
--- a/util/stream/ut/ya.make
+++ b/util/stream/ut/ya.make
@@ -25,6 +25,6 @@ SRCS(
stream/zlib_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/string/benchmark/float_to_string/metrics/ya.make b/util/string/benchmark/float_to_string/metrics/ya.make
index 24b522e8a6..4b8c4cc07d 100644
--- a/util/string/benchmark/float_to_string/metrics/ya.make
+++ b/util/string/benchmark/float_to_string/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/string/benchmark/join/metrics/ya.make b/util/string/benchmark/join/metrics/ya.make
index fcb948f61f..08ff3a149f 100644
--- a/util/string/benchmark/join/metrics/ya.make
+++ b/util/string/benchmark/join/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/string/benchmark/subst_global/metrics/ya.make b/util/string/benchmark/subst_global/metrics/ya.make
index ee90840c5e..d8c30ad460 100644
--- a/util/string/benchmark/subst_global/metrics/ya.make
+++ b/util/string/benchmark/subst_global/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/string/ut/ya.make b/util/string/ut/ya.make
index e1e3690f45..6e80812825 100644
--- a/util/string/ut/ya.make
+++ b/util/string/ut/ya.make
@@ -19,6 +19,6 @@ SRCS(
string/ascii_ut.cpp
)
-INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
-
+INCLUDE(${ARCADIA_ROOT}/util/tests/ya_util_tests.inc)
+
END()
diff --git a/util/system/atomic_ut.cpp b/util/system/atomic_ut.cpp
index 9806f9c441..07211ffba7 100644
--- a/util/system/atomic_ut.cpp
+++ b/util/system/atomic_ut.cpp
@@ -212,16 +212,16 @@ using TAltAtomic = volatile TChooser<TAtomicBase, long, long long>::TdType;
#endif
class TTTest: public TAtomicTest<TAltAtomic> {
-public:
+public:
TString Name() const noexcept override {
- return "TAtomicTest<TAltAtomic>";
- }
+ return "TAtomicTest<TAltAtomic>";
+ }
static TString StaticName() noexcept {
- return "TAtomicTest<TAltAtomic>";
- }
-};
-
-UNIT_TEST_SUITE_REGISTRATION(TTTest);
-
+ return "TAtomicTest<TAltAtomic>";
+ }
+};
+
+UNIT_TEST_SUITE_REGISTRATION(TTTest);
+
#endif
diff --git a/util/system/benchmark/cpu_id/metrics/ya.make b/util/system/benchmark/cpu_id/metrics/ya.make
index e2860a7ec4..8c55def99b 100644
--- a/util/system/benchmark/cpu_id/metrics/ya.make
+++ b/util/system/benchmark/cpu_id/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/system/benchmark/create_destroy_thread/metrics/ya.make b/util/system/benchmark/create_destroy_thread/metrics/ya.make
index 4912369927..d526487e1a 100644
--- a/util/system/benchmark/create_destroy_thread/metrics/ya.make
+++ b/util/system/benchmark/create_destroy_thread/metrics/ya.make
@@ -6,12 +6,12 @@ SUBSCRIBER(g:util-subscribers)
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(main.py)
diff --git a/util/system/shellcommand.cpp b/util/system/shellcommand.cpp
index 6ac9eae94f..b1989b5c8c 100644
--- a/util/system/shellcommand.cpp
+++ b/util/system/shellcommand.cpp
@@ -54,8 +54,8 @@ using TExitStatus = DWORD;
// #define DBG(stmt) stmt
namespace {
- constexpr static size_t DATA_BUFFER_SIZE = 128 * 1024;
-
+ constexpr static size_t DATA_BUFFER_SIZE = 128 * 1024;
+
#if defined(_unix_)
void SetUserGroups(const passwd* pw) {
int ngroups = 1;
@@ -265,14 +265,14 @@ private:
}
};
- struct TPipePump {
- TRealPipeHandle* Pipe;
- IOutputStream* OutputStream;
- IInputStream* InputStream;
- TAtomic* ShouldClosePipe;
- TString InternalError;
- };
-
+ struct TPipePump {
+ TRealPipeHandle* Pipe;
+ IOutputStream* OutputStream;
+ IInputStream* InputStream;
+ TAtomic* ShouldClosePipe;
+ TString InternalError;
+ };
+
#if defined(_unix_)
void OnFork(TPipes& pipes, sigset_t oldmask, char* const* argv, char* const* envp, const std::function<void()>& afterFork) const;
#else
@@ -448,66 +448,66 @@ public:
Communicate(pi);
return nullptr;
}
-
- inline static void* ReadStream(void* data) noexcept {
- TPipePump* pump = reinterpret_cast<TPipePump*>(data);
- try {
- int bytes = 0;
- TBuffer buffer(DATA_BUFFER_SIZE);
-
- while (true) {
- bytes = pump->Pipe->Read(buffer.Data(), buffer.Capacity());
+
+ inline static void* ReadStream(void* data) noexcept {
+ TPipePump* pump = reinterpret_cast<TPipePump*>(data);
+ try {
+ int bytes = 0;
+ TBuffer buffer(DATA_BUFFER_SIZE);
+
+ while (true) {
+ bytes = pump->Pipe->Read(buffer.Data(), buffer.Capacity());
if (bytes > 0) {
- pump->OutputStream->Write(buffer.Data(), bytes);
+ pump->OutputStream->Write(buffer.Data(), bytes);
} else {
- break;
+ break;
}
- }
+ }
if (pump->Pipe->IsOpen()) {
- pump->Pipe->Close();
+ pump->Pipe->Close();
}
- } catch (...) {
- pump->InternalError = CurrentExceptionMessage();
- }
- return nullptr;
- }
-
- inline static void* WriteStream(void* data) noexcept {
- TPipePump* pump = reinterpret_cast<TPipePump*>(data);
- try {
- int bytes = 0;
- int bytesToWrite = 0;
- char* bufPos = nullptr;
- TBuffer buffer(DATA_BUFFER_SIZE);
-
- while (true) {
- if (!bytesToWrite) {
- bytesToWrite = pump->InputStream->Read(buffer.Data(), buffer.Capacity());
- if (bytesToWrite == 0) {
+ } catch (...) {
+ pump->InternalError = CurrentExceptionMessage();
+ }
+ return nullptr;
+ }
+
+ inline static void* WriteStream(void* data) noexcept {
+ TPipePump* pump = reinterpret_cast<TPipePump*>(data);
+ try {
+ int bytes = 0;
+ int bytesToWrite = 0;
+ char* bufPos = nullptr;
+ TBuffer buffer(DATA_BUFFER_SIZE);
+
+ while (true) {
+ if (!bytesToWrite) {
+ bytesToWrite = pump->InputStream->Read(buffer.Data(), buffer.Capacity());
+ if (bytesToWrite == 0) {
if (AtomicGet(pump->ShouldClosePipe)) {
- break;
+ break;
}
- continue;
- }
- bufPos = buffer.Data();
- }
-
- bytes = pump->Pipe->Write(bufPos, bytesToWrite);
- if (bytes > 0) {
- bytesToWrite -= bytes;
- bufPos += bytes;
- } else {
- break;
- }
- }
+ continue;
+ }
+ bufPos = buffer.Data();
+ }
+
+ bytes = pump->Pipe->Write(bufPos, bytesToWrite);
+ if (bytes > 0) {
+ bytesToWrite -= bytes;
+ bufPos += bytes;
+ } else {
+ break;
+ }
+ }
if (pump->Pipe->IsOpen()) {
- pump->Pipe->Close();
+ pump->Pipe->Close();
}
- } catch (...) {
- pump->InternalError = CurrentExceptionMessage();
- }
- return nullptr;
- }
+ } catch (...) {
+ pump->InternalError = CurrentExceptionMessage();
+ }
+ return nullptr;
+ }
TString GetQuotedCommand() const;
};
@@ -549,9 +549,9 @@ void TShellCommand::TImpl::StartProcess(TShellCommand::TImpl::TPipes& pipes) {
if (InputMode != TShellCommandOptions::HANDLE_INHERIT) {
startup_info.hStdInput = pipes.InputPipeFd[0];
} else {
- // Don't leave hStdInput unfilled, otherwise any attempt to retrieve the operating-system file handle
- // that is associated with the specified file descriptor will led to errors.
- startup_info.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
+ // Don't leave hStdInput unfilled, otherwise any attempt to retrieve the operating-system file handle
+ // that is associated with the specified file descriptor will led to errors.
+ startup_info.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
}
PROCESS_INFORMATION process_info;
@@ -561,7 +561,7 @@ void TShellCommand::TImpl::StartProcess(TShellCommand::TImpl::TPipes& pipes) {
TString cmd = UseShell ? "cmd /A /Q /S /C \"" + qcmd + "\"" : qcmd;
// winapi can modify command text, copy it
- Y_ENSURE_EX(cmd.size() < MAX_COMMAND_LINE, yexception() << "Command is too long (length=" << cmd.size() << ")");
+ Y_ENSURE_EX(cmd.size() < MAX_COMMAND_LINE, yexception() << "Command is too long (length=" << cmd.size() << ")");
TTempArray<wchar_t> cmdcopy(MAX_COMMAND_LINE);
Copy(cmd.data(), cmd.data() + cmd.size(), cmdcopy.Data());
*(cmdcopy.Data() + cmd.size()) = 0;
@@ -644,18 +644,18 @@ void ShellQuoteArgSp(TString& dst, TStringBuf argument) {
ShellQuoteArg(dst, argument);
}
-bool ArgNeedsQuotes(TStringBuf arg) noexcept {
+bool ArgNeedsQuotes(TStringBuf arg) noexcept {
if (arg.empty()) {
- return true;
+ return true;
}
- return arg.find_first_of(" \"\'\t&()*<>\\`^|") != TString::npos;
-}
-
+ return arg.find_first_of(" \"\'\t&()*<>\\`^|") != TString::npos;
+}
+
TString TShellCommand::TImpl::GetQuotedCommand() const {
TString quoted = Command; /// @todo command itself should be quoted too
for (const auto& argument : Arguments) {
- // Don't add unnecessary quotes. It's especially important for the windows with a 32k command line length limit.
- if (QuoteArguments && ArgNeedsQuotes(argument)) {
+ // Don't add unnecessary quotes. It's especially important for the windows with a 32k command line length limit.
+ if (QuoteArguments && ArgNeedsQuotes(argument)) {
::ShellQuoteArgSp(quoted, argument);
} else {
quoted.append(" ").append(argument);
@@ -731,8 +731,8 @@ void TShellCommand::TImpl::OnFork(TPipes& pipes, sigset_t oldmask, char* const*
}
if (Nice) {
- // Don't verify Nice() call - it does not work properly with WSL https://github.com/Microsoft/WSL/issues/1838
- ::Nice(Nice);
+ // Don't verify Nice() call - it does not work properly with WSL https://github.com/Microsoft/WSL/issues/1838
+ ::Nice(Nice);
}
if (afterFork) {
afterFork();
@@ -751,7 +751,7 @@ void TShellCommand::TImpl::OnFork(TPipes& pipes, sigset_t oldmask, char* const*
<< "unknown error" << Endl;
}
- _exit(-1);
+ _exit(-1);
}
#endif
@@ -901,29 +901,29 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
#endif
try {
-#if defined(_win_)
- TPipePump pumps[3] = {0};
- pumps[0] = {&pi->ErrorFd, error};
- pumps[1] = {&pi->OutputFd, output};
-
- TVector<THolder<TThread>> streamThreads;
- streamThreads.emplace_back(new TThread(&TImpl::ReadStream, &pumps[0]));
- streamThreads.emplace_back(new TThread(&TImpl::ReadStream, &pumps[1]));
-
- if (input) {
- pumps[2] = {&pi->InputFd, nullptr, input, &pi->Parent->ShouldCloseInput};
- streamThreads.emplace_back(new TThread(&TImpl::WriteStream, &pumps[2]));
- }
-
- for (auto& threadHolder : streamThreads)
- threadHolder->Start();
-#else
- TBuffer buffer(DATA_BUFFER_SIZE);
- TBuffer inputBuffer(DATA_BUFFER_SIZE);
+#if defined(_win_)
+ TPipePump pumps[3] = {0};
+ pumps[0] = {&pi->ErrorFd, error};
+ pumps[1] = {&pi->OutputFd, output};
+
+ TVector<THolder<TThread>> streamThreads;
+ streamThreads.emplace_back(new TThread(&TImpl::ReadStream, &pumps[0]));
+ streamThreads.emplace_back(new TThread(&TImpl::ReadStream, &pumps[1]));
+
+ if (input) {
+ pumps[2] = {&pi->InputFd, nullptr, input, &pi->Parent->ShouldCloseInput};
+ streamThreads.emplace_back(new TThread(&TImpl::WriteStream, &pumps[2]));
+ }
+
+ for (auto& threadHolder : streamThreads)
+ threadHolder->Start();
+#else
+ TBuffer buffer(DATA_BUFFER_SIZE);
+ TBuffer inputBuffer(DATA_BUFFER_SIZE);
int bytes;
int bytesToWrite = 0;
char* bufPos = nullptr;
-#endif
+#endif
TWaitResult waitPidResult;
TExitStatus status = 0;
@@ -939,7 +939,7 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
#if defined(_unix_)
waitpid(pi->Parent->Pid, &status, WNOHANG);
#else
- WaitForSingleObject(pi->Parent->Pid /* process_info.hProcess */, pi->Parent->PollDelayMs /* ms */);
+ WaitForSingleObject(pi->Parent->Pid /* process_info.hProcess */, pi->Parent->PollDelayMs /* ms */);
Y_UNUSED(status);
#endif
// DBG(Cerr << "wait result: " << waitPidResult << Endl);
@@ -947,8 +947,8 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
break;
}
}
-/// @todo factor out (poll + wfmo)
-#if defined(_unix_)
+/// @todo factor out (poll + wfmo)
+#if defined(_unix_)
bool haveIn = false;
bool haveOut = false;
bool haveErr = false;
@@ -1011,7 +1011,7 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
if (input && ((fds[0].revents & POLLOUT) == POLLOUT)) {
haveIn = true;
}
-
+
if (haveOut) {
bytes = pi->OutputFd.Read(buffer.Data(), buffer.Capacity());
DBG(Cerr << "transferred " << bytes << " bytes of output" << Endl);
@@ -1053,7 +1053,7 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
DBG(Cerr << "transferred " << bytes << " bytes of input" << Endl);
}
-#endif
+#endif
}
DBG(Cerr << "process finished" << Endl);
@@ -1068,7 +1068,7 @@ void TShellCommand::TImpl::Communicate(TProcessInfo* pi) {
if (WIFEXITED(status) && processExitCode == 0) {
cleanExit = true;
} else if (WIFSIGNALED(status)) {
- processExitCode = -WTERMSIG(status);
+ processExitCode = -WTERMSIG(status);
}
#else
if (waitPidResult == WAIT_OBJECT_0) {
diff --git a/util/system/shellcommand.h b/util/system/shellcommand.h
index f56a458351..8730627fe5 100644
--- a/util/system/shellcommand.h
+++ b/util/system/shellcommand.h
@@ -480,6 +480,6 @@ void ShellQuoteArg(TString& dst, TStringBuf arg);
/// Appends to dst: space, quoted arg
void ShellQuoteArgSp(TString& dst, TStringBuf arg);
-
-/// Returns true if arg should be quoted
-bool ArgNeedsQuotes(TStringBuf arg) noexcept;
+
+/// Returns true if arg should be quoted
+bool ArgNeedsQuotes(TStringBuf arg) noexcept;
diff --git a/util/system/shellcommand_ut.cpp b/util/system/shellcommand_ut.cpp
index 5ee48ccf90..9d849279d2 100644
--- a/util/system/shellcommand_ut.cpp
+++ b/util/system/shellcommand_ut.cpp
@@ -84,19 +84,19 @@ Y_UNIT_TEST_SUITE(TShellCommandTest) {
UNIT_ASSERT_VALUES_EQUAL(cmd.GetQuotedCommand(), "echo hello");
}
- Y_UNIT_TEST(TestOnlyNecessaryQuotes) {
- TShellCommandOptions options;
- options.SetQuoteArguments(true);
- TShellCommand cmd("echo");
+ Y_UNIT_TEST(TestOnlyNecessaryQuotes) {
+ TShellCommandOptions options;
+ options.SetQuoteArguments(true);
+ TShellCommand cmd("echo");
cmd << "hey"
<< "hello&world";
- cmd.Run();
- UNIT_ASSERT_VALUES_EQUAL(cmd.GetError(), "");
- UNIT_ASSERT_VALUES_EQUAL(cmd.GetOutput(), "hey hello&world" NL);
- UNIT_ASSERT(TShellCommand::SHELL_FINISHED == cmd.GetStatus());
- UNIT_ASSERT(cmd.GetExitCode().Defined() && 0 == cmd.GetExitCode());
- }
-
+ cmd.Run();
+ UNIT_ASSERT_VALUES_EQUAL(cmd.GetError(), "");
+ UNIT_ASSERT_VALUES_EQUAL(cmd.GetOutput(), "hey hello&world" NL);
+ UNIT_ASSERT(TShellCommand::SHELL_FINISHED == cmd.GetStatus());
+ UNIT_ASSERT(cmd.GetExitCode().Defined() && 0 == cmd.GetExitCode());
+ }
+
Y_UNIT_TEST(TestRun) {
TShellCommand cmd("echo");
cmd << "hello";
@@ -312,7 +312,7 @@ Y_UNIT_TEST_SUITE(TShellCommandTest) {
cmd.Terminate();
cmd.Wait();
UNIT_ASSERT(TShellCommand::SHELL_ERROR == cmd.GetStatus());
- UNIT_ASSERT(cmd.GetExitCode().Defined() && -15 == cmd.GetExitCode());
+ UNIT_ASSERT(cmd.GetExitCode().Defined() && -15 == cmd.GetExitCode());
}
sleep(1);
UNIT_ASSERT(!NFs::Exists(tmpfile));
@@ -368,17 +368,17 @@ Y_UNIT_TEST_SUITE(TShellCommandTest) {
rc = SigProcMask(SIG_SETMASK, &oldmask, nullptr);
UNIT_ASSERT(rc == 0);
}
-#else
- // This ut is windows-only
- Y_UNIT_TEST(TestStdinProperlyConstructed) {
- TShellCommandOptions options;
- options.SetErrorStream(&Cerr);
-
- TShellCommand cmd(BinaryPath("util/system/ut/stdin_osfhandle/stdin_osfhandle"), options);
- cmd.Run().Wait();
- UNIT_ASSERT(TShellCommand::SHELL_FINISHED == cmd.GetStatus());
- UNIT_ASSERT(cmd.GetExitCode().Defined() && 0 == cmd.GetExitCode());
- }
+#else
+ // This ut is windows-only
+ Y_UNIT_TEST(TestStdinProperlyConstructed) {
+ TShellCommandOptions options;
+ options.SetErrorStream(&Cerr);
+
+ TShellCommand cmd(BinaryPath("util/system/ut/stdin_osfhandle/stdin_osfhandle"), options);
+ cmd.Run().Wait();
+ UNIT_ASSERT(TShellCommand::SHELL_FINISHED == cmd.GetStatus());
+ UNIT_ASSERT(cmd.GetExitCode().Defined() && 0 == cmd.GetExitCode());
+ }
#endif
Y_UNIT_TEST(TestInternalError) {
TString input = (TString("a") * 2000).append("\n");
@@ -392,32 +392,32 @@ Y_UNIT_TEST_SUITE(TShellCommandTest) {
UNIT_ASSERT(TShellCommand::SHELL_INTERNAL_ERROR == cmd.GetStatus());
UNIT_ASSERT_VALUES_UNEQUAL(cmd.GetInternalError().size(), 0u);
}
- Y_UNIT_TEST(TestHugeOutput) {
- TShellCommandOptions options;
- TGuardedStringStream stream;
- options.SetOutputStream(&stream);
- options.SetUseShell(true);
-
- TString input = TString(7000, 'a');
- TString command = TStringBuilder{} << "echo " << input;
- TShellCommand cmd(command, options);
- cmd.Run().Wait();
-
- UNIT_ASSERT_VALUES_EQUAL(stream.Str(), input + NL);
- }
- Y_UNIT_TEST(TestHugeError) {
- TShellCommandOptions options;
- TGuardedStringStream stream;
- options.SetErrorStream(&stream);
- options.SetUseShell(true);
-
- TString input = TString(7000, 'a');
- TString command = TStringBuilder{} << "echo " << input << ">&2";
- TShellCommand cmd(command, options);
- cmd.Run().Wait();
-
- UNIT_ASSERT_VALUES_EQUAL(stream.Str(), input + NL);
- }
+ Y_UNIT_TEST(TestHugeOutput) {
+ TShellCommandOptions options;
+ TGuardedStringStream stream;
+ options.SetOutputStream(&stream);
+ options.SetUseShell(true);
+
+ TString input = TString(7000, 'a');
+ TString command = TStringBuilder{} << "echo " << input;
+ TShellCommand cmd(command, options);
+ cmd.Run().Wait();
+
+ UNIT_ASSERT_VALUES_EQUAL(stream.Str(), input + NL);
+ }
+ Y_UNIT_TEST(TestHugeError) {
+ TShellCommandOptions options;
+ TGuardedStringStream stream;
+ options.SetErrorStream(&stream);
+ options.SetUseShell(true);
+
+ TString input = TString(7000, 'a');
+ TString command = TStringBuilder{} << "echo " << input << ">&2";
+ TShellCommand cmd(command, options);
+ cmd.Run().Wait();
+
+ UNIT_ASSERT_VALUES_EQUAL(stream.Str(), input + NL);
+ }
Y_UNIT_TEST(TestPipeInput) {
TShellCommandOptions options;
options.SetAsync(true);
diff --git a/util/system/ut/stdin_osfhandle/main.cpp b/util/system/ut/stdin_osfhandle/main.cpp
index 35c12e67d1..fe2ea836a9 100644
--- a/util/system/ut/stdin_osfhandle/main.cpp
+++ b/util/system/ut/stdin_osfhandle/main.cpp
@@ -1,15 +1,15 @@
-#include <io.h>
-#include <stdio.h>
-#include <windows.h>
-
-int main() {
- auto handle = (unsigned long long)_get_osfhandle(0);
- fprintf(stderr, "_get_osfhandle(0)=%llu\n", handle);
- // It look's like classic windows undocumented behaviour
- // https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/get-osfhandle
- // _get_osfhandle returns INVALID_HANDLE_VALUE - 1 without any sign of error if specified fd was closed.
- // Working with such handle will lead to future various errors.
- if (handle + 1 == (unsigned long long)INVALID_HANDLE_VALUE)
- return 1;
- return 0;
-}
+#include <io.h>
+#include <stdio.h>
+#include <windows.h>
+
+int main() {
+ auto handle = (unsigned long long)_get_osfhandle(0);
+ fprintf(stderr, "_get_osfhandle(0)=%llu\n", handle);
+ // It look's like classic windows undocumented behaviour
+ // https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/get-osfhandle
+ // _get_osfhandle returns INVALID_HANDLE_VALUE - 1 without any sign of error if specified fd was closed.
+ // Working with such handle will lead to future various errors.
+ if (handle + 1 == (unsigned long long)INVALID_HANDLE_VALUE)
+ return 1;
+ return 0;
+}
diff --git a/util/system/ut/stdin_osfhandle/ya.make b/util/system/ut/stdin_osfhandle/ya.make
index a21cb037ec..d71ab22e69 100644
--- a/util/system/ut/stdin_osfhandle/ya.make
+++ b/util/system/ut/stdin_osfhandle/ya.make
@@ -1,14 +1,14 @@
-PROGRAM()
-
-OWNER(g:util)
+PROGRAM()
+
+OWNER(g:util)
SUBSCRIBER(g:util-subscribers)
-
-SRCS(
- main.cpp
-)
-
-NO_UTIL()
-
-BUILD_ONLY_IF(OS_WINDOWS)
-
-END()
+
+SRCS(
+ main.cpp
+)
+
+NO_UTIL()
+
+BUILD_ONLY_IF(OS_WINDOWS)
+
+END()
diff --git a/util/system/ut/ya.make b/util/system/ut/ya.make
index 919bbae969..127e7c261e 100644
--- a/util/system/ut/ya.make
+++ b/util/system/ut/ya.make
@@ -13,12 +13,12 @@ TIMEOUT(300)
SIZE(MEDIUM)
-IF (OS_DARWIN)
- SIZE(LARGE)
- TAG(ya:fat ya:force_sandbox ya:exotic_platform)
- TIMEOUT(3600)
-ENDIF()
-
+IF (OS_DARWIN)
+ SIZE(LARGE)
+ TAG(ya:fat ya:force_sandbox ya:exotic_platform)
+ TIMEOUT(3600)
+ENDIF()
+
PEERDIR(
library/cpp/testing/benchmark
)
@@ -77,12 +77,12 @@ SRCS(
system/yassert_ut.cpp
)
-IF (OS_WINDOWS)
- DEPENDS(
- util/system/ut/stdin_osfhandle
- )
-ENDIF()
-
+IF (OS_WINDOWS)
+ DEPENDS(
+ util/system/ut/stdin_osfhandle
+ )
+ENDIF()
+
REQUIREMENTS(ram:12)
END()
diff --git a/util/system/yassert.cpp b/util/system/yassert.cpp
index fd35a8a12f..0f586648b7 100644
--- a/util/system/yassert.cpp
+++ b/util/system/yassert.cpp
@@ -17,8 +17,8 @@
#include <stdarg.h>
#include <stdio.h>
-#ifdef CLANG_COVERAGE
-extern "C" {
+#ifdef CLANG_COVERAGE
+extern "C" {
// __llvm_profile_write_file may not be provided if the executable target uses NO_CLANG_COVERAGE() macro and
// arrives as test's dependency via DEPENDS() macro.
// That's why we provide a weak no-op implementation for __llvm_profile_write_file,
@@ -26,10 +26,10 @@ extern "C" {
Y_WEAK int __llvm_profile_write_file(void) {
return 0;
}
-}
-
-#endif
-
+}
+
+#endif
+
namespace {
struct TPanicLockHolder: public TAdaptiveLock {
};
@@ -82,11 +82,11 @@ namespace NPrivate {
#ifndef WITH_VALGRIND
PrintBackTrace();
#endif
-#ifdef CLANG_COVERAGE
- if (__llvm_profile_write_file()) {
- Cerr << "Failed to dump clang coverage" << Endl;
- }
-#endif
+#ifdef CLANG_COVERAGE
+ if (__llvm_profile_write_file()) {
+ Cerr << "Failed to dump clang coverage" << Endl;
+ }
+#endif
abort();
} catch (...) {
abort();
diff --git a/util/tests/sym_versions/test_glibc.py b/util/tests/sym_versions/test_glibc.py
index 04486232c4..c76359c54c 100644
--- a/util/tests/sym_versions/test_glibc.py
+++ b/util/tests/sym_versions/test_glibc.py
@@ -26,4 +26,4 @@ def iter_binaries():
for p in iter_binaries():
- globals()['test_' + os.path.basename(p).replace('-', '_')] = functools.partial(yc.process.check_glibc_version, p)
+ globals()['test_' + os.path.basename(p).replace('-', '_')] = functools.partial(yc.process.check_glibc_version, p)
diff --git a/util/tests/ya_util_tests.inc b/util/tests/ya_util_tests.inc
index 2789c911f2..57855aee6b 100644
--- a/util/tests/ya_util_tests.inc
+++ b/util/tests/ya_util_tests.inc
@@ -1,4 +1,4 @@
-IF (OS_DARWIN)
- SIZE(LARGE)
- TAG(ya:fat ya:force_sandbox ya:exotic_platform)
-ENDIF()
+IF (OS_DARWIN)
+ SIZE(LARGE)
+ TAG(ya:fat ya:force_sandbox ya:exotic_platform)
+ENDIF()
diff --git a/ydb/core/blobstorage/backpressure/ut/ya.make b/ydb/core/blobstorage/backpressure/ut/ya.make
index 6e43e32cd7..ed318404ab 100644
--- a/ydb/core/blobstorage/backpressure/ut/ya.make
+++ b/ydb/core/blobstorage/backpressure/ut/ya.make
@@ -7,7 +7,7 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(1800)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/blobstorage/base/ut/ya.make b/ydb/core/blobstorage/base/ut/ya.make
index 17606b67f3..3e223c0bfd 100644
--- a/ydb/core/blobstorage/base/ut/ya.make
+++ b/ydb/core/blobstorage/base/ut/ya.make
@@ -6,8 +6,8 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/blobstorage/dsproxy/ut/ya.make b/ydb/core/blobstorage/dsproxy/ut/ya.make
index 829fc74563..76825e572b 100644
--- a/ydb/core/blobstorage/dsproxy/ut/ya.make
+++ b/ydb/core/blobstorage/dsproxy/ut/ya.make
@@ -12,8 +12,8 @@ OWNER(
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(3600)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/blobstorage/groupinfo/ut/ya.make b/ydb/core/blobstorage/groupinfo/ut/ya.make
index 13fa9ed518..78d020c48b 100644
--- a/ydb/core/blobstorage/groupinfo/ut/ya.make
+++ b/ydb/core/blobstorage/groupinfo/ut/ya.make
@@ -6,8 +6,8 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp b/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp
index 6b90c8e1c4..ca6cedf5d8 100644
--- a/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp
+++ b/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp
@@ -120,7 +120,7 @@ namespace NKikimr {
auto it = WriteQueue.insert(WriteQueue.end(), std::move(item));
// fill in callback
- auto callback = [it](NKikimrProto::EReplyStatus status, IEventBase* /*msg*/, const TActorContext& ctx) {
+ auto callback = [it](NKikimrProto::EReplyStatus status, IEventBase* /*msg*/, const TActorContext& ctx) {
ctx.Send(it->Sender, new TEvIncrHugeWriteResult(status, it->Id, std::move(it->Payload)), 0, it->Cookie);
};
it->Callback = MakeCallback(std::move(callback));
diff --git a/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp b/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp
index 132dda2149..eb602a0b46 100644
--- a/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp
+++ b/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp
@@ -29,7 +29,7 @@ virtual void Scenario(const TActorContext &ctx) {
// read command
TAutoPtr<IActor> readCmd;
- auto sendFunc = [part0, &instance](const TActorContext &ctx) {
+ auto sendFunc = [part0, &instance](const TActorContext &ctx) {
auto req = TEvBlobStorage::TEvVGet::CreateExtremeDataQuery(instance.VDiskID,
TInstant::Max(),
NKikimrBlobStorage::EGetHandleClass::AsyncRead,
@@ -38,7 +38,7 @@ virtual void Scenario(const TActorContext &ctx) {
{part0});
ctx.Send(instance.ActorID, req.release());
};
- auto checkFunc = [](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
+ auto checkFunc = [](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
CheckQueryResult(ev, ctx, EQR_OK_NODATA, nullptr);
};
readCmd.Reset(CreateOneGet(SyncRunner->NotifyID(), sendFunc, checkFunc));
@@ -293,7 +293,7 @@ virtual void Scenario(const TActorContext &ctx) {
10);
ctx.Send(instance.ActorID, req.release());
};
- auto checkFunc = [&done](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
+ auto checkFunc = [&done](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
TEvBlobStorage::TEvVGetResult *msg = ev->Get();
Y_VERIFY(msg->Record.GetStatus() == NKikimrProto::OK);
done = msg->Record.ResultSize() == 0;
@@ -359,7 +359,7 @@ virtual void Scenario(const TActorContext &ctx) {
10);
ctx.Send(instance.ActorID, req.release());
};
- auto checkFunc = [&done](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
+ auto checkFunc = [&done](TEvBlobStorage::TEvVGetResult::TPtr &ev, const TActorContext &ctx) {
TEvBlobStorage::TEvVGetResult *msg = ev->Get();
Y_VERIFY(msg->Record.GetStatus() == NKikimrProto::OK);
done = msg->Record.ResultSize() == 0;
diff --git a/ydb/core/blobstorage/vdisk/hulldb/blobstorage_hullgcmap.h b/ydb/core/blobstorage/vdisk/hulldb/blobstorage_hullgcmap.h
index 6b17575b18..5161a503f7 100644
--- a/ydb/core/blobstorage/vdisk/hulldb/blobstorage_hullgcmap.h
+++ b/ydb/core/blobstorage/vdisk/hulldb/blobstorage_hullgcmap.h
@@ -127,7 +127,7 @@ namespace NKikimr {
// subsMerger must return circaLsn
// dbMerger must return max circaLsn for the record with _data_
// we must switch to a special kind of TIndexRecordMerger
- auto newItem = [] (const TIterator &subsIt, const TIndexRecordMerger &subsMerger) {
+ auto newItem = [] (const TIterator &subsIt, const TIndexRecordMerger &subsMerger) {
Y_UNUSED(subsIt);
Y_UNUSED(subsMerger);
};
diff --git a/ydb/core/client/ut/ya.make b/ydb/core/client/ut/ya.make
index d630ef9f16..5d839f47c8 100644
--- a/ydb/core/client/ut/ya.make
+++ b/ydb/core/client/ut/ya.make
@@ -11,12 +11,12 @@ SPLIT_FACTOR(60)
IF (SANITIZER_TYPE == "thread" OR WITH_VALGRIND)
TIMEOUT(3600)
- SIZE(LARGE)
+ SIZE(LARGE)
REQUIREMENTS(
cpu:4
ram:32
)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
REQUIREMENTS(
cpu:4
diff --git a/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp b/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp
index e68460ff1e..450d4135c6 100644
--- a/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp
+++ b/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp
@@ -398,7 +398,7 @@ public:
for(;;) {
MessageBusCall<NMsgBusProxy::TBusSchemeDescribe, NMsgBusProxy::TBusResponse>(config, request,
- [&entries](const NMsgBusProxy::TBusResponse& response) -> int {
+ [&entries](const NMsgBusProxy::TBusResponse& response) -> int {
entries.push_front(response.Record.GetPathDescription());
return 0;
});
@@ -712,7 +712,7 @@ public:
}
modifyAcl.SetDiffACL(diffAcl.SerializeAsString());
int result = MessageBusCall<NMsgBusProxy::TBusSchemeOperation, NMsgBusProxy::TBusResponse>(config, request,
- [](const NMsgBusProxy::TBusResponse& response) -> int {
+ [](const NMsgBusProxy::TBusResponse& response) -> int {
if (response.Record.GetStatus() != NMsgBusProxy::MSTATUS_OK) {
Cerr << ToCString(static_cast<NMsgBusProxy::EResponseStatus>(response.Record.GetStatus())) << " " << response.Record.GetErrorReason() << Endl;
return 1;
@@ -780,7 +780,7 @@ public:
}
modifyAcl.SetDiffACL(diffAcl.SerializeAsString());
int result = MessageBusCall<NMsgBusProxy::TBusSchemeOperation, NMsgBusProxy::TBusResponse>(config, request,
- [](const NMsgBusProxy::TBusResponse& response) -> int {
+ [](const NMsgBusProxy::TBusResponse& response) -> int {
if (response.Record.GetStatus() != NMsgBusProxy::MSTATUS_OK) {
Cerr << ToCString(static_cast<NMsgBusProxy::EResponseStatus>(response.Record.GetStatus())) << " " << response.Record.GetErrorReason() << Endl;
return 1;
diff --git a/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp b/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp
index ea9eb5d841..aae54255e9 100644
--- a/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp
+++ b/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp
@@ -35,7 +35,7 @@ public:
auto *cmd = request.MutableRequest()->AddCommand();
cmd->MutableProposeStoragePools();
- auto callback = [](const NMsgBusProxy::TBusResponse& response) {
+ auto callback = [](const NMsgBusProxy::TBusResponse& response) {
const auto& record = response.Record;
if (!record.HasBlobStorageConfigResponse()) {
return 1;
@@ -217,7 +217,7 @@ public:
request.MutableRequest()->SetRollback(true);
}
- auto callback = [](const NMsgBusProxy::TBusResponse& response) {
+ auto callback = [](const NMsgBusProxy::TBusResponse& response) {
const auto& record = response.Record;
if (record.HasBlobStorageConfigResponse()) {
TString data;
diff --git a/ydb/core/driver_lib/cli_utils/cli_cmds_get.cpp b/ydb/core/driver_lib/cli_utils/cli_cmds_get.cpp
index fb794ed1a6..f659360087 100644
--- a/ydb/core/driver_lib/cli_utils/cli_cmds_get.cpp
+++ b/ydb/core/driver_lib/cli_utils/cli_cmds_get.cpp
@@ -50,7 +50,7 @@ public:
LogoBlobIDFromLogoBlobID(*ExtremeQuery, record.MutableExtreme());
}
- auto callback = [](const TResponse& response) -> int {
+ auto callback = [](const TResponse& response) -> int {
if (response.Record.HasErrorDescription()) {
Cerr << "error: " << response.Record.GetErrorDescription() << Endl;
return 1;
diff --git a/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp b/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp
index 4231ca0b67..11e89d1960 100644
--- a/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp
+++ b/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp
@@ -64,7 +64,7 @@ public:
request->Record.SetHost(node);
}
MessageBusCall<NMsgBusProxy::TBusResolveNode, NMsgBusProxy::TBusResponse>(config, request,
- [this](const NMsgBusProxy::TBusResponse& response) -> int {
+ [this](const NMsgBusProxy::TBusResponse& response) -> int {
BusResponse = response.Record;
return 0;
});
diff --git a/ydb/core/engine/mkql_engine_flat.cpp b/ydb/core/engine/mkql_engine_flat.cpp
index 4efaa26340..c7fe388eef 100644
--- a/ydb/core/engine/mkql_engine_flat.cpp
+++ b/ydb/core/engine/mkql_engine_flat.cpp
@@ -1648,18 +1648,18 @@ private:
return lpoFunc;
if (name == Strings.CombineByKeyMerge) {
- return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
+ return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
Y_UNUSED(env);
return callable.GetInput(0);
});
} else
if (name == Strings.PartialSort) {
- return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
+ return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
return RenameCallable(callable, "Sort", env);
});
} else
if (name == Strings.PartialTake) {
- return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
+ return TCallableVisitFunc([](TCallable& callable, const TTypeEnvironment& env) {
return RenameCallable(callable, "Take", env);
});
}
@@ -1835,7 +1835,7 @@ private:
firstPass, Env, false, wereChanges);
ProxyProgramExplorer.Walk(ProxyProgram.GetNode(), Env, {});
- auto getCallableForPushdown = [&pureCallables, &callableConsumers] (TRuntimeNode node,
+ auto getCallableForPushdown = [&pureCallables, &callableConsumers] (TRuntimeNode node,
TInternName name) -> TCallable*
{
if (!node.GetNode()->GetType()->IsCallable()) {
diff --git a/ydb/core/erasure/ut/ya.make b/ydb/core/erasure/ut/ya.make
index 4c16561cce..549ac7f896 100644
--- a/ydb/core/erasure/ut/ya.make
+++ b/ydb/core/erasure/ut/ya.make
@@ -5,8 +5,8 @@ SPLIT_FACTOR(30)
IF (WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/keyvalue/ut/ya.make b/ydb/core/keyvalue/ut/ya.make
index 83b9279d7e..7124f2ccf8 100644
--- a/ydb/core/keyvalue/ut/ya.make
+++ b/ydb/core/keyvalue/ut/ya.make
@@ -9,9 +9,9 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND OR SANITIZER_TYPE)
TIMEOUT(1800)
- SIZE(LARGE)
+ SIZE(LARGE)
SPLIT_FACTOR(20)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/kqp/provider/ut/ya.make b/ydb/core/kqp/provider/ut/ya.make
index 8a3255bc41..b8762ed422 100644
--- a/ydb/core/kqp/provider/ut/ya.make
+++ b/ydb/core/kqp/provider/ut/ya.make
@@ -17,8 +17,8 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/mind/ut/ya.make b/ydb/core/mind/ut/ya.make
index 703370c66f..26e6412fc7 100644
--- a/ydb/core/mind/ut/ya.make
+++ b/ydb/core/mind/ut/ya.make
@@ -10,8 +10,8 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(3600)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
SPLIT_FACTOR(80)
REQUIREMENTS(
cpu:4
diff --git a/ydb/core/tablet_flat/test/libs/rows/layout.h b/ydb/core/tablet_flat/test/libs/rows/layout.h
index e2432a6201..36e64feac0 100644
--- a/ydb/core/tablet_flat/test/libs/rows/layout.h
+++ b/ydb/core/tablet_flat/test/libs/rows/layout.h
@@ -36,7 +36,7 @@ namespace NTest{
TPos keyOrder = 0;
for (auto tag: keys) {
- auto pred = [tag](const TColumn &col) {
+ auto pred = [tag](const TColumn &col) {
return tag == col.Id;
};
diff --git a/ydb/core/testlib/test_client.cpp b/ydb/core/testlib/test_client.cpp
index c1047ead3c..d4907c26f1 100644
--- a/ydb/core/testlib/test_client.cpp
+++ b/ydb/core/testlib/test_client.cpp
@@ -2267,10 +2267,10 @@ namespace Tests {
}
TServerSetup GetServerSetup() {
- if (!IsServerRedirected()) {
+ if (!IsServerRedirected()) {
return TServerSetup("localhost", 0);
- }
-
+ }
+
TStringBuf str(GetEnv(ServerRedirectEnvVar));
TStringBuf address;
TStringBuf port;
diff --git a/ydb/core/tx/schemeshard/ut_bsvolume/ya.make b/ydb/core/tx/schemeshard/ut_bsvolume/ya.make
index 601d9c8c90..005d942ac4 100644
--- a/ydb/core/tx/schemeshard/ut_bsvolume/ya.make
+++ b/ydb/core/tx/schemeshard/ut_bsvolume/ya.make
@@ -7,7 +7,7 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/tx/schemeshard/ut_pq/ya.make b/ydb/core/tx/schemeshard/ut_pq/ya.make
index b1863a093b..fe304ef882 100644
--- a/ydb/core/tx/schemeshard/ut_pq/ya.make
+++ b/ydb/core/tx/schemeshard/ut_pq/ya.make
@@ -12,7 +12,7 @@ SPLIT_FACTOR(10)
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/tx/schemeshard/ut_rtmr/ya.make b/ydb/core/tx/schemeshard/ut_rtmr/ya.make
index 6440c26ea7..43ff241e87 100644
--- a/ydb/core/tx/schemeshard/ut_rtmr/ya.make
+++ b/ydb/core/tx/schemeshard/ut_rtmr/ya.make
@@ -10,7 +10,7 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/tx/schemeshard/ut_subdomain/ya.make b/ydb/core/tx/schemeshard/ut_subdomain/ya.make
index 1bd2f62f8e..aaa80b7090 100644
--- a/ydb/core/tx/schemeshard/ut_subdomain/ya.make
+++ b/ydb/core/tx/schemeshard/ut_subdomain/ya.make
@@ -12,7 +12,7 @@ SPLIT_FACTOR(60)
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/tx/tx_proxy/ut_base_tenant/ya.make b/ydb/core/tx/tx_proxy/ut_base_tenant/ya.make
index 7b9f0aaed0..a07ffd818a 100644
--- a/ydb/core/tx/tx_proxy/ut_base_tenant/ya.make
+++ b/ydb/core/tx/tx_proxy/ut_base_tenant/ya.make
@@ -10,7 +10,7 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/core/tx/tx_proxy/ut_storage_tenant/ya.make b/ydb/core/tx/tx_proxy/ut_storage_tenant/ya.make
index a22950a07b..3192b7f7d2 100644
--- a/ydb/core/tx/tx_proxy/ut_storage_tenant/ya.make
+++ b/ydb/core/tx/tx_proxy/ut_storage_tenant/ya.make
@@ -10,7 +10,7 @@ FORK_SUBTESTS()
IF (WITH_VALGRIND)
TIMEOUT(3600)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
@@ -34,6 +34,6 @@ SRCS(
proxy_ut_helpers.cpp
)
-REQUIREMENTS(ram:32)
+REQUIREMENTS(ram:32)
END()
diff --git a/ydb/library/yql/core/yql_graph_transformer.cpp b/ydb/library/yql/core/yql_graph_transformer.cpp
index 5193de15c6..26193c4c40 100644
--- a/ydb/library/yql/core/yql_graph_transformer.cpp
+++ b/ydb/library/yql/core/yql_graph_transformer.cpp
@@ -380,7 +380,7 @@ void AsyncTransform(IGraphTransformer& transformer, TExprNode::TPtr& root, TExpr
std::function<void(const IGraphTransformer::TStatus&)> asyncCallback) {
NThreading::TFuture<IGraphTransformer::TStatus> status = AsyncTransform(transformer, root, ctx, applyAsyncChanges);
status.Subscribe(
- [asyncCallback](const NThreading::TFuture<IGraphTransformer::TStatus>& status) mutable -> void {
+ [asyncCallback](const NThreading::TFuture<IGraphTransformer::TStatus>& status) mutable -> void {
YQL_ENSURE(!status.HasException());
asyncCallback(status.GetValue());
});
diff --git a/ydb/library/yql/minikql/benchmark/pack_num/metrics/ya.make b/ydb/library/yql/minikql/benchmark/pack_num/metrics/ya.make
index 09667d68e2..203af2de49 100644
--- a/ydb/library/yql/minikql/benchmark/pack_num/metrics/ya.make
+++ b/ydb/library/yql/minikql/benchmark/pack_num/metrics/ya.make
@@ -6,12 +6,12 @@ OWNER(
PY2TEST()
-SIZE(LARGE)
+SIZE(LARGE)
TAG(
ya:force_sandbox
sb:intel_e5_2660v1
- ya:fat
+ ya:fat
)
TEST_SRCS(
diff --git a/ydb/library/yql/minikql/comp_nodes/ut/ya.make b/ydb/library/yql/minikql/comp_nodes/ut/ya.make
index 6e67702a03..dffda1318f 100644
--- a/ydb/library/yql/minikql/comp_nodes/ut/ya.make
+++ b/ydb/library/yql/minikql/comp_nodes/ut/ya.make
@@ -6,8 +6,8 @@ SPLIT_FACTOR(60)
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(3600)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/library/yql/minikql/computation/ut/ya.make b/ydb/library/yql/minikql/computation/ut/ya.make
index 88147548e9..ab27689c7b 100644
--- a/ydb/library/yql/minikql/computation/ut/ya.make
+++ b/ydb/library/yql/minikql/computation/ut/ya.make
@@ -4,8 +4,8 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/library/yql/minikql/invoke_builtins/ut/ya.make b/ydb/library/yql/minikql/invoke_builtins/ut/ya.make
index 00de0503e7..ca5f9974c3 100644
--- a/ydb/library/yql/minikql/invoke_builtins/ut/ya.make
+++ b/ydb/library/yql/minikql/invoke_builtins/ut/ya.make
@@ -4,8 +4,8 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/library/yql/minikql/ut/ya.make b/ydb/library/yql/minikql/ut/ya.make
index d6c724b3e7..098a77d0c6 100644
--- a/ydb/library/yql/minikql/ut/ya.make
+++ b/ydb/library/yql/minikql/ut/ya.make
@@ -4,8 +4,8 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(1800)
- SIZE(LARGE)
- TAG(ya:fat)
+ SIZE(LARGE)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/public/sdk/cpp/client/ydb_params/ut/ya.make b/ydb/public/sdk/cpp/client/ydb_params/ut/ya.make
index 1755edcdf2..cd5ee478c9 100644
--- a/ydb/public/sdk/cpp/client/ydb_params/ut/ya.make
+++ b/ydb/public/sdk/cpp/client/ydb_params/ut/ya.make
@@ -8,7 +8,7 @@ OWNER(
IF (SANITIZER_TYPE)
TIMEOUT(1200)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
ELSE()
TIMEOUT(600)
SIZE(MEDIUM)
diff --git a/ydb/services/persqueue_v1/ut/ya.make b/ydb/services/persqueue_v1/ut/ya.make
index b83e7d4453..70c088383d 100644
--- a/ydb/services/persqueue_v1/ut/ya.make
+++ b/ydb/services/persqueue_v1/ut/ya.make
@@ -15,7 +15,7 @@ FORK_SUBTESTS()
IF (SANITIZER_TYPE OR WITH_VALGRIND)
TIMEOUT(1800)
SIZE(LARGE)
- TAG(ya:fat)
+ TAG(ya:fat)
REQUIREMENTS(ram:32)
ELSE()
TIMEOUT(600)
diff --git a/ydb/tests/functional/blobstorage/ya.make b/ydb/tests/functional/blobstorage/ya.make
index d0c9345d4e..3248849d57 100644
--- a/ydb/tests/functional/blobstorage/ya.make
+++ b/ydb/tests/functional/blobstorage/ya.make
@@ -16,7 +16,7 @@ IF (SANITIZER_TYPE)
ram:16
)
TIMEOUT(1800)
- SIZE(LARGE)
+ SIZE(LARGE)
TAG(ya:fat)
ELSE()
REQUIREMENTS(