diff options
author | robot-piglet <robot-piglet@yandex-team.com> | 2024-07-19 13:30:18 +0300 |
---|---|---|
committer | robot-piglet <robot-piglet@yandex-team.com> | 2024-07-19 13:52:55 +0300 |
commit | ccfb768b9bf9dff2336c9b0931b4e35c1bfb18df (patch) | |
tree | 6e40750714479264189fadf3a81b199ecedf05ed | |
parent | cc6fbb0ddda7fdd63033840536eea785946b40d9 (diff) | |
download | ydb-ccfb768b9bf9dff2336c9b0931b4e35c1bfb18df.tar.gz |
Intermediate changes
13 files changed, 93 insertions, 125 deletions
diff --git a/contrib/python/Flask/py3/flask/scaffold.py b/contrib/python/Flask/py3/flask/scaffold.py index a58941c01d..0171239c63 100644 --- a/contrib/python/Flask/py3/flask/scaffold.py +++ b/contrib/python/Flask/py3/flask/scaffold.py @@ -10,7 +10,7 @@ from functools import update_wrapper from json import JSONDecoder from json import JSONEncoder -from jinja2 import ChoiceLoader, FileSystemLoader, ResourceLoader +from jinja2 import ChoiceLoader, FileSystemLoader, ResourceLoader, PackageLoader from werkzeug.exceptions import default_exceptions from werkzeug.exceptions import HTTPException @@ -359,6 +359,7 @@ class Scaffold: if self.template_folder is not None: return ChoiceLoader([ FileSystemLoader(os.path.join(self.root_path, self.template_folder)), + PackageLoader(self.import_name, self.template_folder), ResourceLoader(os.path.join(self._builtin_resource_prefix, self.template_folder), self.module_loader), ]) else: diff --git a/contrib/python/hypothesis/py3/.dist-info/METADATA b/contrib/python/hypothesis/py3/.dist-info/METADATA index 310e5a5007..8ec8c1803d 100644 --- a/contrib/python/hypothesis/py3/.dist-info/METADATA +++ b/contrib/python/hypothesis/py3/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: hypothesis -Version: 6.104.2 +Version: 6.105.0 Summary: A library for property-based testing Home-page: https://hypothesis.works Author: David R. MacIver and Zac Hatfield-Dodds @@ -41,10 +41,10 @@ Requires-Dist: exceptiongroup >=1.0.0 ; python_version < "3.11" Provides-Extra: all Requires-Dist: black >=19.10b0 ; extra == 'all' Requires-Dist: click >=7.0 ; extra == 'all' -Requires-Dist: crosshair-tool >=0.0.55 ; extra == 'all' +Requires-Dist: crosshair-tool >=0.0.58 ; extra == 'all' Requires-Dist: django >=3.2 ; extra == 'all' Requires-Dist: dpcontracts >=0.4 ; extra == 'all' -Requires-Dist: hypothesis-crosshair >=0.0.4 ; extra == 'all' +Requires-Dist: hypothesis-crosshair >=0.0.6 ; extra == 'all' Requires-Dist: lark >=0.10.1 ; extra == 'all' Requires-Dist: libcst >=0.3.16 ; extra == 'all' Requires-Dist: numpy >=1.17.3 ; extra == 'all' @@ -63,8 +63,8 @@ Requires-Dist: rich >=9.0.0 ; extra == 'cli' Provides-Extra: codemods Requires-Dist: libcst >=0.3.16 ; extra == 'codemods' Provides-Extra: crosshair -Requires-Dist: hypothesis-crosshair >=0.0.4 ; extra == 'crosshair' -Requires-Dist: crosshair-tool >=0.0.55 ; extra == 'crosshair' +Requires-Dist: hypothesis-crosshair >=0.0.6 ; extra == 'crosshair' +Requires-Dist: crosshair-tool >=0.0.58 ; extra == 'crosshair' Provides-Extra: dateutil Requires-Dist: python-dateutil >=1.4 ; extra == 'dateutil' Provides-Extra: django diff --git a/contrib/python/hypothesis/py3/hypothesis/core.py b/contrib/python/hypothesis/py3/hypothesis/core.py index c2767421cb..51710a2e1f 100644 --- a/contrib/python/hypothesis/py3/hypothesis/core.py +++ b/contrib/python/hypothesis/py3/hypothesis/core.py @@ -18,6 +18,7 @@ import io import math import sys import time +import traceback import types import unittest import warnings @@ -60,6 +61,7 @@ from hypothesis.errors import ( Flaky, Found, HypothesisDeprecationWarning, + HypothesisException, HypothesisWarning, InvalidArgument, NoSuchExample, @@ -86,9 +88,9 @@ from hypothesis.internal.entropy import deterministic_PRNG from hypothesis.internal.escalation import ( InterestingOrigin, current_pytest_item, - escalate_hypothesis_internal_error, format_exception, get_trimmed_traceback, + is_hypothesis_file, ) from hypothesis.internal.healthcheck import fail_health_check from hypothesis.internal.observability import ( @@ -1071,7 +1073,11 @@ class StateForActualGivenExecution: except failure_exceptions_to_catch() as e: # If the error was raised by Hypothesis-internal code, re-raise it # as a fatal error instead of treating it as a test failure. - escalate_hypothesis_internal_error() + filepath = traceback.extract_tb(e.__traceback__)[-1][0] + if is_hypothesis_file(filepath) and not isinstance( + e, (HypothesisException, StopTest, UnsatisfiedAssumption) + ): + raise if data.frozen: # This can happen if an error occurred in a finally diff --git a/contrib/python/hypothesis/py3/hypothesis/errors.py b/contrib/python/hypothesis/py3/hypothesis/errors.py index 0d376a7493..0b2c297084 100644 --- a/contrib/python/hypothesis/py3/hypothesis/errors.py +++ b/contrib/python/hypothesis/py3/hypothesis/errors.py @@ -175,11 +175,9 @@ class DidNotReproduce(HypothesisException): pass -class Found(Exception): +class Found(HypothesisException): """Signal that the example matches condition. Internal use only.""" - hypothesis_internal_never_escalate = True - class RewindRecursive(Exception): """Signal that the type inference should be rewound due to recursive types. Internal use only.""" diff --git a/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py b/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py index 181c8869f9..29f6dcf00a 100644 --- a/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py +++ b/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py @@ -57,7 +57,9 @@ def integers_for_field(min_value, max_value): def timezones(): # From Django 4.0, the default is to use zoneinfo instead of pytz. assert getattr(django.conf.settings, "USE_TZ", False) - if getattr(django.conf.settings, "USE_DEPRECATED_PYTZ", True): + if django.VERSION < (5, 0, 0) and getattr( + django.conf.settings, "USE_DEPRECATED_PYTZ", True + ): from hypothesis.extra.pytz import timezones else: from hypothesis.strategies import timezones diff --git a/contrib/python/hypothesis/py3/hypothesis/extra/django/_impl.py b/contrib/python/hypothesis/py3/hypothesis/extra/django/_impl.py index 5a7ab8f0e3..d4bcefb0c1 100644 --- a/contrib/python/hypothesis/py3/hypothesis/extra/django/_impl.py +++ b/contrib/python/hypothesis/py3/hypothesis/extra/django/_impl.py @@ -105,6 +105,7 @@ def from_model( name not in field_strategies and not field.auto_created and not isinstance(field, dm.AutoField) + and not isinstance(field, getattr(dm, "GeneratedField", ())) and field.default is dm.fields.NOT_PROVIDED ): field_strategies[name] = from_field(field) diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/cache.py b/contrib/python/hypothesis/py3/hypothesis/internal/cache.py index eae61a2578..49c1956867 100644 --- a/contrib/python/hypothesis/py3/hypothesis/internal/cache.py +++ b/contrib/python/hypothesis/py3/hypothesis/internal/cache.py @@ -12,6 +12,8 @@ import threading import attr +from hypothesis.errors import InvalidArgument + @attr.s(slots=True) class Entry: @@ -37,7 +39,8 @@ class GenericCache: Defines a dict-like mapping with a maximum size, where as well as mapping to a value, each key also maps to a score. When a write would cause the dict to exceed its maximum size, it first evicts the existing key with - the smallest score, then adds the new key to the map. + the smallest score, then adds the new key to the map. If due to pinning + no key can be evicted, ValueError is raised. A key has the following lifecycle: @@ -45,7 +48,7 @@ class GenericCache: self.new_entry(key, value) 2. whenever an existing key is read or written, self.on_access(key, value, score) is called. This returns a new score for the key. - 3. When a key is evicted, self.on_evict(key, value, score) is called. + 3. After a key is evicted, self.on_evict(key, value, score) is called. The cache will be in a valid state in all of these cases. @@ -56,6 +59,9 @@ class GenericCache: __slots__ = ("max_size", "_threadlocal") def __init__(self, max_size): + if max_size <= 0: + raise InvalidArgument("Cache size must be at least one.") + self.max_size = max_size # Implementation: We store a binary heap of Entry objects in self.data, @@ -81,14 +87,6 @@ class GenericCache: self._threadlocal.data = [] return self._threadlocal.data - @property - def __pinned_entry_count(self): - return getattr(self._threadlocal, "_pinned_entry_count", 0) - - @__pinned_entry_count.setter - def __pinned_entry_count(self, value): - self._threadlocal._pinned_entry_count = value - def __len__(self): assert len(self.keys_to_indices) == len(self.data) return len(self.data) @@ -99,25 +97,21 @@ class GenericCache: def __getitem__(self, key): i = self.keys_to_indices[key] result = self.data[i] - self.on_access(result.key, result.value, result.score) - self.__balance(i) + self.__entry_was_accessed(i) return result.value def __setitem__(self, key, value): - if self.max_size == 0: - return evicted = None try: i = self.keys_to_indices[key] except KeyError: - if self.max_size == self.__pinned_entry_count: - raise ValueError( - "Cannot increase size of cache where all keys have been pinned." - ) from None entry = Entry(key, value, self.new_entry(key, value)) if len(self.data) >= self.max_size: evicted = self.data[0] - assert evicted.pins == 0 + if evicted.pins > 0: + raise ValueError( + "Cannot increase size of cache where all keys have been pinned." + ) from None del self.keys_to_indices[evicted.key] i = 0 self.data[0] = entry @@ -125,45 +119,44 @@ class GenericCache: i = len(self.data) self.data.append(entry) self.keys_to_indices[key] = i + self.__balance(i) else: entry = self.data[i] assert entry.key == key entry.value = value - entry.score = self.on_access(entry.key, entry.value, entry.score) - - self.__balance(i) + self.__entry_was_accessed(i) if evicted is not None: if self.data[0] is not entry: - assert evicted.score <= self.data[0].score + assert evicted.sort_key <= self.data[0].sort_key self.on_evict(evicted.key, evicted.value, evicted.score) def __iter__(self): return iter(self.keys_to_indices) - def pin(self, key): - """Mark ``key`` as pinned. That is, it may not be evicted until - ``unpin(key)`` has been called. The same key may be pinned multiple - times and will not be unpinned until the same number of calls to - unpin have been made.""" + def pin(self, key, value): + """Mark ``key`` as pinned (with the given value). That is, it may not + be evicted until ``unpin(key)`` has been called. The same key may be + pinned multiple times, possibly changing its value, and will not be + unpinned until the same number of calls to unpin have been made. + """ + self[key] = value + i = self.keys_to_indices[key] entry = self.data[i] entry.pins += 1 if entry.pins == 1: - self.__pinned_entry_count += 1 - assert self.__pinned_entry_count <= self.max_size self.__balance(i) def unpin(self, key): - """Undo one previous call to ``pin(key)``. Once all calls are - undone this key may be evicted as normal.""" + """Undo one previous call to ``pin(key)``. The value stays the same. + Once all calls are undone this key may be evicted as normal.""" i = self.keys_to_indices[key] entry = self.data[i] if entry.pins == 0: raise ValueError(f"Key {key!r} has not been pinned") entry.pins -= 1 if entry.pins == 0: - self.__pinned_entry_count -= 1 self.__balance(i) def is_pinned(self, key): @@ -172,10 +165,9 @@ class GenericCache: return self.data[i].pins > 0 def clear(self): - """Remove all keys, clearing their pinned status.""" + """Remove all keys, regardless of their pinned status.""" del self.data[:] self.keys_to_indices.clear() - self.__pinned_entry_count = 0 def __repr__(self): return "{" + ", ".join(f"{e.key!r}: {e.value!r}" for e in self.data) + "}" @@ -206,11 +198,22 @@ class GenericCache: Asserts that all of the cache's invariants hold. When everything is working correctly this should be an expensive no-op. """ + assert len(self.keys_to_indices) == len(self.data) for i, e in enumerate(self.data): assert self.keys_to_indices[e.key] == i for j in [i * 2 + 1, i * 2 + 2]: if j < len(self.data): - assert e.score <= self.data[j].score, self.data + assert e.sort_key <= self.data[j].sort_key, self.data + + def __entry_was_accessed(self, i): + entry = self.data[i] + new_score = self.on_access(entry.key, entry.value, entry.score) + if new_score != entry.score: + entry.score = new_score + # changing the score of a pinned entry cannot unbalance the heap, as + # we place all pinned entries after unpinned ones, regardless of score. + if entry.pins == 0: + self.__balance(i) def __swap(self, i, j): assert i < j @@ -220,28 +223,23 @@ class GenericCache: self.keys_to_indices[self.data[j].key] = j def __balance(self, i): - """When we have made a modification to the heap such that means that + """When we have made a modification to the heap such that the heap property has been violated locally around i but previously held for all other indexes (and no other values have been modified), this fixes the heap so that the heap property holds everywhere.""" - while i > 0: - parent = (i - 1) // 2 + # bubble up (if score is too low for current position) + while (parent := (i - 1) // 2) >= 0: if self.__out_of_order(parent, i): self.__swap(parent, i) i = parent else: - # This branch is never taken on versions of Python where dicts - # preserve their insertion order (pypy or cpython >= 3.7) - break # pragma: no cover - while True: - children = [j for j in (2 * i + 1, 2 * i + 2) if j < len(self.data)] - if len(children) == 2: - children.sort(key=lambda j: self.data[j].score) - for j in children: - if self.__out_of_order(i, j): - self.__swap(i, j) - i = j - break + break + # or bubble down (if score is too high for current position) + while children := [j for j in (2 * i + 1, 2 * i + 2) if j < len(self.data)]: + smallest_child = min(children, key=lambda j: self.data[j].sort_key) + if self.__out_of_order(i, smallest_child): + self.__swap(i, smallest_child) + i = smallest_child else: break @@ -258,10 +256,10 @@ class LRUReusedCache(GenericCache): """The only concrete implementation of GenericCache we use outside of tests currently. - Adopts a modified least-frequently used eviction policy: It evicts the key + Adopts a modified least-recently used eviction policy: It evicts the key that has been used least recently, but it will always preferentially evict - keys that have only ever been accessed once. Among keys that have been - accessed more than once, it ignores the number of accesses. + keys that have never been accessed after insertion. Among keys that have been + accessed, it ignores the number of accesses. This retains most of the benefits of an LRU cache, but adds an element of scan-resistance to the process: If we end up scanning through a large @@ -280,22 +278,7 @@ class LRUReusedCache(GenericCache): return self.__tick def new_entry(self, key, value): - return [1, self.tick()] + return (1, self.tick()) def on_access(self, key, value, score): - score[0] = 2 - score[1] = self.tick() - return score - - def pin(self, key): - try: - super().pin(key) - except KeyError: - # The whole point of an LRU cache is that it might drop things for you - assert key not in self.keys_to_indices - - def unpin(self, key): - try: - super().unpin(key) - except KeyError: - assert key not in self.keys_to_indices + return (2, self.tick()) diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py index 7d1e010a6e..960a61cc98 100644 --- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py +++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py @@ -1072,10 +1072,8 @@ def ir_value_permitted(value, ir_type, kwargs): if max_value is not None and value > max_value: return False - if (max_value is None or min_value is None) and ( - value - shrink_towards - ).bit_length() >= 128: - return False + if max_value is None or min_value is None: + return (value - shrink_towards).bit_length() < 128 return True elif ir_type == "float": diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py index eb326f59a5..b9914bd0dd 100644 --- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py +++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py @@ -552,7 +552,7 @@ class ConjectureRunner: if changed: self.save_buffer(data.buffer) self.interesting_examples[key] = data.as_result() # type: ignore - self.__data_cache.pin(data.buffer) + self.__data_cache.pin(data.buffer, data.as_result()) self.shrunk_examples.discard(key) if self.shrinks >= MAX_SHRINKS: @@ -899,7 +899,9 @@ class ConjectureRunner: zero_data = self.cached_test_function(bytes(BUFFER_SIZE)) if zero_data.status > Status.OVERRUN: assert isinstance(zero_data, ConjectureResult) - self.__data_cache.pin(zero_data.buffer) + self.__data_cache.pin( + zero_data.buffer, zero_data.as_result() + ) # Pin forever if zero_data.status == Status.OVERRUN or ( zero_data.status == Status.VALID diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/escalation.py b/contrib/python/hypothesis/py3/hypothesis/internal/escalation.py index b85d9fcdc9..9c242ba0c2 100644 --- a/contrib/python/hypothesis/py3/hypothesis/internal/escalation.py +++ b/contrib/python/hypothesis/py3/hypothesis/internal/escalation.py @@ -18,13 +18,7 @@ from pathlib import Path from typing import Dict, NamedTuple, Optional, Type import hypothesis -from hypothesis.errors import ( - DeadlineExceeded, - HypothesisException, - StopTest, - UnsatisfiedAssumption, - _Trimmable, -) +from hypothesis.errors import _Trimmable from hypothesis.internal.compat import BaseExceptionGroup from hypothesis.utils.dynamicvariables import DynamicVariable @@ -54,31 +48,11 @@ def belongs_to(package): return accept -PREVENT_ESCALATION = os.getenv("HYPOTHESIS_DO_NOT_ESCALATE") == "true" - FILE_CACHE: Dict[bytes, bool] = {} is_hypothesis_file = belongs_to(hypothesis) -HYPOTHESIS_CONTROL_EXCEPTIONS = (DeadlineExceeded, StopTest, UnsatisfiedAssumption) - - -def escalate_hypothesis_internal_error(): - if PREVENT_ESCALATION: - return - - _, e, tb = sys.exc_info() - - if getattr(e, "hypothesis_internal_never_escalate", False): - return - - filepath = None if tb is None else traceback.extract_tb(tb)[-1][0] - if is_hypothesis_file(filepath) and not isinstance( - e, (HypothesisException, *HYPOTHESIS_CONTROL_EXCEPTIONS) - ): - raise - def get_trimmed_traceback(exception=None): """Return the current traceback, minus any frames added by Hypothesis.""" diff --git a/contrib/python/hypothesis/py3/hypothesis/strategies/_internal/types.py b/contrib/python/hypothesis/py3/hypothesis/strategies/_internal/types.py index 11e6aa381b..8753bfb784 100644 --- a/contrib/python/hypothesis/py3/hypothesis/strategies/_internal/types.py +++ b/contrib/python/hypothesis/py3/hypothesis/strategies/_internal/types.py @@ -514,8 +514,9 @@ def from_typing_type(thing): for T in [*union_elems, elem_type] ): mapping.pop(bytes, None) - mapping.pop(collections.abc.ByteString, None) - mapping.pop(typing.ByteString, None) + if sys.version_info[:2] <= (3, 13): + mapping.pop(collections.abc.ByteString, None) + mapping.pop(typing.ByteString, None) elif ( (not mapping) and isinstance(thing, typing.ForwardRef) @@ -699,14 +700,16 @@ if sys.version_info[:2] >= (3, 9): # which includes this... but we don't actually ever want to build one. _global_type_lookup[os._Environ] = st.just(os.environ) +if sys.version_info[:2] <= (3, 13): + # Note: while ByteString notionally also represents the bytearray and + # memoryview types, it is a subclass of Hashable and those types are not. + # We therefore only generate the bytes type. type-ignored due to deprecation. + _global_type_lookup[typing.ByteString] = st.binary() # type: ignore + _global_type_lookup[collections.abc.ByteString] = st.binary() # type: ignore + _global_type_lookup.update( { - # Note: while ByteString notionally also represents the bytearray and - # memoryview types, it is a subclass of Hashable and those types are not. - # We therefore only generate the bytes type. type-ignored due to deprecation. - typing.ByteString: st.binary(), # type: ignore - collections.abc.ByteString: st.binary(), # type: ignore # TODO: SupportsAbs and SupportsRound should be covariant, ie have functions. typing.SupportsAbs: st.one_of( st.booleans(), diff --git a/contrib/python/hypothesis/py3/hypothesis/version.py b/contrib/python/hypothesis/py3/hypothesis/version.py index 5259509771..d94d798ba2 100644 --- a/contrib/python/hypothesis/py3/hypothesis/version.py +++ b/contrib/python/hypothesis/py3/hypothesis/version.py @@ -8,5 +8,5 @@ # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at https://mozilla.org/MPL/2.0/. -__version_info__ = (6, 104, 2) +__version_info__ = (6, 105, 0) __version__ = ".".join(map(str, __version_info__)) diff --git a/contrib/python/hypothesis/py3/ya.make b/contrib/python/hypothesis/py3/ya.make index 3e32db3afa..648c900671 100644 --- a/contrib/python/hypothesis/py3/ya.make +++ b/contrib/python/hypothesis/py3/ya.make @@ -2,7 +2,7 @@ PY3_LIBRARY() -VERSION(6.104.2) +VERSION(6.105.0) LICENSE(MPL-2.0) |