aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-08-30 09:36:18 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-08-30 10:01:19 +0300
commit9b34e4f5ca33d29cd33ff5c496601b956d09cd16 (patch)
treeceba3037f4be1cbe9a7cb0188360e52126ce2e6f
parent42f7f485434730ee4e48b85108780fe031cb3f24 (diff)
downloadydb-9b34e4f5ca33d29cd33ff5c496601b956d09cd16.tar.gz
Intermediate changes
-rw-r--r--contrib/python/hypothesis/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py9
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py163
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py27
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py25
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/shrinker.py12
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/version.py2
-rw-r--r--contrib/python/hypothesis/py3/ya.make2
8 files changed, 106 insertions, 136 deletions
diff --git a/contrib/python/hypothesis/py3/.dist-info/METADATA b/contrib/python/hypothesis/py3/.dist-info/METADATA
index accf483679..3e743f0029 100644
--- a/contrib/python/hypothesis/py3/.dist-info/METADATA
+++ b/contrib/python/hypothesis/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hypothesis
-Version: 6.111.0
+Version: 6.111.1
Summary: A library for property-based testing
Home-page: https://hypothesis.works
Author: David R. MacIver and Zac Hatfield-Dodds
diff --git a/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py b/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py
index 29f6dcf00a..d27eafd3d6 100644
--- a/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py
+++ b/contrib/python/hypothesis/py3/hypothesis/extra/django/_fields.py
@@ -10,7 +10,7 @@
import re
import string
-from datetime import timedelta
+from datetime import datetime, timedelta
from decimal import Decimal
from functools import lru_cache
from typing import Any, Callable, Dict, Type, TypeVar, Union
@@ -115,7 +115,12 @@ def register_for(field_type):
@register_for(df.DateTimeField)
def _for_datetime(field):
if getattr(django.conf.settings, "USE_TZ", False):
- return st.datetimes(timezones=timezones())
+ # avoid https://code.djangoproject.com/ticket/35683
+ return st.datetimes(
+ min_value=datetime.min + timedelta(days=1),
+ max_value=datetime.max - timedelta(days=1),
+ timezones=timezones(),
+ )
return st.datetimes()
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
index 4661141544..cc1f71da96 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
@@ -127,8 +127,8 @@ IRKWargsType: TypeAlias = Union[
IntegerKWargs, FloatKWargs, StringKWargs, BytesKWargs, BooleanKWargs
]
IRTypeName: TypeAlias = Literal["integer", "string", "boolean", "float", "bytes"]
-# ir_type, kwargs, forced
-InvalidAt: TypeAlias = Tuple[IRTypeName, IRKWargsType, Optional[IRType]]
+# index, ir_type, kwargs, forced
+MisalignedAt: TypeAlias = Tuple[int, IRTypeName, IRKWargsType, Optional[IRType]]
class ExtraInformation:
@@ -954,9 +954,6 @@ class DataObserver:
) -> None:
pass
- def mark_invalid(self, invalid_at: InvalidAt) -> None:
- pass
-
@attr.s(slots=True, repr=False, eq=False)
class IRNode:
@@ -1169,7 +1166,7 @@ class ConjectureResult:
examples: Examples = attr.ib(repr=False, eq=False)
arg_slices: Set[Tuple[int, int]] = attr.ib(repr=False)
slice_comments: Dict[Tuple[int, int], str] = attr.ib(repr=False)
- invalid_at: Optional[InvalidAt] = attr.ib(repr=False)
+ misaligned_at: Optional[MisalignedAt] = attr.ib(repr=False)
index: int = attr.ib(init=False)
@@ -2060,7 +2057,7 @@ class ConjectureData:
self.extra_information = ExtraInformation()
self.ir_tree_nodes = ir_tree_prefix
- self.invalid_at: Optional[InvalidAt] = None
+ self.misaligned_at: Optional[MisalignedAt] = None
self._node_index = 0
self.start_example(TOP_LABEL)
@@ -2144,10 +2141,10 @@ class ConjectureData:
)
if self.ir_tree_nodes is not None and observe:
- node = self._pop_ir_tree_node("integer", kwargs, forced=forced)
+ node_value = self._pop_ir_tree_node("integer", kwargs, forced=forced)
if forced is None:
- assert isinstance(node.value, int)
- forced = node.value
+ assert isinstance(node_value, int)
+ forced = node_value
fake_forced = True
value = self.provider.draw_integer(
@@ -2201,10 +2198,10 @@ class ConjectureData:
)
if self.ir_tree_nodes is not None and observe:
- node = self._pop_ir_tree_node("float", kwargs, forced=forced)
+ node_value = self._pop_ir_tree_node("float", kwargs, forced=forced)
if forced is None:
- assert isinstance(node.value, float)
- forced = node.value
+ assert isinstance(node_value, float)
+ forced = node_value
fake_forced = True
value = self.provider.draw_float(
@@ -2243,10 +2240,10 @@ class ConjectureData:
},
)
if self.ir_tree_nodes is not None and observe:
- node = self._pop_ir_tree_node("string", kwargs, forced=forced)
+ node_value = self._pop_ir_tree_node("string", kwargs, forced=forced)
if forced is None:
- assert isinstance(node.value, str)
- forced = node.value
+ assert isinstance(node_value, str)
+ forced = node_value
fake_forced = True
value = self.provider.draw_string(
@@ -2279,10 +2276,10 @@ class ConjectureData:
kwargs: BytesKWargs = self._pooled_kwargs("bytes", {"size": size})
if self.ir_tree_nodes is not None and observe:
- node = self._pop_ir_tree_node("bytes", kwargs, forced=forced)
+ node_value = self._pop_ir_tree_node("bytes", kwargs, forced=forced)
if forced is None:
- assert isinstance(node.value, bytes)
- forced = node.value
+ assert isinstance(node_value, bytes)
+ forced = node_value
fake_forced = True
value = self.provider.draw_bytes(
@@ -2320,10 +2317,10 @@ class ConjectureData:
kwargs: BooleanKWargs = self._pooled_kwargs("boolean", {"p": p})
if self.ir_tree_nodes is not None and observe:
- node = self._pop_ir_tree_node("boolean", kwargs, forced=forced)
+ node_value = self._pop_ir_tree_node("boolean", kwargs, forced=forced)
if forced is None:
- assert isinstance(node.value, bool)
- forced = node.value
+ assert isinstance(node_value, bool)
+ forced = node_value
fake_forced = True
value = self.provider.draw_boolean(
@@ -2367,41 +2364,57 @@ class ConjectureData:
def _pop_ir_tree_node(
self, ir_type: IRTypeName, kwargs: IRKWargsType, *, forced: Optional[IRType]
- ) -> IRNode:
+ ) -> IRType:
+ from hypothesis.internal.conjecture.engine import BUFFER_SIZE
+
assert self.ir_tree_nodes is not None
if self._node_index == len(self.ir_tree_nodes):
self.mark_overrun()
node = self.ir_tree_nodes[self._node_index]
- # If we're trying to draw a different ir type at the same location, then
- # this ir tree has become badly misaligned. We don't have many good/simple
- # options here for realigning beyond giving up.
+ value = node.value
+ # If we're trying to:
+ # * draw a different ir type at the same location
+ # * draw the same ir type with a different kwargs
+ #
+ # then we call this a misalignment, because the choice sequence has
+ # slipped from what we expected at some point. An easy misalignment is
+ #
+ # st.one_of(st.integers(0, 100), st.integers(101, 200))
#
- # This is more of an issue for ir nodes while shrinking than it was for
- # buffers: misaligned buffers are still usually valid, just interpreted
- # differently. This would be somewhat like drawing a random value for
- # the new ir type here. For what it's worth, misaligned buffers are
- # rather unlikely to be *useful* buffers, so giving up isn't a big downgrade.
- # (in fact, it is possible that giving up early here results in more time
- # for useful shrinks to run).
- if node.ir_type != ir_type:
- invalid_at = (ir_type, kwargs, forced)
- self.invalid_at = invalid_at
- self.observer.mark_invalid(invalid_at)
- self.mark_invalid(f"(internal) want a {ir_type} but have a {node.ir_type}")
-
- # if a node has different kwargs (and so is misaligned), but has a value
- # that is allowed by the expected kwargs, then we can coerce this node
- # into an aligned one by using its value. It's unclear how useful this is.
- if not ir_value_permitted(node.value, node.ir_type, kwargs):
- invalid_at = (ir_type, kwargs, forced)
- self.invalid_at = invalid_at
- self.observer.mark_invalid(invalid_at)
- self.mark_invalid(f"(internal) got a {ir_type} but outside the valid range")
+ # where the choice sequence [0, 100] has kwargs {min_value: 0, max_value: 100}
+ # at position 2, but [0, 101] has kwargs {min_value: 101, max_value: 200} at
+ # position 2.
+ #
+ # When we see a misalignment, we can't offer up the stored node value as-is.
+ # We need to make it appropriate for the requested kwargs and ir type.
+ # Right now we do that by using bytes as the intermediary to convert between
+ # ir types/kwargs. In the future we'll probably use the index into a custom
+ # ordering for an (ir_type, kwargs) pair.
+ if node.ir_type != ir_type or not ir_value_permitted(
+ node.value, node.ir_type, kwargs
+ ):
+ # only track first misalignment for now.
+ if self.misaligned_at is None:
+ self.misaligned_at = (self._node_index, ir_type, kwargs, forced)
+ (_value, buffer) = ir_to_buffer(
+ node.ir_type, node.kwargs, forced=node.value
+ )
+ try:
+ value = buffer_to_ir(
+ ir_type, kwargs, buffer=buffer + bytes(BUFFER_SIZE - len(buffer))
+ )
+ except StopTest:
+ # must have been an overrun.
+ #
+ # maybe we should fall back to to an arbitrary small value here
+ # instead? eg
+ # buffer_to_ir(ir_type, kwargs, buffer=bytes(BUFFER_SIZE))
+ self.mark_overrun()
self._node_index += 1
- return node
+ return value
def as_result(self) -> Union[ConjectureResult, _Overrun]:
"""Convert the result of running this test into
@@ -2429,7 +2442,7 @@ class ConjectureData:
forced_indices=frozenset(self.forced_indices),
arg_slices=self.arg_slices,
slice_comments=self.slice_comments,
- invalid_at=self.invalid_at,
+ misaligned_at=self.misaligned_at,
)
assert self.__result is not None
self.blocks.transfer_ownership(self.__result)
@@ -2578,38 +2591,9 @@ class ConjectureData:
self.stop_example()
self.__example_record.freeze()
-
self.frozen = True
-
self.buffer = bytes(self.buffer)
-
- # if we were invalid because of a misalignment in the tree, we don't
- # want to tell the DataTree that. Doing so would lead to inconsistent behavior.
- # Given an empty DataTree
- # ┌──────┐
- # │ root │
- # └──────┘
- # and supposing the very first draw is misaligned, concluding here would
- # tell the datatree that the *only* possibility at the root node is Status.INVALID:
- # ┌──────┐
- # │ root │
- # └──┬───┘
- # ┌───────────┴───────────────┐
- # │ Conclusion(Status.INVALID)│
- # └───────────────────────────┘
- # when in fact this is only the case when we try to draw a misaligned node.
- # For instance, suppose we come along in the second test case and try a
- # valid node as the first draw from the root. The DataTree thinks this
- # is flaky (because root must lead to Status.INVALID in the tree) while
- # in fact nothing in the test function has changed and the only change
- # is in the ir tree prefix we are supplying.
- #
- # From the perspective of DataTree, it is safe to not conclude here. This
- # tells the datatree that we don't know what happens after this node - which
- # is true! We are aborting early here because the ir tree became misaligned,
- # which is a semantically different invalidity than an assume or filter failing.
- if self.invalid_at is None:
- self.observer.conclude_test(self.status, self.interesting_origin)
+ self.observer.conclude_test(self.status, self.interesting_origin)
def choice(
self,
@@ -2716,3 +2700,24 @@ def bits_to_bytes(n: int) -> int:
Equivalent to (n + 7) // 8, but slightly faster. This really is
called enough times that that matters."""
return (n + 7) >> 3
+
+
+def ir_to_buffer(ir_type, kwargs, *, forced=None, random=None):
+ from hypothesis.internal.conjecture.engine import BUFFER_SIZE
+
+ if forced is None:
+ assert random is not None
+
+ cd = ConjectureData(
+ max_length=BUFFER_SIZE,
+ # buffer doesn't matter if forced is passed since we're forcing the sole draw
+ prefix=b"" if forced is None else bytes(BUFFER_SIZE),
+ random=random,
+ )
+ value = getattr(cd.provider, f"draw_{ir_type}")(**kwargs, forced=forced)
+ return (value, cd.buffer)
+
+
+def buffer_to_ir(ir_type, kwargs, *, buffer):
+ cd = ConjectureData.for_buffer(buffer)
+ return getattr(cd.provider, f"draw_{ir_type}")(**kwargs)
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
index c5602a35f4..60c1610bc3 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
@@ -29,7 +29,6 @@ from hypothesis.internal.conjecture.data import (
DataObserver,
FloatKWargs,
IntegerKWargs,
- InvalidAt,
IRKWargsType,
IRType,
IRTypeName,
@@ -441,8 +440,6 @@ class TreeNode:
# be explored when generating novel prefixes)
transition: Union[None, Branch, Conclusion, Killed] = attr.ib(default=None)
- invalid_at: Optional[InvalidAt] = attr.ib(default=None)
-
# A tree node is exhausted if every possible sequence of draws below it has
# been explored. We only update this when performing operations that could
# change the answer.
@@ -496,8 +493,6 @@ class TreeNode:
del self.ir_types[i:]
del self.values[i:]
del self.kwargs[i:]
- # we have a transition now, so we don't need to carry around invalid_at.
- self.invalid_at = None
assert len(self.values) == len(self.kwargs) == len(self.ir_types) == i
def check_exhausted(self):
@@ -866,13 +861,6 @@ class DataTree:
t = node.transition
data.conclude_test(t.status, t.interesting_origin)
elif node.transition is None:
- if node.invalid_at is not None:
- (ir_type, kwargs, forced) = node.invalid_at
- try:
- draw(ir_type, kwargs, forced=forced, convert_forced=False)
- except StopTest:
- if data.invalid_at is not None:
- raise
raise PreviouslyUnseenBehaviour
elif isinstance(node.transition, Branch):
v = draw(node.transition.ir_type, node.transition.kwargs)
@@ -891,16 +879,9 @@ class DataTree:
return TreeRecordingObserver(self)
def _draw(self, ir_type, kwargs, *, random, forced=None):
- # we should possibly pull out BUFFER_SIZE to a common file to avoid this
- # circular import.
- from hypothesis.internal.conjecture.engine import BUFFER_SIZE
-
- cd = ConjectureData(max_length=BUFFER_SIZE, prefix=b"", random=random)
- draw_func = getattr(cd, f"draw_{ir_type}")
-
- value = draw_func(**kwargs, forced=forced)
- buf = cd.buffer
+ from hypothesis.internal.conjecture.data import ir_to_buffer
+ (value, buf) = ir_to_buffer(ir_type, kwargs, forced=forced, random=random)
# using floats as keys into branch.children breaks things, because
# e.g. hash(0.0) == hash(-0.0) would collide as keys when they are
# in fact distinct child branches.
@@ -1018,10 +999,6 @@ class TreeRecordingObserver(DataObserver):
) -> None:
self.draw_value("boolean", value, was_forced=was_forced, kwargs=kwargs)
- def mark_invalid(self, invalid_at: InvalidAt) -> None:
- if self.__current_node.transition is None:
- self.__current_node.invalid_at = invalid_at
-
def draw_value(
self,
ir_type: IRTypeName,
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py
index 91f731cf44..ff330cf59e 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/engine.py
@@ -331,15 +331,8 @@ class ConjectureRunner:
data: Union[ConjectureData, ConjectureResult, None] = None,
) -> Tuple[Tuple[Any, ...], ...]:
assert (nodes is not None) ^ (data is not None)
- extension = []
if data is not None:
nodes = data.examples.ir_tree_nodes
- if data.invalid_at is not None:
- # if we're invalid then we should have at least one node left (the invalid one).
- assert isinstance(data, ConjectureData)
- assert data.ir_tree_nodes is not None
- assert data._node_index < len(data.ir_tree_nodes)
- extension = [data.ir_tree_nodes[data._node_index]]
assert nodes is not None
# intentionally drop was_forced from equality here, because the was_forced
@@ -350,24 +343,12 @@ class ConjectureRunner:
ir_value_key(node.ir_type, node.value),
ir_kwargs_key(node.ir_type, node.kwargs),
)
- for node in nodes + extension
+ for node in nodes
)
def _cache(self, data: ConjectureData) -> None:
result = data.as_result()
- # when we shrink, we try out of bounds things, which can lead to the same
- # data.buffer having multiple outcomes. eg data.buffer=b'' is Status.OVERRUN
- # in normal circumstances, but a data with
- # ir_nodes=[integer -5 {min_value: 0, max_value: 10}] will also have
- # data.buffer=b'' but will be Status.INVALID instead. We do not want to
- # change the cached value to INVALID in this case.
- #
- # We handle this specially for the ir cache by keying off the misaligned node
- # as well, but we cannot do the same for buffers as we do not know ahead of
- # time what buffer a node maps to. I think it's largely fine that we don't
- # write to the buffer cache here as we move more things to the ir cache.
- if data.invalid_at is None:
- self.__data_cache[data.buffer] = result
+ self.__data_cache[data.buffer] = result
# interesting buffer-based data can mislead the shrinker if we cache them.
#
@@ -489,7 +470,7 @@ class ConjectureRunner:
node.kwargs = kwargs
self._cache(data)
- if data.invalid_at is not None: # pragma: no branch # coverage bug?
+ if data.misaligned_at is not None: # pragma: no branch # coverage bug?
self.misaligned_count += 1
self.debug_data(data)
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/shrinker.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/shrinker.py
index d441821787..3be703a18e 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/shrinker.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/shrinker.py
@@ -1037,10 +1037,10 @@ class Shrinker:
if attempt.status is Status.OVERRUN:
return False
- if attempt.status is Status.INVALID and attempt.invalid_at is None:
+ if attempt.status is Status.INVALID:
return False
- if attempt.status is Status.INVALID and attempt.invalid_at is not None:
+ if attempt.misaligned_at is not None:
# we're invalid due to a misalignment in the tree. We'll try to fix
# a very specific type of misalignment here: where we have a node of
# {"size": n} and tried to draw the same node, but with {"size": m < n}.
@@ -1065,10 +1065,12 @@ class Shrinker:
# case of this function of preserving from the right instead of
# preserving from the left. see test_can_shrink_variable_string_draws.
- node = self.nodes[len(attempt.examples.ir_tree_nodes)]
- (attempt_ir_type, attempt_kwargs, _attempt_forced) = attempt.invalid_at
+ (index, attempt_ir_type, attempt_kwargs, _attempt_forced) = (
+ attempt.misaligned_at
+ )
+ node = self.nodes[index]
if node.ir_type != attempt_ir_type:
- return False
+ return False # pragma: no cover
if node.was_forced:
return False # pragma: no cover
diff --git a/contrib/python/hypothesis/py3/hypothesis/version.py b/contrib/python/hypothesis/py3/hypothesis/version.py
index 3f18637632..7b68749f4f 100644
--- a/contrib/python/hypothesis/py3/hypothesis/version.py
+++ b/contrib/python/hypothesis/py3/hypothesis/version.py
@@ -8,5 +8,5 @@
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
-__version_info__ = (6, 111, 0)
+__version_info__ = (6, 111, 1)
__version__ = ".".join(map(str, __version_info__))
diff --git a/contrib/python/hypothesis/py3/ya.make b/contrib/python/hypothesis/py3/ya.make
index 21fbb328a7..d00e9b6081 100644
--- a/contrib/python/hypothesis/py3/ya.make
+++ b/contrib/python/hypothesis/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(6.111.0)
+VERSION(6.111.1)
LICENSE(MPL-2.0)