aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/tools/python3/src/Python/hamt.c
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.com>2024-02-12 07:53:52 +0300
committershadchin <shadchin@yandex-team.com>2024-02-12 08:07:36 +0300
commitce1b7ca3171f9158180640c6a02a74b4afffedea (patch)
treee47c1e8391b1b0128262c1e9b1e6ed4c8fff2348 /contrib/tools/python3/src/Python/hamt.c
parent57350d96f030db90f220ce50ee591d5c5d403df7 (diff)
downloadydb-ce1b7ca3171f9158180640c6a02a74b4afffedea.tar.gz
Update Python from 3.11.8 to 3.12.2
Diffstat (limited to 'contrib/tools/python3/src/Python/hamt.c')
-rw-r--r--contrib/tools/python3/src/Python/hamt.c227
1 files changed, 76 insertions, 151 deletions
diff --git a/contrib/tools/python3/src/Python/hamt.c b/contrib/tools/python3/src/Python/hamt.c
index 908c253187..8cb94641be 100644
--- a/contrib/tools/python3/src/Python/hamt.c
+++ b/contrib/tools/python3/src/Python/hamt.c
@@ -321,22 +321,11 @@ typedef struct {
typedef struct {
PyObject_VAR_HEAD
- uint32_t b_bitmap;
- PyObject *b_array[1];
-} PyHamtNode_Bitmap;
-
-
-typedef struct {
- PyObject_VAR_HEAD
int32_t c_hash;
PyObject *c_array[1];
} PyHamtNode_Collision;
-static PyHamtNode_Bitmap *_empty_bitmap_node;
-static PyHamtObject *_empty_hamt;
-
-
static PyHamtObject *
hamt_alloc(void);
@@ -496,11 +485,7 @@ _hamt_dump_format(_PyUnicodeWriter *writer, const char *format, ...)
int ret;
va_list vargs;
-#ifdef HAVE_STDARG_PROTOTYPES
va_start(vargs, format);
-#else
- va_start(vargs);
-#endif
msg = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
@@ -525,14 +510,16 @@ hamt_node_bitmap_new(Py_ssize_t size)
PyHamtNode_Bitmap *node;
Py_ssize_t i;
+ if (size == 0) {
+ /* Since bitmap nodes are immutable, we can cache the instance
+ for size=0 and reuse it whenever we need an empty bitmap node.
+ */
+ return (PyHamtNode *)Py_NewRef(&_Py_SINGLETON(hamt_bitmap_node_empty));
+ }
+
assert(size >= 0);
assert(size % 2 == 0);
- if (size == 0 && _empty_bitmap_node != NULL) {
- Py_INCREF(_empty_bitmap_node);
- return (PyHamtNode *)_empty_bitmap_node;
- }
-
/* No freelist; allocate a new bitmap node */
node = PyObject_GC_NewVar(
PyHamtNode_Bitmap, &_PyHamt_BitmapNode_Type, size);
@@ -550,14 +537,6 @@ hamt_node_bitmap_new(Py_ssize_t size)
_PyObject_GC_TRACK(node);
- if (size == 0 && _empty_bitmap_node == NULL) {
- /* Since bitmap nodes are immutable, we can cache the instance
- for size=0 and reuse it whenever we need an empty bitmap node.
- */
- _empty_bitmap_node = node;
- Py_INCREF(_empty_bitmap_node);
- }
-
return (PyHamtNode *)node;
}
@@ -581,8 +560,7 @@ hamt_node_bitmap_clone(PyHamtNode_Bitmap *node)
}
for (i = 0; i < Py_SIZE(node); i++) {
- Py_XINCREF(node->b_array[i]);
- clone->b_array[i] = node->b_array[i];
+ clone->b_array[i] = Py_XNewRef(node->b_array[i]);
}
clone->b_bitmap = node->b_bitmap;
@@ -607,14 +585,12 @@ hamt_node_bitmap_clone_without(PyHamtNode_Bitmap *o, uint32_t bit)
uint32_t i;
for (i = 0; i < key_idx; i++) {
- Py_XINCREF(o->b_array[i]);
- new->b_array[i] = o->b_array[i];
+ new->b_array[i] = Py_XNewRef(o->b_array[i]);
}
assert(Py_SIZE(o) >= 0 && Py_SIZE(o) <= 32);
for (i = val_idx + 1; i < (uint32_t)Py_SIZE(o); i++) {
- Py_XINCREF(o->b_array[i]);
- new->b_array[i - 2] = o->b_array[i];
+ new->b_array[i - 2] = Py_XNewRef(o->b_array[i]);
}
new->b_bitmap = o->b_bitmap & ~bit;
@@ -647,15 +623,11 @@ hamt_node_new_bitmap_or_collision(uint32_t shift,
return NULL;
}
- Py_INCREF(key1);
- n->c_array[0] = key1;
- Py_INCREF(val1);
- n->c_array[1] = val1;
+ n->c_array[0] = Py_NewRef(key1);
+ n->c_array[1] = Py_NewRef(val1);
- Py_INCREF(key2);
- n->c_array[2] = key2;
- Py_INCREF(val2);
- n->c_array[3] = val2;
+ n->c_array[2] = Py_NewRef(key2);
+ n->c_array[3] = Py_NewRef(val2);
return (PyHamtNode *)n;
}
@@ -740,8 +712,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self,
if (val_or_node == (PyObject *)sub_node) {
Py_DECREF(sub_node);
- Py_INCREF(self);
- return (PyHamtNode *)self;
+ return (PyHamtNode *)Py_NewRef(self);
}
PyHamtNode_Bitmap *ret = hamt_node_bitmap_clone(self);
@@ -763,8 +734,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self,
if (comp_err == 1) { /* key == key_or_null */
if (val == val_or_node) {
/* we already have the same key/val pair; return self. */
- Py_INCREF(self);
- return (PyHamtNode *)self;
+ return (PyHamtNode *)Py_NewRef(self);
}
/* We're setting a new value for the key we had before.
@@ -773,8 +743,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self,
if (ret == NULL) {
return NULL;
}
- Py_INCREF(val);
- Py_SETREF(ret->b_array[val_idx], val);
+ Py_SETREF(ret->b_array[val_idx], Py_NewRef(val));
return (PyHamtNode *)ret;
}
@@ -869,8 +838,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self,
if (self->b_array[j] == NULL) {
new_node->a_array[i] =
- (PyHamtNode *)self->b_array[j + 1];
- Py_INCREF(new_node->a_array[i]);
+ (PyHamtNode *)Py_NewRef(self->b_array[j + 1]);
}
else {
int32_t rehash = hamt_hash(self->b_array[j]);
@@ -927,22 +895,18 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self,
/* Copy all keys/values that will be before the new key/value
we are adding. */
for (i = 0; i < key_idx; i++) {
- Py_XINCREF(self->b_array[i]);
- new_node->b_array[i] = self->b_array[i];
+ new_node->b_array[i] = Py_XNewRef(self->b_array[i]);
}
/* Set the new key/value to the new Bitmap node. */
- Py_INCREF(key);
- new_node->b_array[key_idx] = key;
- Py_INCREF(val);
- new_node->b_array[val_idx] = val;
+ new_node->b_array[key_idx] = Py_NewRef(key);
+ new_node->b_array[val_idx] = Py_NewRef(val);
/* Copy all keys/values that will be after the new key/value
we are adding. */
assert(Py_SIZE(self) >= 0 && Py_SIZE(self) <= 32);
for (i = key_idx; i < (uint32_t)Py_SIZE(self); i++) {
- Py_XINCREF(self->b_array[i]);
- new_node->b_array[i + 2] = self->b_array[i];
+ new_node->b_array[i + 2] = Py_XNewRef(self->b_array[i]);
}
new_node->b_bitmap = self->b_bitmap | bit;
@@ -1023,10 +987,8 @@ hamt_node_bitmap_without(PyHamtNode_Bitmap *self,
PyObject *key = sub_tree->b_array[0];
PyObject *val = sub_tree->b_array[1];
- Py_INCREF(key);
- Py_XSETREF(clone->b_array[key_idx], key);
- Py_INCREF(val);
- Py_SETREF(clone->b_array[val_idx], val);
+ Py_XSETREF(clone->b_array[key_idx], Py_NewRef(key));
+ Py_SETREF(clone->b_array[val_idx], Py_NewRef(val));
Py_DECREF(sub_tree);
@@ -1164,6 +1126,16 @@ hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self)
Py_ssize_t len = Py_SIZE(self);
Py_ssize_t i;
+ if (Py_SIZE(self) == 0) {
+ /* The empty node is statically allocated. */
+ assert(self == &_Py_SINGLETON(hamt_bitmap_node_empty));
+#ifdef Py_DEBUG
+ _Py_FatalRefcountError("deallocating the empty hamt node bitmap singleton");
+#else
+ return;
+#endif
+ }
+
PyObject_GC_UnTrack(self);
Py_TRASHCAN_BEGIN(self, hamt_node_bitmap_dealloc)
@@ -1347,14 +1319,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self,
}
for (i = 0; i < Py_SIZE(self); i++) {
- Py_INCREF(self->c_array[i]);
- new_node->c_array[i] = self->c_array[i];
+ new_node->c_array[i] = Py_NewRef(self->c_array[i]);
}
- Py_INCREF(key);
- new_node->c_array[i] = key;
- Py_INCREF(val);
- new_node->c_array[i + 1] = val;
+ new_node->c_array[i] = Py_NewRef(key);
+ new_node->c_array[i + 1] = Py_NewRef(val);
*added_leaf = 1;
return (PyHamtNode *)new_node;
@@ -1368,8 +1337,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self,
if (self->c_array[val_idx] == val) {
/* We're setting a key/value pair that's already set. */
- Py_INCREF(self);
- return (PyHamtNode *)self;
+ return (PyHamtNode *)Py_NewRef(self);
}
/* We need to replace old value for the key
@@ -1382,14 +1350,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self,
/* Copy all elements of the old node to the new one. */
for (i = 0; i < Py_SIZE(self); i++) {
- Py_INCREF(self->c_array[i]);
- new_node->c_array[i] = self->c_array[i];
+ new_node->c_array[i] = Py_NewRef(self->c_array[i]);
}
/* Replace the old value with the new value for the our key. */
- Py_DECREF(new_node->c_array[val_idx]);
- Py_INCREF(val);
- new_node->c_array[val_idx] = val;
+ Py_SETREF(new_node->c_array[val_idx], Py_NewRef(val));
return (PyHamtNode *)new_node;
@@ -1414,8 +1379,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self,
return NULL;
}
new_node->b_bitmap = hamt_bitpos(self->c_hash, shift);
- Py_INCREF(self);
- new_node->b_array[1] = (PyObject*) self;
+ new_node->b_array[1] = Py_NewRef(self);
assoc_res = hamt_node_bitmap_assoc(
new_node, shift, hash, key, val, added_leaf);
@@ -1477,17 +1441,13 @@ hamt_node_collision_without(PyHamtNode_Collision *self,
}
if (key_idx == 0) {
- Py_INCREF(self->c_array[2]);
- node->b_array[0] = self->c_array[2];
- Py_INCREF(self->c_array[3]);
- node->b_array[1] = self->c_array[3];
+ node->b_array[0] = Py_NewRef(self->c_array[2]);
+ node->b_array[1] = Py_NewRef(self->c_array[3]);
}
else {
assert(key_idx == 2);
- Py_INCREF(self->c_array[0]);
- node->b_array[0] = self->c_array[0];
- Py_INCREF(self->c_array[1]);
- node->b_array[1] = self->c_array[1];
+ node->b_array[0] = Py_NewRef(self->c_array[0]);
+ node->b_array[1] = Py_NewRef(self->c_array[1]);
}
node->b_bitmap = hamt_bitpos(hash, shift);
@@ -1508,12 +1468,10 @@ hamt_node_collision_without(PyHamtNode_Collision *self,
/* Copy all other keys from `self` to `new` */
Py_ssize_t i;
for (i = 0; i < key_idx; i++) {
- Py_INCREF(self->c_array[i]);
- new->c_array[i] = self->c_array[i];
+ new->c_array[i] = Py_NewRef(self->c_array[i]);
}
for (i = key_idx + 2; i < Py_SIZE(self); i++) {
- Py_INCREF(self->c_array[i]);
- new->c_array[i - 2] = self->c_array[i];
+ new->c_array[i - 2] = Py_NewRef(self->c_array[i]);
}
*new_node = (PyHamtNode*)new;
@@ -1665,8 +1623,7 @@ hamt_node_array_clone(PyHamtNode_Array *node)
/* Copy all elements from the current Array node to the new one. */
for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
- Py_XINCREF(node->a_array[i]);
- clone->a_array[i] = node->a_array[i];
+ clone->a_array[i] = (PyHamtNode*)Py_XNewRef(node->a_array[i]);
}
VALIDATE_ARRAY_NODE(clone)
@@ -1723,8 +1680,7 @@ hamt_node_array_assoc(PyHamtNode_Array *self,
/* Copy all elements from the current Array node to the
new one. */
for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
- Py_XINCREF(self->a_array[i]);
- new_node->a_array[i] = self->a_array[i];
+ new_node->a_array[i] = (PyHamtNode*)Py_XNewRef(self->a_array[i]);
}
assert(new_node->a_array[idx] == NULL);
@@ -1872,15 +1828,12 @@ hamt_node_array_without(PyHamtNode_Array *self,
PyObject *key = child->b_array[0];
PyObject *val = child->b_array[1];
- Py_INCREF(key);
- new->b_array[new_i] = key;
- Py_INCREF(val);
- new->b_array[new_i + 1] = val;
+ new->b_array[new_i] = Py_NewRef(key);
+ new->b_array[new_i + 1] = Py_NewRef(val);
}
else {
new->b_array[new_i] = NULL;
- Py_INCREF(node);
- new->b_array[new_i + 1] = (PyObject*)node;
+ new->b_array[new_i + 1] = Py_NewRef(node);
}
}
else {
@@ -1898,8 +1851,7 @@ hamt_node_array_without(PyHamtNode_Array *self,
/* Just copy the node into our new Bitmap */
new->b_array[new_i] = NULL;
- Py_INCREF(node);
- new->b_array[new_i + 1] = (PyObject*)node;
+ new->b_array[new_i + 1] = Py_NewRef(node);
}
new_i += 2;
@@ -2315,8 +2267,7 @@ _PyHamt_Assoc(PyHamtObject *o, PyObject *key, PyObject *val)
if (new_root == o->h_root) {
Py_DECREF(new_root);
- Py_INCREF(o);
- return o;
+ return (PyHamtObject*)Py_NewRef(o);
}
new_o = hamt_alloc();
@@ -2352,8 +2303,7 @@ _PyHamt_Without(PyHamtObject *o, PyObject *key)
case W_EMPTY:
return _PyHamt_New();
case W_NOT_FOUND:
- Py_INCREF(o);
- return o;
+ return (PyHamtObject*)Py_NewRef(o);
case W_NEWNODE: {
assert(new_root != NULL);
@@ -2474,35 +2424,15 @@ hamt_alloc(void)
return o;
}
+#define _empty_hamt \
+ (&_Py_INTERP_SINGLETON(_PyInterpreterState_Get(), hamt_empty))
+
PyHamtObject *
_PyHamt_New(void)
{
- if (_empty_hamt != NULL) {
- /* HAMT is an immutable object so we can easily cache an
- empty instance. */
- Py_INCREF(_empty_hamt);
- return _empty_hamt;
- }
-
- PyHamtObject *o = hamt_alloc();
- if (o == NULL) {
- return NULL;
- }
-
- o->h_root = hamt_node_bitmap_new(0);
- if (o->h_root == NULL) {
- Py_DECREF(o);
- return NULL;
- }
-
- o->h_count = 0;
-
- if (_empty_hamt == NULL) {
- Py_INCREF(o);
- _empty_hamt = o;
- }
-
- return o;
+ /* HAMT is an immutable object so we can easily cache an
+ empty instance. */
+ return (PyHamtObject*)Py_NewRef(_empty_hamt);
}
#ifdef Py_DEBUG
@@ -2595,8 +2525,7 @@ hamt_baseiter_new(PyTypeObject *type, binaryfunc yield, PyHamtObject *o)
return NULL;
}
- Py_INCREF(o);
- it->hi_obj = o;
+ it->hi_obj = (PyHamtObject*)Py_NewRef(o);
it->hi_yield = yield;
hamt_iterator_init(&it->hi_iter, o->h_root);
@@ -2652,8 +2581,7 @@ PyTypeObject _PyHamtKeys_Type = {
static PyObject *
hamt_iter_yield_keys(PyObject *key, PyObject *val)
{
- Py_INCREF(key);
- return key;
+ return Py_NewRef(key);
}
PyObject *
@@ -2676,8 +2604,7 @@ PyTypeObject _PyHamtValues_Type = {
static PyObject *
hamt_iter_yield_values(PyObject *key, PyObject *val)
{
- Py_INCREF(val);
- return val;
+ return Py_NewRef(val);
}
PyObject *
@@ -2721,6 +2648,15 @@ hamt_tp_traverse(PyHamtObject *self, visitproc visit, void *arg)
static void
hamt_tp_dealloc(PyHamtObject *self)
{
+ if (self == _empty_hamt) {
+ /* The empty one is statically allocated. */
+#ifdef Py_DEBUG
+ _Py_FatalRefcountError("deallocating the empty hamt singleton");
+#else
+ return;
+#endif
+ }
+
PyObject_GC_UnTrack(self);
if (self->h_weakreflist != NULL) {
PyObject_ClearWeakRefs((PyObject*)self);
@@ -2770,8 +2706,7 @@ hamt_tp_subscript(PyHamtObject *self, PyObject *key)
case F_ERROR:
return NULL;
case F_FOUND:
- Py_INCREF(val);
- return val;
+ return Py_NewRef(val);
case F_NOT_FOUND:
PyErr_SetObject(PyExc_KeyError, key);
return NULL;
@@ -2821,14 +2756,12 @@ hamt_py_get(PyHamtObject *self, PyObject *args)
case F_ERROR:
return NULL;
case F_FOUND:
- Py_INCREF(val);
- return val;
+ return Py_NewRef(val);
case F_NOT_FOUND:
if (def == NULL) {
Py_RETURN_NONE;
}
- Py_INCREF(def);
- return def;
+ return Py_NewRef(def);
default:
Py_UNREACHABLE();
}
@@ -2959,11 +2892,3 @@ PyTypeObject _PyHamt_CollisionNode_Type = {
.tp_free = PyObject_GC_Del,
.tp_hash = PyObject_HashNotImplemented,
};
-
-
-void
-_PyHamt_Fini(PyInterpreterState *interp)
-{
- Py_CLEAR(_empty_hamt);
- Py_CLEAR(_empty_bitmap_node);
-}