summarylogtreecommitdiffstats
path: root/cython0-python313.patch
diff options
context:
space:
mode:
authorPatrick Northon2024-12-28 22:26:12 -0500
committerPatrick Northon2024-12-28 22:26:12 -0500
commitbe93f560a1a26f9c215cb71bc218fec6dbba0c7d (patch)
tree4a0c636cc05ad8babca911cf50d881365b800d64 /cython0-python313.patch
parent3144b216e08c7044d166d7b837afe02e6a3b1971 (diff)
downloadaur-be93f560a1a26f9c215cb71bc218fec6dbba0c7d.tar.gz
Backport patches for python 3.13.
Diffstat (limited to 'cython0-python313.patch')
-rw-r--r--cython0-python313.patch480
1 files changed, 480 insertions, 0 deletions
diff --git a/cython0-python313.patch b/cython0-python313.patch
new file mode 100644
index 000000000000..56eebb33bd78
--- /dev/null
+++ b/cython0-python313.patch
@@ -0,0 +1,480 @@
+diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c
+index 8117b92d4..3d2dfd6f1 100644
+--- a/Cython/Utility/Exceptions.c
++++ b/Cython/Utility/Exceptions.c
+@@ -7,28 +7,45 @@
+
+
+ /////////////// AssertionsEnabled.init ///////////////
+-__Pyx_init_assertions_enabled();
++if (likely(__Pyx_init_assertions_enabled() == 0)); else
+
+ /////////////// AssertionsEnabled.proto ///////////////
+
+-#define __Pyx_init_assertions_enabled()
+-
+ #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+- #define __pyx_assertions_enabled() (1)
+-#elif PY_VERSION_HEX < 0x03080000 || CYTHON_COMPILING_IN_PYPY || defined(Py_LIMITED_API)
+- #define __pyx_assertions_enabled() (!Py_OptimizeFlag)
+-#elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030900A6
+- // Py3.8+ has PyConfig from PEP 587, but only Py3.9 added read access to it.
+- // Py_OptimizeFlag is deprecated in Py3.12+
++ #define __Pyx_init_assertions_enabled() (0)
++ #define __pyx_assertions_enabled() (1)
++#elif CYTHON_COMPILING_IN_LIMITED_API || (CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030C0000)
+ static int __pyx_assertions_enabled_flag;
+ #define __pyx_assertions_enabled() (__pyx_assertions_enabled_flag)
+-
+- #undef __Pyx_init_assertions_enabled
+- static void __Pyx_init_assertions_enabled(void) {
+- __pyx_assertions_enabled_flag = ! _PyInterpreterState_GetConfig(__Pyx_PyThreadState_Current->interp)->optimization_level;
++ #if PY_VERSION_HEX >= 0x030D0000
++ #ifndef Py_BUILD_CORE
++ #define Py_BUILD_CORE 1
++ #endif
++ #include "internal/pycore_interp.h"
++ #endif
++ static int __Pyx_init_assertions_enabled(void) {
++ PyObject *builtins, *debug, *debug_str;
++ int flag;
++ builtins = PyEval_GetBuiltins();
++ if (!builtins) goto bad;
++ debug_str = PyUnicode_FromStringAndSize("__debug__", 9);
++ if (!debug_str) goto bad;
++ debug = PyObject_GetItem(builtins, debug_str);
++ Py_DECREF(debug_str);
++ if (!debug) goto bad;
++ flag = PyObject_IsTrue(debug);
++ Py_DECREF(debug);
++ if (flag == -1) goto bad;
++ __pyx_assertions_enabled_flag = flag;
++ return 0;
++ bad:
++ __pyx_assertions_enabled_flag = 1;
++ // We (rarely) may not have an exception set, but the calling code will call PyErr_Occurred() either way.
++ return -1;
+ }
+ #else
+- #define __pyx_assertions_enabled() (!Py_OptimizeFlag)
++ #define __Pyx_init_assertions_enabled() (0)
++ #define __pyx_assertions_enabled() (!Py_OptimizeFlag)
+ #endif
+
+
+diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c
+index cfff60642..06c84385a 100644
+--- a/Cython/Utility/ModuleSetupCode.c
++++ b/Cython/Utility/ModuleSetupCode.c
+@@ -602,8 +602,14 @@ class __Pyx_FakeReference {
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+ #endif
+
+-#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
++#if CYTHON_COMPILING_IN_LIMITED_API
++ #define __Pyx_PyThreadState_Current PyThreadState_Get()
++#elif !CYTHON_FAST_THREAD_STATE
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
++#elif PY_VERSION_HEX >= 0x030d00A1
++ //#elif PY_VERSION_HEX >= 0x03050200
++ // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
++ #define __Pyx_PyThreadState_Current PyThreadState_GetUnchecked()
+ #elif PY_VERSION_HEX >= 0x03060000
+ //#elif PY_VERSION_HEX >= 0x03050200
+ // Actually added in 3.5.2, but compiling against that does not guarantee that we get imported there.
+@@ -648,7 +654,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ // PyThread_ReInitTLS() is a no-op
+ #endif /* TSS (Thread Specific Storage) API */
+
+-#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000 || defined(_PyDict_NewPresized)
+ #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+ #else
+ #define __Pyx_PyDict_NewPresized(n) PyDict_New()
+@@ -662,7 +668,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+ #endif
+
+-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000 && CYTHON_USE_UNICODE_INTERNALS
+ #define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+ #else
+ #define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c
+index 5c4d9e160..6064fc6cc 100644
+--- a/Cython/Utility/ObjectHandling.c
++++ b/Cython/Utility/ObjectHandling.c
+@@ -198,7 +198,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject*
+ next = iternext(iterator);
+ if (likely(next))
+ return next;
+- #if PY_VERSION_HEX >= 0x02070000 && CYTHON_COMPILING_IN_CPYTHON
++ #if PY_VERSION_HEX >= 0x02070000 && CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000
+ if (unlikely(iternext == &_PyObject_NextNotImplemented))
+ return NULL;
+ #endif
+@@ -1151,7 +1151,7 @@ static PyObject *__Pyx__GetNameInClass(PyObject *nmspace, PyObject *name) {
+
+ /////////////// SetNameInClass.proto ///////////////
+
+-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000
+ // Identifier names are always interned and have a pre-calculated hash value.
+ #define __Pyx_SetNameInClass(ns, name, value) \
+ (likely(PyDict_CheckExact(ns)) ? _PyDict_SetItem_KnownHash(ns, name, value, ((PyASCIIObject *) name)->hash) : PyObject_SetItem(ns, name, value))
+@@ -1200,7 +1200,7 @@ static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
+ {
+ PyObject *result;
+ #if !CYTHON_AVOID_BORROWED_REFS
+-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && PY_VERSION_HEX < 0x030d0000
+ // Identifier names are always interned and have a pre-calculated hash value.
+ result = _PyDict_GetItem_KnownHash($moddict_cname, name, ((PyASCIIObject *) name)->hash);
+ __PYX_UPDATE_DICT_CACHE($moddict_cname, result, *dict_cached_value, *dict_version)
+@@ -1370,15 +1370,21 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, P
+ //@requires: Exceptions.c::PyErrFetchRestore
+ //@requires: Exceptions.c::PyErrExceptionMatches
+
++#if __PYX_LIMITED_VERSION_HEX < 0x030d00A1
+ static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
+ __Pyx_PyErr_Clear();
+ }
++#endif
+
+ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
+ PyObject *result;
++#if __PYX_LIMITED_VERSION_HEX >= 0x030d00A1
++ (void) PyObject_GetOptionalAttr(obj, attr_name, &result);
++ return result;
++#else
+ #if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
+ // _PyObject_GenericGetAttrWithDict() in CPython 3.7+ can avoid raising the AttributeError.
+ // See https://bugs.python.org/issue32544
+@@ -1392,6 +1398,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, P
+ __Pyx_PyObject_GetAttrStr_ClearAttributeError();
+ }
+ return result;
++#endif
+ }
+
+
+@@ -1818,14 +1825,24 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
+ //@requires: PyObjectCallOneArg
+ //@requires: PyObjectCall2Args
+
++#if !(CYTHON_VECTORCALL && __PYX_LIMITED_VERSION_HEX >= 0x030C00A2)
+ static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) {
+ // Separate function to avoid excessive inlining.
+ PyObject *result = __Pyx_PyObject_CallOneArg(method, arg);
+ Py_DECREF(method);
+ return result;
+ }
++#endif
+
+ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) {
++#if CYTHON_VECTORCALL && __PYX_LIMITED_VERSION_HEX >= 0x030C00A2
++ PyObject *args[2] = {obj, arg};
++ // avoid unused functions
++ (void) __Pyx_PyObject_GetMethod;
++ (void) __Pyx_PyObject_CallOneArg;
++ (void) __Pyx_PyObject_Call2Args;
++ return PyObject_VectorcallMethod(method_name, args, 2 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
++#else
+ PyObject *method = NULL, *result;
+ int is_method = __Pyx_PyObject_GetMethod(obj, method_name, &method);
+ if (likely(is_method)) {
+@@ -1835,6 +1852,7 @@ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name
+ }
+ if (unlikely(!method)) return NULL;
+ return __Pyx__PyObject_CallMethod1(method, arg);
++#endif
+ }
+
+
+diff --git a/Cython/Utility/Optimize.c b/Cython/Utility/Optimize.c
+index 35f3a67c9..715bc8073 100644
+--- a/Cython/Utility/Optimize.c
++++ b/Cython/Utility/Optimize.c
+@@ -34,7 +34,14 @@ static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {
+ Py_ssize_t len = Py_SIZE(list);
+ if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {
+ Py_INCREF(x);
++ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000
++ // In Py3.13a1, PyList_SET_ITEM() checks that the end index is lower than the current size.
++ // However, extending the size *before* setting the value would not be correct,
++ // so we cannot call PyList_SET_ITEM().
++ L->ob_item[len] = x;
++ #else
+ PyList_SET_ITEM(list, len, x);
++ #endif
+ __Pyx_SET_SIZE(list, len + 1);
+ return 0;
+ }
+@@ -52,7 +59,14 @@ static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {
+ Py_ssize_t len = Py_SIZE(list);
+ if (likely(L->allocated > len)) {
+ Py_INCREF(x);
++ #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030d0000
++ // In Py3.13a1, PyList_SET_ITEM() checks that the end index is lower than the current size.
++ // However, extending the size *before* setting the value would not be correct,
++ // so we cannot call PyList_SET_ITEM().
++ L->ob_item[len] = x;
++ #else
+ PyList_SET_ITEM(list, len, x);
++ #endif
+ __Pyx_SET_SIZE(list, len + 1);
+ return 0;
+ }
+@@ -65,7 +79,7 @@ static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {
+ //////////////////// ListExtend.proto ////////////////////
+
+ static CYTHON_INLINE int __Pyx_PyList_Extend(PyObject* L, PyObject* v) {
+-#if CYTHON_COMPILING_IN_CPYTHON
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000
+ PyObject* none = _PyList_Extend((PyListObject*)L, v);
+ if (unlikely(!none))
+ return -1;
+@@ -279,7 +293,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyOb
+ /////////////// py_dict_pop ///////////////
+
+ static CYTHON_INLINE PyObject *__Pyx_PyDict_Pop(PyObject *d, PyObject *key, PyObject *default_value) {
+-#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B3
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX > 0x030600B3 && PY_VERSION_HEX < 0x030d0000
+ if ((1)) {
+ return _PyDict_Pop(d, key, default_value);
+ } else
+@@ -435,7 +449,7 @@ static CYTHON_INLINE int __Pyx_set_iter_next(
+
+ static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set,
+ Py_ssize_t* p_orig_length, int* p_source_is_set) {
+-#if CYTHON_COMPILING_IN_CPYTHON
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000
+ is_set = is_set || likely(PySet_CheckExact(iterable) || PyFrozenSet_CheckExact(iterable));
+ *p_source_is_set = is_set;
+ if (likely(is_set)) {
+@@ -455,7 +469,7 @@ static CYTHON_INLINE int __Pyx_set_iter_next(
+ PyObject* iter_obj, Py_ssize_t orig_length,
+ Py_ssize_t* ppos, PyObject **value,
+ int source_is_set) {
+- if (!CYTHON_COMPILING_IN_CPYTHON || unlikely(!source_is_set)) {
++ if (!CYTHON_COMPILING_IN_CPYTHON || PY_VERSION_HEX >= 0x030d0000 || unlikely(!source_is_set)) {
+ *value = PyIter_Next(iter_obj);
+ if (unlikely(!*value)) {
+ return __Pyx_IterFinish();
+@@ -464,7 +478,7 @@ static CYTHON_INLINE int __Pyx_set_iter_next(
+ (void)ppos;
+ return 1;
+ }
+-#if CYTHON_COMPILING_IN_CPYTHON
++#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000
+ if (unlikely(PySet_GET_SIZE(iter_obj) != orig_length)) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c
+index 98b5e260e..55b62a1d6 100644
+--- a/Cython/Utility/StringTools.c
++++ b/Cython/Utility/StringTools.c
+@@ -804,25 +804,22 @@ static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t i
+ #define __Pyx_PyString_Join PyUnicode_Join
+ #define __Pyx_PyBaseString_Join PyUnicode_Join
+ #endif
+-
+-#if CYTHON_COMPILING_IN_CPYTHON
+- #if PY_MAJOR_VERSION < 3
+- #define __Pyx_PyBytes_Join _PyString_Join
+- #else
+- #define __Pyx_PyBytes_Join _PyBytes_Join
+- #endif
+-#else
+ static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values); /*proto*/
+-#endif
+-
+
+ //////////////////// StringJoin ////////////////////
++//@requires: ObjectHandling.c::PyObjectCallMethod1
+
+-#if !CYTHON_COMPILING_IN_CPYTHON
+ static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values) {
+- return PyObject_CallMethodObjArgs(sep, PYIDENT("join"), values, NULL);
+-}
++ // avoid unused function
++ (void) __Pyx_PyObject_CallMethod1;
++#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION < 3
++ return _PyString_Join(sep, values);
++#elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030d0000
++ return _PyBytes_Join(sep, values);
++#else
++ return __Pyx_PyObject_CallMethod1(sep, PYIDENT("join"), values);
+ #endif
++}
+
+
+ /////////////// JoinPyUnicode.proto ///////////////
+@@ -873,7 +870,9 @@ static PyObject* __Pyx_PyUnicode_Join(PyObject* value_tuple, Py_ssize_t value_co
+ if (!CYTHON_PEP393_ENABLED || ukind == result_ukind) {
+ memcpy((char *)result_udata + char_pos * result_ukind, udata, (size_t) (ulength * result_ukind));
+ } else {
+- #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters)
++ #if PY_VERSION_HEX >= 0x030D0000
++ PyUnicode_CopyCharacters(result_uval, char_pos, uval, 0, ulength);
++ #elif CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030300F0 || defined(_PyUnicode_FastCopyCharacters)
+ _PyUnicode_FastCopyCharacters(result_uval, char_pos, uval, 0, ulength);
+ #else
+ Py_ssize_t j;
+diff --git a/Cython/Utility/TypeConversion.c b/Cython/Utility/TypeConversion.c
+index 404814907..3a3ee1524 100644
+--- a/Cython/Utility/TypeConversion.c
++++ b/Cython/Utility/TypeConversion.c
+@@ -679,8 +679,39 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) {
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
++#if !CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030d0000
+ return _PyLong_FromByteArray(bytes, sizeof({{TYPE}}),
+ little, !is_unsigned);
++#else
++ // call int.from_bytes()
++ PyObject *from_bytes, *result = NULL;
++ PyObject *py_bytes = NULL, *arg_tuple = NULL, *kwds = NULL, *order_str = NULL;
++ from_bytes = PyObject_GetAttrString((PyObject*)&PyLong_Type, "from_bytes");
++ if (!from_bytes) return NULL;
++ py_bytes = PyBytes_FromStringAndSize((char*)bytes, sizeof({{TYPE}}));
++ if (!py_bytes) goto limited_bad;
++ // I'm deliberately not using PYIDENT here because this code path is very unlikely
++ // to ever run so it seems a pessimization mostly.
++ order_str = PyUnicode_FromString(little ? "little" : "big");
++ if (!order_str) goto limited_bad;
++ arg_tuple = PyTuple_Pack(2, py_bytes, order_str);
++ if (!arg_tuple) goto limited_bad;
++ if (!is_unsigned) {
++ // default is signed=False
++ kwds = PyDict_New();
++ if (!kwds) goto limited_bad;
++ if (PyDict_SetItemString(kwds, "signed", __Pyx_NewRef(Py_True))) goto limited_bad;
++ }
++ result = PyObject_Call(from_bytes, arg_tuple, kwds);
++
++ limited_bad:
++ Py_XDECREF(kwds);
++ Py_XDECREF(arg_tuple);
++ Py_XDECREF(order_str);
++ Py_XDECREF(py_bytes);
++ Py_XDECREF(from_bytes);
++ return result;
++#endif
+ }
+ }
+
+@@ -984,11 +1015,106 @@ static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *x) {
+ }
+ #endif
+ if (likely(v)) {
++int ret = -1;
++#if PY_VERSION_HEX < 0x030d0000 && !(CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_LIMITED_API) || defined(_PyLong_AsByteArray)
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+- int ret = _PyLong_AsByteArray((PyLongObject *)v,
+- bytes, sizeof(val),
+- is_little, !is_unsigned);
++ ret = _PyLong_AsByteArray((PyLongObject *)v,
++ bytes, sizeof(val),
++ is_little, !is_unsigned);
++#else
++// Inefficient copy of bit chunks through the C-API. Probably still better than a "cannot do this" exception.
++// This is substantially faster in CPython (>30%) than calling "int.to_bytes()"
++ PyObject *stepval = NULL, *mask = NULL, *shift = NULL;
++ int bits, remaining_bits, is_negative = 0;
++ long idigit;
++ int chunk_size = (sizeof(long) < 8) ? 30 : 62;
++
++ // use exact PyLong to prevent user defined &&/<</etc. implementations
++ if (unlikely(!PyLong_CheckExact(v))) {
++ PyObject *tmp = v;
++ v = PyNumber_Long(v);
++ assert(PyLong_CheckExact(v));
++ Py_DECREF(tmp);
++ if (unlikely(!v)) return ({{TYPE}}) -1;
++ }
++
++#if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000
++ if (Py_SIZE(x) == 0)
++ return ({{TYPE}}) 0;
++ is_negative = Py_SIZE(x) < 0;
++#else
++ {
++ // misuse Py_False as a quick way to compare to a '0' int object
++ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
++ if (unlikely(result < 0))
++ return ({{TYPE}}) -1;
++ is_negative = result == 1;
++ }
++#endif
++
++ if (is_unsigned && unlikely(is_negative)) {
++ goto raise_neg_overflow;
++ } else if (is_negative) {
++ // bit-invert to make sure we can safely convert it
++ stepval = PyNumber_Invert(v);
++ if (unlikely(!stepval))
++ return ({{TYPE}}) -1;
++ } else {
++ stepval = __Pyx_NewRef(v);
++ }
++
++ // unpack full chunks of bits
++ val = ({{TYPE}}) 0;
++ mask = PyLong_FromLong((1L << chunk_size) - 1); if (unlikely(!mask)) goto done;
++ shift = PyLong_FromLong(chunk_size); if (unlikely(!shift)) goto done;
++ for (bits = 0; bits < (int) sizeof({{TYPE}}) * 8 - chunk_size; bits += chunk_size) {
++ PyObject *tmp, *digit;
++
++ digit = PyNumber_And(stepval, mask);
++ if (unlikely(!digit)) goto done;
++ idigit = PyLong_AsLong(digit);
++ Py_DECREF(digit);
++ if (unlikely(idigit < 0)) goto done;
++
++ tmp = PyNumber_Rshift(stepval, shift);
++ if (unlikely(!tmp)) goto done;
++ Py_DECREF(stepval); stepval = tmp;
++
++ val |= (({{TYPE}}) idigit) << bits;
++
++ #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000
++ if (Py_SIZE(stepval) == 0)
++ goto unpacking_done;
++ #endif
++ }
++
++ // detect overflow when adding the last bits
++ idigit = PyLong_AsLong(stepval);
++ if (unlikely(idigit < 0)) goto done;
++ remaining_bits = ((int) sizeof({{TYPE}}) * 8) - bits - (is_unsigned ? 0 : 1);
++ if (unlikely(idigit >= (1L << remaining_bits)))
++ goto raise_overflow;
++ val |= (({{TYPE}}) idigit) << bits;
++
++ #if CYTHON_COMPILING_IN_LIMITED_API && PY_VERSION_HEX < 0x030B0000
++ unpacking_done:
++ #endif
++ // handle sign and overflow into sign bit
++ if (!is_unsigned) {
++ // gcc warns about unsigned (val < 0) => test sign bit instead
++ if (unlikely(val & ((({{TYPE}}) 1) << (sizeof({{TYPE}}) * 8 - 1))))
++ goto raise_overflow;
++ // undo the PyNumber_Invert() above
++ if (is_negative)
++ val = ~val;
++ }
++ ret = 0;
++ done:
++ Py_XDECREF(shift);
++ Py_XDECREF(mask);
++ Py_XDECREF(stepval);
++#endif
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;