PyUnicode_AsUTF32String() calls directly _PyUnicode_EncodeUTF32(),
instead of calling the deprecated PyUnicode_EncodeUTF32() function
This commit is contained in:
parent
77faf69ca1
commit
b960b34577
|
@ -5150,12 +5150,7 @@ PyUnicode_EncodeUTF32(const Py_UNICODE *s,
|
|||
PyObject *
|
||||
PyUnicode_AsUTF32String(PyObject *unicode)
|
||||
{
|
||||
const Py_UNICODE *wstr;
|
||||
Py_ssize_t wlen;
|
||||
wstr = PyUnicode_AsUnicodeAndSize(unicode, &wlen);
|
||||
if (wstr == NULL)
|
||||
return NULL;
|
||||
return PyUnicode_EncodeUTF32(wstr, wlen, NULL, 0);
|
||||
return _PyUnicode_EncodeUTF32(unicode, NULL, 0);
|
||||
}
|
||||
|
||||
/* --- UTF-16 Codec ------------------------------------------------------- */
|
||||
|
|
Loading…
Reference in New Issue