From 3abc03c8088b77d9da6b74a2092f3ae05e70df01 Mon Sep 17 00:00:00 2001 From: AN Long Date: Tue, 3 Feb 2026 21:25:29 +0900 Subject: [PATCH 1/7] gh-132888: Fix Windows API error checking in pyrepl.windows_console (#144248) --- Lib/_pyrepl/windows_console.py | 33 ++++++++++--------- Lib/test/test_pyrepl/test_windows_console.py | 17 +++++++++- ...-01-27-00-03-41.gh-issue-132888.yhTfUN.rst | 2 ++ 3 files changed, 35 insertions(+), 17 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2026-01-27-00-03-41.gh-issue-132888.yhTfUN.rst diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index 303af8a354ff00..6c949c046875f3 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -43,14 +43,11 @@ from .windows_eventqueue import EventQueue try: - from ctypes import get_last_error, GetLastError, WinDLL, windll, WinError # type: ignore[attr-defined] + from ctypes import get_last_error, WinDLL, windll, WinError # type: ignore[attr-defined] except: # Keep MyPy happy off Windows from ctypes import CDLL as WinDLL, cdll as windll - def GetLastError() -> int: - return 42 - def get_last_error() -> int: return 42 @@ -149,16 +146,18 @@ def __init__( # Save original console modes so we can recover on cleanup. original_input_mode = DWORD() - GetConsoleMode(InHandle, original_input_mode) + if not GetConsoleMode(InHandle, original_input_mode): + raise WinError(get_last_error()) trace(f'saved original input mode 0x{original_input_mode.value:x}') self.__original_input_mode = original_input_mode.value - SetConsoleMode( + if not SetConsoleMode( OutHandle, ENABLE_WRAP_AT_EOL_OUTPUT | ENABLE_PROCESSED_OUTPUT | ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) + ): + raise WinError(get_last_error()) self.screen: list[str] = [] self.width = 80 @@ -301,7 +300,7 @@ def _scroll( if not ScrollConsoleScreenBuffer( OutHandle, scroll_rect, None, destination_origin, fill_info ): - raise WinError(GetLastError()) + raise WinError(get_last_error()) def _hide_cursor(self): self.__write("\x1b[?25l") @@ -335,7 +334,7 @@ def __write(self, text: str) -> None: def screen_xy(self) -> tuple[int, int]: info = CONSOLE_SCREEN_BUFFER_INFO() if not GetConsoleScreenBufferInfo(OutHandle, info): - raise WinError(GetLastError()) + raise WinError(get_last_error()) return info.dwCursorPosition.X, info.dwCursorPosition.Y def _erase_to_end(self) -> None: @@ -350,14 +349,16 @@ def prepare(self) -> None: self.__offset = 0 if self.__vt_support: - SetConsoleMode(InHandle, self.__original_input_mode | ENABLE_VIRTUAL_TERMINAL_INPUT) + if not SetConsoleMode(InHandle, self.__original_input_mode | ENABLE_VIRTUAL_TERMINAL_INPUT): + raise WinError(get_last_error()) self._enable_bracketed_paste() def restore(self) -> None: if self.__vt_support: # Recover to original mode before running REPL self._disable_bracketed_paste() - SetConsoleMode(InHandle, self.__original_input_mode) + if not SetConsoleMode(InHandle, self.__original_input_mode): + raise WinError(get_last_error()) def _move_relative(self, x: int, y: int) -> None: """Moves relative to the current posxy""" @@ -394,7 +395,7 @@ def getheightwidth(self) -> tuple[int, int]: and width of the terminal window in characters.""" info = CONSOLE_SCREEN_BUFFER_INFO() if not GetConsoleScreenBufferInfo(OutHandle, info): - raise WinError(GetLastError()) + raise WinError(get_last_error()) return ( info.srWindow.Bottom - info.srWindow.Top + 1, info.srWindow.Right - info.srWindow.Left + 1, @@ -403,7 +404,7 @@ def getheightwidth(self) -> tuple[int, int]: def _getscrollbacksize(self) -> int: info = CONSOLE_SCREEN_BUFFER_INFO() if not GetConsoleScreenBufferInfo(OutHandle, info): - raise WinError(GetLastError()) + raise WinError(get_last_error()) return info.srWindow.Bottom # type: ignore[no-any-return] @@ -411,7 +412,7 @@ def _read_input(self) -> INPUT_RECORD | None: rec = INPUT_RECORD() read = DWORD() if not ReadConsoleInput(InHandle, rec, 1, read): - raise WinError(GetLastError()) + raise WinError(get_last_error()) return rec @@ -421,7 +422,7 @@ def _read_input_bulk( rec = (n * INPUT_RECORD)() read = DWORD() if not ReadConsoleInput(InHandle, rec, n, read): - raise WinError(GetLastError()) + raise WinError(get_last_error()) return rec, read.value @@ -523,7 +524,7 @@ def flushoutput(self) -> None: def forgetinput(self) -> None: """Forget all pending, but not yet processed input.""" if not FlushConsoleInputBuffer(InHandle): - raise WinError(GetLastError()) + raise WinError(get_last_error()) def getpending(self) -> Event: """Return the characters that have been typed but not yet diff --git a/Lib/test/test_pyrepl/test_windows_console.py b/Lib/test/test_pyrepl/test_windows_console.py index 3587b834f3cd07..f03f84e0985c1f 100644 --- a/Lib/test/test_pyrepl/test_windows_console.py +++ b/Lib/test/test_pyrepl/test_windows_console.py @@ -10,7 +10,7 @@ from test.support import force_not_colorized_test_class from typing import Iterable from unittest import TestCase -from unittest.mock import MagicMock, call +from unittest.mock import MagicMock, call, patch from .support import handle_all_events, code_to_events from .support import prepare_reader as default_prepare_reader @@ -30,7 +30,21 @@ pass +def _mock_console_init(self, f_in=0, f_out=1, term="", encoding="utf-8"): + """Mock __init__ to avoid real Windows API calls in headless environments.""" + super(WindowsConsole, self).__init__(f_in, f_out, term, encoding) + self.screen = [] + self.width = 80 + self.height = 25 + self._WindowsConsole__offset = 0 + self.posxy = (0, 0) + self._WindowsConsole__vt_support = False + self._WindowsConsole_original_input_mode = 0 + self.event_queue = wc.EventQueue('utf-8') + + @force_not_colorized_test_class +@patch.object(WindowsConsole, '__init__', _mock_console_init) class WindowsConsoleTests(TestCase): def console(self, events, **kwargs) -> Console: console = WindowsConsole() @@ -373,6 +387,7 @@ def test_multiline_ctrl_z(self): con.restore() +@patch.object(WindowsConsole, '__init__', _mock_console_init) class WindowsConsoleGetEventTests(TestCase): # Virtual-Key Codes: https://learn.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes VK_BACK = 0x08 diff --git a/Misc/NEWS.d/next/Library/2026-01-27-00-03-41.gh-issue-132888.yhTfUN.rst b/Misc/NEWS.d/next/Library/2026-01-27-00-03-41.gh-issue-132888.yhTfUN.rst new file mode 100644 index 00000000000000..71b984c69c5c29 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2026-01-27-00-03-41.gh-issue-132888.yhTfUN.rst @@ -0,0 +1,2 @@ +Fix incorrect use of :func:`ctypes.GetLastError` and add missing error +checks for Windows API calls in :mod:`!_pyrepl.windows_console`. From 74c1f4145e0c214305e571ce83cdb3ab5f1d83b5 Mon Sep 17 00:00:00 2001 From: Thomas Kluyver Date: Tue, 3 Feb 2026 12:36:12 +0000 Subject: [PATCH 2/7] gh-144377: Clean up sqlite3 Connection's list of weakrefs to Cursor objects (#144378) --- Modules/_sqlite/connection.c | 52 +----------------------------------- Modules/_sqlite/connection.h | 7 +---- Modules/_sqlite/cursor.c | 26 ------------------ 3 files changed, 2 insertions(+), 83 deletions(-) diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index cde06c965ad4e3..af63271b9fd971 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -38,7 +38,6 @@ #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() #include "pycore_unicodeobject.h" // _PyUnicode_AsUTF8NoNUL -#include "pycore_weakref.h" #include @@ -144,7 +143,6 @@ class _sqlite3.Connection "pysqlite_Connection *" "clinic_state()->ConnectionTyp [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=67369db2faf80891]*/ -static int _pysqlite_drop_unused_cursor_references(pysqlite_Connection* self); static void incref_callback_context(callback_context *ctx); static void decref_callback_context(callback_context *ctx); static void set_callback_context(callback_context **ctx_pp, @@ -285,17 +283,10 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, goto error; } - /* Create lists of weak references to cursors and blobs */ - PyObject *cursors = PyList_New(0); - if (cursors == NULL) { - Py_DECREF(statement_cache); - goto error; - } - + /* Create lists of weak references to blobs */ PyObject *blobs = PyList_New(0); if (blobs == NULL) { Py_DECREF(statement_cache); - Py_DECREF(cursors); goto error; } @@ -308,9 +299,7 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, self->check_same_thread = check_same_thread; self->thread_ident = PyThread_get_thread_ident(); self->statement_cache = statement_cache; - self->cursors = cursors; self->blobs = blobs; - self->created_cursors = 0; self->row_factory = Py_NewRef(Py_None); self->text_factory = Py_NewRef(&PyUnicode_Type); self->trace_ctx = NULL; @@ -392,7 +381,6 @@ connection_traverse(PyObject *op, visitproc visit, void *arg) pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->statement_cache); - Py_VISIT(self->cursors); Py_VISIT(self->blobs); Py_VISIT(self->row_factory); Py_VISIT(self->text_factory); @@ -417,7 +405,6 @@ connection_clear(PyObject *op) { pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_CLEAR(self->statement_cache); - Py_CLEAR(self->cursors); Py_CLEAR(self->blobs); Py_CLEAR(self->row_factory); Py_CLEAR(self->text_factory); @@ -562,11 +549,6 @@ pysqlite_connection_cursor_impl(pysqlite_Connection *self, PyObject *factory) return NULL; } - if (_pysqlite_drop_unused_cursor_references(self) < 0) { - Py_DECREF(cursor); - return NULL; - } - if (cursor && self->row_factory != Py_None) { Py_INCREF(self->row_factory); Py_XSETREF(((pysqlite_Cursor *)cursor)->row_factory, self->row_factory); @@ -1067,38 +1049,6 @@ final_callback(sqlite3_context *context) PyGILState_Release(threadstate); } -static int -_pysqlite_drop_unused_cursor_references(pysqlite_Connection* self) -{ - /* we only need to do this once in a while */ - if (self->created_cursors++ < 200) { - return 0; - } - - self->created_cursors = 0; - - PyObject* new_list = PyList_New(0); - if (!new_list) { - return -1; - } - - assert(PyList_CheckExact(self->cursors)); - Py_ssize_t imax = PyList_GET_SIZE(self->cursors); - for (Py_ssize_t i = 0; i < imax; i++) { - PyObject* weakref = PyList_GET_ITEM(self->cursors, i); - if (_PyWeakref_IsDead(weakref)) { - continue; - } - if (PyList_Append(new_list, weakref) != 0) { - Py_DECREF(new_list); - return -1; - } - } - - Py_SETREF(self->cursors, new_list); - return 0; -} - /* Allocate a UDF/callback context structure. In order to ensure that the state * pointer always outlives the callback context, we make sure it owns a * reference to the module itself. create_callback_context() is always called diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h index 703396a0c8db53..a2241bd540669c 100644 --- a/Modules/_sqlite/connection.h +++ b/Modules/_sqlite/connection.h @@ -70,14 +70,9 @@ typedef struct PyObject *statement_cache; - /* Lists of weak references to cursors and blobs used within this connection */ - PyObject *cursors; + /* Lists of weak references to blobs used within this connection */ PyObject *blobs; - /* Counters for how many cursors were created in the connection. May be - * reset to 0 at certain intervals */ - int created_cursors; - PyObject* row_factory; /* Determines how bytestrings from SQLite are converted to Python objects: diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index 4611c9e5e3e437..5a61e43617984d 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -99,28 +99,6 @@ class _sqlite3.Cursor "pysqlite_Cursor *" "clinic_state()->CursorType" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=3c5b8115c5cf30f1]*/ -/* - * Registers a cursor with the connection. - * - * 0 => error; 1 => ok - */ -static int -register_cursor(pysqlite_Connection *connection, PyObject *cursor) -{ - PyObject *weakref = PyWeakref_NewRef((PyObject *)cursor, NULL); - if (weakref == NULL) { - return 0; - } - - if (PyList_Append(connection->cursors, weakref) < 0) { - Py_CLEAR(weakref); - return 0; - } - - Py_DECREF(weakref); - return 1; -} - /*[clinic input] _sqlite3.Cursor.__init__ as pysqlite_cursor_init @@ -160,10 +138,6 @@ pysqlite_cursor_init_impl(pysqlite_Cursor *self, return -1; } - if (!register_cursor(connection, (PyObject *)self)) { - return -1; - } - self->initialized = 1; return 0; From bb917d83b16231ad5193731f0405fbc53122d68b Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen Date: Tue, 3 Feb 2026 14:41:57 +0200 Subject: [PATCH 3/7] gh-142956: Update `tomllib` to parse TOML 1.1.0 (#144243) Co-authored-by: Petr Viktorin Co-authored-by: Stan Ulbrych <89152624+StanFromIreland@users.noreply.github.com> --- Doc/library/tomllib.rst | 12 +++- Doc/whatsnew/3.15.rst | 50 ++++++++++++++++ Lib/test/test_tomllib/burntsushi.py | 33 +++-------- .../data/valid/dates-and-times/datetimes.json | 1 + .../data/valid/dates-and-times/datetimes.toml | 1 + .../data/valid/dates-and-times/localtime.json | 4 +- .../data/valid/dates-and-times/localtime.toml | 3 +- .../empty-inline-table.json | 0 .../empty-inline-table.toml | 0 .../inline-table/multiline-inline-table.json | 35 +++++++++++ .../inline-table/multiline-inline-table.toml | 12 ++++ .../multiline-basic-str/replacements.json | 6 ++ .../multiline-basic-str/replacements.toml | 4 ++ Lib/test/test_tomllib/test_data.py | 19 +----- Lib/tomllib/_parser.py | 59 ++++++++++--------- Lib/tomllib/_re.py | 26 ++++---- Makefile.pre.in | 1 + ...-01-26-12-30-57.gh-issue-142956.X9CS8J.rst | 1 + 18 files changed, 182 insertions(+), 85 deletions(-) rename Lib/test/test_tomllib/data/valid/{ => inline-table}/empty-inline-table.json (100%) rename Lib/test/test_tomllib/data/valid/{ => inline-table}/empty-inline-table.toml (100%) create mode 100644 Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.json create mode 100644 Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.toml create mode 100644 Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.json create mode 100644 Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.toml create mode 100644 Misc/NEWS.d/next/Library/2026-01-26-12-30-57.gh-issue-142956.X9CS8J.rst diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 30d7ff50a1acc1..d3767798055da4 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -4,8 +4,6 @@ .. module:: tomllib :synopsis: Parse TOML files. -.. versionadded:: 3.11 - .. moduleauthor:: Taneli Hukkinen .. sectionauthor:: Taneli Hukkinen @@ -13,10 +11,18 @@ -------------- -This module provides an interface for parsing TOML 1.0.0 (Tom's Obvious Minimal +This module provides an interface for parsing TOML 1.1.0 (Tom's Obvious Minimal Language, `https://toml.io `_). This module does not support writing TOML. +.. versionadded:: 3.11 + The module was added with support for TOML 1.0.0. + +.. versionchanged:: next + Added TOML 1.1.0 support. + See the :ref:`What's New ` for details. + + .. seealso:: The :pypi:`Tomli-W package ` diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst index b9178fb794a71e..7979933d7e8e79 100644 --- a/Doc/whatsnew/3.15.rst +++ b/Doc/whatsnew/3.15.rst @@ -867,6 +867,56 @@ tkinter with outdated names. (Contributed by Serhiy Storchaka in :gh:`143754`) + +.. _whatsnew315-tomllib-1-1-0: + +tomllib +------- + +* The :mod:`tomllib` module now supports TOML 1.1.0. + This is a backwards compatible update, meaning that all valid TOML 1.0.0 + documents are parsed the same way. + + The changes, according to the `official TOML changelog`_, are: + + - Allow newlines and trailing commas in inline tables. + + Previously an inline table had to be on a single line and couldn't end + with a trailing comma. This is now relaxed so that the following is valid: + + .. syntax highlighting needs TOML 1.1.0 support in Pygments, + see https://github.com/pygments/pygments/issues/3026 + + .. code-block:: text + + tbl = { + key = "a string", + moar-tbl = { + key = 1, + }, + } + + - Add ``\xHH`` notation to basic strings for codepoints under 255, + and the ``\e`` escape for the escape character: + + .. code-block:: text + + null = "null byte: \x00; letter a: \x61" + csi = "\e[" + + - Seconds in datetime and time values are now optional. + The following are now valid: + + .. code-block:: text + + dt = 2010-02-03 14:15 + t = 14:15 + + (Contributed by Taneli Hukkinen in :gh:`142956`.) + +.. _official TOML changelog: https://github.com/toml-lang/toml/blob/main/CHANGELOG.md + + types ------ diff --git a/Lib/test/test_tomllib/burntsushi.py b/Lib/test/test_tomllib/burntsushi.py index 71228c65369572..0ec50eb1a98a45 100644 --- a/Lib/test/test_tomllib/burntsushi.py +++ b/Lib/test/test_tomllib/burntsushi.py @@ -7,19 +7,8 @@ import datetime from typing import Any -# Aliases for converting TOML compliance format [1] to BurntSushi format [2] -# [1] https://github.com/toml-lang/compliance/blob/db7c3211fda30ff9ddb10292f4aeda7e2e10abc4/docs/json-encoding.md # noqa: E501 -# [2] https://github.com/BurntSushi/toml-test/blob/4634fdf3a6ecd6aaea5f4cdcd98b2733c2694993/README.md # noqa: E501 -_aliases = { - "boolean": "bool", - "offset datetime": "datetime", - "local datetime": "datetime-local", - "local date": "date-local", - "local time": "time-local", -} - - -def convert(obj): # noqa: C901 + +def convert(obj): if isinstance(obj, str): return {"type": "string", "value": obj} elif isinstance(obj, bool): @@ -53,31 +42,25 @@ def convert(obj): # noqa: C901 def normalize(obj: Any) -> Any: """Normalize test objects. - This normalizes primitive values (e.g. floats), and also converts from - TOML compliance format [1] to BurntSushi format [2]. - - [1] https://github.com/toml-lang/compliance/blob/db7c3211fda30ff9ddb10292f4aeda7e2e10abc4/docs/json-encoding.md # noqa: E501 - [2] https://github.com/BurntSushi/toml-test/blob/4634fdf3a6ecd6aaea5f4cdcd98b2733c2694993/README.md # noqa: E501 - """ + This normalizes primitive values (e.g. floats).""" if isinstance(obj, list): return [normalize(item) for item in obj] if isinstance(obj, dict): if "type" in obj and "value" in obj: type_ = obj["type"] - norm_type = _aliases.get(type_, type_) value = obj["value"] - if norm_type == "float": + if type_ == "float": norm_value = _normalize_float_str(value) - elif norm_type in {"datetime", "datetime-local"}: + elif type_ in {"datetime", "datetime-local"}: norm_value = _normalize_datetime_str(value) - elif norm_type == "time-local": + elif type_ == "time-local": norm_value = _normalize_localtime_str(value) else: norm_value = value - if norm_type == "array": + if type_ == "array": return [normalize(item) for item in value] - return {"type": norm_type, "value": norm_value} + return {"type": type_, "value": norm_value} return {k: normalize(v) for k, v in obj.items()} raise AssertionError("Burntsushi fixtures should be dicts/lists only") diff --git a/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.json b/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.json index 99aca873480ec3..09a7c083d14f88 100644 --- a/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.json +++ b/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.json @@ -1,4 +1,5 @@ { "local-dt": {"type":"datetime-local","value":"1988-10-27t01:01:01"}, + "local-dt-no-seconds": {"type":"datetime-local","value":"2025-04-18t20:05:00"}, "zulu-dt": {"type":"datetime","value":"1988-10-27t01:01:01z"} } diff --git a/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.toml b/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.toml index cf84159de46fd8..5dc4b318256198 100644 --- a/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.toml +++ b/Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.toml @@ -1,2 +1,3 @@ local-dt=1988-10-27t01:01:01 +local-dt-no-seconds=2025-04-18T20:05 zulu-dt=1988-10-27t01:01:01z diff --git a/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.json b/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.json index 4d96abcbc799e6..1f66348b237161 100644 --- a/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.json +++ b/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.json @@ -1,2 +1,4 @@ {"t": - {"type":"time-local","value":"00:00:00.999999"}} + {"type":"time-local","value":"00:00:00.999999"}, +"t2": + {"type":"time-local","value":"00:00:00"}} diff --git a/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.toml b/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.toml index 87547c1cf3bd89..6579b30c94f8d6 100644 --- a/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.toml +++ b/Lib/test/test_tomllib/data/valid/dates-and-times/localtime.toml @@ -1 +1,2 @@ -t=00:00:00.99999999999999 \ No newline at end of file +t=00:00:00.99999999999999 +t2=00:00 \ No newline at end of file diff --git a/Lib/test/test_tomllib/data/valid/empty-inline-table.json b/Lib/test/test_tomllib/data/valid/inline-table/empty-inline-table.json similarity index 100% rename from Lib/test/test_tomllib/data/valid/empty-inline-table.json rename to Lib/test/test_tomllib/data/valid/inline-table/empty-inline-table.json diff --git a/Lib/test/test_tomllib/data/valid/empty-inline-table.toml b/Lib/test/test_tomllib/data/valid/inline-table/empty-inline-table.toml similarity index 100% rename from Lib/test/test_tomllib/data/valid/empty-inline-table.toml rename to Lib/test/test_tomllib/data/valid/inline-table/empty-inline-table.toml diff --git a/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.json b/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.json new file mode 100644 index 00000000000000..d253884fbac9f0 --- /dev/null +++ b/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.json @@ -0,0 +1,35 @@ +{ + "multiline": { + "a": { + "type": "integer", + "value": "1" + }, + "b": { + "type": "integer", + "value": "2" + }, + "c": [ + { + "type": "integer", + "value": "1" + }, + { + "type": "integer", + "value": "2" + }, + { + "type": "integer", + "value": "3" + } + ], + "d": { + "type": "integer", + "value": "3" + }, + "e": { + "type": "integer", + "value": "4" + }, + "f": {} + } +} \ No newline at end of file diff --git a/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.toml b/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.toml new file mode 100644 index 00000000000000..6a98a08a576a06 --- /dev/null +++ b/Lib/test/test_tomllib/data/valid/inline-table/multiline-inline-table.toml @@ -0,0 +1,12 @@ +multiline = { + "a" = 1, "b" = 2, + c = [ + 1, + 2, + 3, + ],# comment + d = 3, + e = 4, f = { + # comment + }, +} diff --git a/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.json b/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.json new file mode 100644 index 00000000000000..699f556248d880 --- /dev/null +++ b/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.json @@ -0,0 +1,6 @@ +{ + "escape": {"type":"string","value":"\u001B"}, + "tab": {"type":"string","value":"\t"}, + "upper-j": {"type":"string","value":"J"}, + "upper-j-2": {"type":"string","value":"J"} +} diff --git a/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.toml b/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.toml new file mode 100644 index 00000000000000..fa5647e5938ee4 --- /dev/null +++ b/Lib/test/test_tomllib/data/valid/multiline-basic-str/replacements.toml @@ -0,0 +1,4 @@ +escape = "\e" +tab = "\x09" +upper-j = "\x4a" +upper-j-2 = "\x4A" diff --git a/Lib/test/test_tomllib/test_data.py b/Lib/test/test_tomllib/test_data.py index 3483d93022b01b..9db1a37466e7bf 100644 --- a/Lib/test/test_tomllib/test_data.py +++ b/Lib/test/test_tomllib/test_data.py @@ -8,12 +8,6 @@ from . import burntsushi, tomllib - -class MissingFile: - def __init__(self, path: Path): - self.path = path - - DATA_DIR = Path(__file__).parent / "data" VALID_FILES = tuple((DATA_DIR / "valid").glob("**/*.toml")) @@ -22,10 +16,7 @@ def __init__(self, path: Path): _expected_files = [] for p in VALID_FILES: json_path = p.with_suffix(".json") - try: - text = json.loads(json_path.read_bytes().decode()) - except FileNotFoundError: - text = MissingFile(json_path) + text = json.loads(json_path.read_bytes().decode()) _expected_files.append(text) VALID_FILES_EXPECTED = tuple(_expected_files) @@ -49,14 +40,6 @@ def test_invalid(self): def test_valid(self): for valid, expected in zip(VALID_FILES, VALID_FILES_EXPECTED): with self.subTest(msg=valid.stem): - if isinstance(expected, MissingFile): - # For a poor man's xfail, assert that this is one of the - # test cases where expected data is known to be missing. - assert valid.stem in { - "qa-array-inline-nested-1000", - "qa-table-inline-nested-1000", - } - continue toml_str = valid.read_bytes().decode() actual = tomllib.loads(toml_str) actual = burntsushi.convert(actual) diff --git a/Lib/tomllib/_parser.py b/Lib/tomllib/_parser.py index 3ee47aa9e0afba..b59d0f7d54bdc3 100644 --- a/Lib/tomllib/_parser.py +++ b/Lib/tomllib/_parser.py @@ -18,39 +18,40 @@ TYPE_CHECKING = False if TYPE_CHECKING: from collections.abc import Iterable - from typing import IO, Any + from typing import IO, Any, Final from ._types import Key, ParseFloat, Pos -ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) +ASCII_CTRL: Final = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) # Neither of these sets include quotation mark or backslash. They are # currently handled as separate cases in the parser functions. -ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") -ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") +ILLEGAL_BASIC_STR_CHARS: Final = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS: Final = ASCII_CTRL - frozenset("\t\n") -ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS -ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS +ILLEGAL_LITERAL_STR_CHARS: Final = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS: Final = ILLEGAL_MULTILINE_BASIC_STR_CHARS -ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_COMMENT_CHARS: Final = ILLEGAL_BASIC_STR_CHARS -TOML_WS = frozenset(" \t") -TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") -BARE_KEY_CHARS = frozenset( +TOML_WS: Final = frozenset(" \t") +TOML_WS_AND_NEWLINE: Final = TOML_WS | frozenset("\n") +BARE_KEY_CHARS: Final = frozenset( "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789" "-_" ) -KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") -HEXDIGIT_CHARS = frozenset("abcdef" "ABCDEF" "0123456789") +KEY_INITIAL_CHARS: Final = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS: Final = frozenset("abcdef" "ABCDEF" "0123456789") -BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( +BASIC_STR_ESCAPE_REPLACEMENTS: Final = MappingProxyType( { "\\b": "\u0008", # backspace "\\t": "\u0009", # tab - "\\n": "\u000A", # linefeed - "\\f": "\u000C", # form feed - "\\r": "\u000D", # carriage return + "\\n": "\u000a", # linefeed + "\\f": "\u000c", # form feed + "\\r": "\u000d", # carriage return + "\\e": "\u001b", # escape '\\"': "\u0022", # quote - "\\\\": "\u005C", # backslash + "\\\\": "\u005c", # backslash } ) @@ -133,7 +134,7 @@ def load(fp: IO[bytes], /, *, parse_float: ParseFloat = float) -> dict[str, Any] return loads(s, parse_float=parse_float) -def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 +def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: """Parse TOML from a string.""" # The spec allows converting "\r\n" to "\n", even in string @@ -208,10 +209,10 @@ class Flags: """Flags that map to parsed keys/namespaces.""" # Marks an immutable namespace (inline array or inline table). - FROZEN = 0 + FROZEN: Final = 0 # Marks a nest that has been explicitly created and can no longer # be opened using the "[table]" syntax. - EXPLICIT_NEST = 1 + EXPLICIT_NEST: Final = 1 def __init__(self) -> None: self._flags: dict[str, dict[Any, Any]] = {} @@ -257,8 +258,8 @@ def is_(self, key: Key, flag: int) -> bool: cont = inner_cont["nested"] key_stem = key[-1] if key_stem in cont: - cont = cont[key_stem] - return flag in cont["flags"] or flag in cont["recursive_flags"] + inner_cont = cont[key_stem] + return flag in inner_cont["flags"] or flag in inner_cont["recursive_flags"] return False @@ -515,7 +516,7 @@ def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos nested_dict = NestedDict() flags = Flags() - pos = skip_chars(src, pos, TOML_WS) + pos = skip_comments_and_array_ws(src, pos) if src.startswith("}", pos): return pos + 1, nested_dict.dict while True: @@ -530,16 +531,18 @@ def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos if key_stem in nest: raise TOMLDecodeError(f"Duplicate inline table key {key_stem!r}", src, pos) nest[key_stem] = value - pos = skip_chars(src, pos, TOML_WS) + pos = skip_comments_and_array_ws(src, pos) c = src[pos : pos + 1] if c == "}": return pos + 1, nested_dict.dict if c != ",": raise TOMLDecodeError("Unclosed inline table", src, pos) + pos += 1 + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict if isinstance(value, (dict, list)): flags.set(key, Flags.FROZEN, recursive=True) - pos += 1 - pos = skip_chars(src, pos, TOML_WS) def parse_basic_str_escape( @@ -561,6 +564,8 @@ def parse_basic_str_escape( pos += 1 pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) return pos, "" + if escape_id == "\\x": + return parse_hex_char(src, pos, 2) if escape_id == "\\u": return parse_hex_char(src, pos, 4) if escape_id == "\\U": @@ -660,7 +665,7 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: pos += 1 -def parse_value( # noqa: C901 +def parse_value( src: str, pos: Pos, parse_float: ParseFloat ) -> tuple[Pos, Any]: try: diff --git a/Lib/tomllib/_re.py b/Lib/tomllib/_re.py index eb8beb19747288..fc374ed63d3e37 100644 --- a/Lib/tomllib/_re.py +++ b/Lib/tomllib/_re.py @@ -10,16 +10,20 @@ TYPE_CHECKING = False if TYPE_CHECKING: - from typing import Any + from typing import Any, Final from ._types import ParseFloat -# E.g. -# - 00:32:00.999999 -# - 00:32:00 -_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" +_TIME_RE_STR: Final = r""" +([01][0-9]|2[0-3]) # hours +:([0-5][0-9]) # minutes +(?: + :([0-5][0-9]) # optional seconds + (?:\.([0-9]{1,6})[0-9]*)? # optional fractions of a second +)? +""" -RE_NUMBER = re.compile( +RE_NUMBER: Final = re.compile( r""" 0 (?: @@ -38,8 +42,8 @@ """, flags=re.VERBOSE, ) -RE_LOCALTIME = re.compile(_TIME_RE_STR) -RE_DATETIME = re.compile( +RE_LOCALTIME: Final = re.compile(_TIME_RE_STR, flags=re.VERBOSE) +RE_DATETIME: Final = re.compile( rf""" ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 (?: @@ -74,7 +78,8 @@ def match_to_datetime(match: re.Match[str]) -> datetime | date: year, month, day = int(year_str), int(month_str), int(day_str) if hour_str is None: return date(year, month, day) - hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + hour, minute = int(hour_str), int(minute_str) + sec = int(sec_str) if sec_str else 0 micros = int(micros_str.ljust(6, "0")) if micros_str else 0 if offset_sign_str: tz: tzinfo | None = cached_tz( @@ -103,8 +108,9 @@ def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: def match_to_localtime(match: re.Match[str]) -> time: hour_str, minute_str, sec_str, micros_str = match.groups() + sec = int(sec_str) if sec_str else 0 micros = int(micros_str.ljust(6, "0")) if micros_str else 0 - return time(int(hour_str), int(minute_str), int(sec_str), micros) + return time(int(hour_str), int(minute_str), sec, micros) def match_to_number(match: re.Match[str], parse_float: ParseFloat) -> Any: diff --git a/Makefile.pre.in b/Makefile.pre.in index b362a4dfdc6894..d27e3301666868 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2748,6 +2748,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_tomllib/data/valid \ test/test_tomllib/data/valid/array \ test/test_tomllib/data/valid/dates-and-times \ + test/test_tomllib/data/valid/inline-table \ test/test_tomllib/data/valid/multiline-basic-str \ test/test_tools \ test/test_tools/i18n_data \ diff --git a/Misc/NEWS.d/next/Library/2026-01-26-12-30-57.gh-issue-142956.X9CS8J.rst b/Misc/NEWS.d/next/Library/2026-01-26-12-30-57.gh-issue-142956.X9CS8J.rst new file mode 100644 index 00000000000000..27f104fa0b62f9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2026-01-26-12-30-57.gh-issue-142956.X9CS8J.rst @@ -0,0 +1 @@ +Updated :mod:`tomllib` to parse TOML 1.1.0. From 45d00a0791a53f07c0050b985c936281ed825d9b Mon Sep 17 00:00:00 2001 From: Adorilson Bezerra Date: Tue, 3 Feb 2026 13:29:05 +0000 Subject: [PATCH 4/7] gh-106318: Add examples for str.rindex() method (#143887) Co-authored-by: Victor Stinner --- Doc/library/stdtypes.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index ce0d7cbb2e4276..99479091cd5bd2 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2163,6 +2163,8 @@ expression support in the :mod:`re` module). .. doctest:: + >>> 'spam, spam, spam'.index('spam') + 0 >>> 'spam, spam, spam'.index('eggs') Traceback (most recent call last): File "", line 1, in @@ -2546,6 +2548,20 @@ expression support in the :mod:`re` module). Like :meth:`rfind` but raises :exc:`ValueError` when the substring *sub* is not found. + For example: + + .. doctest:: + + >>> 'spam, spam, spam'.rindex('spam') + 12 + >>> 'spam, spam, spam'.rindex('eggs') + Traceback (most recent call last): + File "", line 1, in + 'spam, spam, spam'.rindex('eggs') + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^ + ValueError: substring not found + + See also :meth:`index` and :meth:`find`. .. method:: str.rjust(width, fillchar=' ', /) From 4e15b8d95da9a0f58ad58283979c37e43ff61229 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Tue, 3 Feb 2026 08:07:59 -0600 Subject: [PATCH 5/7] gh-74453: Add stronger security warning to os.path.commonprefix (GH-144401) --- Doc/library/os.path.rst | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 3cfe08a1fe1f7a..bfd59fc5a82049 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -97,15 +97,17 @@ the :mod:`glob` module.) .. function:: commonprefix(list, /) - Return the longest path prefix (taken character-by-character) that is a - prefix of all paths in *list*. If *list* is empty, return the empty string + Return the longest string prefix (taken character-by-character) that is a + prefix of all strings in *list*. If *list* is empty, return the empty string (``''``). - .. note:: + .. warning:: This function may return invalid paths because it works a - character at a time. To obtain a valid path, see - :func:`commonpath`. + character at a time. + If you need a **common path prefix**, then the algorithm + implemented in this function is not secure. Use + :func:`commonpath` for finding a common path prefix. :: From 53fecbe6e116a4426058b7d0f6c451719c72cb5b Mon Sep 17 00:00:00 2001 From: AN Long Date: Tue, 3 Feb 2026 23:49:12 +0900 Subject: [PATCH 6/7] gh-127313: Use getLogger() without argument to get root logger in logging cookbook (GH-143683) Use getLogger() to get root logger in logging cookbook --- Doc/howto/logging-cookbook.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index 52537a91df542c..9633bc75f2c914 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -229,7 +229,7 @@ messages should not. Here's how you can achieve this:: # tell the handler to use this format console.setFormatter(formatter) # add the handler to the root logger - logging.getLogger('').addHandler(console) + logging.getLogger().addHandler(console) # Now, we can log to the root logger, or any other logger. First the root... logging.info('Jackdaws love my big sphinx of quartz.') @@ -650,7 +650,7 @@ the receiving end. A simple way of doing this is attaching a import logging, logging.handlers - rootLogger = logging.getLogger('') + rootLogger = logging.getLogger() rootLogger.setLevel(logging.DEBUG) socketHandler = logging.handlers.SocketHandler('localhost', logging.handlers.DEFAULT_TCP_LOGGING_PORT) From 79c43e7c249e61d959550c20f798a88c8829a8a8 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 3 Feb 2026 12:24:35 -0500 Subject: [PATCH 7/7] gh-139103: Use borrowed references for positional args in _PyStack_UnpackDict (gh-144407) The positional arguments passed to _PyStack_UnpackDict are already kept alive by the caller, so we can avoid the extra reference count operations by using borrowed references instead of creating new ones. This reduces reference count contention in the free-threaded build when calling functions with keyword arguments. In particular, this avoids contention on the type argument to `__new__` when instantiating namedtuples with keyword arguments. --- Objects/call.c | 15 ++++++++++----- Python/ceval.c | 11 ++++++++--- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/Objects/call.c b/Objects/call.c index af42fc8f7f2dbf..4b1b4bd52a2e56 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -935,6 +935,10 @@ _PyStack_AsDict(PyObject *const *values, PyObject *kwnames) The newly allocated argument vector supports PY_VECTORCALL_ARGUMENTS_OFFSET. + The positional arguments are borrowed references from the input array + (which must be kept alive by the caller). The keyword argument values + are new references. + When done, you must call _PyStack_UnpackDict_Free(stack, nargs, kwnames) */ PyObject *const * _PyStack_UnpackDict(PyThreadState *tstate, @@ -970,9 +974,9 @@ _PyStack_UnpackDict(PyThreadState *tstate, stack++; /* For PY_VECTORCALL_ARGUMENTS_OFFSET */ - /* Copy positional arguments */ + /* Copy positional arguments (borrowed references) */ for (Py_ssize_t i = 0; i < nargs; i++) { - stack[i] = Py_NewRef(args[i]); + stack[i] = args[i]; } PyObject **kwstack = stack + nargs; @@ -1009,9 +1013,10 @@ void _PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, PyObject *kwnames) { - Py_ssize_t n = PyTuple_GET_SIZE(kwnames) + nargs; - for (Py_ssize_t i = 0; i < n; i++) { - Py_DECREF(stack[i]); + /* Only decref kwargs values, positional args are borrowed */ + Py_ssize_t nkwargs = PyTuple_GET_SIZE(kwnames); + for (Py_ssize_t i = 0; i < nkwargs; i++) { + Py_DECREF(stack[nargs + i]); } _PyStack_UnpackDict_FreeNoDecRef(stack, kwnames); } diff --git a/Python/ceval.c b/Python/ceval.c index c59f20bbf1e803..590b315ab65c2c 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -2000,11 +2000,16 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, _PyStackRef func, PyStackRef_CLOSE(func); goto error; } - size_t total_args = nargs + PyDict_GET_SIZE(kwargs); + size_t nkwargs = PyDict_GET_SIZE(kwargs); assert(sizeof(PyObject *) == sizeof(_PyStackRef)); newargs = (_PyStackRef *)object_array; - for (size_t i = 0; i < total_args; i++) { - newargs[i] = PyStackRef_FromPyObjectSteal(object_array[i]); + /* Positional args are borrowed from callargs tuple, need new reference */ + for (Py_ssize_t i = 0; i < nargs; i++) { + newargs[i] = PyStackRef_FromPyObjectNew(object_array[i]); + } + /* Keyword args are owned by _PyStack_UnpackDict, steal them */ + for (size_t i = 0; i < nkwargs; i++) { + newargs[nargs + i] = PyStackRef_FromPyObjectSteal(object_array[nargs + i]); } } else {