Skip to content

Commit 9c6cd55

Browse files
Strip internal issue-tracker references from tests and comments
Also renames two test files with clearer names: - test_dbapi_hardening.py - test_cursor_after_external_close.py Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
1 parent 5638bc7 commit 9c6cd55

22 files changed

+61
-65
lines changed

src/dqlitedbapi/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,11 +109,11 @@ def connect(
109109
underlying connection.
110110
max_total_rows: Cumulative row cap across continuation frames
111111
for a single query. Forwarded to the underlying
112-
:class:`Connection` (ISSUE-111). ``None`` disables the cap.
113-
max_continuation_frames: Per-query continuation-frame cap
114-
(ISSUE-98). Forwarded to the underlying :class:`Connection`.
112+
:class:`Connection`. ``None`` disables the cap.
113+
max_continuation_frames: Per-query continuation-frame cap.
114+
Forwarded to the underlying :class:`Connection`.
115115
trust_server_heartbeat: Let the server-advertised heartbeat
116-
widen the per-read deadline (ISSUE-101). Default False.
116+
widen the per-read deadline. Default False.
117117
118118
Returns:
119119
A Connection object

src/dqlitedbapi/aio/connection.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,10 @@ def __init__(
3535
max_total_rows: Cumulative row cap across continuation
3636
frames. Forwarded to the underlying DqliteConnection;
3737
``None`` disables the cap.
38-
max_continuation_frames: Per-query continuation-frame cap
39-
(ISSUE-98). Forwarded to the underlying DqliteConnection.
38+
max_continuation_frames: Per-query continuation-frame cap.
39+
Forwarded to the underlying DqliteConnection.
4040
trust_server_heartbeat: When True, let the server-advertised
41-
heartbeat widen the per-read deadline (ISSUE-101).
41+
heartbeat widen the per-read deadline.
4242
"""
4343
if not math.isfinite(timeout) or timeout <= 0:
4444
raise ProgrammingError(f"timeout must be a positive finite number, got {timeout}")

src/dqlitedbapi/aio/cursor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def rownumber(self) -> int | None:
6969
"""0-based index of the next row in the current result set.
7070
7171
PEP 249 optional extension. ``None`` when no result set is
72-
active (ISSUE-80).
72+
active.
7373
"""
7474
if self._description is None:
7575
return None
@@ -198,7 +198,7 @@ async def fetchmany(self, size: int | None = None) -> list[tuple[Any, ...]]:
198198
size = self._arraysize
199199
if size < 0:
200200
# See sync Cursor.fetchmany: silently returning [] on a
201-
# negative size hides caller bugs (ISSUE-82).
201+
# negative size hides caller bugs.
202202
raise ProgrammingError(f"fetchmany size must be >= 0, got {size}")
203203

204204
result: list[tuple[Any, ...]] = []

src/dqlitedbapi/connection.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
# time; some code paths return ``SQLITE_MISUSE`` (21). Check the
2020
# numeric code first so a malicious or impostor server cannot silence
2121
# unrelated errors just by crafting a message string that contains the
22-
# magic substring (ISSUE-97). The substring remains as a secondary
22+
# magic substring. The substring remains as a secondary
2323
# filter because SQLite has many uses of code=1.
2424
_NO_TX_CODES = frozenset({1, 21})
2525
_NO_TX_SUBSTRING = "no transaction is active"
@@ -65,7 +65,7 @@ def _cleanup_loop_thread(
6565
stacklevel=2,
6666
)
6767
# Narrow suppression to the specific exceptions loop/thread teardown
68-
# can legitimately raise during finalization (ISSUE-113). Wider
68+
# can legitimately raise during finalization. Wider
6969
# ``except Exception: pass`` would hide programmer bugs like a
7070
# missing attribute reference introduced during a refactor.
7171
try:
@@ -109,14 +109,14 @@ def __init__(
109109
max_total_rows: Cumulative row cap across continuation
110110
frames for a single query. Forwarded to the underlying
111111
:class:`DqliteConnection`. ``None`` disables the cap.
112-
max_continuation_frames: Per-query continuation-frame cap
113-
(ISSUE-98). Bounds Python-side decode work a hostile
114-
server can inflict by drip-feeding 1-row frames.
115-
Forwarded to the underlying :class:`DqliteConnection`.
112+
max_continuation_frames: Per-query continuation-frame cap.
113+
Bounds Python-side decode work a hostile server can
114+
inflict by drip-feeding 1-row frames. Forwarded to the
115+
underlying :class:`DqliteConnection`.
116116
trust_server_heartbeat: When True, widen the per-read
117117
deadline to the server-advertised heartbeat (subject to
118118
a 300 s hard cap). Default False so the configured
119-
``timeout`` is authoritative (ISSUE-101).
119+
``timeout`` is authoritative.
120120
"""
121121
if not math.isfinite(timeout) or timeout <= 0:
122122
raise ProgrammingError(f"timeout must be a positive finite number, got {timeout}")

src/dqlitedbapi/cursor.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ async def _call_client(coro: Coroutine[Any, Any, Any]) -> Any:
3535
client.ProtocolError → dbapi.InterfaceError
3636
client.DataError → dbapi.DataError
3737
client.InterfaceError → dbapi.InterfaceError
38-
any other DqliteError → dbapi.InterfaceError (ISSUE-85)
38+
any other DqliteError → dbapi.InterfaceError
3939
4040
Every ``dqliteclient`` exception is a subclass of ``DqliteError``;
4141
the trailing catch-all ensures a new client exception class cannot
@@ -61,7 +61,7 @@ async def _call_client(coro: Coroutine[Any, Any, Any]) -> Any:
6161
except _client_exc.DqliteError as e:
6262
# Catch-all for any future subclass of DqliteError not enumerated
6363
# above. Surface as InterfaceError rather than leaking to the
64-
# caller as a non-DBAPI exception (ISSUE-85).
64+
# caller as a non-DBAPI exception.
6565
raise _DbapiInterfaceError(f"unrecognized client error ({type(e).__name__}): {e}") from e
6666

6767

@@ -97,7 +97,7 @@ def _reject_non_sequence_params(params: Any) -> None:
9797
scrambles positional bindings. And we reject ``str`` /
9898
``bytes`` / ``bytearray`` / ``memoryview`` — they are iterable, so
9999
they would silently "explode" into character/byte binds and the
100-
caller almost always meant ``(value,)`` instead (ISSUE-86).
100+
caller almost always meant ``(value,)`` instead.
101101
"""
102102
if params is None:
103103
return
@@ -152,7 +152,7 @@ def _strip_leading_comments(sql: str) -> str:
152152
def _is_row_returning(sql: str) -> bool:
153153
"""Heuristic for "does this statement return a result set?"
154154
155-
Single source of truth for sync and async cursors (ISSUE-110).
155+
Single source of truth for sync and async cursors.
156156
Matches leading SELECT/PRAGMA/EXPLAIN/WITH after stripping comments,
157157
and catches trailing/embedded RETURNING clauses on DML.
158158
@@ -232,7 +232,7 @@ def rownumber(self) -> int | None:
232232
PEP 249 optional extension: returns ``None`` if no result set is
233233
active (no query executed, or last statement was DML without
234234
RETURNING); otherwise returns the index of the row that the next
235-
``fetchone()`` would produce (ISSUE-80).
235+
``fetchone()`` would produce.
236236
"""
237237
if self._description is None:
238238
return None
@@ -369,8 +369,8 @@ def fetchmany(self, size: int | None = None) -> list[tuple[Any, ...]]:
369369
size = self._arraysize
370370
if size < 0:
371371
# Previously ``range(-5)`` silently returned [] — hid caller
372-
# bugs (ISSUE-82). ``arraysize`` setter already validates
373-
# >= 1; mirror that here.
372+
# bugs. ``arraysize`` setter already validates >= 1; mirror
373+
# that here.
374374
raise ProgrammingError(f"fetchmany size must be >= 0, got {size}")
375375

376376
result: list[tuple[Any, ...]] = []

src/dqlitedbapi/types.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def Time( # noqa: N802
2424
"""Construct a time value.
2525
2626
Accepts optional ``microsecond`` and ``tzinfo`` for parity with
27-
stdlib ``datetime.time`` (ISSUE-88). PEP 249 does not require this,
27+
stdlib ``datetime.time``. PEP 249 does not require this,
2828
but mixing the driver's ``Time()`` with ``datetime.time`` would
2929
otherwise drop sub-second precision silently.
3030
"""
@@ -51,7 +51,7 @@ def _validate_ticks(ticks: float) -> None:
5151
``fromtimestamp`` raises different stdlib exceptions depending on
5252
the failure mode (``ValueError`` for NaN on some platforms,
5353
``OverflowError`` / ``OSError`` for out-of-range). Guard up front so
54-
the caller always sees a single DB-API ``DataError`` (ISSUE-84).
54+
the caller always sees a single DB-API ``DataError``.
5555
"""
5656
if isinstance(ticks, float) and not math.isfinite(ticks):
5757
raise DataError(f"Invalid timestamp ticks: {ticks}")
@@ -209,8 +209,7 @@ def _datetime_from_iso8601(text: str) -> datetime.datetime | None:
209209
210210
A malformed string from the server (bug, corruption, or MitM) would
211211
otherwise escape as a raw ``ValueError``; wrap as ``DataError`` to
212-
satisfy PEP 249's "all DB errors funnel through Error" contract
213-
(ISSUE-102).
212+
satisfy PEP 249's "all DB errors funnel through Error" contract.
214213
"""
215214
if not text:
216215
return None
@@ -234,7 +233,7 @@ def _datetime_from_unixtime(value: int) -> datetime.datetime:
234233
235234
A corrupt server or MitM-modified bytes could deliver a non-integer
236235
or out-of-range value; wrap the resulting stdlib exceptions as
237-
``DataError`` (ISSUE-107).
236+
``DataError``.
238237
"""
239238
try:
240239
return datetime.datetime.fromtimestamp(value, tz=datetime.UTC)

tests/integration/test_executemany_returning.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
"""Integration test for ISSUE-57: executemany must accumulate rows from
2-
every parameter set when the statement has a RETURNING clause."""
1+
"""Executemany must accumulate rows from every parameter set when the
2+
statement has a RETURNING clause."""
33

44
import pytest
55

tests/integration/test_misc_coverage.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
"""Integration tests for previously-uncovered territory.
22
3-
- ISSUE-26: large result set (forces continuation frames) + large BLOB.
4-
- ISSUE-27: Unicode in identifiers + emoji in TEXT.
5-
- ISSUE-28: multi-statement SQL is rejected with a specific error.
3+
- Large result set (forces continuation frames) + large BLOB.
4+
- Unicode in identifiers + emoji in TEXT.
5+
- Multi-statement SQL is rejected with a specific error.
66
"""
77

88
import pytest

tests/test_arraysize_validation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Cursor.arraysize rejects non-positive values (ISSUE-33)."""
1+
"""Cursor.arraysize rejects non-positive values."""
22

33
import pytest
44

tests/test_async_close_race.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""AsyncConnection.close serializes with in-flight operations (ISSUE-32).
1+
"""AsyncConnection.close serializes with in-flight operations.
22
33
Previously close() called await self._async_conn.close() without
44
acquiring _op_lock; a concurrent task mid-execute would find the

0 commit comments

Comments
 (0)