Skip to content

Commit ac952d4

Browse files
Close dbapi test-coverage gaps
ISSUE-25 — fetchmany edge cases: size=0 returns empty, oversized returns remaining, default uses arraysize, cursor iteration drains, fetchone+fetchmany/fetchall mix behaves correctly, fetch before a query raises. ISSUE-26 — large data: 5k-row SELECT (forces continuation frames on most server chunk sizes), 2 MiB BLOB round-trip. ISSUE-27 — Unicode in identifiers (café table, ☕ column name) and non-BMP codepoints (U+1F600 emoji) in TEXT values. ISSUE-28 — multi-statement SQL rejection with "nonempty statement tail". dqlite deviates from stdlib sqlite3 here; pinning the error so regressions can't silently change the behavior. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
1 parent cf2107a commit ac952d4

2 files changed

Lines changed: 159 additions & 0 deletions

File tree

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
"""Integration tests for previously-uncovered territory.
2+
3+
- ISSUE-26: large result set (forces continuation frames) + large BLOB.
4+
- ISSUE-27: Unicode in identifiers + emoji in TEXT.
5+
- ISSUE-28: multi-statement SQL is rejected with a specific error.
6+
"""
7+
8+
import dqliteclient.exceptions
9+
import pytest
10+
11+
from dqlitedbapi import connect
12+
from dqlitedbapi.exceptions import OperationalError
13+
14+
15+
@pytest.mark.integration
16+
class TestLargeData:
17+
def test_large_result_set_round_trips(self, cluster_address: str) -> None:
18+
"""Insert 5k rows and read them all back; exercises continuation frames
19+
on most server chunk sizes."""
20+
with connect(cluster_address, database="test_large") as conn:
21+
c = conn.cursor()
22+
c.execute("DROP TABLE IF EXISTS many")
23+
c.execute("CREATE TABLE many (i INTEGER PRIMARY KEY, s TEXT)")
24+
for i in range(5000):
25+
c.execute("INSERT INTO many (i, s) VALUES (?, ?)", [i, f"row-{i}"])
26+
conn.commit()
27+
28+
c.execute("SELECT i, s FROM many ORDER BY i")
29+
rows = c.fetchall()
30+
assert len(rows) == 5000
31+
assert rows[0] == (0, "row-0")
32+
assert rows[-1] == (4999, "row-4999")
33+
c.execute("DROP TABLE many")
34+
35+
def test_multi_megabyte_blob(self, cluster_address: str) -> None:
36+
"""A 2 MiB BLOB round-trips byte-for-byte."""
37+
payload = bytes(range(256)) * 8192 # 2 MiB of 0x00..0xFF pattern
38+
assert len(payload) == 2 * 1024 * 1024
39+
with connect(cluster_address, database="test_blob") as conn:
40+
c = conn.cursor()
41+
c.execute("DROP TABLE IF EXISTS big_blob")
42+
c.execute("CREATE TABLE big_blob (id INTEGER PRIMARY KEY, data BLOB)")
43+
c.execute("INSERT INTO big_blob (data) VALUES (?)", [payload])
44+
conn.commit()
45+
c.execute("SELECT data FROM big_blob")
46+
(value,) = c.fetchone()
47+
assert value == payload
48+
c.execute("DROP TABLE big_blob")
49+
50+
51+
@pytest.mark.integration
52+
class TestUnicode:
53+
def test_unicode_identifier_and_emoji_value(self, cluster_address: str) -> None:
54+
with connect(cluster_address, database="test_unicode") as conn:
55+
c = conn.cursor()
56+
c.execute('DROP TABLE IF EXISTS "café"')
57+
c.execute('CREATE TABLE "café" (id INTEGER PRIMARY KEY, "☕" TEXT)')
58+
c.execute('INSERT INTO "café" ("☕") VALUES (?)', ["hello 🚀 world"])
59+
conn.commit()
60+
c.execute('SELECT "☕" FROM "café"')
61+
(value,) = c.fetchone()
62+
assert value == "hello 🚀 world"
63+
assert c.description is not None
64+
assert c.description[0][0] == "☕"
65+
c.execute('DROP TABLE "café"')
66+
67+
def test_non_bmp_codepoint_round_trip(self, cluster_address: str) -> None:
68+
"""4-byte UTF-8 codepoint survives the wire round-trip."""
69+
grinning = "\U0001f600" # 😀
70+
payload = grinning * 1000 # 4000 bytes of non-BMP codepoints
71+
with connect(cluster_address, database="test_non_bmp") as conn:
72+
c = conn.cursor()
73+
c.execute("DROP TABLE IF EXISTS nbmp")
74+
c.execute("CREATE TABLE nbmp (id INTEGER PRIMARY KEY, s TEXT)")
75+
c.execute("INSERT INTO nbmp (s) VALUES (?)", [payload])
76+
conn.commit()
77+
c.execute("SELECT s FROM nbmp")
78+
(value,) = c.fetchone()
79+
assert value == payload
80+
c.execute("DROP TABLE nbmp")
81+
82+
83+
@pytest.mark.integration
84+
class TestMultiStatementRejection:
85+
def test_semicolon_separated_select_rejected(self, cluster_address: str) -> None:
86+
"""dqlite rejects multi-statement SQL — a real deviation from stdlib
87+
sqlite3 that applications commonly trip over. Pinning the error so
88+
regressions don't silently change the behavior."""
89+
# The error class is dqliteclient.OperationalError today (the
90+
# DBAPI doesn't wrap); either is acceptable for now.
91+
expected = (OperationalError, dqliteclient.exceptions.OperationalError)
92+
with connect(cluster_address, database="test_multi_stmt") as conn:
93+
c = conn.cursor()
94+
with pytest.raises(expected, match="nonempty statement tail"):
95+
c.execute("SELECT 1; SELECT 2;")

tests/test_fetchmany_edges.py

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
"""PEP 249 fetchmany edge cases (ISSUE-25)."""
2+
3+
from unittest.mock import AsyncMock, MagicMock
4+
5+
import pytest
6+
7+
from dqlitedbapi.cursor import Cursor
8+
9+
10+
def _seeded_cursor(rows: list[tuple[int, ...]]) -> Cursor:
11+
"""Build a Cursor with ``rows`` already materialised as a result set.
12+
13+
Bypasses the event-loop layer — these are pure fetch-path edge
14+
cases that don't need a live cluster.
15+
"""
16+
conn = MagicMock()
17+
conn._get_async_connection = AsyncMock()
18+
conn._run_sync = MagicMock()
19+
c = Cursor(conn)
20+
c._rows = rows # type: ignore[assignment]
21+
c._row_index = 0
22+
c._description = [("id", None, None, None, None, None, None)]
23+
c._rowcount = len(rows)
24+
return c
25+
26+
27+
class TestFetchmanyEdges:
28+
def test_fetchmany_zero_returns_empty(self) -> None:
29+
c = _seeded_cursor([(1,), (2,), (3,)])
30+
assert c.fetchmany(0) == []
31+
# didn't advance
32+
assert c._row_index == 0
33+
34+
def test_fetchmany_larger_than_remaining_returns_all_remaining(self) -> None:
35+
c = _seeded_cursor([(1,), (2,), (3,)])
36+
assert c.fetchmany(100) == [(1,), (2,), (3,)]
37+
assert c._row_index == 3
38+
39+
def test_fetchmany_default_uses_arraysize(self) -> None:
40+
c = _seeded_cursor([(1,), (2,), (3,)])
41+
c.arraysize = 2
42+
assert c.fetchmany() == [(1,), (2,)]
43+
44+
def test_for_row_in_cursor_iterates_all(self) -> None:
45+
c = _seeded_cursor([(1,), (2,), (3,)])
46+
collected = list(c)
47+
assert collected == [(1,), (2,), (3,)]
48+
49+
def test_fetchone_then_fetchmany_continues(self) -> None:
50+
c = _seeded_cursor([(1,), (2,), (3,)])
51+
assert c.fetchone() == (1,)
52+
assert c.fetchmany(10) == [(2,), (3,)]
53+
54+
def test_fetchall_after_partial_fetch(self) -> None:
55+
c = _seeded_cursor([(1,), (2,), (3,)])
56+
c.fetchone()
57+
assert c.fetchall() == [(2,), (3,)]
58+
59+
def test_fetch_on_no_result_set_raises(self) -> None:
60+
conn = MagicMock()
61+
c = Cursor(conn)
62+
# No execute called → description is None.
63+
with pytest.raises(Exception, match="No result set"):
64+
c.fetchone()

0 commit comments

Comments
 (0)