Skip to content

Commit 61ba166

Browse files
committed
Ruff RUF: fixed all remaining ruff specific linter issues
1 parent 7f3104b commit 61ba166

20 files changed

Lines changed: 36 additions & 35 deletions

duckdb/experimental/spark/exception.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1+
from typing import Optional
2+
3+
14
class ContributionsAcceptedError(NotImplementedError): # noqa: D100
25
"""This method is not planned to be implemented, if you would like to implement this method
36
or show your interest in this method to other members of the community,
47
feel free to open up a PR or a Discussion over on https://github.com/duckdb/duckdb.
58
""" # noqa: D205
69

7-
def __init__(self, message: str = None) -> None: # noqa: D107
10+
def __init__(self, message: Optional[str] = None) -> None: # noqa: D107
811
doc = self.__class__.__doc__
912
if message:
1013
doc = message + "\n" + doc

duckdb/experimental/spark/sql/types.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import time
1010
from builtins import tuple
1111
from collections.abc import Iterator
12+
from types import MappingProxyType
1213
from typing import (
1314
Any,
1415
ClassVar,
@@ -17,7 +18,7 @@
1718
TypeVar,
1819
Union,
1920
cast,
20-
overload,
21+
overload, Mapping,
2122
)
2223

2324
import duckdb
@@ -512,14 +513,14 @@ class DayTimeIntervalType(AtomicType):
512513
MINUTE = 2
513514
SECOND = 3
514515

515-
_fields = {
516+
_fields: Mapping[str, int] = MappingProxyType({
516517
DAY: "day",
517518
HOUR: "hour",
518519
MINUTE: "minute",
519520
SECOND: "second",
520-
}
521+
})
521522

522-
_inverted_fields = dict(zip(_fields.values(), _fields.keys()))
523+
_inverted_fields: Mapping[int, str] = MappingProxyType(dict(zip(_fields.values(), _fields.keys())))
523524

524525
def __init__(self, startField: Optional[int] = None, endField: Optional[int] = None) -> None: # noqa: D107
525526
super().__init__(DuckDBPyType("INTERVAL"))

duckdb/query_graph/__main__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ def generate_timing_html(graph_json: object, query_timings: object) -> object:
228228
all_phases = query_timings.get_phases()
229229
query_timings.add_node_timing(NodeTiming("TOTAL TIME", total_time))
230230
query_timings.add_node_timing(NodeTiming("Execution Time", execution_time))
231-
all_phases = ["TOTAL TIME", "Execution Time"] + all_phases
231+
all_phases = ["TOTAL TIME", "Execution Time", *all_phases]
232232
for phase in all_phases:
233233
summarized_phase = query_timings.get_summary_phase_timings(phase)
234234
summarized_phase.calculate_percentage(total_time)

duckdb_packaging/pypi_cleanup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def session_with_retries() -> Generator[Session, None, None]:
122122
connect=3, # try 3 times before giving up on connection errors
123123
read=3, # try 3 times before giving up on read errors
124124
status=3, # try 3 times before giving up on status errors (see forcelist below)
125-
status_forcelist=[429] + list(range(500, 512)),
125+
status_forcelist=[429, *list(range(500, 512))],
126126
other=0, # whatever else may cause an error should break
127127
backoff_factor=0.1, # [0.0s, 0.2s, 0.4s]
128128
raise_on_redirect=True, # raise exception when redirect error retries are exhausted

tests/extensions/json/test_read_json.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_read_json_sample_size(self):
3838

3939
def test_read_json_format(self):
4040
# Wrong option
41-
with pytest.raises(duckdb.BinderException, match="format must be one of .* not 'test'"):
41+
with pytest.raises(duckdb.BinderException, match=r"format must be one of .* not 'test'"):
4242
rel = duckdb.read_json(TestFile("example.json"), format="test")
4343

4444
rel = duckdb.read_json(TestFile("example.json"), format="unstructured")

tests/fast/adbc/test_statement_bind.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def test_bind_multiple_rows(self):
5151
with pytest.raises(
5252
adbc_driver_manager.NotSupportedError, match="Binding multiple rows at once is not supported yet"
5353
):
54-
res, number_of_rows = statement.execute_query()
54+
statement.execute_query()
5555

5656
def test_bind_single_row(self):
5757
expected_result = pa.array([8], type=pa.int64())
@@ -171,7 +171,7 @@ def test_too_many_parameters(self):
171171
adbc_driver_manager.ProgrammingError,
172172
match="Input data has more column than prepared statement has parameters",
173173
):
174-
res, _ = statement.execute_query()
174+
statement.execute_query()
175175

176176
def test_not_enough_parameters(self):
177177
data = pa.record_batch(
@@ -197,4 +197,4 @@ def test_not_enough_parameters(self):
197197
adbc_driver_manager.ProgrammingError,
198198
match="Values were not provided for the following prepared statement parameters: 2",
199199
):
200-
res, _ = statement.execute_query()
200+
statement.execute_query()

tests/fast/api/test_duckdb_connection.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import re
2+
13
import pytest
24
from conftest import ArrowPandas, NumpyPandas
35

@@ -384,7 +386,7 @@ def test_connect_with_path(self, tmp_database):
384386
assert con.sql("select 42").fetchall() == [(42,)]
385387

386388
with pytest.raises(
387-
duckdb.InvalidInputException, match="Please provide either a str or a pathlib.Path, not <class 'int'>"
389+
duckdb.InvalidInputException, match=re.escape("Please provide either a str or a pathlib.Path, not <class 'int'>")
388390
):
389391
con = duckdb.connect(5)
390392

tests/fast/api/test_read_csv.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ def test_quotechar(self, duckdb_cursor):
128128

129129
def test_quote(self, duckdb_cursor):
130130
with pytest.raises(
131-
duckdb.Error, match='The methods read_csv and read_csv_auto do not have the "quote" argument.'
131+
duckdb.Error, match='The methods read_csv and read_csv_auto do not have the "quote" argument'
132132
):
133133
duckdb_cursor.read_csv(TestFile("unquote_without_delimiter.csv"), quote="", header=False)
134134

@@ -445,8 +445,6 @@ def scoped_objects(duckdb_cursor) -> None:
445445
assert CountedObject.instance_count == 0
446446

447447
def test_read_csv_glob(self, tmp_path, create_temp_csv):
448-
file1_path, file2_path = create_temp_csv
449-
450448
# Use the temporary file paths to read CSV files
451449
con = duckdb.connect()
452450
rel = con.read_csv(f"{tmp_path}/file*.csv") # noqa: F841

tests/fast/api/test_with_propagating_exceptions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
class TestWithPropagatingExceptions:
77
def test_with(self):
88
# Should propagate exception raised in the 'with duckdb.connect() ..'
9-
with pytest.raises(duckdb.ParserException, match="syntax error at or near *"), duckdb.connect() as con:
9+
with pytest.raises(duckdb.ParserException, match=r"syntax error at or near *"), duckdb.connect() as con:
1010
con.execute("invalid")
1111

1212
# Does not raise an exception

tests/fast/arrow/test_arrow_ipc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,6 @@ def test_single_buffer(self, duckdb_cursor):
2929
stream = ipc.MessageReader.open_stream(buf_reader)
3030
# This fails
3131
with pytest.raises(
32-
duckdb.Error, match="The nanoarrow community extension is needed to read the Arrow IPC protocol."
32+
duckdb.Error, match="The nanoarrow community extension is needed to read the Arrow IPC protocol"
3333
):
3434
duckdb_cursor.from_arrow(stream).fetchall()

0 commit comments

Comments
 (0)