Skip to content

Commit dc95538

Browse files
committed
Ruff linting - remaining unsafe fixes
1 parent 1ee6b42 commit dc95538

24 files changed

Lines changed: 56 additions & 69 deletions

duckdb/experimental/spark/_globals.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,6 @@ def foo(arg=pyducdkb.spark._NoValue):
3131
Note that this approach is taken after from NumPy.
3232
"""
3333

34-
from typing import Type
35-
3634
__ALL__ = ["_NoValue"]
3735

3836

@@ -59,7 +57,7 @@ class _NoValueType:
5957
def __new__(cls) -> "_NoValueType":
6058
# ensure that only one instance exists
6159
if not cls.__instance:
62-
cls.__instance = super(_NoValueType, cls).__new__(cls)
60+
cls.__instance = super().__new__(cls)
6361
return cls.__instance
6462

6563
# Make the _NoValue instance falsey
@@ -69,7 +67,7 @@ def __nonzero__(self) -> bool:
6967
__bool__ = __nonzero__
7068

7169
# needed for python 2 to preserve identity through a pickle
72-
def __reduce__(self) -> tuple[Type, tuple]:
70+
def __reduce__(self) -> tuple[type, tuple]:
7371
return (self.__class__, ())
7472

7573
def __repr__(self) -> str:

duckdb/experimental/spark/_typing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from typing import Callable, TypeVar, Union
2121

2222
from numpy import float32, float64, int32, int64, ndarray
23-
from typing_extensions import Literal, Protocol
23+
from typing_extensions import Literal, Protocol, Self
2424

2525
F = TypeVar("F", bound=Callable)
2626
T_co = TypeVar("T_co", covariant=True)
@@ -31,7 +31,7 @@
3131

3232

3333
class SupportsIAdd(Protocol):
34-
def __iadd__(self, other: "SupportsIAdd") -> "SupportsIAdd": ...
34+
def __iadd__(self, other: "SupportsIAdd") -> Self: ...
3535

3636

3737
class SupportsOrdering(Protocol):

duckdb/experimental/spark/sql/dataframe.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616

1717
from ..errors import PySparkIndexError, PySparkTypeError, PySparkValueError
1818
from ..exception import ContributionsAcceptedError
19-
from ._typing import ColumnOrName
2019
from .column import Column
2120
from .readwriter import DataFrameWriter
2221
from .type_utils import duckdb_to_spark_schema
@@ -26,6 +25,7 @@
2625
import pyarrow as pa
2726
from pandas.core.frame import DataFrame as PandasDataFrame
2827

28+
from ._typing import ColumnOrName
2929
from .group import GroupedData
3030
from .session import SparkSession
3131

@@ -896,7 +896,8 @@ def __getattr__(self, name: str) -> Column:
896896
[Row(age=2), Row(age=5)]
897897
"""
898898
if name not in self.relation.columns:
899-
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
899+
msg = f"'{self.__class__.__name__}' object has no attribute '{name}'"
900+
raise AttributeError(msg)
900901
return Column(duckdb.ColumnExpression(self.relation.alias, name))
901902

902903
@overload

duckdb/experimental/spark/sql/group.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,19 @@
1515
# limitations under the License.
1616
#
1717

18-
from typing import Callable, Union, overload
18+
from typing import TYPE_CHECKING, Callable, Union, overload
1919

2020
from ..exception import ContributionsAcceptedError
21-
from ._typing import ColumnOrName
2221
from .column import Column
2322
from .dataframe import DataFrame
2423
from .functions import _to_column_expr
25-
from .session import SparkSession
2624
from .types import NumericType
2725

26+
# Only import symbols needed for type checking if something is type checking
27+
if TYPE_CHECKING:
28+
from ._typing import ColumnOrName
29+
from .session import SparkSession
30+
2831
__all__ = ["GroupedData", "Grouping"]
2932

3033

duckdb/experimental/spark/sql/types.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ class DataTypeSingleton(type):
123123

124124
def __call__(cls: type[T]) -> T: # type: ignore[override]
125125
if cls not in cls._instances: # type: ignore[attr-defined]
126-
cls._instances[cls] = super(DataTypeSingleton, cls).__call__() # type: ignore[misc, attr-defined]
126+
cls._instances[cls] = super().__call__() # type: ignore[misc, attr-defined]
127127
return cls._instances[cls] # type: ignore[attr-defined]
128128

129129

@@ -535,7 +535,8 @@ def __init__(self, startField: Optional[int] = None, endField: Optional[int] = N
535535

536536
fields = DayTimeIntervalType._fields
537537
if startField not in fields or endField not in fields:
538-
raise RuntimeError(f"interval {startField} to {endField} is invalid")
538+
msg = f"interval {startField} to {endField} is invalid"
539+
raise RuntimeError(msg)
539540
self.startField = cast("int", startField)
540541
self.endField = cast("int", endField)
541542

@@ -917,7 +918,8 @@ def toInternal(self, obj: tuple) -> tuple: # noqa: D102
917918
for n, f, c in zip(self.names, self.fields, self._needConversion)
918919
)
919920
else:
920-
raise ValueError(f"Unexpected tuple {obj!r} with StructType")
921+
msg = f"Unexpected tuple {obj!r} with StructType"
922+
raise ValueError(msg)
921923
else:
922924
if isinstance(obj, dict):
923925
return tuple(obj.get(n) for n in self.names)
@@ -927,7 +929,8 @@ def toInternal(self, obj: tuple) -> tuple: # noqa: D102
927929
d = obj.__dict__
928930
return tuple(d.get(n) for n in self.names)
929931
else:
930-
raise ValueError(f"Unexpected tuple {obj!r} with StructType")
932+
msg = f"Unexpected tuple {obj!r} with StructType"
933+
raise ValueError(msg)
931934

932935
def fromInternal(self, obj: tuple) -> "Row": # noqa: D102
933936
if obj is None:
@@ -1177,23 +1180,24 @@ def __contains__(self, item: Any) -> bool: # noqa: D105, ANN401
11771180
if hasattr(self, "__fields__"):
11781181
return item in self.__fields__
11791182
else:
1180-
return super(Row, self).__contains__(item)
1183+
return super().__contains__(item)
11811184

11821185
# let object acts like class
11831186
def __call__(self, *args: Any) -> "Row": # noqa: ANN401
11841187
"""Create new Row object."""
11851188
if len(args) > len(self):
1186-
raise ValueError(f"Can not create Row with fields {self}, expected {len(self):d} values but got {args}")
1189+
msg = f"Can not create Row with fields {self}, expected {len(self):d} values but got {args}"
1190+
raise ValueError(msg)
11871191
return _create_row(self, args)
11881192

11891193
def __getitem__(self, item: Any) -> Any: # noqa: D105, ANN401
11901194
if isinstance(item, (int, slice)):
1191-
return super(Row, self).__getitem__(item)
1195+
return super().__getitem__(item)
11921196
try:
11931197
# it will be slow when it has many fields,
11941198
# but this will not be used in normal cases
11951199
idx = self.__fields__.index(item)
1196-
return super(Row, self).__getitem__(idx)
1200+
return super().__getitem__(idx)
11971201
except IndexError:
11981202
raise KeyError(item) # noqa: B904
11991203
except ValueError:

duckdb/query_graph/__main__.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import argparse # noqa: D100
22
import json
33
import re
4-
import sys
54
import webbrowser
65
from functools import reduce
76
from pathlib import Path
@@ -322,9 +321,6 @@ def translate_json_to_html(input_file: str, output_file: str) -> None: # noqa:
322321

323322

324323
def main() -> None: # noqa: D103
325-
if sys.version_info[0] < 3:
326-
print("Please use python3")
327-
exit(1)
328324
parser = argparse.ArgumentParser(
329325
prog="Query Graph Generator",
330326
description="""Given a json profile output, generate a html file showing the query graph and

duckdb_packaging/pypi_cleanup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,8 @@ def run(self) -> int:
230230
elif self._mode == CleanMode.LIST_ONLY:
231231
logging.info("Running in DRY RUN mode, nothing will be deleted")
232232
else:
233-
raise RuntimeError("Unexpected mode")
233+
msg = "Unexpected mode"
234+
raise RuntimeError(msg)
234235

235236
logging.info(f"Max development releases to keep per unreleased version: {self._max_dev_releases}")
236237

duckdb_packaging/setuptools_scm_version.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,8 @@ def _tag_to_version(tag: str) -> str:
6868
def _bump_dev_version(base_version: str, distance: int) -> str:
6969
"""Bump the given version."""
7070
if distance == 0:
71-
raise ValueError("Dev distance is 0, cannot bump version.")
71+
msg = "Dev distance is 0, cannot bump version."
72+
raise ValueError(msg)
7273
major, minor, patch, post, rc = parse_version(base_version)
7374

7475
if post != 0:

tests/extensions/test_httpfs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def test_httpfs(self, require, pandas):
4848
if str(e).startswith("HTTP HEAD error") or str(e).startswith("Unable to connect"):
4949
return
5050
else:
51-
raise e
51+
raise
5252

5353
result_df = connection.fetchdf()
5454
exp_result = pandas.DataFrame(

tests/fast/adbc/test_adbc.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,11 @@
11
import datetime
2-
import sys
32
from pathlib import Path
43

54
import numpy as np
65
import pytest
76

87
import duckdb
98

10-
if sys.version_info < (3, 9):
11-
pytest.skip(
12-
"Python Version must be higher or equal to 3.9 to run this test",
13-
allow_module_level=True,
14-
)
15-
169
adbc_driver_manager = pytest.importorskip("adbc_driver_manager.dbapi")
1710
adbc_driver_manager_lib = pytest.importorskip("adbc_driver_manager._lib")
1811

0 commit comments

Comments
 (0)