Skip to content

Aplpy ruff rules (RUF) #9731

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Nov 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion asv_bench/benchmarks/dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,7 +722,7 @@ class PerformanceBackend(xr.backends.BackendEntrypoint):
def open_dataset(
self,
filename_or_obj: str | os.PathLike | None,
drop_variables: tuple[str, ...] = None,
drop_variables: tuple[str, ...] | None = None,
*,
mask_and_scale=True,
decode_times=True,
Expand Down
2 changes: 1 addition & 1 deletion ci/min_deps_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def process_pkg(
)


def fmt_version(major: int, minor: int, patch: int = None) -> str:
def fmt_version(major: int, minor: int, patch: int | None = None) -> str:
if patch is None:
return f"{major}.{minor}"
else:
Expand Down
7 changes: 7 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,12 @@ ignore = [
"E501",
"E731",
"UP007",
"RUF001",
"RUF002",
"RUF003",
"RUF005",
"RUF007",
"RUF012",
]
extend-select = [
"B", # flake8-bugbear
Expand All @@ -254,6 +260,7 @@ extend-select = [
"TID", # flake8-tidy-imports (absolute imports)
"I", # isort
"PGH", # pygrep-hooks
"RUF",
"UP", # Pyupgrade
]

Expand Down
8 changes: 4 additions & 4 deletions xarray/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@
"combine_by_coords",
"combine_nested",
"concat",
"corr",
"cov",
"cross",
"date_range",
"date_range_like",
"decode_cf",
"dot",
"cov",
"corr",
"cross",
"full_like",
"get_options",
"group_subtrees",
Expand Down Expand Up @@ -121,8 +121,8 @@
"Index",
"IndexSelResult",
"IndexVariable",
"Variable",
"NamedArray",
"Variable",
# Exceptions
"InvalidTreeError",
"MergeError",
Expand Down
12 changes: 6 additions & 6 deletions xarray/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,21 @@
"AbstractDataStore",
"BackendArray",
"BackendEntrypoint",
"FileManager",
"CachingFileManager",
"DummyFileManager",
"InMemoryDataStore",
"NetCDF4DataStore",
"PydapDataStore",
"ScipyDataStore",
"FileManager",
"H5NetCDFStore",
"ZarrStore",
"H5netcdfBackendEntrypoint",
"InMemoryDataStore",
"NetCDF4BackendEntrypoint",
"NetCDF4DataStore",
"PydapBackendEntrypoint",
"PydapDataStore",
"ScipyBackendEntrypoint",
"ScipyDataStore",
"StoreBackendEntrypoint",
"ZarrBackendEntrypoint",
"ZarrStore",
"list_engines",
"refresh_engines",
]
2 changes: 1 addition & 1 deletion xarray/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def __exit__(self, exception_type, exception_value, traceback):


class ArrayWriter:
__slots__ = ("sources", "targets", "regions", "lock")
__slots__ = ("lock", "regions", "sources", "targets")

def __init__(self, lock=None):
self.sources = []
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/h5netcdf_.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,14 +100,14 @@ class H5NetCDFStore(WritableCFDataStore):
"""Store for reading and writing data via h5netcdf"""

__slots__ = (
"autoclose",
"format",
"is_remote",
"lock",
"_filename",
"_group",
"_manager",
"_mode",
"autoclose",
"format",
"is_remote",
"lock",
)

def __init__(self, manager, group=None, mode=None, lock=HDF5_LOCK, autoclose=False):
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/locks.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def _get_scheduler(get=None, collection=None) -> str | None:
# Fix for bug caused by dask installation that doesn't involve the toolz library
# Issue: 4164
import dask
from dask.base import get_scheduler # noqa: F401
from dask.base import get_scheduler

actual_get = get_scheduler(get, collection)
except ImportError:
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/netCDF4_.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,14 +361,14 @@ class NetCDF4DataStore(WritableCFDataStore):
"""

__slots__ = (
"autoclose",
"format",
"is_remote",
"lock",
"_filename",
"_group",
"_manager",
"_mode",
"autoclose",
"format",
"is_remote",
"lock",
)

def __init__(
Expand Down
12 changes: 6 additions & 6 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def encode_zarr_attr_value(value):


class ZarrArrayWrapper(BackendArray):
__slots__ = ("dtype", "shape", "_array")
__slots__ = ("_array", "dtype", "shape")

def __init__(self, zarr_array):
# some callers attempt to evaluate an array if an `array` property exists on the object.
Expand Down Expand Up @@ -598,18 +598,18 @@ class ZarrStore(AbstractWritableDataStore):
"""Store for reading and writing data via zarr"""

__slots__ = (
"zarr_group",
"_append_dim",
"_close_store_on_close",
"_consolidate_on_close",
"_group",
"_mode",
"_read_only",
"_synchronizer",
"_write_region",
"_safe_chunks",
"_write_empty",
"_close_store_on_close",
"_synchronizer",
"_use_zarr_fill_value_as_mask",
"_write_empty",
"_write_region",
"zarr_group",
)

@classmethod
Expand Down
14 changes: 6 additions & 8 deletions xarray/conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,13 +368,13 @@ def _update_bounds_encoding(variables: T_Variables) -> None:
and attrs["bounds"] in variables
):
emit_user_level_warning(
f"Variable {name:s} has datetime type and a "
f"bounds variable but {name:s}.encoding does not have "
f"units specified. The units encodings for {name:s} "
f"Variable {name} has datetime type and a "
f"bounds variable but {name}.encoding does not have "
f"units specified. The units encodings for {name} "
f"and {attrs['bounds']} will be determined independently "
"and may not be equal, counter to CF-conventions. "
"If this is a concern, specify a units encoding for "
f"{name:s} before writing to a file.",
f"{name} before writing to a file.",
)

if has_date_units and "bounds" in attrs:
Expand Down Expand Up @@ -486,9 +486,7 @@ def stackable(dim: Hashable) -> bool:
for role_or_name in part.split()
]
if len(roles_and_names) % 2 == 1:
emit_user_level_warning(
f"Attribute {attr_name:s} malformed"
)
emit_user_level_warning(f"Attribute {attr_name} malformed")
var_names = roles_and_names[1::2]
if all(var_name in variables for var_name in var_names):
new_vars[k].encoding[attr_name] = attr_val
Expand All @@ -500,7 +498,7 @@ def stackable(dim: Hashable) -> bool:
if proj_name not in variables
]
emit_user_level_warning(
f"Variable(s) referenced in {attr_name:s} not in variables: {referenced_vars_not_in_variables!s}",
f"Variable(s) referenced in {attr_name} not in variables: {referenced_vars_not_in_variables}",
)
del var_attrs[attr_name]

Expand Down
6 changes: 3 additions & 3 deletions xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ class _UFuncSignature:
"""

__slots__ = (
"input_core_dims",
"output_core_dims",
"_all_core_dims",
"_all_input_core_dims",
"_all_output_core_dims",
"_all_core_dims",
"input_core_dims",
"output_core_dims",
)

def __init__(self, input_core_dims, output_core_dims=((),)):
Expand Down
6 changes: 3 additions & 3 deletions xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,13 +421,13 @@ class DataArray(
_variable: Variable

__slots__ = (
"__weakref__",
"_cache",
"_coords",
"_close",
"_coords",
"_indexes",
"_name",
"_variable",
"__weakref__",
)

dt = utils.UncachedAccessor(CombinedDatetimelikeAccessor["DataArray"])
Expand Down Expand Up @@ -4537,7 +4537,7 @@ def from_dict(cls, d: Mapping[str, Any]) -> Self:
except KeyError as e:
raise ValueError(
"cannot convert dict when coords are missing the key "
f"'{str(e.args[0])}'"
f"'{e.args[0]}'"
) from e
try:
data = d["data"]
Expand Down
10 changes: 5 additions & 5 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,15 +684,15 @@ class Dataset(
_variables: dict[Hashable, Variable]

__slots__ = (
"__weakref__",
"_attrs",
"_cache",
"_close",
"_coord_names",
"_dims",
"_encoding",
"_close",
"_indexes",
"_variables",
"__weakref__",
)

def __init__(
Expand Down Expand Up @@ -6430,7 +6430,7 @@ def transpose(
"""
# Raise error if list is passed as dim
if (len(dim) > 0) and (isinstance(dim[0], list)):
list_fix = [f"{repr(x)}" if isinstance(x, str) else f"{x}" for x in dim[0]]
list_fix = [f"{x!r}" if isinstance(x, str) else f"{x}" for x in dim[0]]
raise TypeError(
f'transpose requires dim to be passed as multiple arguments. Expected `{", ".join(list_fix)}`. Received `{dim[0]}` instead'
)
Expand Down Expand Up @@ -7800,7 +7800,7 @@ def from_dict(cls, d: Mapping[Any, Any]) -> Self:
}
except KeyError as e:
raise ValueError(
f"cannot convert dict without the key '{str(e.args[0])}'"
f"cannot convert dict without the key '{e.args[0]}'"
) from e
obj = cls(variable_dict)

Expand Down Expand Up @@ -10155,7 +10155,7 @@ def _wrapper(Y, *args, **kwargs):
if name is _THIS_ARRAY:
name = ""
else:
name = f"{str(name)}_"
name = f"{name}_"

input_core_dims = [reduce_dims_ for _ in range(n_coords + 1)]
input_core_dims.extend(
Expand Down
14 changes: 7 additions & 7 deletions xarray/core/datatree.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,10 +217,10 @@ class DatasetView(Dataset):
__slots__ = (
"_attrs",
"_cache", # used by _CachedAccessor
"_close",
"_coord_names",
"_dims",
"_encoding",
"_close",
"_indexes",
"_variables",
)
Expand Down Expand Up @@ -457,17 +457,17 @@ class DataTree(
_close: Callable[[], None] | None

__slots__ = (
"_name",
"_parent",
"_children",
"_attrs",
"_cache", # used by _CachedAccessor
"_children",
"_close",
"_data_variables",
"_encoding",
"_name",
"_node_coord_variables",
"_node_dims",
"_node_indexes",
"_attrs",
"_encoding",
"_close",
"_parent",
)

def __init__(
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/datatree_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,8 @@ def __repr__(self) -> str:
classname = self.__class__.__name__
args = [
repr(self.node),
f"style={repr(self.style)}",
f"childiter={repr(self.childiter)}",
f"style={self.style!r}",
f"childiter={self.childiter!r}",
]
return f"{classname}({', '.join(args)})"

Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def isdtype(dtype, kind: str | tuple[str, ...], xp=None) -> bool:
if not isinstance(kind, str) and not (
isinstance(kind, tuple) and all(isinstance(k, str) for k in kind) # type: ignore[redundant-expr]
):
raise TypeError(f"kind must be a string or a tuple of strings: {repr(kind)}")
raise TypeError(f"kind must be a string or a tuple of strings: {kind!r}")

if isinstance(dtype, np.dtype):
return npcompat.isdtype(dtype, kind)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/extension_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def __array_ufunc__(ufunc, method, *inputs, **kwargs):
return ufunc(*inputs, **kwargs)

def __repr__(self):
return f"{type(self)}(array={repr(self.array)})"
return f"{type(self)}(array={self.array!r})"

def __getattr__(self, attr: str) -> object:
return getattr(self.array, attr)
Expand Down
4 changes: 1 addition & 3 deletions xarray/core/formatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,7 @@ def inline_sparse_repr(array):
"""Similar to sparse.COO.__repr__, but without the redundant shape/dtype."""
sparse_array_type = array_type("sparse")
assert isinstance(array, sparse_array_type), array
return (
f"<{type(array).__name__}: nnz={array.nnz:d}, fill_value={array.fill_value!s}>"
)
return f"<{type(array).__name__}: nnz={array.nnz:d}, fill_value={array.fill_value}>"


def inline_variable_array_repr(var, max_width):
Expand Down
Loading
Loading