Skip to content

Enforce ruff/flake8-simplify rules (SIM) #10462

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jun 30, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Apply ruf/flake8-simplify rule SIM118
SIM118 Use `key in dict` instead of `key in dict.keys()`
  • Loading branch information
DimitriPapadopoulos committed Jun 29, 2025
commit c805b8a61eec162535d3a9838614889ac796cefa
2 changes: 1 addition & 1 deletion xarray/backends/netCDF4_.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ def _extract_nc4_variable_encoding(
del encoding["chunksizes"]

var_has_unlim_dim = any(dim in unlimited_dims for dim in variable.dims)
if not raise_on_invalid and var_has_unlim_dim and "contiguous" in encoding.keys():
if not raise_on_invalid and var_has_unlim_dim and "contiguous" in encoding:
del encoding["contiguous"]

for k in safe_to_drop:
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/pydap_.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,14 +348,14 @@ def group_fqn(store, path=None, g_fqn=None) -> dict[str, str]:
if not g_fqn:
g_fqn = {}
groups = [
store[key].id
for key in store.keys()
if isinstance(store[key], GroupType)
var.id for var in store.values() if isinstance(var, GroupType)
]
for g in groups:
g_fqn.update({g: path})
subgroups = [
var for var in store[g] if isinstance(store[g][var], GroupType)
key
for key, var in store[g].items()
if isinstance(var, GroupType)
]
if len(subgroups) > 0:
npath = path + g
Expand Down
2 changes: 1 addition & 1 deletion xarray/computation/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -1081,7 +1081,7 @@ def __init__(
self.side = side
self.boundary = boundary

missing_dims = tuple(dim for dim in windows.keys() if dim not in self.obj.dims)
missing_dims = tuple(dim for dim in windows if dim not in self.obj.dims)
if missing_dims:
raise ValueError(
f"Window dimensions {missing_dims} not found in {self.obj.__class__.__name__} "
Expand Down
4 changes: 1 addition & 3 deletions xarray/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1247,9 +1247,7 @@ def _dataset_indexer(dim: Hashable) -> DataArray:
_dataarray_indexer if isinstance(cond, DataArray) else _dataset_indexer
)

indexers = {}
for dim in cond.sizes.keys():
indexers[dim] = _get_indexer(dim)
indexers = {dim: _get_indexer(dim) for dim in cond.sizes}

self = self.isel(**indexers)
cond = cond.isel(**indexers)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def _check_data_shape(
data_shape = tuple(
(
as_variable(coords[k], k, auto_convert=False).size
if k in coords.keys()
if k in coords
else 1
)
for k in dims
Expand Down
15 changes: 8 additions & 7 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4088,21 +4088,20 @@ def _rename(
is raised at the right stack level.
"""
name_dict = either_dict_or_kwargs(name_dict, names, "rename")
for k in name_dict.keys():
for k, new_k in name_dict.items():
if k not in self and k not in self.dims:
raise ValueError(
f"cannot rename {k!r} because it is not a "
"variable or dimension in this dataset"
)

create_dim_coord = False
new_k = name_dict[k]

if k == new_k:
continue # Same name, nothing to do

if k in self.dims and new_k in self._coord_names:
coord_dims = self._variables[name_dict[k]].dims
coord_dims = self._variables[new_k].dims
if coord_dims == (k,):
create_dim_coord = True
elif k in self._coord_names and new_k in self.dims:
Expand All @@ -4112,7 +4111,7 @@ def _rename(

if create_dim_coord:
warnings.warn(
f"rename {k!r} to {name_dict[k]!r} does not create an index "
f"rename {k!r} to {new_k!r} does not create an index "
"anymore. Try using swap_dims instead or use set_index "
"after rename to create an indexed coordinate.",
UserWarning,
Expand Down Expand Up @@ -8980,16 +8979,18 @@ def pad(
variables[name] = var
elif name in self.data_vars:
if utils.is_dict_like(constant_values):
if name in constant_values.keys():
if name in constant_values:
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see how this change causes the new mypy errors:

Found 2 errors in 1 file (checked 191 source files)
xarray/core/dataset.py:8986: error: Value expression in dictionary comprehension has incompatible type "float | tuple[float, float] | Mapping[Any, float | tuple[float, float]]"; expected type "float | tuple[float, float]"  [misc]
xarray/core/dataset.py:8991: error: Incompatible types in assignment (expression has type "float | tuple[float, float] | Mapping[Any, float | tuple[float, float]] | Mapping[Any, float | tuple[float, float] | Mapping[Any, float | tuple[float, float]]] | None", variable has type "float | tuple[float, float] | Mapping[Any, float | tuple[float, float]]")  [assignment]

Wasn't this a pre-existing issue? If so, not sure why mypy only flags it after this change.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes agree it's confusing...

filtered_constant_values = constant_values[name]
elif not set(var.dims).isdisjoint(constant_values.keys()):
filtered_constant_values = {
k: v for k, v in constant_values.items() if k in var.dims
k: v # type: ignore[misc]
for k, v in constant_values.items()
if k in var.dims
}
else:
filtered_constant_values = 0 # TODO: https://github.com/pydata/xarray/pull/9353#discussion_r1724018352
else:
filtered_constant_values = constant_values
filtered_constant_values = constant_values # type: ignore[assignment]
variables[name] = var.pad(
pad_width=var_pad_width,
mode=mode,
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ def _wrapper(
{**hlg.layers, **new_layers},
dependencies={
**hlg.dependencies,
**{name: {gname} for name in new_layers.keys()},
**{name: {gname} for name in new_layers},
},
)

Expand Down
2 changes: 1 addition & 1 deletion xarray/plot/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1825,7 +1825,7 @@ def _guess_coords_to_plot(
"""
coords_to_plot_exist = {k: v for k, v in coords_to_plot.items() if v is not None}
available_coords = tuple(
k for k in darray.coords.keys() if k not in coords_to_plot_exist.values()
k for k in darray.coords if k not in coords_to_plot_exist.values()
)

# If dims_plot[k] isn't defined then fill with one of the available dims, unless
Expand Down
6 changes: 3 additions & 3 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -2671,13 +2671,13 @@ def test_hidden_zarr_keys(self) -> None:
# check that a variable hidden attribute is present and correct
# JSON only has a single array type, which maps to list in Python.
# In contrast, dims in xarray is always a tuple.
for var in expected.variables.keys():
for var in expected.variables:
dims = zarr_group[var].attrs[self.DIMENSION_KEY]
assert dims == list(expected[var].dims)

with xr.decode_cf(store):
# make sure it is hidden
for var in expected.variables.keys():
for var in expected.variables:
assert self.DIMENSION_KEY not in expected[var].attrs

# put it back and try removing from a variable
Expand Down Expand Up @@ -3731,7 +3731,7 @@ def test_chunk_key_encoding_v2(self) -> None:

# Verify the chunk keys in store use the slash separator
if not has_zarr_v3:
chunk_keys = [k for k in store.keys() if k.startswith("var1/")]
chunk_keys = [k for k in store if k.startswith("var1/")]
assert len(chunk_keys) > 0
for key in chunk_keys:
assert "/" in key
Expand Down
22 changes: 11 additions & 11 deletions xarray/tests/test_backends_datatree.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def assert_chunks_equal(
and node1.variables[name].chunksizes == node2.variables[name].chunksizes
)
for path, (node1, node2) in xr.group_subtrees(actual, expected)
for name in node1.variables.keys()
for name in node1.variables
}

assert all(comparison.values()), diff_chunks(comparison, actual, expected)
Expand Down Expand Up @@ -312,9 +312,9 @@ def test_open_groups(self, unaligned_datatree_nc) -> None:
unaligned_dict_of_datasets = open_groups(unaligned_datatree_nc)

# Check that group names are keys in the dictionary of `xr.Datasets`
assert "/" in unaligned_dict_of_datasets.keys()
assert "/Group1" in unaligned_dict_of_datasets.keys()
assert "/Group1/subgroup1" in unaligned_dict_of_datasets.keys()
assert "/" in unaligned_dict_of_datasets
assert "/Group1" in unaligned_dict_of_datasets
assert "/Group1/subgroup1" in unaligned_dict_of_datasets
# Check that group name returns the correct datasets
with xr.open_dataset(unaligned_datatree_nc, group="/") as expected:
assert_identical(unaligned_dict_of_datasets["/"], expected)
Expand Down Expand Up @@ -453,9 +453,9 @@ def test_open_groups(self, url=unaligned_datatree_url) -> None:
unaligned_dict_of_datasets = open_groups(url, engine=self.engine)

# Check that group names are keys in the dictionary of `xr.Datasets`
assert "/" in unaligned_dict_of_datasets.keys()
assert "/Group1" in unaligned_dict_of_datasets.keys()
assert "/Group1/subgroup1" in unaligned_dict_of_datasets.keys()
assert "/" in unaligned_dict_of_datasets
assert "/Group1" in unaligned_dict_of_datasets
assert "/Group1/subgroup1" in unaligned_dict_of_datasets
# Check that group name returns the correct datasets
with xr.open_dataset(url, engine=self.engine, group="/") as expected:
assert_identical(unaligned_dict_of_datasets["/"], expected)
Expand Down Expand Up @@ -782,10 +782,10 @@ def test_open_groups(self, unaligned_datatree_zarr_factory, zarr_format) -> None
storepath = unaligned_datatree_zarr_factory(zarr_format=zarr_format)
unaligned_dict_of_datasets = open_groups(storepath, engine="zarr")

assert "/" in unaligned_dict_of_datasets.keys()
assert "/Group1" in unaligned_dict_of_datasets.keys()
assert "/Group1/subgroup1" in unaligned_dict_of_datasets.keys()
assert "/Group2" in unaligned_dict_of_datasets.keys()
assert "/" in unaligned_dict_of_datasets
assert "/Group1" in unaligned_dict_of_datasets
assert "/Group1/subgroup1" in unaligned_dict_of_datasets
assert "/Group2" in unaligned_dict_of_datasets
# Check that group name returns the correct datasets
with xr.open_dataset(storepath, group="/", engine="zarr") as expected:
assert_identical(unaligned_dict_of_datasets["/"], expected)
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@

def assert_combined_tile_ids_equal(dict1, dict2):
assert len(dict1) == len(dict2)
for k in dict1.keys():
assert k in dict2.keys()
for k in dict1:
assert k in dict2
assert_equal(dict1[k], dict2[k])


Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3164,7 +3164,7 @@ def test_drop_encoding(self) -> None:
vencoding = {"scale_factor": 10}
orig.encoding = {"foo": "bar"}

for k in orig.variables.keys():
for k in orig.variables:
orig[k].encoding = vencoding

actual = orig.drop_encoding()
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_number_of_dims(self, data, ndims):
def test_restrict_names(self, data):
capitalized_names = st.text(st.characters(), min_size=1).map(str.upper)
dim_sizes = data.draw(dimension_sizes(dim_names=capitalized_names))
for dim in dim_sizes.keys():
for dim in dim_sizes:
assert dim.upper() == dim


Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_units.py
Original file line number Diff line number Diff line change
Expand Up @@ -3052,7 +3052,7 @@ def is_compatible(a, b):
other_units = extract_units(other)

equal_arrays = all(
is_compatible(units[name], other_units[name]) for name in units.keys()
is_compatible(units[name], other_units[name]) for name in units
) and (
strip_units(data_array).equals(
strip_units(convert_units(other, extract_units(data_array)))
Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy