Skip to content

Commit

Permalink
Added many flake8 checks in ruff (#597)
Browse files Browse the repository at this point in the history
* Added many `flake8` checks in `ruff`
* Did a lot of fixes to pass the additional tests
* Updated documentation on code style
  • Loading branch information
david-zwicker authored Aug 16, 2024
1 parent c5e68ac commit 20bd997
Show file tree
Hide file tree
Showing 77 changed files with 401 additions and 353 deletions.
8 changes: 4 additions & 4 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import sys

sys.path.insert(0, ".")
sys.path.insert(0, os.path.abspath("../.."))
sys.path.insert(0, os.path.abspath("../../scripts"))
sys.path.insert(0, os.path.abspath("../sphinx_ext/"))
sys.path.insert(0, os.path.abspath("../..")) # noqa: PTH100
sys.path.insert(0, os.path.abspath("../../scripts")) # noqa: PTH100
sys.path.insert(0, os.path.abspath("../sphinx_ext/")) # noqa: PTH100

from datetime import date

Expand All @@ -26,7 +26,7 @@
project = "py-pde"
module_name = "pde"
author = "Zwicker Group"
copyright = f"{date.today().year}, {author}" # @ReservedAssignment
copyright = f"{date.today().year}, {author}" # @ReservedAssignment # noqa: A001
html_logo = "_images/logo_small.png"

# Determine the version from the actual package
Expand Down
5 changes: 3 additions & 2 deletions docs/source/manual/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,9 @@ where ``vector_component`` is either 0 or 1.

Coding style
""""""""""""
The coding style is enforced using `isort <https://timothycrosley.github.io/isort/>`_
and `black <https://black.readthedocs.io/>`_. Moreover, we use `Google Style docstrings
The coding style is enforced using `ruff <https://docs.astral.sh/ruff/>`_, based on the
styles suggest by `isort <https://timothycrosley.github.io/isort/>`_ and
`black <https://black.readthedocs.io/>`_. Moreover, we use `Google Style docstrings
<https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings>`_,
which might be best `learned by example
<https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html>`_.
Expand Down
11 changes: 6 additions & 5 deletions docs/source/run_autodoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import logging
import os
import subprocess as sp
from pathlib import Path

logging.basicConfig(level=logging.INFO)

Expand Down Expand Up @@ -31,21 +32,21 @@ def replace_in_file(infile, replacements, outfile=None):
if outfile is None:
outfile = infile

with open(infile) as fp:
with Path(infile).open() as fp:
content = fp.read()

for key, value in replacements.items():
content = content.replace(key, value)

with open(outfile, "w") as fp:
with Path(outfile).open("w") as fp:
fp.write(content)


def main():
# remove old files
for path in glob.glob(f"{OUTPUT_PATH}/*.rst"):
for path in Path(OUTPUT_PATH).glob("*.rst"):
logging.info("Remove file `%s`", path)
os.remove(path)
path.unlink()

# run sphinx-apidoc
sp.check_call(
Expand All @@ -65,7 +66,7 @@ def main():
)

# replace unwanted information
for path in glob.glob(f"{OUTPUT_PATH}/*.rst"):
for path in Path(OUTPUT_PATH).glob("*.rst"):
logging.info("Patch file `%s`", path)
replace_in_file(path, REPLACEMENTS)

Expand Down
4 changes: 2 additions & 2 deletions examples/pde_brusselator_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@
class BrusselatorPDE(PDEBase):
"""Brusselator with diffusive mobility."""

def __init__(self, a=1, b=3, diffusivity=[1, 0.1], bc="auto_periodic_neumann"):
def __init__(self, a=1, b=3, diffusivity=None, bc="auto_periodic_neumann"):
super().__init__()
self.a = a
self.b = b
self.diffusivity = diffusivity # spatial mobility
self.diffusivity = [1, 0.1] if diffusivity is None else diffusivity
self.bc = bc # boundary condition

def get_initial_state(self, grid):
Expand Down
11 changes: 6 additions & 5 deletions pde/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# determine the package version
try:
# try reading version of the automatically generated module
from ._version import __version__ # type: ignore
from ._version import __version__
except ImportError:
# determine version automatically from CVS information
from importlib.metadata import PackageNotFoundError, version
Expand All @@ -20,7 +20,8 @@
from .tools.config import Config, environment

config = Config() # initialize the default configuration
del Config # clean name space

import contextlib

# import all other modules that should occupy the main name space
from .fields import * # @UnusedWildImport
Expand All @@ -32,7 +33,7 @@
from .trackers import * # @UnusedWildImport
from .visualization import * # @UnusedWildImport

try:
with contextlib.suppress(ImportError):
from .tools.modelrunner import *
except ImportError:
pass # modelrunner extensions are simply not loaded

del contextlib, Config # clean name space
2 changes: 1 addition & 1 deletion pde/fields/datafield_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1131,7 +1131,7 @@ def get_vector_data(self, transpose: bool = False, **kwargs) -> dict[str, Any]:
Returns:
dict: Information useful for plotting an vector field
"""
raise NotImplementedError()
raise NotImplementedError

def _plot_line(
self,
Expand Down
2 changes: 1 addition & 1 deletion pde/fields/scalar.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def slice(
raise ValueError(
f"The axes {ax} is not contained in "
f"{self.grid} with axes {self.grid.axes}"
)
) from None
ax_remove.append(i)

# check the position
Expand Down
4 changes: 2 additions & 2 deletions pde/fields/tensorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,8 @@ def _get_axes_index(self, key: tuple[int | str, int | str]) -> tuple[int, int]:
try:
if len(key) != 2:
raise IndexError("Index must be given as two integers")
except TypeError:
raise IndexError("Index must be given as two values")
except TypeError as err:
raise IndexError("Index must be given as two values") from err
return tuple(self.grid.get_axis_index(k) for k in key) # type: ignore

def __getitem__(self, key: tuple[int | str, int | str]) -> ScalarField:
Expand Down
4 changes: 2 additions & 2 deletions pde/fields/vectorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,8 +317,8 @@ def check_rank(arr: nb.types.Type | nb.types.Optional) -> None:
@register_jitable
def calc(a: np.ndarray, b: np.ndarray, out: np.ndarray) -> np.ndarray:
"""Calculate outer product between fields `a` and `b`"""
for i in range(0, dim):
for j in range(0, dim):
for i in range(dim):
for j in range(dim):
out[i, j, :] = a[i] * b[j]
return out

Expand Down
3 changes: 2 additions & 1 deletion pde/grids/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1021,7 +1021,7 @@ def contains_point(
the grid
"""
cell_coords = self.transform(points, source=coords, target="cell", full=full)
return np.all((0 <= cell_coords) & (cell_coords <= self.shape), axis=-1) # type: ignore
return np.all((cell_coords >= 0) & (cell_coords <= self.shape), axis=-1) # type: ignore

def iter_mirror_points(
self, point: np.ndarray, with_self: bool = False, only_periodic: bool = True
Expand Down Expand Up @@ -1222,6 +1222,7 @@ def register_operator(factor_func_arg: OperatorFactory):
else:
# method is used directly
register_operator(factory_func)
return None

@hybridmethod # type: ignore
@property
Expand Down
9 changes: 5 additions & 4 deletions pde/grids/boundaries/axes.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def __setitem__(self, index, data) -> None:

else:
# handle all other cases, in particular integer indices
return super().__setitem__(index, data)
super().__setitem__(index, data)

def get_mathematical_representation(self, field_name: str = "C") -> str:
"""Return mathematical representation of the boundary condition."""
Expand Down Expand Up @@ -303,7 +303,7 @@ def set_ghost_cells(
for i, j in itertools.product([0, -1], [0, -1]):
d[..., i, j] = (d[..., nxt[i], j] + d[..., i, nxt[j]]) / 2

elif self.grid.num_axes >= 3:
elif self.grid.num_axes == 3:
# iterate all edges
for i, j in itertools.product([0, -1], [0, -1]):
d[..., :, i, j] = (+d[..., :, nxt[i], j] + d[..., :, i, nxt[j]]) / 2
Expand All @@ -316,8 +316,9 @@ def set_ghost_cells(
+ d[..., i, nxt[j], k]
+ d[..., i, j, nxt[k]]
) / 3
else:
logging.getLogger(self.__class__.__name__).warning(

elif self.grid.num_axes > 3:
raise NotImplementedError(
f"Can't interpolate corners for grid with {self.grid.num_axes} axes"
)

Expand Down
2 changes: 1 addition & 1 deletion pde/grids/boundaries/axis.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def from_data(cls, grid: GridBase, axis: int, data, rank: int = 0) -> BoundaryPa
# if len is not supported, the format must be wrong
raise BCDataError(
f"Unsupported boundary format: `{data}`. " + cls.get_help()
)
) from None
else:
if data_len == 2:
# assume that data is given for each boundary
Expand Down
25 changes: 13 additions & 12 deletions pde/grids/boundaries/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def from_str(
except KeyError:
raise BCDataError(
f"Boundary condition `{condition}` not defined. " + cls.get_help()
)
) from None

# create the actual class
return boundary_class(grid=grid, axis=axis, upper=upper, rank=rank, **kwargs)
Expand Down Expand Up @@ -459,7 +459,7 @@ def from_dict(
data = data.copy() # need to make a copy since we modify it below

# parse all possible variants that could be given
if "type" in data.keys():
if "type" in data:
# type is given (optionally with a value)
b_type = data.pop("type")
return cls.from_str(grid, axis, upper, condition=b_type, rank=rank, **data)
Expand Down Expand Up @@ -1224,14 +1224,13 @@ def __init__(
except Exception as err:
if self._is_func:
raise BCDataError(
f"Could not evaluate BC function. Expected signature "
f"{signature}.\nEncountered error: {err}"
)
f"Could not evaluate BC function. Expected signature {signature}."
) from err
else:
raise BCDataError(
f"Could not evaluate BC expression `{expression}` with signature "
f"{signature}.\nEncountered error: {err}"
)
f"{signature}."
) from err

@property
def _test_values(self) -> tuple[float, ...]:
Expand Down Expand Up @@ -1275,7 +1274,7 @@ def value_func(*args):

except nb.NumbaError:
# if compilation fails, we simply fall back to pure-python mode
self._logger.warning(f"Cannot compile BC {self}")
self._logger.warning("Cannot compile BC %s", self)

@register_jitable
def value_func(*args):
Expand Down Expand Up @@ -1360,7 +1359,7 @@ def _get_function_from_expression(self, do_jit: bool) -> Callable:

except nb.NumbaError:
# if compilation fails, we simply fall back to pure-python mode
self._logger.warning(f"Cannot compile BC {self._func_expression}")
self._logger.warning("Cannot compile BC %s", self._func_expression)
# calculate the expected value to test this later (and fail early)
expected = func(*self._test_values)

Expand Down Expand Up @@ -1391,12 +1390,14 @@ def value_func(grid_value, dx, x, y, z, t):

else:
# cheap way to signal a problem
raise ValueError
raise ValueError from None

# compile the actual functio and check the result
result_compiled = value_func(*self._test_values)
if not np.allclose(result_compiled, expected):
raise RuntimeError("Compiled function does not give same value")
raise RuntimeError(
"Compiled function does not give same value"
) from None

return value_func # type: ignore

Expand Down Expand Up @@ -2974,4 +2975,4 @@ def registered_boundary_condition_names() -> dict[str, type[BCBase]]:
Returns:
dict: a dictionary with the names of the boundary conditions that can be used
"""
return {cls_name: cls for cls_name, cls in BCBase._conditions.items()}
return dict(BCBase._conditions.items())
2 changes: 1 addition & 1 deletion pde/grids/cartesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ def _get_axis(axis):
try:
axis = self.axes.index(axis)
except ValueError:
raise ValueError(f"Axis `{axis}` not defined")
raise ValueError(f"Axis `{axis}` not defined") from None
return axis

if extract == "auto":
Expand Down
3 changes: 2 additions & 1 deletion pde/grids/operators/cartesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -986,7 +986,8 @@ def _make_divergence_scipy_nd(

def divergence(arr: np.ndarray, out: np.ndarray) -> None:
"""Apply divergence operator to array `arr`"""
assert arr.shape[0] == len(data_shape) and arr.shape[1:] == data_shape
assert arr.shape[0] == len(data_shape)
assert arr.shape[1:] == data_shape

# need to initialize with zeros since data is added later
if out is None:
Expand Down
11 changes: 5 additions & 6 deletions pde/grids/operators/polar_sym.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,12 +300,11 @@ def _get_laplace_matrix(bcs: Boundaries) -> tuple[np.ndarray, np.ndarray]:
if r_min == 0:
matrix[i, i + 1] = 2 * scale
continue # the special case of the inner boundary is handled
else:
const, entries = bcs[0].get_sparse_matrix_data((-1,))
factor = scale - scale_i
vector[i] += const * factor
for k, v in entries.items():
matrix[i, k] += v * factor
const, entries = bcs[0].get_sparse_matrix_data((-1,))
factor = scale - scale_i
vector[i] += const * factor
for k, v in entries.items():
matrix[i, k] += v * factor

else:
matrix[i, i - 1] = scale - scale_i
Expand Down
6 changes: 4 additions & 2 deletions pde/grids/operators/spherical_sym.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ def make_laplace(grid: SphericalSymGrid, *, conservative: bool = True) -> Operat
# create a conservative spherical laplace operator
rl = rs - dr / 2 # inner radii of spherical shells
rh = rs + dr / 2 # outer radii
assert np.isclose(rl[0], r_min) and np.isclose(rh[-1], r_max)
assert np.isclose(rl[0], r_min)
assert np.isclose(rh[-1], r_max)
volumes = (rh**3 - rl**3) / 3 # volume of the spherical shells
factor_l = rl**2 / (dr * volumes)
factor_h = rh**2 / (dr * volumes)
Expand Down Expand Up @@ -496,7 +497,8 @@ def make_tensor_double_divergence(
rl = rs - dr / 2 # inner radii of spherical shells
rh = rs + dr / 2 # outer radii
r_min, r_max = grid.axes_bounds[0]
assert np.isclose(rl[0], r_min) and np.isclose(rh[-1], r_max)
assert np.isclose(rl[0], r_min)
assert np.isclose(rh[-1], r_max)
volumes = (rh**3 - rl**3) / 3 # volume of the spherical shells
factor_l = rl / volumes
factor_h = rh / volumes
Expand Down
1 change: 0 additions & 1 deletion pde/pdes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,6 @@ def noise_realization(state_data: np.ndarray, t: float) -> np.ndarray:
@jit
def noise_realization(state_data: np.ndarray, t: float) -> None:
"""Helper function returning a noise realization."""
return None

return noise_realization # type: ignore

Expand Down
4 changes: 2 additions & 2 deletions pde/pdes/laplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,14 @@ def solve_poisson_equation(
result = ScalarField(rhs.grid, label=label)
try:
solver(rhs.data, result.data)
except RuntimeError:
except RuntimeError as err:
magnitude = rhs.magnitude
if magnitude > 1e-10:
raise RuntimeError(
"Could not solve the Poisson problem. One possible reason for this is "
"that only periodic or Neumann conditions are applied although the "
f"magnitude of the field is {magnitude} and thus non-zero."
)
) from err
else:
raise # another error occurred

Expand Down
Loading

0 comments on commit 20bd997

Please sign in to comment.