Skip to content

Commit

Permalink
Made definition of cell coordinates more precise
Browse files Browse the repository at this point in the history
Two different versions of cell coordinates have been used where integral
coordinates either refer to the cell center or it's left edge. We now
define cell coordinates to use the latter, since then the integral part
denotes the cell, while the fractional part denotes the position within
the cell.

This change might break other packages downstream, so we will release a
new version of the package soon.
  • Loading branch information
david-zwicker committed Sep 22, 2023
1 parent 2b83ea6 commit 1412889
Show file tree
Hide file tree
Showing 8 changed files with 101 additions and 98 deletions.
5 changes: 3 additions & 2 deletions pde/fields/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1346,7 +1346,8 @@ def interpolate(
coordinates.
bc:
The boundary conditions applied to the field, which affects values close
to the boundary. If omitted, the argument `fill` is used.
to the boundary. If omitted, the argument `fill` is used to determine
values outside the domain.
{ARG_BOUNDARIES_OPTIONAL}
fill (Number, optional):
Determines how values out of bounds are handled. If `None`, a
Expand Down Expand Up @@ -1453,7 +1454,7 @@ def insert(self, point: np.ndarray, amount: ArrayLike) -> None:
# determine the grid coordinates next to the chosen points
low = np.array(grid.axes_bounds)[:, 0]
c_l, d_l = np.divmod((point - low) / grid.discretization - 0.5, 1.0)
c_l = c_l.astype(np.intc) # support points to the left of the chosen points
c_l = c_l.astype(int) # support points to the left of the chosen points
w_l = 1 - d_l # weights of the low point
w_h = d_l # weights of the high point

Expand Down
18 changes: 9 additions & 9 deletions pde/fields/vectorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def divergence(
Additional arguments affecting how the operator behaves.
Returns:
:class:`~pde.fields.scalar.ScalarField`: result of applying the operator
:class:`~pde.fields.scalar.ScalarField`: Divergence of the field
"""
return self.apply_operator("divergence", bc=bc, out=out, **kwargs) # type: ignore

Expand Down Expand Up @@ -398,7 +398,7 @@ def gradient(
Additional arguments affecting how the operator behaves.
Returns:
:class:`~pde.fields.tensorial.Tensor2Field`: result of applying the operator
:class:`~pde.fields.tensorial.Tensor2Field`: Gradient of the field
"""
return self.apply_operator("vector_gradient", bc=bc, out=out, **kwargs) # type: ignore

Expand Down Expand Up @@ -428,7 +428,7 @@ def laplace(
Additional arguments affecting how the operator behaves.
Returns:
:class:`~pde.fields.vectorial.VectorField`: result of applying the operator
:class:`~pde.fields.vectorial.VectorField`: Laplacian of the field
"""
return self.apply_operator("vector_laplace", bc=bc, out=out, **kwargs) # type: ignore

Expand Down Expand Up @@ -457,8 +457,8 @@ def to_scalar(
Name of the returned field
Returns:
:class:`pde.fields.scalar.ScalarField`: the scalar field after
applying the operation
:class:`pde.fields.scalar.ScalarField`:
The scalar field after applying the operation
"""
if scalar == "auto":
if self.grid.dim > 1 or np.iscomplexobj(self.data):
Expand Down Expand Up @@ -503,8 +503,8 @@ def get_vector_data(
max_points (int):
The maximal number of points that is used along each axis. This
option can be used to sub-sample the data.
\**kwargs: Additional parameters are forwarded to
`grid.get_image_data`
\**kwargs:
Additional parameters forwarded to `grid.get_image_data`
Returns:
dict: Information useful for plotting an vector field
Expand Down Expand Up @@ -559,8 +559,8 @@ def _get_napari_layer_data( # type: ignore
Args:
max_points (int):
The maximal number of points that is used along each axis. This
option can be used to subsample the data.
The maximal number of points that is used along each axis. This option
can be used to subsample the data.
args (dict):
Additional arguments returned in the result, which affect how the layer
is shown.
Expand Down
39 changes: 25 additions & 14 deletions pde/grids/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,8 @@ def _grid_to_cell(
marking what cell the point belongs to or whether fractional coordinates
are returned. The default is to return integers.
normalize (bool):
Flag indicating whether the points should be normalized
Flag indicating whether the points should be normalized, e.g., whether
periodic boundary conditions should be imposed.
Returns:
:class:`~numpy.ndarray`: The cell coordinates
Expand All @@ -654,7 +655,7 @@ def _grid_to_cell(
c_min = np.array(self.axes_bounds)[:, 0]
cells = (coords - c_min) / self.discretization
if truncate:
return cells.astype(np.intc) # type: ignore
return cells.astype(int) # type: ignore
else:
return cells # type: ignore

Expand All @@ -666,11 +667,21 @@ def transform(
) -> np.ndarray:
"""converts coordinates from one coordinate system to another
Supported coordinate systems include
* `cartesian`: Cartesian coordinates where each point carries `dim` values
* `cell`: Grid coordinates based on indexing the discretization cells
* `grid`: Grid coordinates where each point carries `num_axes` values
Supported coordinate systems include the following:
* `cartesian`:
Cartesian coordinates where each point carries `dim` values. These are the
true physical coordinates in space.
* `grid`:
Coordinates values in the coordinate system defined by the grid. A point is
thus characterized by `grid.num_axes` values.
* `cell`:
Normalized grid coordinates based on indexing the discretization cells. A
point is characterized by `grid.num_axes` values and the range of values for
a given axis is between `0` and `N`, where `N` is the number of grid points.
Consequently, the integral part of the cell coordinate denotes the cell,
while the fractional part denotes the relative position within the cell. In
particular, the cell center is located at `i + 0.5` with `i = 0, ..., N-1`.
Note:
Some conversion might involve projections if the coordinate system imposes
Expand Down Expand Up @@ -706,7 +717,7 @@ def transform(
if target == "grid":
return grid_coords
if target == "cell":
return self._grid_to_cell(grid_coords, normalize=False)
return self._grid_to_cell(grid_coords, truncate=False, normalize=False)

elif source == "cell":
# Cell coordinates given
Expand All @@ -719,7 +730,7 @@ def transform(

# convert cell coordinates to grid coordinates
c_min = np.array(self.axes_bounds)[:, 0]
grid_coords = c_min + (cells + 0.5) * self.discretization
grid_coords = c_min + cells * self.discretization

if target == "grid":
return grid_coords
Expand All @@ -735,7 +746,7 @@ def transform(
if target == "cartesian":
return self.point_to_cartesian(grid_coords, full=False)
elif target == "cell":
return self._grid_to_cell(grid_coords, normalize=False)
return self._grid_to_cell(grid_coords, truncate=False, normalize=False)
elif target == "grid":
return grid_coords

Expand Down Expand Up @@ -768,7 +779,7 @@ def contains_point(
the grid
"""
cell_coords = self.transform(points, source=coords, target="cell")
return np.all((0 <= cell_coords) & (cell_coords < self.shape), axis=-1) # type: ignore
return np.all((0 <= cell_coords) & (cell_coords <= self.shape), axis=-1) # type: ignore

@abstractmethod
def iter_mirror_points(
Expand Down Expand Up @@ -1401,9 +1412,9 @@ def _make_interpolator_compiled(
Args:
fill (Number, optional):
Determines how values out of bounds are handled. If `None`, a
`ValueError` is raised when out-of-bounds points are requested.
Otherwise, the given value is returned.
Determines how values out of bounds are handled. If `None`, `ValueError`
is raised when out-of-bounds points are requested. Otherwise, the given
value is returned.
with_ghost_cells (bool):
Flag indicating that the interpolator should work on the full data array
that includes values for the ghost points. If this is the case, the
Expand Down
4 changes: 2 additions & 2 deletions pde/grids/cartesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,8 +260,8 @@ def get_random_point(
"""return a random point within the grid
Args:
boundary_distance (float): The minimal distance this point needs to
have from all boundaries.
boundary_distance (float):
The minimal distance this point needs to have from all boundaries.
coords (str):
Determines the coordinate system in which the point is specified. Valid
values are `cartesian`, `cell`, and `grid`;
Expand Down
76 changes: 39 additions & 37 deletions scripts/create_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class Requirement:
"""simple class collecting data for a single required python package"""

name: str # name of the python package
version: str # minimal version
version_min: str # minimal version
usage: str = "" # description for how the package is used in py-pde
relation: Optional[str] = None # relation used to compare version number
essential: bool = False # basic requirement for the package
Expand All @@ -30,8 +30,8 @@ class Requirement:

@property
def short_version(self) -> str:
"""str: simplified version string"""
version = self.version.split(",")[0] # only use the first part
"""str: simplified version_min string"""
version = self.version_min.split(",")[0] # only use the first part
processing = True
while processing:
if version.endswith(".0"):
Expand All @@ -45,120 +45,122 @@ def line(self, relation: str = ">=") -> str:
Args:
relation (str):
The relation used for version comparison if self.relation is None
The relation used for version_min comparison if self.relation is None
Returns:
str: A string that can be written to a requirements file
"""
if self.relation is not None:
relation = self.relation
return f"{self.name}{relation}{self.version}"
return f"{self.name}{relation}{self.version_min}"


REQUIREMENTS = [
# essential requirements
Requirement(
name="matplotlib",
version="3.1.0",
version_min="3.1.0",
usage="Visualizing results",
essential=True,
),
Requirement(
name="numba",
version="0.56.0",
version_min="0.56.0",
usage="Just-in-time compilation to accelerate numerics",
essential=True,
),
Requirement(
name="numpy",
version="1.22.0",
version_min="1.22.0",
usage="Handling numerical data",
essential=True,
),
Requirement(
name="scipy",
version="1.10.0",
version_min="1.10.0",
usage="Miscellaneous scientific functions",
essential=True,
),
Requirement(
name="sympy",
version="1.9.0",
version_min="1.9.0",
usage="Dealing with user-defined mathematical expressions",
essential=True,
),
Requirement(
name="tqdm",
version="4.60",
version_min="4.60",
usage="Display progress bars during calculations",
essential=True,
),
# general, optional requirements
Requirement(
name="h5py",
version="2.10",
version_min="2.10",
usage="Storing data in the hierarchical file format",
collections={"full", "multiprocessing", "docs"},
),
Requirement(
name="pandas",
version="1.2",
version_min="1.2",
usage="Handling tabular data",
collections={"full", "multiprocessing", "docs"},
),
Requirement(
name="pyfftw",
version="0.12",
version_min="0.12",
usage="Faster Fourier transforms",
collections={"full"},
),
Requirement(
name="rocket-fft",
version="0.2",
version_min="0.2",
usage="Numba-compiled fast Fourier transforms",
collections={"full"},
),
Requirement(
name="ipywidgets",
version="7",
version_min="7",
usage="Jupyter notebook support",
collections={"interactive"},
),
Requirement(
name="mpi4py",
version="3",
version_min="3",
usage="Parallel processing using MPI",
collections={"multiprocessing"},
),
Requirement(
name="napari",
version="0.4.8",
version_min="0.4.8",
usage="Displaying images interactively",
collections={"interactive"},
),
Requirement(
name="numba-mpi",
version="0.22",
version_min="0.22",
usage="Parallel processing using MPI+numba",
collections={"multiprocessing"},
),
# for documentation only
Requirement(name="Sphinx", version="4", docs_only=True),
Requirement(name="sphinx-autodoc-annotation", version="1.0", docs_only=True),
Requirement(name="sphinx-gallery", version="0.6", docs_only=True),
Requirement(name="sphinx-rtd-theme", version="1", docs_only=True),
Requirement(name="Pillow", version="7.0", docs_only=True),
Requirement(name="Sphinx", version_min="4", docs_only=True),
Requirement(name="sphinx-autodoc-annotation", version_min="1.0", docs_only=True),
Requirement(name="sphinx-gallery", version_min="0.6", docs_only=True),
Requirement(name="sphinx-rtd-theme", version_min="1", docs_only=True),
Requirement(name="Pillow", version_min="7.0", docs_only=True),
# for tests only
Requirement(name="jupyter_contrib_nbextensions", version="0.5", tests_only=True),
Requirement(name="black", version="19.*", tests_only=True),
Requirement(name="importlib-metadata", version="5", tests_only=True),
Requirement(name="isort", version="5.1", tests_only=True),
Requirement(name="mypy", version="0.770", tests_only=True),
Requirement(name="notebook", version="6.5", relation="~=", tests_only=True),
Requirement(name="pyinstrument", version="3", tests_only=True),
Requirement(name="pytest", version="5.4", tests_only=True),
Requirement(name="pytest-cov", version="2.8", tests_only=True),
Requirement(name="pytest-xdist", version="1.30", tests_only=True),
Requirement(
name="jupyter_contrib_nbextensions", version_min="0.5", tests_only=True
),
Requirement(name="black", version_min="19.*", tests_only=True),
Requirement(name="importlib-metadata", version_min="5", tests_only=True),
Requirement(name="isort", version_min="5.1", tests_only=True),
Requirement(name="mypy", version_min="0.770", tests_only=True),
Requirement(name="notebook", version_min="6.5", relation="~=", tests_only=True),
Requirement(name="pyinstrument", version_min="3", tests_only=True),
Requirement(name="pytest", version_min="5.4", tests_only=True),
Requirement(name="pytest-cov", version_min="2.8", tests_only=True),
Requirement(name="pytest-xdist", version_min="1.30", tests_only=True),
]


Expand Down Expand Up @@ -211,7 +213,7 @@ def write_requirements_csv(
with open(path, "w") as fp:
writer = csv.writer(fp)
if incl_version:
writer.writerow(["Package", "Minimal version", "Usage"])
writer.writerow(["Package", "Minimal version_min", "Usage"])
else:
writer.writerow(["Package", "Usage"])
for r in sorted(requirements, key=lambda r: r.name.lower()):
Expand Down Expand Up @@ -243,7 +245,7 @@ def write_requirements_py(path: Path, requirements: List[Requirement]):

# add generated code
for r in sorted(requirements, key=lambda r: r.name.lower()):
content.append(f'check_package_version("{r.name}", "{r.version}")\n')
content.append(f'check_package_version("{r.name}", "{r.version_min}")\n')
content.append("del check_package_version\n")

# write content back to file
Expand Down Expand Up @@ -275,7 +277,7 @@ def write_from_template(
with template_path.open("r") as fp:
template = Template(fp.read())

# parse python version
# parse python version_min
major, minor = MAX_PYTHON_VERSION.split(".")
minor_next = int(minor) + 1

Expand Down
Loading

0 comments on commit 1412889

Please sign in to comment.