diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c847e5e..e84faf9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: - uses: actions/setup-python@v5.2.0 name: Install Python with: - python-version: "3.10" + python-version: "3.12" - name: Install PyBuild run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 52ab0ef..14f698f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,7 @@ jobs: strategy: matrix: - python-version: ["3.10", "3.13"] + python-version: ["3.11", "3.13"] os: ["ubuntu-latest"] runs-on: ${{ matrix.os }} diff --git a/pyproject.toml b/pyproject.toml index e4781ed..5e242e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,14 +14,13 @@ name = "rasterix" description = "Raster extensions for Xarray" license = "Apache-2.0" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.11" keywords = ["xarray"] classifiers = [ "Development Status :: 4 - Beta", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", @@ -93,7 +92,7 @@ dependencies = [ features = ["test"] [[tool.hatch.envs.test.matrix]] -python = ["3.10", "3.13"] +python = ["3.11", "3.13"] [tool.hatch.envs.test.scripts] run-coverage = "pytest -nauto --cov-config=pyproject.toml --cov=pkg --cov-report xml --cov=src --junitxml=junit.xml -o junit_family=legacy" diff --git a/src/rasterix/odc_compat.py b/src/rasterix/odc_compat.py new file mode 100644 index 0000000..53c8552 --- /dev/null +++ b/src/rasterix/odc_compat.py @@ -0,0 +1,473 @@ +# The code below is reproduced from the odc-geo project under terms of their Apache-2.0 license (reproduced below) +# +# Apache License +# Version 2.0, January 2004 +# http://www.apache.org/licenses/ +# +# TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +# +# 1. Definitions. +# +# "License" shall mean the terms and conditions for use, reproduction, +# and distribution as defined by Sections 1 through 9 of this document. +# +# "Licensor" shall mean the copyright owner or entity authorized by +# the copyright owner that is granting the License. +# +# "Legal Entity" shall mean the union of the acting entity and all +# other entities that control, are controlled by, or are under common +# control with that entity. For the purposes of this definition, +# "control" means (i) the power, direct or indirect, to cause the +# direction or management of such entity, whether by contract or +# otherwise, or (ii) ownership of fifty percent (50%) or more of the +# outstanding shares, or (iii) beneficial ownership of such entity. +# +# "You" (or "Your") shall mean an individual or Legal Entity +# exercising permissions granted by this License. +# +# "Source" form shall mean the preferred form for making modifications, +# including but not limited to software source code, documentation +# source, and configuration files. +# +# "Object" form shall mean any form resulting from mechanical +# transformation or translation of a Source form, including but +# not limited to compiled object code, generated documentation, +# and conversions to other media types. +# +# "Work" shall mean the work of authorship, whether in Source or +# Object form, made available under the License, as indicated by a +# copyright notice that is included in or attached to the work +# (an example is provided in the Appendix below). +# +# "Derivative Works" shall mean any work, whether in Source or Object +# form, that is based on (or derived from) the Work and for which the +# editorial revisions, annotations, elaborations, or other modifications +# represent, as a whole, an original work of authorship. For the purposes +# of this License, Derivative Works shall not include works that remain +# separable from, or merely link (or bind by name) to the interfaces of, +# the Work and Derivative Works thereof. +# +# "Contribution" shall mean any work of authorship, including +# the original version of the Work and any modifications or additions +# to that Work or Derivative Works thereof, that is intentionally +# submitted to Licensor for inclusion in the Work by the copyright owner +# or by an individual or Legal Entity authorized to submit on behalf of +# the copyright owner. For the purposes of this definition, "submitted" +# means any form of electronic, verbal, or written communication sent +# to the Licensor or its representatives, including but not limited to +# communication on electronic mailing lists, source code control systems, +# and issue tracking systems that are managed by, or on behalf of, the +# Licensor for the purpose of discussing and improving the Work, but +# excluding communication that is conspicuously marked or otherwise +# designated in writing by the copyright owner as "Not a Contribution." +# +# "Contributor" shall mean Licensor and any individual or Legal Entity +# on behalf of whom a Contribution has been received by Licensor and +# subsequently incorporated within the Work. +# +# 2. Grant of Copyright License. Subject to the terms and conditions of +# this License, each Contributor hereby grants to You a perpetual, +# worldwide, non-exclusive, no-charge, royalty-free, irrevocable +# copyright license to reproduce, prepare Derivative Works of, +# publicly display, publicly perform, sublicense, and distribute the +# Work and such Derivative Works in Source or Object form. +# +# 3. Grant of Patent License. Subject to the terms and conditions of +# this License, each Contributor hereby grants to You a perpetual, +# worldwide, non-exclusive, no-charge, royalty-free, irrevocable +# (except as stated in this section) patent license to make, have made, +# use, offer to sell, sell, import, and otherwise transfer the Work, +# where such license applies only to those patent claims licensable +# by such Contributor that are necessarily infringed by their +# Contribution(s) alone or by combination of their Contribution(s) +# with the Work to which such Contribution(s) was submitted. If You +# institute patent litigation against any entity (including a +# cross-claim or counterclaim in a lawsuit) alleging that the Work +# or a Contribution incorporated within the Work constitutes direct +# or contributory patent infringement, then any patent licenses +# granted to You under this License for that Work shall terminate +# as of the date such litigation is filed. +# +# 4. Redistribution. You may reproduce and distribute copies of the +# Work or Derivative Works thereof in any medium, with or without +# modifications, and in Source or Object form, provided that You +# meet the following conditions: +# +# (a) You must give any other recipients of the Work or +# Derivative Works a copy of this License; and +# +# (b) You must cause any modified files to carry prominent notices +# stating that You changed the files; and +# +# (c) You must retain, in the Source form of any Derivative Works +# that You distribute, all copyright, patent, trademark, and +# attribution notices from the Source form of the Work, +# excluding those notices that do not pertain to any part of +# the Derivative Works; and +# +# (d) If the Work includes a "NOTICE" text file as part of its +# distribution, then any Derivative Works that You distribute must +# include a readable copy of the attribution notices contained +# within such NOTICE file, excluding those notices that do not +# pertain to any part of the Derivative Works, in at least one +# of the following places: within a NOTICE text file distributed +# as part of the Derivative Works; within the Source form or +# documentation, if provided along with the Derivative Works; or, +# within a display generated by the Derivative Works, if and +# wherever such third-party notices normally appear. The contents +# of the NOTICE file are for informational purposes only and +# do not modify the License. You may add Your own attribution +# notices within Derivative Works that You distribute, alongside +# or as an addendum to the NOTICE text from the Work, provided +# that such additional attribution notices cannot be construed +# as modifying the License. +# +# You may add Your own copyright statement to Your modifications and +# may provide additional or different license terms and conditions +# for use, reproduction, or distribution of Your modifications, or +# for any such Derivative Works as a whole, provided Your use, +# reproduction, and distribution of the Work otherwise complies with +# the conditions stated in this License. +# +# 5. Submission of Contributions. Unless You explicitly state otherwise, +# any Contribution intentionally submitted for inclusion in the Work +# by You to the Licensor shall be under the terms and conditions of +# this License, without any additional terms or conditions. +# Notwithstanding the above, nothing herein shall supersede or modify +# the terms of any separate license agreement you may have executed +# with Licensor regarding such Contributions. +# +# 6. Trademarks. This License does not grant permission to use the trade +# names, trademarks, service marks, or product names of the Licensor, +# except as required for reasonable and customary use in describing the +# origin of the Work and reproducing the content of the NOTICE file. +# +# 7. Disclaimer of Warranty. Unless required by applicable law or +# agreed to in writing, Licensor provides the Work (and each +# Contributor provides its Contributions) on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied, including, without limitation, any warranties or conditions +# of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +# PARTICULAR PURPOSE. You are solely responsible for determining the +# appropriateness of using or redistributing the Work and assume any +# risks associated with Your exercise of permissions under this License. +# +# 8. Limitation of Liability. In no event and under no legal theory, +# whether in tort (including negligence), contract, or otherwise, +# unless required by applicable law (such as deliberate and grossly +# negligent acts) or agreed to in writing, shall any Contributor be +# liable to You for damages, including any direct, indirect, special, +# incidental, or consequential damages of any character arising as a +# result of this License or out of the use or inability to use the +# Work (including but not limited to damages for loss of goodwill, +# work stoppage, computer failure or malfunction, or any and all +# other commercial damages or losses), even if such Contributor +# has been advised of the possibility of such damages. +# +# 9. Accepting Warranty or Additional Liability. While redistributing +# the Work or Derivative Works thereof, You may choose to offer, +# and charge a fee for, acceptance of support, warranty, indemnity, +# or other liability obligations and/or rights consistent with this +# License. However, in accepting such obligations, You may act only +# on Your own behalf and on Your sole responsibility, not on behalf +# of any other Contributor, and only if You agree to indemnify, +# defend, and hold each Contributor harmless for any liability +# incurred by, or claims asserted against, such Contributor by reason +# of your accepting any such warranty or additional liability. +# +# END OF TERMS AND CONDITIONS +# +# APPENDIX: How to apply the Apache License to your work. +# +# To apply the Apache License to your work, attach the following +# boilerplate notice, with the fields enclosed by brackets "[]" +# replaced with your own identifying information. (Don't include +# the brackets!) The text should be enclosed in the appropriate +# comment syntax for the file format. We also recommend that a +# file or class name and description of purpose be included on the +# same "printed page" as the copyright notice for easier +# identification within third-party archives. +# +# Copyright [yyyy] [name of copyright owner] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import math +from collections.abc import Iterable, Sequence +from typing import Any + +from affine import Affine + + +def _snap_edge_pos(x0: float, x1: float, res: float, tol: float) -> tuple[float, int]: + assert res > 0 + assert x1 >= x0 + _x0 = math.floor(maybe_int(x0 / res, tol)) + _x1 = math.ceil(maybe_int(x1 / res, tol)) + nx = max(1, _x1 - _x0) + return _x0 * res, nx + + +def _snap_edge(x0: float, x1: float, res: float, tol: float) -> tuple[float, int]: + assert x1 >= x0 + if res > 0: + return _snap_edge_pos(x0, x1, res, tol) + _tx, nx = _snap_edge_pos(x0, x1, -res, tol) + tx = _tx + nx * (-res) + return tx, nx + + +def snap_grid( + x0: float, x1: float, res: float, off_pix: float | None = 0, tol: float = 1e-6 +) -> tuple[float, int]: + """ + Compute grid snapping for single axis. + + :param x0: In point ``x0 <= x1`` + :param x1: Out point ``x0 <= x1`` + :param res: Pixel size and direction (can be negative) + :param off_pix: + Pixel fraction to align to ``x=0``. + 0 - edge aligned + 0.5 - center aligned + None - don't snap + + :return: ``tx, nx`` that defines 1-d grid, such that ``x0`` and ``x1`` are within edge pixels. + """ + assert (off_pix is None) or (0 <= off_pix < 1) + if off_pix is None: + if res > 0: + nx = math.ceil(maybe_int((x1 - x0) / res, tol)) + return x0, max(1, nx) + nx = math.ceil(maybe_int((x1 - x0) / (-res), tol)) + return x1, max(nx, 1) + + off = off_pix * abs(res) + _tx, nx = _snap_edge(x0 - off, x1 - off, res, tol) + if x1 == x0: + nx = 0 + return _tx + off, nx + + +def split_float(x: float) -> tuple[float, float]: + """ + Split float number into whole and fractional parts. + + Adding the two numbers back together should result in the original value. + Fractional part is always in the ``(-0.5, +0.5)`` interval, and whole part + is equivalent to ``round(x)``. + + :param x: floating point number + :return: ``whole, fraction`` + """ + if not math.isfinite(x): + return (x, 0) + + x_part = math.fmod(x, 1.0) + x_whole = x - x_part + if x_part > 0.5: + x_part -= 1 + x_whole += 1 + elif x_part < -0.5: + x_part += 1 + x_whole -= 1 + return (x_whole, x_part) + + +def maybe_int(x: float, tol: float) -> int | float: + """ + Turn almost ints to actual ints. + + pass through other values unmodified. + """ + if not math.isfinite(x): + return x + + x_whole, x_part = split_float(x) + + if abs(x_part) < tol: # almost int + return int(x_whole) + return x + + +class BoundingBox(Sequence[float]): + """Bounding box, defining extent in cartesian coordinates.""" + + __slots__ = "_box" + + def __init__(self, left: float, bottom: float, right: float, top: float): + self._box = (left, bottom, right, top) + + @property + def left(self): + return self._box[0] + + @property + def bottom(self): + return self._box[1] + + @property + def right(self): + return self._box[2] + + @property + def top(self): + return self._box[3] + + @property + def bbox(self) -> tuple[float, float, float, float]: + return self._box + + def __iter__(self): + return self._box.__iter__() + + def __eq__(self, other: Any) -> bool: + if isinstance(other, BoundingBox): + return self._box == other._box + return self._box == other + + def __hash__(self) -> int: + return hash(self._box) + + def __len__(self) -> int: + return 4 + + def __getitem__(self, idx): + return self._box[idx] + + def __repr__(self) -> str: + return f"BoundingBox(left={self.left}, bottom={self.bottom}, right={self.right}, top={self.top})" + + def __str__(self) -> str: + return self.__repr__() + + def __and__(self, other: "BoundingBox") -> "BoundingBox": + return bbox_intersection([self, other]) + + def __or__(self, other: "BoundingBox") -> "BoundingBox": + return bbox_union([self, other]) + + @property + def shape(self) -> tuple[int, int]: + """``(int(span_y), int(span_x))``.""" + return (self.height, self.width) + + @property + def points(self) -> list[tuple[float, float]]: + """Extract four corners of the bounding box.""" + x0, y0, x1, y1 = self._box + return list(itertools.product((x0, x1), (y0, y1))) + + def transform(self, transform: Affine) -> "BoundingBox": + """ + Map bounding box through a linear transform. + + Apply linear transform on four points of the bounding box and compute bounding box of these + four points. + """ + pts = [transform * pt for pt in self.points] + xx = [x for x, _ in pts] + yy = [y for _, y in pts] + return BoundingBox(min(xx), min(yy), max(xx), max(yy)) + + @staticmethod + def from_xy(x: tuple[float, float], y: tuple[float, float]) -> "BoundingBox": + """ + Construct :py:class:`BoundingBox` from x and y ranges. + + :param x: (left, right) + :param y: (bottom, top) + """ + x1, x2 = sorted(x) + y1, y2 = sorted(y) + return BoundingBox(x1, y1, x2, y2) + + @staticmethod + def from_points(p1: tuple[float, float], p2: tuple[float, float]) -> "BoundingBox": + """ + Construct :py:class:`BoundingBox` from two points. + + :param p1: (x, y) + :param p2: (x, y) + """ + return BoundingBox.from_xy((p1[0], p2[0]), (p1[1], p2[1])) + + @staticmethod + def from_transform(shape: tuple[int, int], transform: Affine) -> "BoundingBox": + """ + Construct :py:class:`BoundingBox` from image shape and transform. + + :param shape: image dimensions + :param transform: Affine mapping from pixel to world + """ + ny, nx = shape + pts = [(0, 0), (nx, 0), (nx, ny), (0, ny)] + transform.itransform(pts) + xx, yy = list(zip(*pts)) + return BoundingBox.from_xy((min(xx), max(xx)), (min(yy), max(yy))) + + def round(self) -> "BoundingBox": + """ + Expand bounding box to nearest integer on all sides. + """ + x0, y0, x1, y1 = self._box + return BoundingBox(math.floor(x0), math.floor(y0), math.ceil(x1), math.ceil(y1)) + + +def bbox_union(bbs: Iterable[BoundingBox]) -> BoundingBox: + """ + Compute union of bounding boxes. + + Given a stream of bounding boxes compute enclosing :py:class:`~odc.geo.geom.BoundingBox`. + """ + # pylint: disable=invalid-name + try: + bb, *bbs = bbs + except ValueError: + raise ValueError("Union of empty stream is undefined") from None + + L, B, R, T = bb + + for bb in bbs: + l, b, r, t = bb # noqa: E741 + L = min(l, L) + B = min(b, B) + R = max(r, R) + T = max(t, T) + + return BoundingBox(L, B, R, T) + + +def bbox_intersection(bbs: Iterable[BoundingBox]) -> BoundingBox: + """ + Compute intersection of bounding boxes. + + Given a stream of bounding boxes compute the overlap :py:class:`~odc.geo.geom.BoundingBox`. + """ + # pylint: disable=invalid-name + try: + bb, *bbs = bbs + except ValueError: + raise ValueError("Intersection of empty stream is undefined") from None + + L, B, R, T = bb + + for bb in bbs: + l, b, r, t = bb # noqa: E741 + L = max(l, L) + B = max(b, B) + R = min(r, R) + T = min(t, T) + + return BoundingBox(L, B, R, T) diff --git a/src/rasterix/raster_index.py b/src/rasterix/raster_index.py index 25bfe3c..e89bde7 100644 --- a/src/rasterix/raster_index.py +++ b/src/rasterix/raster_index.py @@ -1,8 +1,9 @@ from __future__ import annotations +import math import textwrap -from collections.abc import Hashable, Mapping -from typing import Any, TypeVar +from collections.abc import Hashable, Iterable, Mapping, Sequence +from typing import Any, Self, TypeVar import numpy as np import pandas as pd @@ -14,6 +15,7 @@ from xarray.core.indexes import CoordinateTransformIndex, PandasIndex from xarray.core.indexing import IndexSelResult, merge_sel_results +from rasterix.odc_compat import BoundingBox, bbox_intersection, bbox_union, maybe_int, snap_grid from rasterix.rioxarray_compat import guess_dims T_Xarray = TypeVar("T_Xarray", "DataArray", "Dataset") @@ -32,6 +34,15 @@ def assign_index(obj: T_Xarray, *, x_dim: str | None = None, y_dim: str | None = return obj.assign_coords(coords) +def _assert_transforms_are_compatible(*affines) -> None: + A1 = affines[0] + for index, A2 in enumerate(affines[1:]): + if A1.a != A2.a or A1.b != A2.b or A1.d != A2.d or A1.e != A2.e: + raise ValueError( + f"Transform parameters are not compatible for affine 0: {A1}, and affine {index + 1} {A2}" + ) + + class AffineTransform(CoordinateTransform): """Affine 2D transform wrapper.""" @@ -76,7 +87,9 @@ def reverse(self, coord_labels): return results - def equals(self, other): + def equals(self, other, *, exclude): + if exclude is not None: + raise NotImplementedError if not isinstance(other, AffineTransform): return False return self.affine == other.affine and self.dim_size == other.dim_size @@ -129,9 +142,11 @@ def reverse(self, coord_labels: dict[Hashable, Any]) -> dict[str, Any]: return {self.dim: positions} - def equals(self, other): + def equals(self, other, *, exclude): if not isinstance(other, AxisAffineTransform): return False + if exclude is not None: + raise NotImplementedError # only compare the affine parameters of the relevant axis if self.is_xaxis: @@ -304,6 +319,7 @@ class RasterIndex(Index): """ _wrapped_indexes: dict[WrappedIndexCoords, WrappedIndex] + _shape: dict[str, int] def __init__(self, indexes: Mapping[WrappedIndexCoords, WrappedIndex]): idx_keys = list(indexes) @@ -321,6 +337,13 @@ def __init__(self, indexes: Mapping[WrappedIndexCoords, WrappedIndex]): assert axis_dependent ^ axis_independent self._wrapped_indexes = dict(indexes) + if axis_independent: + self._shape = { + "x": self._wrapped_indexes["x"].axis_transform.size, + "y": self._wrapped_indexes["y"].axis_transform.size, + } + else: + self._shape = next(iter(self._wrapped_indexes.values())).dim_size @classmethod def from_transform( @@ -396,7 +419,9 @@ def sel(self, labels: dict[Any, Any], method=None, tolerance=None) -> IndexSelRe return merge_sel_results(results) - def equals(self, other: Index) -> bool: + def equals(self, other: Index, *, exclude=None) -> bool: + if exclude is None: + exclude = {} if not isinstance(other, RasterIndex): return False if set(self._wrapped_indexes) != set(other._wrapped_indexes): @@ -405,6 +430,7 @@ def equals(self, other: Index) -> bool: return all( index.equals(other._wrapped_indexes[k]) # type: ignore[arg-type] for k, index in self._wrapped_indexes.items() + if k not in exclude ) def to_pandas_index(self) -> pd.Index: @@ -428,6 +454,9 @@ def __repr__(self): def transform(self) -> Affine: """Returns Affine transform for top-left corners.""" + return self.center_transform() * Affine.translation(-0.5, -0.5) + + def center_transform(self) -> Affine: if len(self._wrapped_indexes) > 1: x = self._wrapped_indexes["x"].axis_transform.affine y = self._wrapped_indexes["y"].axis_transform.affine @@ -435,4 +464,142 @@ def transform(self) -> Affine: else: index = next(iter(self._wrapped_indexes.values())) aff = index.affine - return aff * Affine.translation(-0.5, -0.5) + return aff + + @property + def bbox(self) -> BoundingBox: + return BoundingBox.from_transform( + shape=tuple(self._shape[k] for k in ("y", "x")), + transform=self.transform(), + ) + + @classmethod + def concat( + cls, + indexes: Sequence[Self], + dim: Hashable, + positions: Iterable[Iterable[int]] | None = None, + ) -> Self: + if len(indexes) == 1: + return next(iter(indexes)) + + if positions is not None: + raise NotImplementedError + + # Note: I am assuming that xarray has calling `align(..., exclude="x" [or 'y'])` already + # and checked for equality along "y" [or 'x'] + new_bbox = bbox_union(as_compatible_bboxes(*indexes, concat_dim=dim)) + return indexes[0]._new_with_bbox(new_bbox) + + def _new_with_bbox(self, bbox: BoundingBox) -> Self: + affine = self.transform() + new_affine, Nx, Ny = bbox_to_affine(bbox, rx=affine.a, ry=affine.e) + # TODO: set xdim, ydim explicitly + new_index = self.from_transform(new_affine, width=Nx, height=Ny) + assert new_index.bbox == bbox + return new_index + + def join(self, other: RasterIndex, how) -> RasterIndex: + if set(self._wrapped_indexes.keys()) != set(other._wrapped_indexes.keys()): + # TODO: better error message + raise ValueError( + "Alignment is only supported between RasterIndexes, when both contain compatible transforms." + ) + + ours, theirs = as_compatible_bboxes(self, other, concat_dim=None) + if how == "outer": + new_bbox = ours | theirs + elif how == "inner": + new_bbox = ours & theirs + else: + raise NotImplementedError(f"{how=!r} not implemented yet for RasterIndex.") + + return self._new_with_bbox(new_bbox) + + def reindex_like(self, other: Self, method=None, tolerance=None) -> dict[Hashable, Any]: + affine = self.transform() + ours, theirs = as_compatible_bboxes(self, other, concat_dim=None) + inter = bbox_intersection([ours, theirs]) + dx = affine.a + dy = affine.e + + # Fraction of a pixel that can be ignored, defaults to 1/100. Bounding box of the output + # geobox is allowed to be smaller than supplied bounding box by that amount. + # FIXME: translate user-provided `tolerance` to `tol` + tol: float = 0.01 + + indexers = {} + indexers["x"] = get_indexer( + theirs.left, ours.left, inter.left, inter.right, spacing=dx, tol=tol, size=other._shape["x"] + ) + indexers["y"] = get_indexer( + theirs.top, ours.top, inter.top, inter.bottom, spacing=dy, tol=tol, size=other._shape["y"] + ) + return indexers + + +def get_indexer(off, our_off, start, stop, spacing, tol, size) -> np.ndarray: + istart = math.ceil(maybe_int((start - off) / spacing, tol)) + + ours_istart = math.ceil(maybe_int((start - our_off) / spacing, tol)) + ours_istop = math.ceil(maybe_int((stop - our_off) / spacing, tol)) + + idxr = np.concatenate( + [ + np.full((istart,), fill_value=-1), + np.arange(ours_istart, ours_istop), + np.full((size - istart - (ours_istop - ours_istart),), fill_value=-1), + ] + ) + return idxr + + +def bbox_to_affine(bbox: BoundingBox, rx, ry) -> Affine: + # Fraction of a pixel that can be ignored, defaults to 1/100. Bounding box of the output + # geobox is allowed to be smaller than supplied bounding box by that amount. + # FIXME: translate user-provided `tolerance` to `tol` + tol: float = 0.01 + + offx, nx = snap_grid(bbox.left, bbox.right, rx, 0, tol=tol) + offy, ny = snap_grid(bbox.bottom, bbox.top, ry, 0, tol=tol) + + affine = Affine.translation(offx, offy) * Affine.scale(rx, ry) + + return affine, nx, ny + + +def as_compatible_bboxes(*indexes: RasterIndex, concat_dim: Hashable | None) -> tuple[BoundingBox, ...]: + transforms = tuple(i.transform() for i in indexes) + _assert_transforms_are_compatible(*transforms) + + expected_off_x = (transforms[0].c,) + tuple( + t.c + i._shape["x"] * t.a for i, t in zip(indexes[:-1], transforms[:-1]) + ) + expected_off_y = (transforms[0].f,) + tuple( + t.f + i._shape["y"] * t.e for i, t in zip(indexes[:-1], transforms[:-1]) + ) + + off_x = tuple(t.c for t in transforms) + off_y = tuple(t.f for t in transforms) + + if concat_dim is not None: + if all(o == off_x[0] for o in off_x[1:]) and all(o == off_y[0] for o in off_y[1:]): + raise ValueError("Attempting to concatenate arrays with same transform along X or Y.") + + if concat_dim == "x": + if any(off_y[0] != o for o in off_y[1:]): + raise ValueError("offsets must be identical in X when concatenating along Y") + if any(a != b for a, b in zip(off_x, expected_off_x)): + raise ValueError( + f"X offsets are incompatible. Provided offsets {off_x}, expected offsets: {expected_off_x}" + ) + elif concat_dim == "y": + if any(off_x[0] != o for o in off_x[1:]): + raise ValueError("offsets must be identical in X when concatenating along Y") + + if any(a != b for a, b in zip(off_y, expected_off_y)): + raise ValueError( + f"Y offsets are incompatible. Provided offsets {off_y}, expected offsets: {expected_off_y}" + ) + + return tuple(i.bbox for i in indexes) diff --git a/tests/test_raster_index.py b/tests/test_raster_index.py index 045ae3a..1bfe331 100644 --- a/tests/test_raster_index.py +++ b/tests/test_raster_index.py @@ -1,10 +1,22 @@ import numpy as np +import pyproj +import pytest import rioxarray # noqa import xarray as xr from affine import Affine +from xarray.testing import assert_identical from rasterix import RasterIndex, assign_index +CRS_ATTRS = pyproj.CRS.from_epsg(4326).to_cf() + + +def dataset_from_transform(transform: str) -> xr.Dataset: + return xr.Dataset( + {"foo": (("y", "x"), np.ones((4, 2)), {"grid_mapping": "spatial_ref"})}, + coords={"spatial_ref": ((), 0, CRS_ATTRS | {"GeoTransform": transform})}, + ).pipe(assign_index) + def test_rectilinear(): source = "/vsicurl/https://noaadata.apps.nsidc.org/NOAA/G02135/south/daily/geotiff/2024/01_Jan/S_20240101_concentration_v3.0.tif" @@ -31,3 +43,205 @@ def test_sel_slice(): assert actual_transform == actual.rio.transform() assert actual_transform == (transform * Affine.translation(0, 3)) + + +@pytest.mark.parametrize( + "transforms, concat_dim", + [ + ( + [ + "-50.0 5 0.0 0.0 0.0 -0.25", + "-40.0 5 0.0 0.0 0.0 -0.25", + "-30.0 5 0.0 0.0 0.0 -0.25", + ], + "x", + ), + ( + [ + # decreasing Δy + "-40.0 5 0.0 2.0 0.0 -0.5", + "-40.0 5 0.0 0.0 0.0 -0.5", + "-40.0 5 0.0 -2.0 0.0 -0.5", + ], + "y", + ), + ( + [ + # increasing Δy + "-40.0 5 0.0 -2.0 0.0 0.5", + "-40.0 5 0.0 0.0 0.0 0.5", + "-40.0 5 0.0 2.0 0.0 0.5", + ], + "y", + ), + ], +) +def test_concat_and_combine_nested_1D(transforms, concat_dim): + """Models two side-by-side tiles""" + datasets = list(map(dataset_from_transform, transforms)) + + if concat_dim == "x": + new_data = np.ones((4, 2 * len(transforms))) + else: + new_data = np.ones((4 * len(transforms), 2)) + expected = xr.Dataset( + {"foo": (("y", "x"), new_data, {"grid_mapping": "spatial_ref"})}, + coords={"spatial_ref": ((), 0, CRS_ATTRS | {"GeoTransform": transforms[0]})}, + ).pipe(assign_index) + + for actual in [ + xr.combine_nested(datasets, concat_dim=concat_dim, combine_attrs="override"), + xr.concat(datasets, dim=concat_dim), + ]: + assert_identical(actual, expected) + assert_identical(actual, expected) + concat_coord = xr.concat([ds[concat_dim] for ds in datasets], dim=concat_dim) + assert_identical(actual[concat_dim], concat_coord) + + +@pytest.mark.parametrize( + "transforms, concat_dim", + [ + ( + [ + # out-of-order for Y + "-40.0 5 0.0 -2.0 0.0 -0.5", + "-40.0 5 0.0 0.0 0.0 -0.5", + "-40.0 5 0.0 2.0 0.0 -0.5", + ], + "y", + ), + ( + [ + # incompatible, different Δx + "-50.0 2 0.0 0.0 0.0 -0.5", + "-40.0 5 0.0 0.0 0.0 -0.5", + ], + "x", + ), + ( + [ + # incompatible, different Δy + "-50.0 2 0.0 0.0 0.0 -0.5", + "-40.0 5 0.0 0.0 0.0 -0.25", + ], + "x", + ), + ( + [ + # exact same transform, makes no sense to concat in X or Y + "-50.0 5 0.0 0.0 0.0 -0.5", + "-50.0 5 0.0 0.0 0.0 -0.5", + ], + "x", + ), + ], +) +def test_concat_errors(transforms, concat_dim): + datasets = list(map(dataset_from_transform, transforms)) + with pytest.raises(ValueError): + xr.combine_nested(datasets, concat_dim=concat_dim, combine_attrs="override") + with pytest.raises(ValueError): + xr.concat(datasets, dim=concat_dim, combine_attrs="override") + + +def test_concat_error_alignment(): + transforms, concat_dim = ( + [ + # incompatible, different origins + "-50.0 5 0.0 -2.0 0.0 -0.5", + "-40.0 5 0.0 0.0 0.0 -0.5", + ], + "x", + ) + + datasets = list(map(dataset_from_transform, transforms)) + with pytest.raises(AssertionError): + xr.combine_nested(datasets, concat_dim=concat_dim, combine_attrs="override") + with pytest.raises(AssertionError): + xr.concat(datasets, dim=concat_dim, combine_attrs="override") + + +def test_concat_different_shape_compatible_transform_error(): + crs_attrs = pyproj.CRS.from_epsg(4326).to_cf() + concat_dim = "x" + + ds1 = xr.Dataset( + {"foo": (("y", "x"), np.ones((4, 3)), {"grid_mapping": "spatial_ref"})}, + coords={"spatial_ref": ((), 0, crs_attrs | {"GeoTransform": "-50.0 5 0.0 0.0 0.0 -0.5"})}, + ) + ds2 = xr.Dataset( + {"foo": (("y", "x"), np.ones((4, 2)), {"grid_mapping": "spatial_ref"})}, + coords={"spatial_ref": ((), 0, crs_attrs | {"GeoTransform": "-40.0 5 0.0 0.0 0.0 -0.5"})}, + ) + + datasets = list(map(assign_index, [ds1, ds2])) + with pytest.raises(ValueError): + xr.combine_nested(datasets, concat_dim=concat_dim, combine_attrs="override") + with pytest.raises(ValueError): + xr.concat(datasets, dim=concat_dim, combine_attrs="override") + + +def test_concat_new_dim(): + """models concat along `time` for two tiles with same transform.""" + transforms = [ + "-50.0 0.5 0.0 0.0 0.0 -0.25", + "-50.0 0.5 0.0 0.0 0.0 -0.25", + ] + datasets = list(map(dataset_from_transform, transforms)) + xr.concat(datasets, dim="time", join="exact") + + +def test_combine_nested_2d(): + """models 2d tiling""" + transforms = [ + # row 1 + "-50.0 5 0.0 0.0 0.0 -0.25", + "-40.0 5 0.0 0.0 0.0 -0.25", + "-30.0 5 0.0 0.0 0.0 -0.25", + # row 2 + "-50.0 5 0.0 -1 0.0 -0.25", + "-40.0 5 0.0 -1 0.0 -0.25", + "-30.0 5 0.0 -1 0.0 -0.25", + ] + + datasets = list(map(dataset_from_transform, transforms)) + datasets = [datasets[:3], datasets[3:]] + xr.combine_nested(datasets, concat_dim=["y", "x"]) + + +@pytest.mark.skip(reason="xarray converts to PandasIndex") +def test_combine_by_coords(): + """models 2d tiling""" + transforms = [ + # row 1 + "-50.0 5 0.0 0.0 0.0 -0.25", + "-40.0 5 0.0 0.0 0.0 -0.25", + "-30.0 5 0.0 0.0 0.0 -0.25", + # row 2 + "-50.0 5 0.0 -1 0.0 -0.25", + "-40.0 5 0.0 -1 0.0 -0.25", + "-30.0 5 0.0 -1 0.0 -0.25", + ] + + datasets = list(map(dataset_from_transform, transforms)) + xr.combine_by_coords(datasets) + + +def test_align(): + transforms = [ + "-50.0 5 0.0 0.0 0.0 -0.25", + "-40.0 5 0.0 0.0 0.0 -0.25", + ] + expected_affine = Affine(5, 0, -50, 0, -0.25, 0) + datasets = list(map(dataset_from_transform, transforms)) + + aligned = xr.align(*datasets, join="outer") + assert all(a.sizes == {"x": 4, "y": 4} for a in aligned) + assert all(a.xindexes["x"].transform() == expected_affine for a in aligned) + + aligned = xr.align(*datasets, join="inner") + assert all(a.sizes["x"] == 0 for a in aligned) + + with pytest.raises(xr.AlignmentError): + aligned = xr.align(*datasets, join="exact")
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: