diff --git a/benchmarks/README.md b/benchmarks/README.md index 6c69a786b..9f8903620 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -14,25 +14,63 @@ pip install -e '.[docs,test,benchmark]' ## Usage -Running all the benchmarks is usually not needed. You run the benchmark using `asv run`. See the [asv documentation](https://asv.readthedocs.io/en/stable/commands.html#asv-run) for interesting arguments, like selecting the benchmarks you're interested in by providing a regex pattern `-b` or `--bench` that links to a function or class method e.g. the option `-b timeraw_import_inspect` selects the function `timeraw_import_inspect` in `benchmarks/spatialdata_benchmark.py`. You can run the benchmark in your current environment with `--python=same`. Some example benchmarks: +Running all the benchmarks is usually not needed. You run the benchmark using `asv run`. See the [asv documentation](https://asv.readthedocs.io/en/stable/commands.html#asv-run) for interesting arguments, like selecting the benchmarks you're interested in by providing a regex pattern `-b` or `--bench` that links to a function or class method. You can run the benchmark in your current environment with `--python=same`. Some example benchmarks: -Importing the SpatialData library can take around 4 seconds: +### Import time benchmarks + +Import benchmarks live in `benchmarks/benchmark_imports.py`. Each `timeraw_*` function returns a Python code snippet that asv runs in a fresh interpreter (cold import, empty module cache): + +Run all import benchmarks in your current environment: ``` -PYTHONWARNINGS="ignore" asv run --python=same --show-stderr -b timeraw_import_inspect -Couldn't load asv.plugins._mamba_helpers because -No module named 'conda' -· Discovering benchmarks -· Running 1 total benchmarks (1 commits * 1 environments * 1 benchmarks) -[ 0.00%] ·· Benchmarking existing-py_opt_homebrew_Caskroom_mambaforge_base_envs_spatialdata2_bin_python3.12 -[50.00%] ··· Running (spatialdata_benchmark.timeraw_import_inspect--). -[100.00%] ··· spatialdata_benchmark.timeraw_import_inspect 3.65±0.2s +asv run --python=same --show-stderr -b timeraw +``` + +Or a single one: + +``` +asv run --python=same --show-stderr -b timeraw_import_spatialdata +``` + +### Comparing the current branch against `main` + +The simplest way is `asv continuous`, which builds both commits, runs the benchmarks, and prints the comparison in one shot: + +```bash +asv continuous --show-stderr -v -b timeraw main faster-import ``` +Replace `faster-import` with any branch name or commit hash. The `-v` flag prints per-sample timings; drop it for a shorter summary. + +Alternatively, collect results separately and compare afterwards: + +```bash +# 1. Collect results for the tip of main and the tip of your branch +asv run --show-stderr -b timeraw main +asv run --show-stderr -b timeraw HEAD + +# 2. Print a side-by-side comparison +asv compare main HEAD +``` + +Both approaches build isolated environments from scratch. If you prefer to skip the rebuild and reuse your current environment (faster, less accurate): + +```bash +asv run --python=same --show-stderr -b timeraw HEAD + +git stash && git checkout main +asv run --python=same --show-stderr -b timeraw HEAD +git checkout - && git stash pop + +asv compare main HEAD +``` + +### Querying benchmarks + Querying using a bounding box without a spatial index is highly impacted by large amounts of points (transcripts), more than table rows (cells). ``` -$ PYTHONWARNINGS="ignore" asv run --python=same --show-stderr -b time_query_bounding_box +$ asv run --python=same --show-stderr -b time_query_bounding_box [100.00%] ··· ======== ============ ============= ============= ============== -- filter_table / n_transcripts_per_cell diff --git a/benchmarks/benchmark_imports.py b/benchmarks/benchmark_imports.py new file mode 100644 index 000000000..e1a47ea8b --- /dev/null +++ b/benchmarks/benchmark_imports.py @@ -0,0 +1,56 @@ +"""Benchmarks for import times of the spatialdata package and its submodules. + +Each ``timeraw_*`` function returns a snippet of Python code that asv runs in +a fresh interpreter, so the measured time reflects a cold import with an empty +module cache. +""" + +from collections.abc import Callable +from typing import Any + + +def _timeraw(func: Any) -> Any: + """Set asv benchmark attributes for a cold-import timeraw function.""" + func.repeat = 5 # number of independent subprocess measurements + func.number = 1 # must be 1: second import in same process hits module cache + return func + + +@_timeraw +def timeraw_import_spatialdata() -> str: + """Time a bare ``import spatialdata``.""" + return """ + import spatialdata + """ + + +@_timeraw +def timeraw_import_SpatialData() -> str: + """Time importing the top-level ``SpatialData`` class.""" + return """ + from spatialdata import SpatialData + """ + + +@_timeraw +def timeraw_import_read_zarr() -> str: + """Time importing ``read_zarr`` from the top-level namespace.""" + return """ + from spatialdata import read_zarr + """ + + +@_timeraw +def timeraw_import_models_elements() -> str: + """Time importing the main element model classes.""" + return """ + from spatialdata.models import Image2DModel, Labels2DModel, PointsModel, ShapesModel, TableModel + """ + + +@_timeraw +def timeraw_import_transformations() -> str: + """Time importing the ``spatialdata.transformations`` submodule.""" + return """ + from spatialdata.transformations import Affine, Scale, Translation, Sequence + """ diff --git a/benchmarks/spatialdata_benchmark.py b/benchmarks/spatialdata_benchmark.py index 408ad14e8..4d1020fe8 100644 --- a/benchmarks/spatialdata_benchmark.py +++ b/benchmarks/spatialdata_benchmark.py @@ -20,13 +20,6 @@ def peakmem_list2(self): return sdata -def timeraw_import_inspect(): - """Time the import of the spatialdata module.""" - return """ - import spatialdata - """ - - class TimeMapRaster: """Time the.""" diff --git a/docs/conf.py b/docs/conf.py index 4ea29943b..6bd7b2430 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,6 +4,8 @@ # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html +from __future__ import annotations + # -- Path setup -------------------------------------------------------------- import sys from datetime import datetime diff --git a/docs/extensions/typed_returns.py b/docs/extensions/typed_returns.py index d044c6985..5eefc02a6 100644 --- a/docs/extensions/typed_returns.py +++ b/docs/extensions/typed_returns.py @@ -1,5 +1,7 @@ # code from https://github.com/theislab/scanpy/blob/master/docs/extensions/typed_returns.py # with some minor adjustment +from __future__ import annotations + import re from sphinx.application import Sphinx diff --git a/pyproject.toml b/pyproject.toml index 89e9e0235..b3e539158 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,6 @@ dependencies = [ [project.optional-dependencies] dev = [ "bump2version", - "sentry-prevent-cli", ] test = [ "pytest", @@ -79,6 +78,7 @@ docs = [ benchmark = [ "asv", "memray", + "profimp", ] torch = [ "torch" @@ -183,6 +183,9 @@ select = [ ] unfixable = ["B", "C4", "UP", "BLE", "T20", "RET"] +[tool.ruff.lint.isort] +required-imports = ["from __future__ import annotations"] + [tool.ruff.lint.pydocstyle] convention = "numpy" diff --git a/src/spatialdata/__init__.py b/src/spatialdata/__init__.py index bb24f04ed..349780287 100644 --- a/src/spatialdata/__init__.py +++ b/src/spatialdata/__init__.py @@ -1,76 +1,227 @@ -from importlib.metadata import version +from __future__ import annotations -import spatialdata.models._accessor # noqa: F401 +import importlib +from importlib.metadata import version +from typing import TYPE_CHECKING, Any __version__ = version("spatialdata") -__all__ = [ +_submodules = { + "dataloader", + "datasets", "models", "transformations", - "datasets", - "dataloader", +} + +_LAZY_IMPORTS: dict[str, str] = { + # _core._deepcopy + "deepcopy": "spatialdata._core._deepcopy", + # _core._utils + "sanitize_name": "spatialdata._core._utils", + "sanitize_table": "spatialdata._core._utils", + # _core.centroids + "get_centroids": "spatialdata._core.centroids", + # _core.concatenate + "concatenate": "spatialdata._core.concatenate", + # _core.data_extent + "are_extents_equal": "spatialdata._core.data_extent", + "get_extent": "spatialdata._core.data_extent", + # _core.operations.aggregate + "aggregate": "spatialdata._core.operations.aggregate", + # _core.operations.map + "map_raster": "spatialdata._core.operations.map", + "relabel_sequential": "spatialdata._core.operations.map", + # _core.operations.rasterize + "rasterize": "spatialdata._core.operations.rasterize", + # _core.operations.rasterize_bins + "rasterize_bins": "spatialdata._core.operations.rasterize_bins", + "rasterize_bins_link_table_to_labels": "spatialdata._core.operations.rasterize_bins", + # _core.operations.transform + "transform": "spatialdata._core.operations.transform", + # _core.operations.vectorize + "to_circles": "spatialdata._core.operations.vectorize", + "to_polygons": "spatialdata._core.operations.vectorize", + # _core.query._utils + "get_bounding_box_corners": "spatialdata._core.query._utils", + # _core.query.relational_query + "filter_by_table_query": "spatialdata._core.query.relational_query", + "get_element_annotators": "spatialdata._core.query.relational_query", + "get_element_instances": "spatialdata._core.query.relational_query", + "get_values": "spatialdata._core.query.relational_query", + "join_spatialelement_table": "spatialdata._core.query.relational_query", + "match_element_to_table": "spatialdata._core.query.relational_query", + "match_sdata_to_table": "spatialdata._core.query.relational_query", + "match_table_to_element": "spatialdata._core.query.relational_query", + # _core.query.spatial_query + "bounding_box_query": "spatialdata._core.query.spatial_query", + "polygon_query": "spatialdata._core.query.spatial_query", + # _core.spatialdata + "SpatialData": "spatialdata._core.spatialdata", + # _io._utils + "get_dask_backing_files": "spatialdata._io._utils", + # _io.format + "SpatialDataFormatType": "spatialdata._io.format", + # _io.io_zarr + "read_zarr": "spatialdata._io.io_zarr", + # _utils + "get_pyramid_levels": "spatialdata._utils", + "unpad_raster": "spatialdata._utils", + # config + "settings": "spatialdata.config", +} + +__all__ = [ + # _core._deepcopy + "deepcopy", + # _core._utils + "sanitize_name", + "sanitize_table", + # _core.centroids + "get_centroids", + # _core.concatenate "concatenate", + # _core.data_extent + "are_extents_equal", + "get_extent", + # _core.operations.aggregate + "aggregate", + # _core.operations.map + "map_raster", + "relabel_sequential", + # _core.operations.rasterize "rasterize", + # _core.operations.rasterize_bins "rasterize_bins", "rasterize_bins_link_table_to_labels", + # _core.operations.transform + "transform", + # _core.operations.vectorize "to_circles", "to_polygons", - "transform", - "aggregate", - "bounding_box_query", - "polygon_query", + # _core.query._utils + "get_bounding_box_corners", + # _core.query.relational_query + "filter_by_table_query", "get_element_annotators", "get_element_instances", "get_values", "join_spatialelement_table", "match_element_to_table", - "match_table_to_element", "match_sdata_to_table", - "filter_by_table_query", + "match_table_to_element", + # _core.query.spatial_query + "bounding_box_query", + "polygon_query", + # _core.spatialdata "SpatialData", - "get_extent", - "get_centroids", + # _io._utils + "get_dask_backing_files", + # _io.format "SpatialDataFormatType", + # _io.io_zarr "read_zarr", - "unpad_raster", + # _utils "get_pyramid_levels", - "get_dask_backing_files", - "are_extents_equal", - "relabel_sequential", - "map_raster", - "deepcopy", - "sanitize_table", - "sanitize_name", + "unpad_raster", + # config "settings", ] -from spatialdata import dataloader, datasets, models, transformations -from spatialdata._core._deepcopy import deepcopy -from spatialdata._core._utils import sanitize_name, sanitize_table -from spatialdata._core.centroids import get_centroids -from spatialdata._core.concatenate import concatenate -from spatialdata._core.data_extent import are_extents_equal, get_extent -from spatialdata._core.operations.aggregate import aggregate -from spatialdata._core.operations.map import map_raster, relabel_sequential -from spatialdata._core.operations.rasterize import rasterize -from spatialdata._core.operations.rasterize_bins import rasterize_bins, rasterize_bins_link_table_to_labels -from spatialdata._core.operations.transform import transform -from spatialdata._core.operations.vectorize import to_circles, to_polygons -from spatialdata._core.query._utils import get_bounding_box_corners -from spatialdata._core.query.relational_query import ( - filter_by_table_query, - get_element_annotators, - get_element_instances, - get_values, - join_spatialelement_table, - match_element_to_table, - match_sdata_to_table, - match_table_to_element, -) -from spatialdata._core.query.spatial_query import bounding_box_query, polygon_query -from spatialdata._core.spatialdata import SpatialData -from spatialdata._io._utils import get_dask_backing_files -from spatialdata._io.format import SpatialDataFormatType -from spatialdata._io.io_zarr import read_zarr -from spatialdata._utils import get_pyramid_levels, unpad_raster -from spatialdata.config import settings +_accessor_loaded = False + + +def __getattr__(name: str) -> Any: + global _accessor_loaded + if not _accessor_loaded: + _accessor_loaded = True + import spatialdata.models._accessor # noqa: F401 + + if name in _submodules: + return importlib.import_module(f"spatialdata.{name}") + if name in _LAZY_IMPORTS: + mod = importlib.import_module(_LAZY_IMPORTS[name]) + attr = getattr(mod, name) + globals()[name] = attr + return attr + try: + return globals()[name] + except KeyError as e: + raise AttributeError(f"module 'spatialdata' has no attribute {name!r}") from e + + +def __dir__() -> list[str]: + return __all__ + ["__version__"] + + +if TYPE_CHECKING: + # submodules + from spatialdata import dataloader, datasets, models, transformations + + # _core._deepcopy + from spatialdata._core._deepcopy import deepcopy + + # _core._utils + from spatialdata._core._utils import sanitize_name, sanitize_table + + # _core.centroids + from spatialdata._core.centroids import get_centroids + + # _core.concatenate + from spatialdata._core.concatenate import concatenate + + # _core.data_extent + from spatialdata._core.data_extent import are_extents_equal, get_extent + + # _core.operations.aggregate + from spatialdata._core.operations.aggregate import aggregate + + # _core.operations.map + from spatialdata._core.operations.map import map_raster, relabel_sequential + + # _core.operations.rasterize + from spatialdata._core.operations.rasterize import rasterize + + # _core.operations.rasterize_bins + from spatialdata._core.operations.rasterize_bins import rasterize_bins, rasterize_bins_link_table_to_labels + + # _core.operations.transform + from spatialdata._core.operations.transform import transform + + # _core.operations.vectorize + from spatialdata._core.operations.vectorize import to_circles, to_polygons + + # _core.query._utils + from spatialdata._core.query._utils import get_bounding_box_corners + + # _core.query.relational_query + from spatialdata._core.query.relational_query import ( + filter_by_table_query, + get_element_annotators, + get_element_instances, + get_values, + join_spatialelement_table, + match_element_to_table, + match_sdata_to_table, + match_table_to_element, + ) + + # _core.query.spatial_query + from spatialdata._core.query.spatial_query import bounding_box_query, polygon_query + + # _core.spatialdata + from spatialdata._core.spatialdata import SpatialData + + # _io._utils + from spatialdata._io._utils import get_dask_backing_files + + # _io.format + from spatialdata._io.format import SpatialDataFormatType + + # _io.io_zarr + from spatialdata._io.io_zarr import read_zarr + + # _utils + from spatialdata._utils import get_pyramid_levels, unpad_raster + + # config + from spatialdata.config import settings diff --git a/src/spatialdata/__main__.py b/src/spatialdata/__main__.py index 22b62cc1c..6d15de398 100644 --- a/src/spatialdata/__main__.py +++ b/src/spatialdata/__main__.py @@ -6,6 +6,8 @@ the contents of a SpatialData .zarr dataset. Additional CLI functionalities will be implemented in the future. """ +from __future__ import annotations + from typing import Literal import click diff --git a/src/spatialdata/_core/_deepcopy.py b/src/spatialdata/_core/_deepcopy.py index 67d709f0a..9e2e7f00c 100644 --- a/src/spatialdata/_core/_deepcopy.py +++ b/src/spatialdata/_core/_deepcopy.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from copy import deepcopy as _deepcopy from functools import singledispatch diff --git a/src/spatialdata/_core/_elements.py b/src/spatialdata/_core/_elements.py index 88205c7eb..a3267a701 100644 --- a/src/spatialdata/_core/_elements.py +++ b/src/spatialdata/_core/_elements.py @@ -1,5 +1,7 @@ """SpatialData elements.""" +from __future__ import annotations + from collections import UserDict from collections.abc import Iterable, KeysView, ValuesView from typing import TypeVar diff --git a/src/spatialdata/_core/_utils.py b/src/spatialdata/_core/_utils.py index a6806334d..a55815655 100644 --- a/src/spatialdata/_core/_utils.py +++ b/src/spatialdata/_core/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Iterable from anndata import AnnData diff --git a/src/spatialdata/_core/centroids.py b/src/spatialdata/_core/centroids.py index cd43fb2fb..24ea616c6 100644 --- a/src/spatialdata/_core/centroids.py +++ b/src/spatialdata/_core/centroids.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import defaultdict from functools import singledispatch diff --git a/src/spatialdata/_core/concatenate.py b/src/spatialdata/_core/concatenate.py index f90043604..b0639eacc 100644 --- a/src/spatialdata/_core/concatenate.py +++ b/src/spatialdata/_core/concatenate.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import defaultdict from collections.abc import Callable, Iterable from copy import copy # Should probably go up at the top diff --git a/src/spatialdata/_core/data_extent.py b/src/spatialdata/_core/data_extent.py index 6abd46ca8..6504bb082 100644 --- a/src/spatialdata/_core/data_extent.py +++ b/src/spatialdata/_core/data_extent.py @@ -1,4 +1,6 @@ # Functions to compute the bounding box describing the extent of a SpatialElement or SpatialData object +from __future__ import annotations + from collections import defaultdict from functools import singledispatch diff --git a/src/spatialdata/_core/operations/aggregate.py b/src/spatialdata/_core/operations/aggregate.py index 131b8d7f3..d0c4741e7 100644 --- a/src/spatialdata/_core/operations/aggregate.py +++ b/src/spatialdata/_core/operations/aggregate.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import warnings from typing import Any @@ -9,10 +11,8 @@ import pandas as pd from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame -from scipy import sparse from shapely import Point from xarray import DataArray, DataTree -from xrspatial import zonal_stats from spatialdata._core.operations._utils import _parse_element from spatialdata._core.operations.transform import transform @@ -270,6 +270,9 @@ def _aggregate_image_by_labels( ------- AnnData of shape `(by.shape[0], len(agg_func)]`. """ + from scipy import sparse + from xrspatial import zonal_stats + if isinstance(by, DataTree): assert len(by["scale0"]) == 1 by = next(iter(by["scale0"].values())) @@ -468,6 +471,8 @@ def _aggregate_shapes( columns_categories * (numel // len(columns_categories)), categories=columns_categories ) + from scipy import sparse + X = sparse.coo_matrix( ( aggregated_values.ravel(), diff --git a/src/spatialdata/_core/operations/map.py b/src/spatialdata/_core/operations/map.py index 823295e10..0f5a380a9 100644 --- a/src/spatialdata/_core/operations/map.py +++ b/src/spatialdata/_core/operations/map.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import operator from collections.abc import Callable, Iterable, Mapping diff --git a/src/spatialdata/_core/operations/rasterize.py b/src/spatialdata/_core/operations/rasterize.py index 12bbb68fe..d5b28c281 100644 --- a/src/spatialdata/_core/operations/rasterize.py +++ b/src/spatialdata/_core/operations/rasterize.py @@ -1,5 +1,7 @@ -import dask_image.ndinterp -import datashader as ds +from __future__ import annotations + +from typing import TYPE_CHECKING + import numpy as np from dask.array import Array as DaskArray from dask.dataframe import DataFrame as DaskDataFrame @@ -7,6 +9,9 @@ from shapely import Point from xarray import DataArray, DataTree +if TYPE_CHECKING: + import datashader as ds + from spatialdata._core.operations._utils import _parse_element from spatialdata._core.operations.transform import transform from spatialdata._core.operations.vectorize import to_polygons @@ -505,6 +510,8 @@ def rasterize_images_labels( target_height: float | None = None, target_depth: float | None = None, ) -> DataArray: + import dask_image.ndinterp + min_coordinate = _parse_list_into_array(min_coordinate) max_coordinate = _parse_list_into_array(max_coordinate) # get dimensions of the target image @@ -624,6 +631,8 @@ def rasterize_shapes_points( agg_func: str | ds.reductions.Reduction | None = None, return_single_channel: bool | None = None, ) -> DataArray: + import datashader as ds + min_coordinate = _parse_list_into_array(min_coordinate) max_coordinate = _parse_list_into_array(max_coordinate) target_width, target_height, target_depth = _compute_target_dimensions( @@ -737,6 +746,8 @@ def rasterize_shapes_points( def _default_agg_func( data: DaskDataFrame | GeoDataFrame, value_key: str | None, return_single_channel: bool ) -> ds.reductions.Reduction: + import datashader as ds + if value_key is None: return ds.count() diff --git a/src/spatialdata/_core/operations/rasterize_bins.py b/src/spatialdata/_core/operations/rasterize_bins.py index f61d1758c..87af14de7 100644 --- a/src/spatialdata/_core/operations/rasterize_bins.py +++ b/src/spatialdata/_core/operations/rasterize_bins.py @@ -9,9 +9,7 @@ from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame from numpy.random import default_rng -from scipy.sparse import csc_matrix from shapely import MultiPolygon, Point, Polygon -from skimage.transform import estimate_transform from xarray import DataArray from spatialdata._core.query.relational_query import get_values @@ -126,6 +124,8 @@ def rasterize_bins( transformations = get_transformation(element, get_all=True) assert isinstance(transformations, dict) else: + from skimage.transform import estimate_transform + # get the transformation if table.n_obs < 6: raise ValueError("At least 6 bins are needed to estimate the transformation.") @@ -177,6 +177,8 @@ def rasterize_bins( keys = ([value_key] if isinstance(value_key, str) else value_key) if value_key is not None else table.var_names + from scipy.sparse import csc_matrix + if (value_key is None or any(key in table.var_names for key in keys)) and not isinstance( table.X, csc_matrix | np.ndarray ): diff --git a/src/spatialdata/_core/operations/vectorize.py b/src/spatialdata/_core/operations/vectorize.py index 40d3e31f4..414584589 100644 --- a/src/spatialdata/_core/operations/vectorize.py +++ b/src/spatialdata/_core/operations/vectorize.py @@ -1,17 +1,20 @@ +from __future__ import annotations + from functools import singledispatch -from typing import Any +from typing import TYPE_CHECKING, Any import dask import numpy as np import pandas as pd import shapely -import skimage.measure from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame from shapely import MultiPolygon, Point, Polygon -from skimage.measure._regionprops import RegionProperties from xarray import DataArray, DataTree +if TYPE_CHECKING: + from skimage.measure._regionprops import RegionProperties + from spatialdata._core.centroids import get_centroids from spatialdata._core.operations.aggregate import aggregate from spatialdata._logging import logger @@ -226,6 +229,8 @@ def _vectorize_chunk(chunk: np.ndarray, yoff: int, xoff: int) -> GeoDataFrame: def _region_props_to_polygon(region_props: RegionProperties) -> MultiPolygon | Polygon: + import skimage.measure + mask = np.pad(region_props.image, 1) contours = skimage.measure.find_contours(mask, 0.5) @@ -244,6 +249,8 @@ def _region_props_to_polygon(region_props: RegionProperties) -> MultiPolygon | P def _vectorize_mask( mask: np.ndarray, # type: ignore[type-arg] ) -> GeoDataFrame: + import skimage.measure + if mask.max() == 0: return GeoDataFrame({"label": []}, geometry=[]) diff --git a/src/spatialdata/_core/query/_utils.py b/src/spatialdata/_core/query/_utils.py index 5ab904d96..f66679ca2 100644 --- a/src/spatialdata/_core/query/_utils.py +++ b/src/spatialdata/_core/query/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any import numba as nb diff --git a/src/spatialdata/_core/query/relational_query.py b/src/spatialdata/_core/query/relational_query.py index 01cfb745b..c83cd770e 100644 --- a/src/spatialdata/_core/query/relational_query.py +++ b/src/spatialdata/_core/query/relational_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import warnings from collections import defaultdict diff --git a/src/spatialdata/_core/query/spatial_query.py b/src/spatialdata/_core/query/spatial_query.py index b112b68c5..c29e3bc0e 100644 --- a/src/spatialdata/_core/query/spatial_query.py +++ b/src/spatialdata/_core/query/spatial_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import warnings from abc import abstractmethod from collections.abc import Callable, Mapping diff --git a/src/spatialdata/_core/validation.py b/src/spatialdata/_core/validation.py index 50e1b65d5..df354fc3b 100644 --- a/src/spatialdata/_core/validation.py +++ b/src/spatialdata/_core/validation.py @@ -1,9 +1,12 @@ +from __future__ import annotations + from collections.abc import Collection from types import TracebackType -from typing import NamedTuple, cast +from typing import TYPE_CHECKING, NamedTuple, cast -import pandas as pd -from anndata import AnnData +if TYPE_CHECKING: + import pandas as pd + from anndata import AnnData class ErrorDetails(NamedTuple): @@ -278,7 +281,7 @@ def __call__( self, location: str | tuple[str, ...] = (), expected_exception: type[BaseException] | tuple[type[BaseException], ...] | None = None, - ) -> "_ErrorDetailsCollector": + ) -> _ErrorDetailsCollector: """ Set or override error details in advance before an exception is raised. diff --git a/src/spatialdata/_docs.py b/src/spatialdata/_docs.py index b8e13bc79..691ed765b 100644 --- a/src/spatialdata/_docs.py +++ b/src/spatialdata/_docs.py @@ -1,4 +1,6 @@ # from https://stackoverflow.com/questions/10307696/how-to-put-a-variable-into-python-docstring +from __future__ import annotations + from collections.abc import Callable from typing import Any, TypeVar diff --git a/src/spatialdata/_io/__init__.py b/src/spatialdata/_io/__init__.py index b0dc914e6..38ff8c6bb 100644 --- a/src/spatialdata/_io/__init__.py +++ b/src/spatialdata/_io/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata._io._utils import get_dask_backing_files from spatialdata._io.format import SpatialDataFormatType from spatialdata._io.io_points import write_points diff --git a/src/spatialdata/_io/_utils.py b/src/spatialdata/_io/_utils.py index b58d67445..6690d1118 100644 --- a/src/spatialdata/_io/_utils.py +++ b/src/spatialdata/_io/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import filecmp import os.path import re diff --git a/src/spatialdata/_io/format.py b/src/spatialdata/_io/format.py index cce6654ef..cfb5be1b6 100644 --- a/src/spatialdata/_io/format.py +++ b/src/spatialdata/_io/format.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Iterator from typing import Any diff --git a/src/spatialdata/_io/io_points.py b/src/spatialdata/_io/io_points.py index 5f42497c7..b47fc418c 100644 --- a/src/spatialdata/_io/io_points.py +++ b/src/spatialdata/_io/io_points.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import zarr diff --git a/src/spatialdata/_io/io_raster.py b/src/spatialdata/_io/io_raster.py index 86b537d0c..df7e1cb8f 100644 --- a/src/spatialdata/_io/io_raster.py +++ b/src/spatialdata/_io/io_raster.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path from typing import Any, Literal diff --git a/src/spatialdata/_io/io_shapes.py b/src/spatialdata/_io/io_shapes.py index 65cb099a0..b07256273 100644 --- a/src/spatialdata/_io/io_shapes.py +++ b/src/spatialdata/_io/io_shapes.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path from typing import Any, Literal diff --git a/src/spatialdata/_io/io_table.py b/src/spatialdata/_io/io_table.py index d2e5c3cca..8cd7b8385 100644 --- a/src/spatialdata/_io/io_table.py +++ b/src/spatialdata/_io/io_table.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import numpy as np diff --git a/src/spatialdata/_io/io_zarr.py b/src/spatialdata/_io/io_zarr.py index 0312dc965..4c410fab0 100644 --- a/src/spatialdata/_io/io_zarr.py +++ b/src/spatialdata/_io/io_zarr.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import warnings from collections.abc import Callable diff --git a/src/spatialdata/_logging.py b/src/spatialdata/_logging.py index c49c1c884..0b117a7a8 100644 --- a/src/spatialdata/_logging.py +++ b/src/spatialdata/_logging.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import logging import os -def _setup_logger() -> "logging.Logger": +def _setup_logger() -> logging.Logger: from rich.console import Console from rich.logging import RichHandler diff --git a/src/spatialdata/_types.py b/src/spatialdata/_types.py index e1415f74c..c5bd76de9 100644 --- a/src/spatialdata/_types.py +++ b/src/spatialdata/_types.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, TypeAlias import numpy as np diff --git a/src/spatialdata/_utils.py b/src/spatialdata/_utils.py index 64dd76389..3df133854 100644 --- a/src/spatialdata/_utils.py +++ b/src/spatialdata/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools import re import warnings diff --git a/src/spatialdata/config.py b/src/spatialdata/config.py index dab848b35..35b96e5f7 100644 --- a/src/spatialdata/config.py +++ b/src/spatialdata/config.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import dataclass from typing import Literal diff --git a/src/spatialdata/dataloader/datasets.py b/src/spatialdata/dataloader/datasets.py index 4bf14a916..6a105b681 100644 --- a/src/spatialdata/dataloader/datasets.py +++ b/src/spatialdata/dataloader/datasets.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import warnings from collections.abc import Callable, Mapping from functools import partial diff --git a/src/spatialdata/datasets.py b/src/spatialdata/datasets.py index de144be74..cb7a7b55b 100644 --- a/src/spatialdata/datasets.py +++ b/src/spatialdata/datasets.py @@ -1,19 +1,19 @@ """SpatialData datasets.""" +from __future__ import annotations + import warnings from typing import Any, Literal import dask.dataframe.core import numpy as np import pandas as pd -import scipy from anndata import AnnData from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame from numpy.random import default_rng from shapely.affinity import translate from shapely.geometry import MultiPolygon, Point, Polygon -from skimage.segmentation import slic from xarray import DataArray, DataTree from spatialdata._core.operations.aggregate import aggregate @@ -89,6 +89,9 @@ def raccoon( self, ) -> SpatialData: """Raccoon dataset.""" + import scipy.datasets + from skimage.segmentation import slic + im_data = scipy.datasets.face() im = Image2DModel.parse(im_data, dims=["y", "x", "c"]) labels_data = slic(im_data, n_segments=100, compactness=10, sigma=1) diff --git a/src/spatialdata/io/__init__.py b/src/spatialdata/io/__init__.py index a23181cad..eba096e3e 100644 --- a/src/spatialdata/io/__init__.py +++ b/src/spatialdata/io/__init__.py @@ -1,5 +1,7 @@ """Experimental bridge to the spatialdata_io package.""" +from __future__ import annotations + try: from spatialdata_io import * # noqa: F403 except ImportError as e: diff --git a/src/spatialdata/models/__init__.py b/src/spatialdata/models/__init__.py index ba064e0a6..2f25b4f3c 100644 --- a/src/spatialdata/models/__init__.py +++ b/src/spatialdata/models/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata._core.validation import check_target_region_column_symmetry from spatialdata.models._utils import ( C, diff --git a/src/spatialdata/models/_accessor.py b/src/spatialdata/models/_accessor.py index a8b19653d..1b635427c 100644 --- a/src/spatialdata/models/_accessor.py +++ b/src/spatialdata/models/_accessor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Iterator, MutableMapping from typing import Any, Literal diff --git a/src/spatialdata/models/_utils.py b/src/spatialdata/models/_utils.py index 6631f6c9d..26e9aafea 100644 --- a/src/spatialdata/models/_utils.py +++ b/src/spatialdata/models/_utils.py @@ -8,7 +8,6 @@ import geopandas import numpy as np import pandas as pd -from anndata import AnnData from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame from shapely.geometry import MultiPolygon, Point, Polygon @@ -29,6 +28,8 @@ X = "x" if TYPE_CHECKING: + from anndata import AnnData + from spatialdata.models.models import RasterSchema diff --git a/src/spatialdata/models/chunks_utils.py b/src/spatialdata/models/chunks_utils.py index 14ac208e8..474067011 100644 --- a/src/spatialdata/models/chunks_utils.py +++ b/src/spatialdata/models/chunks_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping, Sequence from typing import Any, TypeAlias diff --git a/src/spatialdata/models/models.py b/src/spatialdata/models/models.py index c8001a847..16883c6aa 100644 --- a/src/spatialdata/models/models.py +++ b/src/spatialdata/models/models.py @@ -1,5 +1,7 @@ """Models and schema for SpatialData.""" +from __future__ import annotations + import warnings from collections.abc import Mapping, Sequence from functools import singledispatchmethod diff --git a/src/spatialdata/models/pyramids_utils.py b/src/spatialdata/models/pyramids_utils.py index 8a0a0ba1b..bef7dfb3e 100644 --- a/src/spatialdata/models/pyramids_utils.py +++ b/src/spatialdata/models/pyramids_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Sequence from typing import Any diff --git a/src/spatialdata/testing.py b/src/spatialdata/testing.py index 1d1455479..17945818a 100644 --- a/src/spatialdata/testing.py +++ b/src/spatialdata/testing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from anndata import AnnData from anndata.tests.helpers import assert_equal as assert_anndata_equal from dask.dataframe import DataFrame as DaskDataFrame diff --git a/src/spatialdata/transformations/__init__.py b/src/spatialdata/transformations/__init__.py index f6affcafe..e95a92766 100644 --- a/src/spatialdata/transformations/__init__.py +++ b/src/spatialdata/transformations/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata.transformations.operations import ( align_elements_using_landmarks, get_transformation, diff --git a/src/spatialdata/transformations/ngff/_utils.py b/src/spatialdata/transformations/ngff/_utils.py index 4d0eaf216..07dff152c 100644 --- a/src/spatialdata/transformations/ngff/_utils.py +++ b/src/spatialdata/transformations/ngff/_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy from spatialdata.models import C, X, Y, Z diff --git a/src/spatialdata/transformations/ngff/ngff_transformations.py b/src/spatialdata/transformations/ngff/ngff_transformations.py index 6e25646f9..1758159bf 100644 --- a/src/spatialdata/transformations/ngff/ngff_transformations.py +++ b/src/spatialdata/transformations/ngff/ngff_transformations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math from abc import ABC, abstractmethod from numbers import Number @@ -71,11 +73,11 @@ def __repr__(self) -> str: @classmethod @abstractmethod - def _from_dict(cls, d: Transformation_t) -> "NgffBaseTransformation": + def _from_dict(cls, d: Transformation_t) -> NgffBaseTransformation: pass @classmethod - def from_dict(cls, d: Transformation_t) -> "NgffBaseTransformation": + def from_dict(cls, d: Transformation_t) -> NgffBaseTransformation: """ Initialize a transformation from the Python dict of its json representation. @@ -134,7 +136,7 @@ def _update_dict_with_input_output_cs(self, d: Transformation_t) -> None: d["output"] = d["output"].to_dict() @abstractmethod - def inverse(self) -> "NgffBaseTransformation": + def inverse(self) -> NgffBaseTransformation: """Return the inverse of the transformation.""" @abstractmethod @@ -156,7 +158,7 @@ def transform_points(self, points: ArrayLike) -> ArrayLike: """ @abstractmethod - def to_affine(self) -> "NgffAffine": + def to_affine(self) -> NgffAffine: """Convert the transformation to an affine transformation, whenever the conversion can be made.""" def _validate_transform_points_shapes(self, input_size: int, points_shape: tuple[int, ...]) -> None: @@ -177,7 +179,7 @@ def _validate_transform_points_shapes(self, input_size: int, points_shape: tuple ) # order of the composition: self is applied first, then the transformation passed as argument - def compose_with(self, transformation: "NgffBaseTransformation") -> "NgffBaseTransformation": + def compose_with(self, transformation: NgffBaseTransformation) -> NgffBaseTransformation: """ Compose the transfomation object with another transformation @@ -343,7 +345,7 @@ def transform_points(self, points: ArrayLike) -> ArrayLike: assert isinstance(res, np.ndarray) return res - def to_affine(self) -> "NgffAffine": + def to_affine(self) -> NgffAffine: return NgffAffine( self.affine, input_coordinate_system=self.input_coordinate_system, @@ -381,7 +383,7 @@ def from_input_output_coordinate_systems( cls, input_coordinate_system: NgffCoordinateSystem, output_coordinate_system: NgffCoordinateSystem, - ) -> "NgffAffine": + ) -> NgffAffine: input_axes = input_coordinate_system.axes_names output_axes = output_coordinate_system.axes_names m = cls._affine_matrix_from_input_and_output_axes(input_axes, output_axes) diff --git a/src/spatialdata/transformations/operations.py b/src/spatialdata/transformations/operations.py index 15fe3d57c..31bd5cccb 100644 --- a/src/spatialdata/transformations/operations.py +++ b/src/spatialdata/transformations/operations.py @@ -3,16 +3,16 @@ import contextlib from typing import TYPE_CHECKING -import networkx as nx import numpy as np from dask.dataframe import DataFrame as DaskDataFrame from geopandas import GeoDataFrame -from skimage.transform import estimate_transform from spatialdata._logging import logger from spatialdata.transformations._utils import _get_transformations, _set_transformations if TYPE_CHECKING: + import networkx as nx + from spatialdata._core.spatialdata import SpatialData from spatialdata.models._utils import SpatialElement from spatialdata.transformations.transformations import Affine, BaseTransformation @@ -194,6 +194,8 @@ def remove_transformation( def _build_transformations_graph(sdata: SpatialData) -> nx.Graph: + import networkx as nx + g = nx.DiGraph() gen = sdata._gen_spatial_element_values() for cs in sdata.coordinate_systems: @@ -236,6 +238,8 @@ def get_transformation_between_coordinate_systems( ------- The transformation to map the source coordinate system to the target coordinate system. """ + import networkx as nx + from spatialdata.models._utils import has_type_spatial_element from spatialdata.transformations import Identity, Sequence @@ -373,6 +377,8 @@ def get_transformation_between_landmarks( >>> references_coords = PointsModel(points_reference) >>> transformation = get_transformation_between_landmarks(references_coords, moving_coords) """ + from skimage.transform import estimate_transform + from spatialdata import transform from spatialdata.models import get_axes_names from spatialdata.transformations.transformations import Affine, Sequence diff --git a/tests/conftest.py b/tests/conftest.py index 7149b4e72..c97939129 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Sequence from pathlib import Path from typing import Any diff --git a/tests/core/operations/test_aggregations.py b/tests/core/operations/test_aggregations.py index eb2ed089c..d471b79cc 100644 --- a/tests/core/operations/test_aggregations.py +++ b/tests/core/operations/test_aggregations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import geopandas import numpy as np import pandas as pd diff --git a/tests/core/operations/test_map.py b/tests/core/operations/test_map.py index a032d381b..cde00fd31 100644 --- a/tests/core/operations/test_map.py +++ b/tests/core/operations/test_map.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import re diff --git a/tests/core/operations/test_rasterize.py b/tests/core/operations/test_rasterize.py index a2ffde3d4..53d362d15 100644 --- a/tests/core/operations/test_rasterize.py +++ b/tests/core/operations/test_rasterize.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import dask.dataframe as dd import numpy as np import pandas as pd diff --git a/tests/core/operations/test_rasterize_bins.py b/tests/core/operations/test_rasterize_bins.py index b99508efa..2918855db 100644 --- a/tests/core/operations/test_rasterize_bins.py +++ b/tests/core/operations/test_rasterize_bins.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import re diff --git a/tests/core/operations/test_spatialdata_operations.py b/tests/core/operations/test_spatialdata_operations.py index 6ea4661d3..68b538e0a 100644 --- a/tests/core/operations/test_spatialdata_operations.py +++ b/tests/core/operations/test_spatialdata_operations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import numpy as np diff --git a/tests/core/operations/test_transform.py b/tests/core/operations/test_transform.py index 1bb494fb7..e006cf280 100644 --- a/tests/core/operations/test_transform.py +++ b/tests/core/operations/test_transform.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import tempfile from pathlib import Path diff --git a/tests/core/operations/test_vectorize.py b/tests/core/operations/test_vectorize.py index cf5e2794c..ae83f6c95 100644 --- a/tests/core/operations/test_vectorize.py +++ b/tests/core/operations/test_vectorize.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import numpy as np diff --git a/tests/core/query/test_relational_query.py b/tests/core/query/test_relational_query.py index 3267b1e55..63e7a6f19 100644 --- a/tests/core/query/test_relational_query.py +++ b/tests/core/query/test_relational_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import annsel as an import numpy as np import pandas as pd diff --git a/tests/core/query/test_relational_query_match_sdata_to_table.py b/tests/core/query/test_relational_query_match_sdata_to_table.py index 6d4fcadf3..0c47ca914 100644 --- a/tests/core/query/test_relational_query_match_sdata_to_table.py +++ b/tests/core/query/test_relational_query_match_sdata_to_table.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from spatialdata import SpatialData, concatenate, match_sdata_to_table diff --git a/tests/core/query/test_spatial_query.py b/tests/core/query/test_spatial_query.py index d7147dbfb..7dadc6f63 100644 --- a/tests/core/query/test_spatial_query.py +++ b/tests/core/query/test_spatial_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import FrozenInstanceError import dask.dataframe as dd diff --git a/tests/core/test_centroids.py b/tests/core/test_centroids.py index aa332f9da..94cd40b09 100644 --- a/tests/core/test_centroids.py +++ b/tests/core/test_centroids.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import numpy as np diff --git a/tests/core/test_data_extent.py b/tests/core/test_data_extent.py index de5875cfa..4d2316acc 100644 --- a/tests/core/test_data_extent.py +++ b/tests/core/test_data_extent.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import numpy as np diff --git a/tests/core/test_deepcopy.py b/tests/core/test_deepcopy.py index b21cc9259..b63b43616 100644 --- a/tests/core/test_deepcopy.py +++ b/tests/core/test_deepcopy.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pandas.testing import assert_frame_equal from spatialdata import SpatialData diff --git a/tests/core/test_get_attrs.py b/tests/core/test_get_attrs.py index aedf32711..a6b467fa9 100644 --- a/tests/core/test_get_attrs.py +++ b/tests/core/test_get_attrs.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pandas as pd import pytest diff --git a/tests/core/test_validation.py b/tests/core/test_validation.py index 258c14711..bb612a4e3 100644 --- a/tests/core/test_validation.py +++ b/tests/core/test_validation.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from spatialdata._core.validation import ValidationError, raise_validation_errors diff --git a/tests/dataloader/__init__.py b/tests/dataloader/__init__.py index 328f74034..c2a5fdd8e 100644 --- a/tests/dataloader/__init__.py +++ b/tests/dataloader/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + try: from spatialdata.dataloader.datasets import ImageTilesDataset except ImportError as e: diff --git a/tests/dataloader/test_datasets.py b/tests/dataloader/test_datasets.py index 48881b0b0..6b6607e80 100644 --- a/tests/dataloader/test_datasets.py +++ b/tests/dataloader/test_datasets.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import numpy as np import pytest diff --git a/tests/datasets/test_datasets.py b/tests/datasets/test_datasets.py index e22182b44..2237e253c 100644 --- a/tests/datasets/test_datasets.py +++ b/tests/datasets/test_datasets.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata.datasets import blobs, raccoon diff --git a/tests/io/test_attrs_io.py b/tests/io/test_attrs_io.py index 4dda6f0d5..1f9895eaa 100644 --- a/tests/io/test_attrs_io.py +++ b/tests/io/test_attrs_io.py @@ -1,5 +1,7 @@ """Test attrs read/write for all SpatialData container formats.""" +from __future__ import annotations + import tempfile from pathlib import Path diff --git a/tests/io/test_format.py b/tests/io/test_format.py index c8d9f04c1..3ef1319c3 100644 --- a/tests/io/test_format.py +++ b/tests/io/test_format.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import tempfile from pathlib import Path diff --git a/tests/io/test_metadata.py b/tests/io/test_metadata.py index bb993b00d..dd0ae704f 100644 --- a/tests/io/test_metadata.py +++ b/tests/io/test_metadata.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import os import tempfile diff --git a/tests/io/test_multi_table.py b/tests/io/test_multi_table.py index 5c90109d8..abaaea8d2 100644 --- a/tests/io/test_multi_table.py +++ b/tests/io/test_multi_table.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import pandas as pd diff --git a/tests/io/test_pyramids_performance.py b/tests/io/test_pyramids_performance.py index 875879541..82501ae87 100644 --- a/tests/io/test_pyramids_performance.py +++ b/tests/io/test_pyramids_performance.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path from typing import TYPE_CHECKING @@ -18,7 +20,7 @@ @pytest.fixture -def sdata_with_image(request: "_pytest.fixtures.SubRequest", tmp_path: Path) -> SpatialData: +def sdata_with_image(request: _pytest.fixtures.SubRequest, tmp_path: Path) -> SpatialData: params = request.param if request.param is not None else {} width = params.get("width", 2048) chunksize = params.get("chunk_size", 1024) diff --git a/tests/io/test_readwrite.py b/tests/io/test_readwrite.py index af028d29c..b46620f4d 100644 --- a/tests/io/test_readwrite.py +++ b/tests/io/test_readwrite.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import os import tempfile diff --git a/tests/io/test_utils.py b/tests/io/test_utils.py index 57bfe6e42..1f7be3581 100644 --- a/tests/io/test_utils.py +++ b/tests/io/test_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import tempfile from contextlib import nullcontext diff --git a/tests/models/test_accessor.py b/tests/models/test_accessor.py index 7356f52cc..6f21a1905 100644 --- a/tests/models/test_accessor.py +++ b/tests/models/test_accessor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import dask.dataframe as dd import pandas as pd import pytest diff --git a/tests/models/test_chunks_utils.py b/tests/models/test_chunks_utils.py index 5bfcfa812..a450ec664 100644 --- a/tests/models/test_chunks_utils.py +++ b/tests/models/test_chunks_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from spatialdata.models.chunks_utils import Chunks_t, normalize_chunks diff --git a/tests/models/test_models.py b/tests/models/test_models.py index e2087ace0..348db1b2e 100644 --- a/tests/models/test_models.py +++ b/tests/models/test_models.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import re import tempfile diff --git a/tests/models/test_pyramids_utils.py b/tests/models/test_pyramids_utils.py index da0a33547..5d5e96086 100644 --- a/tests/models/test_pyramids_utils.py +++ b/tests/models/test_pyramids_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import numpy as np import pytest from multiscale_spatial_image.to_multiscale.to_multiscale import Methods diff --git a/tests/test_init.py b/tests/test_init.py new file mode 100644 index 000000000..94107c937 --- /dev/null +++ b/tests/test_init.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import spatialdata + + +def test_package_has_version() -> None: + assert spatialdata.__version__ + + +def test_all_matches_lazy_imports() -> None: + """Ensure __all__ and _LAZY_IMPORTS stay in sync.""" + assert set(spatialdata.__all__) == set(spatialdata._LAZY_IMPORTS.keys()) + + +def test_all_are_importable() -> None: + """Every name in __all__ should be accessible on the module.""" + for name in spatialdata.__all__: + assert hasattr(spatialdata, name), f"{name!r} listed in __all__ but not resolvable" + + +def test_all_are_in_dir() -> None: + """dir(spatialdata) should expose everything in __all__.""" + module_dir = dir(spatialdata) + for name in spatialdata.__all__: + assert name in module_dir, f"{name!r} missing from dir()" diff --git a/tests/transformations/ngff/conftest.py b/tests/transformations/ngff/conftest.py index cc0089cd3..a5ce90247 100644 --- a/tests/transformations/ngff/conftest.py +++ b/tests/transformations/ngff/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata.transformations.ngff.ngff_coordinate_system import ( NgffAxis, NgffCoordinateSystem, diff --git a/tests/transformations/ngff/test_ngff_coordinate_system.py b/tests/transformations/ngff/test_ngff_coordinate_system.py index 6a8474b71..ae1412fcc 100644 --- a/tests/transformations/ngff/test_ngff_coordinate_system.py +++ b/tests/transformations/ngff/test_ngff_coordinate_system.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy import json diff --git a/tests/transformations/ngff/test_ngff_transformations.py b/tests/transformations/ngff/test_ngff_transformations.py index 8e1ea0f3c..7051f3646 100644 --- a/tests/transformations/ngff/test_ngff_transformations.py +++ b/tests/transformations/ngff/test_ngff_transformations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import copy import json diff --git a/tests/transformations/test_transformations.py b/tests/transformations/test_transformations.py index b8571dc9a..a8de25f47 100644 --- a/tests/transformations/test_transformations.py +++ b/tests/transformations/test_transformations.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import nullcontext from copy import deepcopy diff --git a/tests/transformations/test_transformations_utils.py b/tests/transformations/test_transformations_utils.py index 2c69d96e8..dc5f884b9 100644 --- a/tests/transformations/test_transformations_utils.py +++ b/tests/transformations/test_transformations_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from spatialdata.transformations._utils import convert_transformations_to_affine from spatialdata.transformations.operations import get_transformation, set_transformation from spatialdata.transformations.transformations import Affine, Scale, Sequence, Translation diff --git a/tests/utils/test_element_utils.py b/tests/utils/test_element_utils.py index 1bfd20aa4..341f66872 100644 --- a/tests/utils/test_element_utils.py +++ b/tests/utils/test_element_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import itertools import dask_image.ndinterp diff --git a/tests/utils/test_sanitize.py b/tests/utils/test_sanitize.py index b61f19084..6b2fd3150 100644 --- a/tests/utils/test_sanitize.py +++ b/tests/utils/test_sanitize.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import numpy as np import pandas as pd import pytest diff --git a/tests/utils/test_testing.py b/tests/utils/test_testing.py index a181c87ff..322b6ad50 100644 --- a/tests/utils/test_testing.py +++ b/tests/utils/test_testing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy import numpy as np