diff --git a/examples/zarr_arr.py b/examples/zarr_arr.py index bee4eb09..0c6790a2 100644 --- a/examples/zarr_arr.py +++ b/examples/zarr_arr.py @@ -20,5 +20,4 @@ except ImportError: raise ImportError("Please `pip install zarr aiohttp` to run this example") - -ndv.imshow(zarr_arr["s4"], current_index={1: 30}, visible_axes=(0, 2)) +ndv.imshow(zarr_arr["s4"].astype("uint16"), current_index={1: 30}, visible_axes=(0, 2)) diff --git a/pyproject.toml b/pyproject.toml index cf954e61..84d7bdf3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,9 @@ dependencies = [ "typing_extensions >= 4.0", ] +[project.scripts] +ndv = "ndv.cli:main" + # https://peps.python.org/pep-0621/#dependencies-optional-dependencies [project.optional-dependencies] # Supported GUI frontends @@ -64,15 +67,12 @@ pyside = [ "superqt[iconify,pyside6] >=0.7.2", # https://github.com/pyapp-kit/ndv/issues/59 "pyside6 ==6.6.3; sys_platform == 'win32'", - "numpy >=1.23,<2; sys_platform == 'win32'", # needed for pyside6.6 + "numpy >=1.23,<2; sys_platform == 'win32'", # needed for pyside6.6 "pyside6 >=6.4", "pyside6 >=6.6; python_version >= '3.12'", "qtpy >=2", ] -wxpython = [ - "pyconify>=0.2.1", - "wxpython >=4.2.2", -] +wxpython = ["pyconify>=0.2.1", "wxpython >=4.2.2"] # Supported Canavs backends vispy = ["vispy>=0.14.3", "pyopengl >=3.1"] @@ -83,6 +83,17 @@ qt = ["ndv[pygfx,pyqt]", "imageio[tifffile] >=2.20"] jup = ["ndv[pygfx,jupyter]", "imageio[tifffile] >=2.20"] wx = ["ndv[pygfx,wxpython]", "imageio[tifffile] >=2.20"] +# full support for reading files +io = [ + "bioio", + "bioio-ome-tiff", + "bioio-ome-zarr", + "bioio-tifffile", + "bioio-lif", + "bioio-nd2", + "bioio-bioformats", + "zarr", +] [project.urls] homepage = "https://github.com/pyapp-kit/ndv" @@ -110,7 +121,7 @@ dev = [ # omitting wxpython from dev env for now # because `uv sync && pytest hangs` on a wx test in the "full" env # use `make test extras=wx,[pygfx|vispy] isolated=1` to test - "ndv[vispy,pygfx,pyqt,jupyter]", + "ndv[vispy,pygfx,pyqt,jupyter]", "imageio[tifffile] >=2.20", "ipykernel>=6.29.5", "ipython>=8.18.1", diff --git a/src/ndv/cli.py b/src/ndv/cli.py new file mode 100644 index 00000000..e099187d --- /dev/null +++ b/src/ndv/cli.py @@ -0,0 +1,20 @@ +"""command-line program.""" + +import argparse + +from ndv.util import imshow + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="ndv: ndarray viewer") + parser.add_argument("path", type=str, help="The filename of the numpy file to view") + return parser.parse_args() + + +def main() -> None: + """Run the command-line program.""" + from ndv import io + + args = _parse_args() + + imshow(io.imread(args.path)) diff --git a/src/ndv/io.py b/src/ndv/io.py new file mode 100644 index 00000000..7dd92547 --- /dev/null +++ b/src/ndv/io.py @@ -0,0 +1,139 @@ +"""All the io we can think of.""" + +from __future__ import annotations + +import json +import logging +from contextlib import suppress +from pathlib import Path +from textwrap import indent, wrap +from typing import TYPE_CHECKING, Any + +import numpy as np + +if TYPE_CHECKING: + import xarray as xr + import zarr + +logger = logging.getLogger(__name__) + + +class collect_errors: + """Store exceptions in `errors` under `key`, rather than raising.""" + + def __init__(self, errors: dict, key: str): + self.errors = errors + self.key = key + + def __enter__(self) -> None: + pass + + def __exit__( + self, exc_type: type[BaseException], exc_value: BaseException, traceback: Any + ) -> bool: + if exc_type is not None: + self.errors[self.key] = exc_value + return True + + +def imread(path: str | Path) -> Any: + """Just read the thing already. + + Try to read `path` and return something that ndv can open. + """ + path_str = str(path) + if path_str.endswith(".npy"): + return np.load(path_str) + + errors: dict[str, Exception] = {} + + if _is_zarr_folder(path): + with collect_errors(errors, "tensorstore-zarr"): + return _read_tensorstore(path) + with collect_errors(errors, "zarr"): + return _read_zarr_python(path) + + if _is_n5_folder(path): + with collect_errors(errors, "tensorstore-n5"): + return _read_tensorstore(path, driver="n5") + + with collect_errors(errors, "bioio"): + return _read_bioio(path) + + raise ValueError(_format_error_message(errors)) + + +def _is_n5_folder(path: str | Path) -> bool: + path = Path(path) + return path.is_dir() and any(path.glob("attributes.json")) + + +def _is_zarr_folder(path: str | Path) -> bool: + if str(path).endswith(".zarr"): + return True + path = Path(path) + return path.is_dir() and any(path.glob("*.zarr")) + + +def _read_tensorstore(path: str | Path, driver: str = "zarr", level: int = 0) -> Any: + import tensorstore as ts + + sub = _array_path(path, level=level) + store = ts.open({"driver": driver, "kvstore": str(path) + sub}).result() + logger.info("using tensorstore") + return store + + +def _format_error_message(errors: dict[str, Exception]) -> str: + lines = ["\nCould not read file. Here's what we tried and errors we got", ""] + for _key, err in errors.items(): + lines.append(f"{_key}:") + wrapped = wrap(str(err), width=120) + indented = indent("\n".join(wrapped), " ") + lines.append(indented) + return "\n".join(lines) + + +def _read_bioio(path: str | Path) -> xr.DataArray: + from bioio import BioImage + + data = BioImage(str(path)) + logger.info("using bioio") + return data.xarray_dask_data + + +def _read_zarr_python(path: str | Path, level: int = 0) -> zarr.Array: + import zarr + + _subpath = _array_path(path, level=level) + z = zarr.open(str(path) + _subpath, mode="r") + logger.info("using zarr python") + return z + + +def _array_path(path: str | Path, level: int = 0) -> str: + import zarr + + z = zarr.open(path, mode="r") + if isinstance(z, zarr.Array): + return "/" + if isinstance(z, zarr.Group): + with suppress(TypeError): + zattrs = json.loads(z.store.get(".zattrs")) + if "multiscales" in zattrs: + levels: list[str] = [] + for dset in zattrs["multiscales"][0]["datasets"]: + if "path" in dset: + levels.append(dset["path"]) + if levels: + return "/" + levels[level] + + arrays = list(z.array_keys()) + if arrays: + return f"/{arrays[0]}" + + if level != 0 and levels: + raise ValueError( + f"Could not find a dataset with level {level} in the group. Found: {levels}" + ) + raise ValueError("Could not find an array or multiscales information in the group.") diff --git a/src/ndv/util.py b/src/ndv/util.py index e9284272..82b1438b 100644 --- a/src/ndv/util.py +++ b/src/ndv/util.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, overload +from typing import TYPE_CHECKING, Any, overload from ndv.controllers import ArrayViewer from ndv.views._app import run_app