diff --git a/.github/workflows/release.yaml.rej b/.github/workflows/release.yaml.rej deleted file mode 100644 index 83203fce..00000000 --- a/.github/workflows/release.yaml.rej +++ /dev/null @@ -1,10 +0,0 @@ -diff a/.github/workflows/release.yaml b/.github/workflows/release.yaml (rejected hunks) -@@ -11,7 +11,7 @@ jobs: - runs-on: ubuntu-latest - environment: - name: pypi -- url: https://pypi.org/p/{{ cookiecutter.package_name }} -+ url: https://pypi.org/p/spatialdata_plot - permissions: - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - steps: diff --git a/.github/workflows/test.yaml.rej b/.github/workflows/test.yaml.rej deleted file mode 100644 index e165fb84..00000000 --- a/.github/workflows/test.yaml.rej +++ /dev/null @@ -1,16 +0,0 @@ -diff a/.github/workflows/test.yaml b/.github/workflows/test.yaml (rejected hunks) -@@ -24,11 +24,11 @@ jobs: - matrix: - include: - - os: ubuntu-latest -- python: "3.9" -+ python: "3.10" - - os: ubuntu-latest -- python: "3.11" -+ python: "3.12" - - os: ubuntu-latest -- python: "3.11" -+ python: "3.12" - pip-flags: "--pre" - name: PRE-RELEASE DEPENDENCIES - diff --git a/.gitignore.rej b/.gitignore.rej deleted file mode 100644 index 9d0a7e93..00000000 --- a/.gitignore.rej +++ /dev/null @@ -1,9 +0,0 @@ -diff a/.gitignore b/.gitignore (rejected hunks) -@@ -18,6 +18,7 @@ __pycache__/ - /.pytest_cache/ - /.cache/ - /data/ -+/node_modules/ - - # docs - /docs/generated/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c216d86..6da8aed0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,13 +21,13 @@ repos: hooks: - id: blacken-docs - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.1 + rev: v1.11.2 hooks: - id: mypy additional_dependencies: [numpy, types-requests] exclude: tests/|docs/ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.6 + rev: v0.6.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/.pre-commit-config.yaml.rej b/.pre-commit-config.yaml.rej deleted file mode 100644 index 5f9ac183..00000000 --- a/.pre-commit-config.yaml.rej +++ /dev/null @@ -1,34 +0,0 @@ -diff a/.pre-commit-config.yaml b/.pre-commit-config.yaml (rejected hunks) -@@ -6,29 +6,18 @@ default_stages: - - push - minimum_pre_commit_version: 2.16.0 - repos: -- - repo: https://github.com/psf/black -- rev: "24.4.2" -- hooks: -- - id: black -- - repo: https://github.com/asottile/blacken-docs -- rev: 1.16.0 -- hooks: -- - id: blacken-docs - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v4.0.0-alpha.8 - hooks: - - id: prettier -- # Newer versions of node don't work on systems that have an older version of GLIBC -- # (in particular Ubuntu 18.04 and Centos 7) -- # EOL of Centos 7 is in 2024-06, we can probably get rid of this then. -- # See https://github.com/scverse/cookiecutter-scverse/issues/143 and -- # https://github.com/jupyterlab/jupyterlab/issues/12675 -- language_version: "17.9.1" - - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.4 - hooks: - - id: ruff -+ types_or: [python, pyi, jupyter] - args: [--fix, --exit-non-zero-on-fix] -+ - id: ruff-format -+ types_or: [python, pyi, jupyter] - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 - hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index 01f600e7..84ce9881 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,17 +12,17 @@ and this project adheres to [Semantic Versioning][]. ### Added -- Replaced `outline` parameter in `render_labels` with alpha-based logic (#323) -- Lowered RMSE-threshold for plot-based tests from 60 to 45 (#323) -- Minor fixes for several tests as a result of the threshold change (#323) +- ### Changed -- +- Replaced `outline` parameter in `render_labels` with alpha-based logic (#323) +- Lowered RMSE-threshold for plot-based tests from 60 to 45 (#323) +- Removed `preprocessing` (.pp) accessor (#329) ### Fixed -- +- Minor fixes for several tests as a result of the threshold change (#323) ## [0.2.4] - 2024-08-07 diff --git a/docs/index.md.rej b/docs/index.md.rej deleted file mode 100644 index 14a8d31f..00000000 --- a/docs/index.md.rej +++ /dev/null @@ -1,9 +0,0 @@ -diff a/docs/index.md b/docs/index.md (rejected hunks) -@@ -8,7 +8,6 @@ - - api.md - changelog.md --template_usage.md - contributing.md - references.md - diff --git a/docs/notebooks/preprocessing.ipynb b/docs/notebooks/preprocessing.ipynb deleted file mode 100644 index 56baf5fa..00000000 --- a/docs/notebooks/preprocessing.ipynb +++ /dev/null @@ -1,456 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "62f06dbc-5073-4d1c-8767-91db1a54ffe9", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/harald/Documents/opt/spatdata/env/lib/python3.9/site-packages/geopandas/_compat.py:123: UserWarning: The Shapely GEOS version (3.11.1-CAPI-1.17.1) is incompatible with the GEOS version PyGEOS was compiled with (3.10.4-CAPI-1.16.2). Conversions between both will be slow.\n", - " warnings.warn(\n", - "/Users/harald/Documents/opt/spatdata/env/lib/python3.9/site-packages/spatialdata/__init__.py:9: UserWarning: Geopandas was set to use PyGEOS, changing to shapely 2.0 with:\n", - "\n", - "\tgeopandas.options.use_pygeos = True\n", - "\n", - "If you intended to use PyGEOS, set the option to False.\n", - " _check_geopandas_using_shapely()\n" - ] - } - ], - "source": [ - "import spatialdata as sd" - ] - }, - { - "cell_type": "markdown", - "id": "0a37df26-4d31-4695-ab60-f7532327b3d8", - "metadata": { - "tags": [] - }, - "source": [ - "# Subsetting Spatialdata objects\n", - "\n", - "In this tutorial, we illustrate how spatialdata objects may be subsetted using the `spatialdata-plot` preprocessing accessor `.pp`. We illustrate this on the MIBI-TOF dataset which can be obtained from the spatialdata-sandbox repo (https://github.com/giovp/spatialdata-sandbox)." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "09731d43-664e-456b-9f3b-ac6b4f20c0a2", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "data_dir = \"../../../spatialdata-sandbox/mibitof/data.zarr\"" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "02c31376-88d4-409c-a1cc-b5806773fd60", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "mibi = sd.read_zarr(data_dir)" - ] - }, - { - "cell_type": "markdown", - "id": "e33dce14-6d3a-4941-81d9-9d07df608b39", - "metadata": {}, - "source": [ - "\n", - "Spatialdata objects may contain various *elements* including images, labels, shapes and points, as well as *coordinate systems* which represent groups of associated elements. The content of a spatialdata object may be inspected simply by invoking its `__repr__` method." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "2e7043a9-9de0-4366-b1e1-adcce488e134", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ ├── 'point8_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "│ ├── 'point16_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "│ └── 'point23_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "├── Labels\n", - "│ ├── 'point8_labels': SpatialImage[yx] (1024, 1024)\n", - "│ ├── 'point16_labels': SpatialImage[yx] (1024, 1024)\n", - "│ └── 'point23_labels': SpatialImage[yx] (1024, 1024)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 3309 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (3309, 36)\n", - "with coordinate systems:\n", - "▸ 'point8', with elements:\n", - " point8_image (Images), point8_labels (Labels)\n", - "▸ 'point16', with elements:\n", - " point16_image (Images), point16_labels (Labels)\n", - "▸ 'point23', with elements:\n", - " point23_image (Images), point23_labels (Labels)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi" - ] - }, - { - "cell_type": "markdown", - "id": "c2e1729d-a151-4ccf-8fbd-f43d4fb1857d", - "metadata": {}, - "source": [ - "Importing `spatialdata-plot` equips spatialdata objects with so called accessors that extend the object with additional methods. The preprocessing accessor `.pp` allows to subset spatialdata objects and exposes the methods `.pp.get_elements` and `.pp.get_bb`." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "36a27f5f-9c47-435f-99e2-d5c691ead000", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import spatialdata_plot" - ] - }, - { - "cell_type": "markdown", - "id": "4fcdc2c5-4ddb-4dc8-8207-b1cc70498d5a", - "metadata": {}, - "source": [ - "## Subsetting spatialdata objects\n", - "\n", - "Any element or coordinate system may be extracted using `pp.get_elements` which receives the respective key(s) as an argument and returns a copy of the subsetted spatialdata object." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "af0dab49-dfed-4023-ba78-5127f495d178", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "└── Images\n", - " └── 'point8_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "with coordinate systems:\n", - "▸ 'point8', with elements:\n", - " point8_image (Images)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements(\"point8_image\") # extract the image point8_image" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "db3fee6a-c9f6-4d04-949c-fc42f86c83b0", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Labels\n", - "│ └── 'point16_labels': SpatialImage[yx] (1024, 1024)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 1023 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (1023, 36)\n", - "with coordinate systems:\n", - "▸ 'point16', with elements:\n", - " point16_labels (Labels)" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements(\"point16_labels\") # extract point16_labels" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "bf849e97-a9e4-4f97-a1ef-71f9a4ac1b01", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ └── 'point23_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "├── Labels\n", - "│ └── 'point23_labels': SpatialImage[yx] (1024, 1024)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 1241 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (1241, 36)\n", - "with coordinate systems:\n", - "▸ 'point23', with elements:\n", - " point23_image (Images), point23_labels (Labels)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements(\"point23\") # extracts the coordinate system point23" - ] - }, - { - "cell_type": "markdown", - "id": "249006de-b363-42fc-a4a0-28f333f9f154", - "metadata": {}, - "source": [ - "Multiple elements/coordinate systems may be selected if the keys are provided in a list." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "957be3c6-0005-4e4a-b2a9-7be8d97b3f0d", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ └── 'point23_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "├── Labels\n", - "│ └── 'point23_labels': SpatialImage[yx] (1024, 1024)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 1241 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (1241, 36)\n", - "with coordinate systems:\n", - "▸ 'point23', with elements:\n", - " point23_image (Images), point23_labels (Labels)" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements([\"point23_image\", \"point23_labels\"]) # extract image and labels of point23" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "abc74625-2988-4376-b54c-49af4be09a2d", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ ├── 'point8_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "│ └── 'point16_image': SpatialImage[cyx] (3, 1024, 1024)\n", - "├── Labels\n", - "│ ├── 'point8_labels': SpatialImage[yx] (1024, 1024)\n", - "│ └── 'point16_labels': SpatialImage[yx] (1024, 1024)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 2068 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (2068, 36)\n", - "with coordinate systems:\n", - "▸ 'point8', with elements:\n", - " point8_image (Images), point8_labels (Labels)\n", - "▸ 'point16', with elements:\n", - " point16_image (Images), point16_labels (Labels)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements([\"point8\", \"point16\"]) # extract coordinatesystems point8 and point16" - ] - }, - { - "cell_type": "markdown", - "id": "8672c517-98fe-422d-813c-322e6e06d102", - "metadata": {}, - "source": [ - "## Extracting bounding boxes\n", - "\n", - "The function `.pp.get_bb` allows to select bounding boxes. The method receives the x and y coordinates of the region of interest, and by default applies the selection to all elements within the object." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "5126af41-1fd8-4b20-b825-73d54790b39a", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ ├── 'point8_image': SpatialImage[cyx] (3, 300, 300)\n", - "│ ├── 'point16_image': SpatialImage[cyx] (3, 300, 300)\n", - "│ └── 'point23_image': SpatialImage[cyx] (3, 300, 300)\n", - "├── Labels\n", - "│ ├── 'point8_labels': SpatialImage[yx] (300, 300)\n", - "│ ├── 'point16_labels': SpatialImage[yx] (300, 300)\n", - "│ └── 'point23_labels': SpatialImage[yx] (300, 300)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 3309 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (3309, 36)\n", - "with coordinate systems:\n", - "▸ 'point8', with elements:\n", - " point8_image (Images), point8_labels (Labels)\n", - "▸ 'point16', with elements:\n", - " point16_image (Images), point16_labels (Labels)\n", - "▸ 'point23', with elements:\n", - " point23_image (Images), point23_labels (Labels)" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_bb([200, 500], [200, 500]) # select the area within the range x and y of [200, 500]" - ] - }, - { - "cell_type": "markdown", - "id": "b213885d-bb19-45a4-b127-171862644b85", - "metadata": { - "tags": [] - }, - "source": [ - "## Chaining preprocessing methods\n", - "\n", - "Methods of preprocessing accessor may be chained such that specific parts of element(s) may be extracted. " - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "cc56844b-1813-4cc3-9697-c6d969cc07b9", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "SpatialData object with:\n", - "├── Images\n", - "│ └── 'point16_image': SpatialImage[cyx] (3, 300, 300)\n", - "├── Labels\n", - "│ └── 'point16_labels': SpatialImage[yx] (300, 300)\n", - "└── Table\n", - " └── AnnData object with n_obs × n_vars = 1023 × 36\n", - " obs: 'row_num', 'point', 'cell_id', 'X1', 'center_rowcoord', 'center_colcoord', 'cell_size', 'category', 'donor', 'Cluster', 'batch', 'library_id'\n", - " uns: 'spatialdata_attrs'\n", - " obsm: 'X_scanorama', 'X_umap', 'spatial': AnnData (1023, 36)\n", - "with coordinate systems:\n", - "▸ 'point16', with elements:\n", - " point16_image (Images), point16_labels (Labels)" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mibi.pp.get_elements(\"point16\").pp.get_bb([200, 500], [200, 500]) # first select the coordinate system, then the ROI" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c7cd3648-8bdb-4d95-95f7-b4836a8567ff", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.0" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/pyproject.toml.rej b/pyproject.toml.rej deleted file mode 100644 index 60fa1660..00000000 --- a/pyproject.toml.rej +++ /dev/null @@ -1,91 +0,0 @@ -diff a/pyproject.toml b/pyproject.toml (rejected hunks) -@@ -7,7 +7,7 @@ name = "spatialdata-plot" - version = "0.0.1" - description = "A very interesting piece of code" - readme = "README.md" --requires-python = ">=3.9" -+requires-python = ">=3.10" - license = {file = "LICENSE"} - authors = [ - {name = "scverse"}, -@@ -21,19 +21,19 @@ urls.Home-page = "https://github.com/scverse/spatialdata-plot" - dependencies = [ - "anndata", - # for debug logging (referenced from the issue template) -- "session-info" -+ "session-info", - ] - - [project.optional-dependencies] - dev = [ - "pre-commit", -- "twine>=4.0.2" -+ "twine>=4.0.2", - ] - doc = [ - "docutils>=0.8,!=0.18.*,!=0.19.*", - "sphinx>=4", - "sphinx-book-theme>=1.0.0", -- "myst-nb", -+ "myst-nb>=1.1.0", - "sphinxcontrib-bibtex>=1.0.0", - "sphinx-autodoc-typehints", - "sphinxext-opengraph", -@@ -61,12 +61,15 @@ addopts = [ - "--import-mode=importlib", # allow using test files with same name - ] - --[tool.black] --line-length = 120 -- - [tool.ruff] --src = ["src"] - line-length = 120 -+src = ["src"] -+extend-include = ["*.ipynb"] -+ -+[tool.ruff.format] -+docstring-code-format = true -+ -+[tool.ruff.lint] - select = [ - "F", # Errors detected by Pyflakes - "E", # Error detected by Pycodestyle -@@ -81,7 +84,7 @@ select = [ - "RUF100", # Report unused noqa directives - ] - ignore = [ -- # line too long -> we accept long comment lines; black gets rid of long code lines -+ # line too long -> we accept long comment lines; formatter gets rid of long code lines - "E501", - # Do not assign a lambda expression, use a def -> lambda expression assignments are convenient - "E731", -@@ -95,7 +98,7 @@ ignore = [ - "D107", - # Errors from function calls in argument defaults. These are fine when the result is immutable. - "B008", -- # __magic__ methods are are often self-explanatory, allow missing docstrings -+ # __magic__ methods are often self-explanatory, allow missing docstrings - "D105", - # first line should end with a period [Bug: doesn't work with single-line docstrings] - "D400", -@@ -108,10 +111,10 @@ ignore = [ - "D213", - ] - --[tool.ruff.pydocstyle] -+[tool.ruff.lint.pydocstyle] - convention = "numpy" - --[tool.ruff.per-file-ignores] -+[tool.ruff.lint.per-file-ignores] - "docs/*" = ["I"] - "tests/*" = ["D"] - "*/__init__.py" = ["F401"] -@@ -125,5 +128,5 @@ skip = [ - "docs/changelog.md", - "docs/references.bib", - "docs/references.md", -- "docs/notebooks/example.ipynb" -+ "docs/notebooks/example.ipynb", - ] diff --git a/src/spatialdata_plot/__init__.py b/src/spatialdata_plot/__init__.py index 029ca8b7..fd8c82c0 100644 --- a/src/spatialdata_plot/__init__.py +++ b/src/spatialdata_plot/__init__.py @@ -1,7 +1,7 @@ from importlib.metadata import version -from . import pl, pp +from . import pl -__all__ = ["pl", "pp"] +__all__ = ["pl"] __version__ = version("spatialdata-plot") diff --git a/src/spatialdata_plot/pl/basic.py b/src/spatialdata_plot/pl/basic.py index 78a7158d..2143f645 100644 --- a/src/spatialdata_plot/pl/basic.py +++ b/src/spatialdata_plot/pl/basic.py @@ -55,9 +55,9 @@ _validate_points_render_params, _validate_shape_render_params, _validate_show_parameters, + _verify_plotting_tree, save_fig, ) -from spatialdata_plot.pp.utils import _verify_plotting_tree # replace with # from spatialdata._types import ColorLike @@ -743,8 +743,6 @@ def show( # Evaluate execution tree for plotting valid_commands = [ - "get_elements", - "get_bb", "render_images", "render_shapes", "render_labels", diff --git a/src/spatialdata_plot/pl/utils.py b/src/spatialdata_plot/pl/utils.py index c2941d91..d04f5c4e 100644 --- a/src/spatialdata_plot/pl/utils.py +++ b/src/spatialdata_plot/pl/utils.py @@ -2,6 +2,7 @@ import os import warnings +from collections import OrderedDict from collections.abc import Iterable, Mapping, Sequence from copy import copy from functools import partial @@ -73,7 +74,6 @@ _FontSize, _FontWeight, ) -from spatialdata_plot.pp.utils import _get_coordinate_system_mapping to_hex = partial(colors.to_hex, keep_alpha=True) @@ -83,6 +83,56 @@ ColorLike = Union[tuple[float, ...], str] +def _verify_plotting_tree(sdata: SpatialData) -> SpatialData: + """Verify that the plotting tree exists, and if not, create it.""" + if not hasattr(sdata, "plotting_tree"): + sdata.plotting_tree = OrderedDict() + + return sdata + + +def _get_coordinate_system_mapping(sdata: SpatialData) -> dict[str, list[str]]: + coordsys_keys = sdata.coordinate_systems + image_keys = [] if sdata.images is None else sdata.images.keys() + label_keys = [] if sdata.labels is None else sdata.labels.keys() + shape_keys = [] if sdata.shapes is None else sdata.shapes.keys() + point_keys = [] if sdata.points is None else sdata.points.keys() + + mapping: dict[str, list[str]] = {} + + if len(coordsys_keys) < 1: + raise ValueError("SpatialData object must have at least one coordinate system to generate a mapping.") + + for key in coordsys_keys: + mapping[key] = [] + + for image_key in image_keys: + transformations = get_transformation(sdata.images[image_key], get_all=True) + + if key in list(transformations.keys()): + mapping[key].append(image_key) + + for label_key in label_keys: + transformations = get_transformation(sdata.labels[label_key], get_all=True) + + if key in list(transformations.keys()): + mapping[key].append(label_key) + + for shape_key in shape_keys: + transformations = get_transformation(sdata.shapes[shape_key], get_all=True) + + if key in list(transformations.keys()): + mapping[key].append(shape_key) + + for point_key in point_keys: + transformations = get_transformation(sdata.points[point_key], get_all=True) + + if key in list(transformations.keys()): + mapping[key].append(point_key) + + return mapping + + def _is_color_like(color: Any) -> bool: """Check if a value is a valid color, returns False for pseudo-bools. diff --git a/src/spatialdata_plot/pp/__init__.py b/src/spatialdata_plot/pp/__init__.py deleted file mode 100644 index c73a42e5..00000000 --- a/src/spatialdata_plot/pp/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .basic import PreprocessingAccessor - -__all__ = [ - "PreprocessingAccessor", -] diff --git a/src/spatialdata_plot/pp/basic.py b/src/spatialdata_plot/pp/basic.py deleted file mode 100644 index 00378405..00000000 --- a/src/spatialdata_plot/pp/basic.py +++ /dev/null @@ -1,330 +0,0 @@ -from collections import OrderedDict -from typing import Union - -import spatialdata as sd -from anndata import AnnData -from dask.dataframe import DataFrame as DaskDataFrame -from datatree import DataTree -from geopandas import GeoDataFrame -from spatialdata.models import get_table_keys -from xarray import DataArray - -from spatialdata_plot._accessor import register_spatial_data_accessor -from spatialdata_plot.pp.utils import ( - _get_coordinate_system_mapping, - _verify_plotting_tree, -) - -# from .colorize import _colorize - - -@register_spatial_data_accessor("pp") -class PreprocessingAccessor: - """ - Preprocessing functions for SpatialData objects. - - Parameters - ---------- - sdata : - A spatial data object. - """ - - @property - def sdata(self) -> sd.SpatialData: - """The `SpatialData` object to provide preprocessing functions for.""" - return self._sdata - - @sdata.setter - def sdata(self, sdata: sd.SpatialData) -> None: - self._sdata = sdata - - def __init__(self, sdata: sd.SpatialData) -> None: - self._sdata = sdata - - def _copy( - self, - images: Union[None, dict[str, Union[DataArray, DataTree]]] = None, - labels: Union[None, dict[str, Union[DataArray, DataTree]]] = None, - points: Union[None, dict[str, DaskDataFrame]] = None, - shapes: Union[None, dict[str, GeoDataFrame]] = None, - tables: Union[None, dict[str, AnnData]] = None, - ) -> sd.SpatialData: - """Copy the references from the original to the new SpatialData object.""" - sdata = sd.SpatialData( - images=self._sdata.images if images is None else images, - labels=self._sdata.labels if labels is None else labels, - points=self._sdata.points if points is None else points, - shapes=self._sdata.shapes if shapes is None else shapes, - tables=self._sdata.tables if tables is None else tables, - ) - sdata.plotting_tree = self._sdata.plotting_tree if hasattr(self._sdata, "plotting_tree") else OrderedDict() - - return sdata - - def _verify_plotting_tree_exists(self) -> None: - if not hasattr(self._sdata, "plotting_tree"): - self._sdata.plotting_tree = OrderedDict() - - def get_elements(self, elements: Union[str, list[str]]) -> sd.SpatialData: - """ - Get a subset of the spatial data object by specifying elements to keep. - - Parameters - ---------- - elements : - A string or a list of strings specifying the elements to keep. - Valid element types are: - - - 'coordinate_systems' - - 'images' - - 'labels' - - 'shapes' - - Returns - ------- - sd.SpatialData - A new spatial data object containing only the specified elements. - - Raises - ------ - TypeError - If `elements` is not a string or a list of strings. - If `elements` is a list of strings but one or more of the strings - are not valid element types. - - ValueError - If any of the specified elements is not present in the original - spatialdata object. - - AssertionError - If `label_keys` is not an empty list but the spatial data object - does not have a table or the table does not have 'uns' or 'obs' - attributes. - - Notes - ----- - If the original spatialdata object has a table, and `elements` - includes label keys, the returned spatialdata object will have a - subset of the original table with only the rows corresponding to the - specified label keys. The `region` attribute of the returned spatial - data object's table will be set to the list of specified label keys. - - If the original spatial data object has no table, or if `elements` does - not include label keys, the returned spatialdata object will have no - table. - """ - if not isinstance(elements, (str, list)): - raise TypeError("Parameter 'elements' must be a string or a list of strings.") - - if not all(isinstance(e, str) for e in elements): - raise TypeError("When parameter 'elements' is a list, all elements must be strings.") - - if isinstance(elements, str): - elements = [elements] - - coord_keys = [] - image_keys = [] - label_keys = [] - shape_keys = [] - point_keys = [] - - # prepare list of valid keys to sort elements on - valid_coord_keys = self._sdata.coordinate_systems if hasattr(self._sdata, "coordinate_systems") else None - valid_image_keys = list(self._sdata.images.keys()) if hasattr(self._sdata, "images") else None - valid_label_keys = list(self._sdata.labels.keys()) if hasattr(self._sdata, "labels") else None - valid_shape_keys = list(self._sdata.shapes.keys()) if hasattr(self._sdata, "shapes") else None - valid_point_keys = list(self._sdata.points.keys()) if hasattr(self._sdata, "points") else None - - # first, extract coordinate system keys becasuse they generate implicit keys - mapping = _get_coordinate_system_mapping(self._sdata) - implicit_keys = [] - for e in elements: - for valid_coord_key in valid_coord_keys: - if (valid_coord_keys is not None) and (e == valid_coord_key): - coord_keys.append(e) - implicit_keys += mapping[e] - - for e in elements + implicit_keys: - found = False - - if valid_coord_keys is not None: - for valid_coord_key in valid_coord_keys: - if e == valid_coord_key: - coord_keys.append(e) - found = True - - if valid_image_keys is not None: - for valid_image_key in valid_image_keys: - if e == valid_image_key: - image_keys.append(e) - found = True - - if valid_label_keys is not None: - for valid_label_key in valid_label_keys: - if e == valid_label_key: - label_keys.append(e) - found = True - - if valid_shape_keys is not None: - for valid_shape_key in valid_shape_keys: - if e == valid_shape_key: - shape_keys.append(e) - found = True - - if valid_point_keys is not None: - for valid_point_key in valid_point_keys: - if e == valid_point_key: - point_keys.append(e) - found = True - - if not found: - msg = f"Element '{e}' not found. Valid choices are:" - if valid_coord_keys is not None: - msg += "\n\ncoordinate_systems\n├ " - msg += "\n├ ".join(valid_coord_keys) - if valid_image_keys is not None: - msg += "\n\nimages\n├ " - msg += "\n├ ".join(valid_image_keys) - if valid_label_keys is not None: - msg += "\n\nlabels\n├ " - msg += "\n├ ".join(valid_label_keys) - if valid_shape_keys is not None: - msg += "\n\nshapes\n├ " - msg += "\n├ ".join(valid_shape_keys) - raise ValueError(msg) - - # copy that we hard-modify - sdata = self._copy() - - if (valid_coord_keys is not None) and (len(coord_keys) > 0): - sdata = sdata.filter_by_coordinate_system(coord_keys, filter_tables=False) - - elif len(coord_keys) == 0: - if valid_image_keys is not None: - if len(image_keys) == 0: - for valid_image_key in valid_image_keys: - del sdata.images[valid_image_key] - elif len(image_keys) > 0: - for valid_image_key in valid_image_keys: - if valid_image_key not in image_keys: - del sdata.images[valid_image_key] - - if valid_label_keys is not None: - if len(label_keys) == 0: - for valid_label_key in valid_label_keys: - del sdata.labels[valid_label_key] - elif len(label_keys) > 0: - for valid_label_key in valid_label_keys: - if valid_label_key not in label_keys: - del sdata.labels[valid_label_key] - - if valid_shape_keys is not None: - if len(shape_keys) == 0: - for valid_shape_key in valid_shape_keys: - del sdata.shapes[valid_shape_key] - elif len(shape_keys) > 0: - for valid_shape_key in valid_shape_keys: - if valid_shape_key not in shape_keys: - del sdata.shapes[valid_shape_key] - - if valid_point_keys is not None: - if len(point_keys) == 0: - for valid_point_key in valid_point_keys: - del sdata.points[valid_point_key] - elif len(point_keys) > 0: - for valid_point_key in valid_point_keys: - if valid_point_key not in point_keys: - del sdata.points[valid_point_key] - - # subset table if it is present and the region key is a valid column - if len(sdata.tables) != 0 and len(shape_keys + label_keys + point_keys) > 0: - for name, table in sdata.tables.items(): - assert hasattr(table, "uns"), "Table in SpatialData object does not have 'uns'." - assert hasattr(table, "obs"), "Table in SpatialData object does not have 'obs'." - - # create mask of used keys - _, region_key, _ = get_table_keys(table) - mask = table.obs[region_key] - mask = list(mask.str.contains("|".join(shape_keys + label_keys))) - - # create copy and delete original so we can reuse slot - old_table = table.copy() - new_table = old_table[mask, :].copy() - new_table.uns["spatialdata_attrs"]["region"] = list(set(new_table.obs[region_key])) - sdata.tables[name] = new_table - - else: - sdata.tables = {} - - return sdata - - def get_bb( - self, - x: Union[slice, list[int], tuple[int, int]] = (0, 0), - y: Union[slice, list[int], tuple[int, int]] = (0, 0), - ) -> sd.SpatialData: - """Get bounding box around a point. - - Parameters - ---------- - x : - x range of the bounding box. Stepsize will be ignored if slice - y : - y range of the bounding box. Stepsize will be ignored if slice - - Returns - ------- - sd.SpatialData - subsetted SpatialData object - """ - if not isinstance(x, (slice, list, tuple)): - raise TypeError("Parameter 'x' must be one of 'slice', 'list', 'tuple'.") - - if isinstance(x, (list, tuple)) and len(x) == 2: - if x[1] <= x[0]: - raise ValueError("The current choice of 'x' would result in an empty slice.") - - x = slice(x[0], x[1]) - - elif isinstance(x, slice): - if x.stop <= x.start: - raise ValueError("The current choice of 'x' would result in an empty slice.") - else: - raise ValueError("Parameter 'x' must be of length 2.") - - if not isinstance(y, (slice, list, tuple)): - raise TypeError("Parameter 'y' must be one of 'slice', 'list', 'tuple'.") - - if isinstance(y, (list, tuple)): - if len(y) != 2: - raise ValueError("Parameter 'y' must be of length 2.") - - if y[1] <= y[0]: - raise ValueError("The current choice of 'y' would result in an empty slice.") - - # y is clean - y = slice(y[0], y[1]) - - elif isinstance(y, slice) and y.stop <= y.start: - raise ValueError("The current choice of 'x' would result in an empty slice.") - - selection = {"x": x, "y": y} # makes use of xarray sel method - - # TODO: error handling if selection is out of bounds - cropped_images = {key: img.sel(selection) for key, img in self._sdata.images.items()} - cropped_labels = {key: img.sel(selection) for key, img in self._sdata.labels.items()} - - sdata = self._copy( - images=cropped_images, - labels=cropped_labels, - ) - self._sdata = _verify_plotting_tree(self._sdata) - - # get current number of steps to create a unique key - n_steps = len(self._sdata.plotting_tree.keys()) - sdata.plotting_tree[f"{n_steps+1}_get_bb"] = { - "x": x, - "y": y, - } - - return sdata diff --git a/src/spatialdata_plot/pp/utils.py b/src/spatialdata_plot/pp/utils.py deleted file mode 100644 index 2947f831..00000000 --- a/src/spatialdata_plot/pp/utils.py +++ /dev/null @@ -1,54 +0,0 @@ -from collections import OrderedDict - -import spatialdata as sd -from spatialdata.transformations import get_transformation - - -def _verify_plotting_tree(sdata: sd.SpatialData) -> sd.SpatialData: - """Verify that the plotting tree exists, and if not, create it.""" - if not hasattr(sdata, "plotting_tree"): - sdata.plotting_tree = OrderedDict() - - return sdata - - -def _get_coordinate_system_mapping(sdata: sd.SpatialData) -> dict[str, list[str]]: - coordsys_keys = sdata.coordinate_systems - image_keys = [] if sdata.images is None else sdata.images.keys() - label_keys = [] if sdata.labels is None else sdata.labels.keys() - shape_keys = [] if sdata.shapes is None else sdata.shapes.keys() - point_keys = [] if sdata.points is None else sdata.points.keys() - - mapping: dict[str, list[str]] = {} - - if len(coordsys_keys) < 1: - raise ValueError("SpatialData object must have at least one coordinate system to generate a mapping.") - - for key in coordsys_keys: - mapping[key] = [] - - for image_key in image_keys: - transformations = get_transformation(sdata.images[image_key], get_all=True) - - if key in list(transformations.keys()): - mapping[key].append(image_key) - - for label_key in label_keys: - transformations = get_transformation(sdata.labels[label_key], get_all=True) - - if key in list(transformations.keys()): - mapping[key].append(label_key) - - for shape_key in shape_keys: - transformations = get_transformation(sdata.shapes[shape_key], get_all=True) - - if key in list(transformations.keys()): - mapping[key].append(shape_key) - - for point_key in point_keys: - transformations = get_transformation(sdata.points[point_key], get_all=True) - - if key in list(transformations.keys()): - mapping[key].append(point_key) - - return mapping diff --git a/tests/conftest.py b/tests/conftest.py index dc409920..67ffab5a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,7 +9,6 @@ import pyarrow as pa import pytest import spatialdata as sd -import spatialdata_plot # noqa: F401 from anndata import AnnData from datatree import DataTree from geopandas import GeoDataFrame @@ -28,6 +27,8 @@ ) from xarray import DataArray +import spatialdata_plot # noqa: F401 + HERE: Path = Path(__file__).parent EXPECTED = HERE / "_images" diff --git a/tests/pl/test_get_extent.py b/tests/pl/test_get_extent.py index c1e5c48e..338a17b7 100644 --- a/tests/pl/test_get_extent.py +++ b/tests/pl/test_get_extent.py @@ -4,13 +4,13 @@ import matplotlib.pyplot as plt import numpy as np import scanpy as sc -import spatialdata_plot # noqa: F401 from geopandas import GeoDataFrame from shapely.geometry import MultiPolygon, Point, Polygon from spatialdata import SpatialData from spatialdata.models import PointsModel, ShapesModel from spatialdata.transformations import Affine, set_transformation +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) @@ -48,7 +48,7 @@ def test_plot_extent_calculation_respects_element_selection_circles_and_polygons sdata_blobs.pl.render_shapes("blobs_circles").pl.render_shapes("blobs_polygons").pl.show() def test_plot_extent_of_img_is_correct_after_spatial_query(self, sdata_blobs: SpatialData): - cropped_blobs = sdata_blobs.pp.get_elements(["blobs_image"]).query.bounding_box( + cropped_blobs = sdata_blobs.query.bounding_box( axes=["x", "y"], min_coordinate=[100, 100], max_coordinate=[400, 400], target_coordinate_system="global" ) cropped_blobs.pl.render_images().pl.show() diff --git a/tests/pl/test_render_images.py b/tests/pl/test_render_images.py index 38a285e6..ceea6da5 100644 --- a/tests/pl/test_render_images.py +++ b/tests/pl/test_render_images.py @@ -2,12 +2,12 @@ import matplotlib import numpy as np import scanpy as sc -import spatialdata_plot # noqa: F401 from matplotlib import pyplot as plt from matplotlib.colors import Normalize from spatial_image import to_spatial_image from spatialdata import SpatialData +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_render_labels.py b/tests/pl/test_render_labels.py index 639c51d9..d1e7c128 100644 --- a/tests/pl/test_render_labels.py +++ b/tests/pl/test_render_labels.py @@ -5,12 +5,12 @@ import pandas as pd import pytest import scanpy as sc -import spatialdata_plot # noqa: F401 from anndata import AnnData from spatial_image import to_spatial_image from spatialdata import SpatialData, deepcopy, get_element_instances from spatialdata.models import TableModel +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_render_points.py b/tests/pl/test_render_points.py index 472b3c92..29278f47 100644 --- a/tests/pl/test_render_points.py +++ b/tests/pl/test_render_points.py @@ -2,11 +2,11 @@ import numpy as np import pandas as pd import scanpy as sc -import spatialdata_plot # noqa: F401 from anndata import AnnData from spatialdata import SpatialData from spatialdata.models import TableModel +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_render_shapes.py b/tests/pl/test_render_shapes.py index 442fcb69..9f26777f 100644 --- a/tests/pl/test_render_shapes.py +++ b/tests/pl/test_render_shapes.py @@ -4,12 +4,12 @@ import numpy as np import pandas as pd import scanpy as sc -import spatialdata_plot # noqa: F401 from anndata import AnnData from shapely.geometry import MultiPolygon, Point, Polygon from spatialdata import SpatialData, deepcopy from spatialdata.models import ShapesModel, TableModel +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_show.py b/tests/pl/test_show.py index acd6a77b..cd775fb5 100644 --- a/tests/pl/test_show.py +++ b/tests/pl/test_show.py @@ -1,9 +1,9 @@ import matplotlib import numpy as np import scanpy as sc -import spatialdata_plot # noqa: F401 from spatialdata import SpatialData +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_upstream_plots.py b/tests/pl/test_upstream_plots.py index cc8be529..d052a8a5 100644 --- a/tests/pl/test_upstream_plots.py +++ b/tests/pl/test_upstream_plots.py @@ -4,7 +4,6 @@ import matplotlib.pyplot as plt import numpy as np import scanpy as sc -import spatialdata_plot # noqa: F401 from spatialdata import SpatialData from spatialdata.transformations import ( Affine, @@ -15,6 +14,7 @@ set_transformation, ) +import spatialdata_plot # noqa: F401 from tests.conftest import DPI, PlotTester, PlotTesterMeta RNG = np.random.default_rng(seed=42) diff --git a/tests/pl/test_utils.py b/tests/pl/test_utils.py index 498c738d..756737e4 100644 --- a/tests/pl/test_utils.py +++ b/tests/pl/test_utils.py @@ -5,10 +5,10 @@ import numpy as np import pytest import scanpy as sc -import spatialdata_plot from spatialdata import SpatialData -from spatialdata_plot.pl.utils import _get_subplots, _sanitise_na_color +import spatialdata_plot +from spatialdata_plot.pl.utils import _get_subplots, _sanitise_na_color from tests.conftest import DPI, PlotTester, PlotTesterMeta SEED = 42 diff --git a/tests/pp/test_basic.py b/tests/pp/test_basic.py deleted file mode 100644 index 2bced3fe..00000000 --- a/tests/pp/test_basic.py +++ /dev/null @@ -1,47 +0,0 @@ -import pytest - - -@pytest.mark.parametrize( - "sdata, keys ", - [ - ("get_sdata_with_multiple_images", 0), - ("get_sdata_with_multiple_images", {"a": 0}), - ("get_sdata_with_multiple_images", None), - ("get_sdata_with_multiple_images", ["my_key", 0]), - ], -) -def test_typerror_when_key_is_invalid(sdata, keys, request): - """Tests wether the images inside sdata can be clipped to a bounding box.""" - sdata = request.getfixturevalue(sdata)(share_coordinate_system="all") - - with pytest.raises(TypeError): - sdata.pp.get_elements(keys) - - -@pytest.mark.parametrize( - "sdata, keys ", - [ - ("get_sdata_with_multiple_images", "data4"), - ("get_sdata_with_multiple_images", ["data1", "data4"]), - ], -) -def test_valuerror_when_key_is_of_correct_type_but_not_in_sdata(sdata, keys, request): - sdata = request.getfixturevalue(sdata)(share_coordinate_system="all") - - with pytest.raises(ValueError): - sdata.pp.get_elements(keys) - - -def test_get_elements_correctly_filters_coordinate_systems(request): - """Tests that get_elements correctly filters coordinate systems by their name.""" - - fun = request.getfixturevalue("get_sdata_with_multiple_images") - sdata_all_cs_shared = fun("all") - sdata_no_cs_shared = fun("none") - sdata_two_cs_shared = fun("two") - sdata_two_cs_similar_name = fun("similar_name") - - assert len(sdata_all_cs_shared.images.keys()) == 3 - assert len(sdata_two_cs_shared.pp.get_elements("coord_sys1").images.keys()) == 2 - assert len(sdata_no_cs_shared.pp.get_elements("coord_sys1").images.keys()) == 1 - assert len(sdata_two_cs_similar_name.pp.get_elements("coord_sys1").images.keys()) == 1 diff --git a/tests/test_pp.py b/tests/test_pp.py deleted file mode 100644 index c40efd67..00000000 --- a/tests/test_pp.py +++ /dev/null @@ -1,152 +0,0 @@ -import pytest - - -@pytest.mark.parametrize( - "sdata, keys", - [ - ("get_sdata_with_multiple_images", "data1"), - ("get_sdata_with_multiple_images", ["data1"]), - ("get_sdata_with_multiple_images", ["data1", "data2"]), - ], -) -def test_can_subset_to_one_or_more_images(sdata, keys, request): - """Tests whether a subset of images can be selected from the sdata object.""" - - sdata = request.getfixturevalue(sdata)(share_coordinate_system="all") - - clipped_sdata = sdata.pp.get_elements(keys) - - assert list(clipped_sdata.images.keys()) == ([keys] if isinstance(keys, str) else keys) - - -@pytest.mark.parametrize( - "sdata", - [ - "test_sdata_single_image", - "test_sdata_multiple_images", - "test_sdata_single_image_with_label", - "test_sdata_multiple_images_with_table", - # "full_sdata" that one is broken - ], -) -def test_get_bb_correct_inputs(sdata, request): - """Tests whether a subset of images can be selected from the sdata object.""" - sdata = request.getfixturevalue(sdata) - - # use all possible inputs - sliced_slice = sdata.pp.get_bb(slice(0, 5), slice(0, 5)) - sliced_list = sdata.pp.get_bb([0, 5], [0, 5]) - sliced_tuple = sdata.pp.get_bb((0, 5), (0, 5)) - - for sliced_object in [sliced_slice, sliced_list, sliced_tuple]: - for _k, v in sliced_object.images.items(): - # test if images have the correct dimensionality - assert v.shape[1] == 5 - assert v.shape[2] == 5 - - if hasattr(sliced_object, "labels"): - for _k, v in sliced_object.labels.items(): - # test if images have the correct dimensionality - assert v.shape[0] == 5 - assert v.shape[1] == 5 - - # check if the plotting tree was appended - assert hasattr(sliced_object, "plotting_tree") - - -@pytest.mark.parametrize( - "sdata", - [ - "test_sdata_single_image", - "test_sdata_multiple_images", - "test_sdata_single_image_with_label", - "test_sdata_multiple_images_with_table", - # "full_sdata" that one is broken - ], -) -def test_get_bb_wrong_input_types(sdata, request): - """Tests whether a subset of images can be selected from the sdata object.""" - sdata = request.getfixturevalue(sdata) - - with pytest.raises(TypeError, match="Parameter 'x' must be one "): - sdata.pp.get_bb(4, 5) - - -@pytest.mark.parametrize( - "sdata", - [ - "test_sdata_single_image", - "test_sdata_multiple_images", - "test_sdata_single_image_with_label", - "test_sdata_multiple_images_with_table", - # "full_sdata" that one is broken - ], -) -def test_get_bb_wrong_input_dims(sdata, request): - """Tests whether a subset of images can be selected from the sdata object.""" - sdata = request.getfixturevalue(sdata) - - # x values - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb(slice(5, 0), slice(0, 5)) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb(slice(5, 5), slice(0, 5)) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb([5, 0], [0, 5]) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb([5, 5], [0, 5]) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb((5, 0), (0, 5)) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb((5, 5), (0, 5)) - - # y values - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb(slice(0, 5), slice(5, 0)) - - with pytest.raises(ValueError, match="The current choice of 'x' would result in an empty slice."): - sdata.pp.get_bb(slice(0, 5), slice(5, 5)) - - with pytest.raises(ValueError, match="The current choice of 'y' would result in an empty slice."): - sdata.pp.get_bb([0, 5], [5, 0]) - - with pytest.raises(ValueError, match="The current choice of 'y' would result in an empty slice."): - sdata.pp.get_bb([0, 5], [5, 5]) - - with pytest.raises(ValueError, match="The current choice of 'y' would result in an empty slice."): - sdata.pp.get_bb((0, 5), (5, 0)) - - with pytest.raises(ValueError, match="The current choice of 'y' would result in an empty slice."): - sdata.pp.get_bb((0, 5), (5, 5)) - - -@pytest.mark.parametrize( - "sdata", - [ - "test_sdata_single_image", - "test_sdata_multiple_images", - "test_sdata_single_image_with_label", - "test_sdata_multiple_images_with_table", - # "full_sdata" that one is broken - ], -) -def test_get_bb_wrong_input_length(sdata, request): - """Tests whether a subset of images can be selected from the sdata object.""" - sdata = request.getfixturevalue(sdata) - - with pytest.raises(ValueError, match="Parameter 'x' must be of length 2."): - sdata.pp.get_bb([0, 5, 6], [0, 5]) - - with pytest.raises(ValueError, match="Parameter 'x' must be of length 2."): - sdata.pp.get_bb((0, 5, 1), (0, 5)) - - with pytest.raises(ValueError, match="Parameter 'y' must be of length 2."): - sdata.pp.get_bb([0, 5], [0, 5, 5]) - - with pytest.raises(ValueError, match="Parameter 'y' must be of length 2."): - sdata.pp.get_bb((0, 5), (0, 5, 2))