From 7c280b701117588c07023b8495c50a3675641c58 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 14:45:35 +0100 Subject: [PATCH 1/5] [pre-commit.ci] pre-commit autoupdate (#848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.9.2 → v0.9.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.2...v0.9.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dd9103ab..985d6c9d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: additional_dependencies: [numpy, types-requests] exclude: tests/|docs/ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.2 + rev: v0.9.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From bc5a917fcc8e4f228f4d87329e27fe4f0586dbe6 Mon Sep 17 00:00:00 2001 From: Luca Marconato Date: Thu, 30 Jan 2025 14:49:07 +0100 Subject: [PATCH 2/5] fix JoinTypes.__dict__ empty python 3.13 --- .github/workflows/test.yaml | 2 +- src/spatialdata/_core/query/relational_query.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 1d7a79b9..fa5f44e1 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - python: ["3.10", "3.12"] + python: ["3.10", "3.12", "3.13"] os: [ubuntu-latest] include: - os: macos-latest diff --git a/src/spatialdata/_core/query/relational_query.py b/src/spatialdata/_core/query/relational_query.py index 4154c5f2..7d65ad14 100644 --- a/src/spatialdata/_core/query/relational_query.py +++ b/src/spatialdata/_core/query/relational_query.py @@ -693,8 +693,11 @@ def _call_join( raise TypeError( f"`{match_rows}` is an invalid argument for `match_rows`. Can be either `no`, ``'left'`` or ``'right'``" ) - if how in JoinTypes.__dict__["_member_names_"]: - elements_dict, table = JoinTypes[how](elements_dict, table, match_rows) + # bug with Python 3.13 (https://github.com/scverse/spatialdata/issues/852) + # if how in JoinTypes.__dict__["_member_names_"]: + # hotfix for bug with Python 3.13: + if how in JoinTypes.__dict__: + elements_dict, table = getattr(JoinTypes, how)(elements_dict, table, match_rows) else: raise TypeError(f"`{how}` is not a valid type of join.") From fbdd0cb413ae761a2a65d730bc659ebae4dc593e Mon Sep 17 00:00:00 2001 From: Lukas Heumos Date: Fri, 31 Jan 2025 10:19:44 +0100 Subject: [PATCH 3/5] Use Ruff for code formatting (#851) Signed-off-by: Lukas Heumos --- .pre-commit-config.yaml | 9 +------- benchmarks/spatialdata_benchmark.py | 1 - benchmarks/utils.py | 2 +- docs/extensions/typed_returns.py | 2 +- pyproject.toml | 22 +------------------ src/spatialdata/_core/operations/aggregate.py | 10 ++++----- src/spatialdata/_core/operations/rasterize.py | 6 ++--- src/spatialdata/_core/operations/transform.py | 6 ++--- src/spatialdata/_core/operations/vectorize.py | 4 ++-- .../_core/query/relational_query.py | 6 ++--- src/spatialdata/_core/spatialdata.py | 8 +++---- src/spatialdata/_io/_utils.py | 5 ++--- src/spatialdata/dataloader/datasets.py | 6 ++--- src/spatialdata/testing.py | 6 ++--- src/spatialdata/transformations/operations.py | 2 +- tests/conftest.py | 3 +-- tests/core/operations/test_rasterize_bins.py | 2 +- tests/core/query/test_spatial_query.py | 7 ++++-- tests/io/test_metadata.py | 3 +-- tests/io/test_multi_table.py | 2 +- tests/models/test_models.py | 6 ++--- tests/transformations/test_transformations.py | 2 +- 22 files changed, 45 insertions(+), 75 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 985d6c9d..5db24359 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,18 +8,10 @@ minimum_pre_commit_version: 2.16.0 ci: skip: [] repos: - - repo: https://github.com/psf/black - rev: 24.10.0 - hooks: - - id: black - repo: https://github.com/rbubley/mirrors-prettier rev: v3.4.2 hooks: - id: prettier - - repo: https://github.com/asottile/blacken-docs - rev: 1.19.1 - hooks: - - id: blacken-docs - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.14.1 hooks: @@ -31,3 +23,4 @@ repos: hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format diff --git a/benchmarks/spatialdata_benchmark.py b/benchmarks/spatialdata_benchmark.py index af383556..408ad14e 100644 --- a/benchmarks/spatialdata_benchmark.py +++ b/benchmarks/spatialdata_benchmark.py @@ -44,7 +44,6 @@ def time_map_blocks(self, _): class TimeQueries: - params = ([100, 1_000, 10_000], [True, False], [100, 1_000]) param_names = ["length", "filter_table", "n_transcripts_per_cell"] diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 75f566b7..9c865d58 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -8,7 +8,7 @@ - function run_benchmark is used to run the benchmarks. Performant dataset generation functions so the benchmarks run fast even for large artificial datasets. -The object is to generate a dataset containing many cells. By copying the same cell values instead of +The object is to generate a dataset containing many cells. By copying the same cell values instead of doing gaussian blur on the whole image, we can generate the same dataset in a fraction of the time. - function labeled_particles is used to generate labeled blobs. - function _generate_ball is used to generate a ball of given radius and dimension. diff --git a/docs/extensions/typed_returns.py b/docs/extensions/typed_returns.py index 94478130..d044c698 100644 --- a/docs/extensions/typed_returns.py +++ b/docs/extensions/typed_returns.py @@ -11,7 +11,7 @@ def _process_return(lines): m = re.fullmatch(r"(?P\w+)\s+:\s+(?P[\w.]+)", line) if m: # Once this is in scanpydoc, we can use the fancy hover stuff - yield f'-{m["param"]} (:class:`~{m["type"]}`)' + yield f"-{m['param']} (:class:`~{m['type']}`)" else: yield line diff --git a/pyproject.toml b/pyproject.toml index 0474f6f0..e01ffb56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,27 +103,6 @@ filterwarnings = [ # "ignore:.*U.*mode is deprecated:DeprecationWarning", ] -[tool.black] -line-length = 120 -target-version = ['py310'] -include = '\.pyi?$' -exclude = ''' -( - /( - \.eggs - | \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - )/ -) -''' - [tool.jupytext] formats = "ipynb,md" @@ -148,6 +127,7 @@ exclude = [ "docs/_build", "dist", "setup.py", + ] line-length = 120 target-version = "py310" diff --git a/src/spatialdata/_core/operations/aggregate.py b/src/spatialdata/_core/operations/aggregate.py index dde0338a..0e89bf11 100644 --- a/src/spatialdata/_core/operations/aggregate.py +++ b/src/spatialdata/_core/operations/aggregate.py @@ -180,9 +180,9 @@ def aggregate( ONES_KEY = None if value_key is None: ONES_KEY = "__ones_column_aggregate" - assert ( - ONES_KEY not in values_.columns - ), f"Column {ONES_KEY} is reserved for internal use and cannot be already present in values_" + assert ONES_KEY not in values_.columns, ( + f"Column {ONES_KEY} is reserved for internal use and cannot be already present in values_" + ) values_[ONES_KEY] = 1 value_key = ONES_KEY @@ -384,10 +384,10 @@ def _aggregate_shapes( "agg_func='sum' instead." ) assert not isinstance(values.iloc[0].geometry, Point), ( - "Fractions cannot be computed when values are points. " "Please use fractions=False." + "Fractions cannot be computed when values are points. Please use fractions=False." ) assert not (categorical and agg_func == "mean"), ( - "Incompatible choice: aggregating a categorical column with " "agg_func='mean'" + "Incompatible choice: aggregating a categorical column with agg_func='mean'" ) # we need to add a column of ones to the values dataframe to be able to count the number of instances in each zone diff --git a/src/spatialdata/_core/operations/rasterize.py b/src/spatialdata/_core/operations/rasterize.py index 5acb5e4e..6bcca5ec 100644 --- a/src/spatialdata/_core/operations/rasterize.py +++ b/src/spatialdata/_core/operations/rasterize.py @@ -678,9 +678,9 @@ def rasterize_shapes_points( elif isinstance(agg_func, str): AGGREGATIONS = ["sum", "count", "count_cat", "first"] - assert np.isin( - agg_func, AGGREGATIONS - ), f"Aggregation function must be one of {', '.join(AGGREGATIONS)}. Found {agg_func}" + assert np.isin(agg_func, AGGREGATIONS), ( + f"Aggregation function must be one of {', '.join(AGGREGATIONS)}. Found {agg_func}" + ) assert agg_func == "count" or value_key is not None, f"value_key cannot be done for agg_func={agg_func}" diff --git a/src/spatialdata/_core/operations/transform.py b/src/spatialdata/_core/operations/transform.py index b92b7757..d74bbf2f 100644 --- a/src/spatialdata/_core/operations/transform.py +++ b/src/spatialdata/_core/operations/transform.py @@ -287,9 +287,9 @@ def _( if transformation is None and to_coordinate_system is not None: return data.transform_to_coordinate_system(target_coordinate_system=to_coordinate_system) raise RuntimeError(ERROR_MSG_AFTER_0_0_15) - assert bool(transformation is None) != bool( - to_coordinate_system is None - ), "When maintain_positioning is True, only one of transformation and to_coordinate_system can be None" + assert bool(transformation is None) != bool(to_coordinate_system is None), ( + "When maintain_positioning is True, only one of transformation and to_coordinate_system can be None" + ) new_elements: dict[str, dict[str, Any]] = {} for element_type in ["images", "labels", "points", "shapes"]: d = getattr(data, element_type) diff --git a/src/spatialdata/_core/operations/vectorize.py b/src/spatialdata/_core/operations/vectorize.py index d750d12e..c34ce525 100644 --- a/src/spatialdata/_core/operations/vectorize.py +++ b/src/spatialdata/_core/operations/vectorize.py @@ -113,7 +113,7 @@ def _(element: GeoDataFrame, **kwargs: Any) -> GeoDataFrame: return _make_circles(element, obs) if isinstance(element.geometry.iloc[0], Point): return element - raise RuntimeError("Unsupported geometry type: " f"{type(element.geometry.iloc[0])}") + raise RuntimeError(f"Unsupported geometry type: {type(element.geometry.iloc[0])}") @to_circles.register(DaskDataFrame) @@ -281,7 +281,7 @@ def _(gdf: GeoDataFrame, buffer_resolution: int = 16) -> GeoDataFrame: return buffered_df assert isinstance(gdf.geometry.iloc[0], Polygon | MultiPolygon) return gdf - raise RuntimeError("Unsupported geometry type: " f"{type(gdf.geometry.iloc[0])}") + raise RuntimeError(f"Unsupported geometry type: {type(gdf.geometry.iloc[0])}") @to_polygons.register(DaskDataFrame) diff --git a/src/spatialdata/_core/query/relational_query.py b/src/spatialdata/_core/query/relational_query.py index 4154c5f2..b2413b27 100644 --- a/src/spatialdata/_core/query/relational_query.py +++ b/src/spatialdata/_core/query/relational_query.py @@ -164,9 +164,9 @@ def _filter_table_by_elements( """ assert set(elements_dict.keys()).issubset({"images", "labels", "shapes", "points"}) assert len(elements_dict) > 0, "elements_dict must not be empty" - assert any( - len(elements) > 0 for elements in elements_dict.values() - ), "elements_dict must contain at least one dict which contains at least one element" + assert any(len(elements) > 0 for elements in elements_dict.values()), ( + "elements_dict must contain at least one dict which contains at least one element" + ) if table is None: return None to_keep = np.zeros(len(table), dtype=bool) diff --git a/src/spatialdata/_core/spatialdata.py b/src/spatialdata/_core/spatialdata.py index a931839b..c4cce214 100644 --- a/src/spatialdata/_core/spatialdata.py +++ b/src/spatialdata/_core/spatialdata.py @@ -158,7 +158,6 @@ def __init__( "For renaming, please see the discussion here https://github.com/scverse/spatialdata/discussions/707 .", exc_type=(ValueError, KeyError), ) as collect_error: - if images is not None: for k, v in images.items(): with collect_error(location=("images", k)): @@ -1715,7 +1714,6 @@ def get_attrs( """ def _flatten_mapping(m: Mapping[str, Any], parent_key: str = "", sep: str = "_") -> dict[str, Any]: - items: list[tuple[str, Any]] = [] for k, v in m.items(): new_key = f"{parent_key}{sep}{k}" if parent_key else k @@ -2007,7 +2005,7 @@ def h(s: str) -> str: descr += f"{h('empty_line')}" descr_class = v.__class__.__name__ if attr == "shapes": - descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class} " f"shape: {v.shape} (2D shapes)" + descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class} shape: {v.shape} (2D shapes)" elif attr == "points": length: int | None = None if len(v.dask) == 1: @@ -2037,7 +2035,7 @@ def h(s: str) -> str: + ", ".join([str(dim) if not isinstance(dim, Delayed) else "" for dim in v.shape]) + ")" ) - descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class} " f"with shape: {shape_str} {dim_string}" + descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class} with shape: {shape_str} {dim_string}" elif attr == "tables": descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class} {v.shape}" else: @@ -2055,7 +2053,7 @@ def h(s: str) -> str: if dims is None: dims = "".join(vv.dims) shapes.append(shape) - descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class}[{dims}] " f"{', '.join(map(str, shapes))}" + descr += f"{h(attr + 'level1.1')}{k!r}: {descr_class}[{dims}] {', '.join(map(str, shapes))}" else: raise TypeError(f"Unknown type {type(v)}") if last_attr is True: diff --git a/src/spatialdata/_io/_utils.py b/src/spatialdata/_io/_utils.py index cf52edda..71947261 100644 --- a/src/spatialdata/_io/_utils.py +++ b/src/spatialdata/_io/_utils.py @@ -47,7 +47,7 @@ def ome_zarr_logger(level: Any) -> Generator[None, None, None]: def _get_transformations_from_ngff_dict( - list_of_encoded_ngff_transformations: list[dict[str, Any]] + list_of_encoded_ngff_transformations: list[dict[str, Any]], ) -> MappingToCoordinateSystem_t: list_of_ngff_transformations = [NgffBaseTransformation.from_dict(d) for d in list_of_encoded_ngff_transformations] list_of_transformations = [BaseTransformation.from_ngff(t) for t in list_of_ngff_transformations] @@ -118,8 +118,7 @@ def overwrite_channel_names(group: zarr.Group, element: DataArray | DataTree) -> multiscales_meta = group.attrs["multiscales"] if len(multiscales_meta) != 1: raise ValueError( - f"Multiscale metadata must be of length one but got length {len(multiscales_meta)}. Data might" - f"be corrupted." + f"Multiscale metadata must be of length one but got length {len(multiscales_meta)}. Data mightbe corrupted." ) multiscales_meta[0]["metadata"]["omero"]["channels"] = channel_metadata group.attrs["multiscales"] = multiscales_meta diff --git a/src/spatialdata/dataloader/datasets.py b/src/spatialdata/dataloader/datasets.py index e4aee42c..bbefdf34 100644 --- a/src/spatialdata/dataloader/datasets.py +++ b/src/spatialdata/dataloader/datasets.py @@ -163,9 +163,9 @@ def _validate( raise ValueError("`table_name` must be provided if `return_annotations` is not `None`.") # check that the regions specified in the two dicts are the same - assert set(regions_to_images.keys()) == set( - regions_to_coordinate_systems.keys() - ), "The keys in `regions_to_images` and `regions_to_coordinate_systems` must be the same." + assert set(regions_to_images.keys()) == set(regions_to_coordinate_systems.keys()), ( + "The keys in `regions_to_images` and `regions_to_coordinate_systems` must be the same." + ) self.regions = list(regions_to_coordinate_systems.keys()) # all regions for the dataloader cs_region_image: list[tuple[str, str, str]] = [] # list of tuples (coordinate_system, region, image) diff --git a/src/spatialdata/testing.py b/src/spatialdata/testing.py index 253f6e50..18199a23 100644 --- a/src/spatialdata/testing.py +++ b/src/spatialdata/testing.py @@ -106,9 +106,9 @@ def assert_elements_are_identical( if check_transformations: assert transformations0.keys() == transformations1.keys() for key in transformations0: - assert ( - transformations0[key] == transformations1[key] - ), f"transformations0[{key}] != transformations1[{key}]" + assert transformations0[key] == transformations1[key], ( + f"transformations0[{key}] != transformations1[{key}]" + ) # compare the elements if isinstance(element0, AnnData): diff --git a/src/spatialdata/transformations/operations.py b/src/spatialdata/transformations/operations.py index d9ceb61c..f08c3911 100644 --- a/src/spatialdata/transformations/operations.py +++ b/src/spatialdata/transformations/operations.py @@ -63,7 +63,7 @@ def set_transformation( _set_transformations(element, transformations) else: assert isinstance(transformation, dict), ( - "If set_all=True, transformation must be of type " "dict[str, BaseTransformation]." + "If set_all=True, transformation must be of type dict[str, BaseTransformation]." ) assert to_coordinate_system is None, "If set_all=True, to_coordinate_system must be None." _set_transformations(element, transformation) diff --git a/tests/conftest.py b/tests/conftest.py index 2cfa67f4..5ced646e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -117,8 +117,7 @@ def full_sdata() -> SpatialData: @pytest.fixture( # params=["labels"] - params=["full", "empty"] - + ["images", "labels", "points", "table_single_annotation", "table_multiple_annotations"] + params=["full", "empty"] + ["images", "labels", "points", "table_single_annotation", "table_multiple_annotations"] # + ["empty_" + x for x in ["table"]] # TODO: empty table not supported yet ) def sdata(request) -> SpatialData: diff --git a/tests/core/operations/test_rasterize_bins.py b/tests/core/operations/test_rasterize_bins.py index 3d66ca9f..18931f3f 100644 --- a/tests/core/operations/test_rasterize_bins.py +++ b/tests/core/operations/test_rasterize_bins.py @@ -172,7 +172,7 @@ def _get_sdata(n: int): table.obs["region"] = regions with pytest.raises( ValueError, - match="Found multiple regions annotated by the table: " "points, shapes.", + match="Found multiple regions annotated by the table: points, shapes.", ): _ = rasterize_bins( sdata=sdata, diff --git a/tests/core/query/test_spatial_query.py b/tests/core/query/test_spatial_query.py index b4575e8d..65905a97 100644 --- a/tests/core/query/test_spatial_query.py +++ b/tests/core/query/test_spatial_query.py @@ -280,7 +280,11 @@ def test_query_raster( model = ( Labels3DModel if is_labels and is_3d - else Labels2DModel if is_labels else Image3DModel if is_3d else Image2DModel + else Labels2DModel + if is_labels + else Image3DModel + if is_3d + else Image2DModel ) image_element = model.parse(image) @@ -420,7 +424,6 @@ def test_query_polygons(is_bb_3d: bool, with_polygon_query: bool, multiple_boxes assert isinstance(polygons_result, list) assert len(polygons_result) == 2 if box_outside_polygon: - assert polygons_result[0] is None assert polygons_result[1].index[0] == 3 else: diff --git a/tests/io/test_metadata.py b/tests/io/test_metadata.py index f0c6ff6a..bb993b00 100644 --- a/tests/io/test_metadata.py +++ b/tests/io/test_metadata.py @@ -42,8 +42,7 @@ def test_validate_can_write_metadata_on_element(full_sdata, element_name): # trying to save metadata before writing the data with pytest.warns( UserWarning, - match="The SpatialData object appears not to be backed by a Zarr storage, so metadata cannot be " - "written.", + match="The SpatialData object appears not to be backed by a Zarr storage, so metadata cannot be written.", ): full_sdata._validate_can_write_metadata_on_element(element_name) diff --git a/tests/io/test_multi_table.py b/tests/io/test_multi_table.py index b49b9bd4..dd43cfa8 100644 --- a/tests/io/test_multi_table.py +++ b/tests/io/test_multi_table.py @@ -99,7 +99,7 @@ def test_set_table_annotates_spatialelement(self, full_sdata, tmp_path): tmpdir = Path(tmp_path) / "tmp.zarr" del full_sdata["table"].uns[TableModel.ATTRS_KEY] with pytest.raises( - TypeError, match="No current annotation metadata found. " "Please specify both region_key and instance_key." + TypeError, match="No current annotation metadata found. Please specify both region_key and instance_key." ): full_sdata.set_table_annotates_spatialelement("table", "labels2d", region_key="non_existent") with pytest.raises(ValueError, match="Instance key column 'non_existent' not found in table.obs."): diff --git a/tests/models/test_models.py b/tests/models/test_models.py index f5889f96..012ca03e 100644 --- a/tests/models/test_models.py +++ b/tests/models/test_models.py @@ -213,9 +213,9 @@ def test_labels_model_with_multiscales(self, model): assert actual.scale0.image.dtype == image.dtype assert actual.scale1.image.dtype == image.dtype assert set(np.unique(image)) == set(np.unique(actual.scale0.image)), "Scale0 should be preserved" - assert set(np.unique(image)) >= set( - np.unique(actual.scale1.image) - ), "Subsequent scales should not have interpolation artifacts" + assert set(np.unique(image)) >= set(np.unique(actual.scale1.image)), ( + "Subsequent scales should not have interpolation artifacts" + ) @pytest.mark.parametrize("model", [ShapesModel]) @pytest.mark.parametrize("path", [POLYGON_PATH, MULTIPOLYGON_PATH, POINT_PATH]) diff --git a/tests/transformations/test_transformations.py b/tests/transformations/test_transformations.py index 833ecc9c..180d007d 100644 --- a/tests/transformations/test_transformations.py +++ b/tests/transformations/test_transformations.py @@ -765,7 +765,7 @@ def test_get_affine_for_element(images): np.array( [ # fmt: off - #c y x # noqa: E265 + # c y x # noqa: E265 [1, 0, 0, 0], # c [0, 0, 1, 1], # x [0, 1, 0, 2], # y From e7fa0206a0245d566929ef1d0f3128f6d1d02509 Mon Sep 17 00:00:00 2001 From: Luca Marconato Date: Fri, 31 Jan 2025 10:52:20 +0100 Subject: [PATCH 4/5] Revert "Update pyproject.toml" This reverts commit 1f8a01cf97dee96a3fa957234b2f5e8a0732d7e0. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e01ffb56..c95c2571 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ maintainers = [ urls.Documentation = "https://spatialdata.scverse.org/en/latest" urls.Source = "https://github.com/scverse/spatialdata.git" urls.Home-page = "https://github.com/scverse/spatialdata.git" -requires-python = ">=3.10" +requires-python = ">=3.10, <3.13" # include 3.13 once multiscale-spatial-image conflicts are resolved dynamic= [ "version" # allow version to be set by git tags ] From a9c08011c3db6a8611f629ff28211fd7aaed238d Mon Sep 17 00:00:00 2001 From: Luca Marconato Date: Fri, 31 Jan 2025 10:54:10 +0100 Subject: [PATCH 5/5] removed 3.13 from test ci --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fa5f44e1..1d7a79b9 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - python: ["3.10", "3.12", "3.13"] + python: ["3.10", "3.12"] os: [ubuntu-latest] include: - os: macos-latest