Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate partial read/writes in v2 #2844

Open
wants to merge 3 commits into
base: support/v2
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 13 additions & 6 deletions docs/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,19 @@ Release notes
# re-indented so that it does not show up in the notes.

.. note::
Zarr-Python 2.18.* is expected be the final release in the 2.* series. Work on Zarr-Python 3.0 is underway.
See `GH1777 <https://github.com/zarr-developers/zarr-python/issues/1777>`_ for more details on the upcoming
3.0 release.
Zarr-Python 2.* is in support mode now, and no new features will be added.


Unreleased
----------

Deprecations
~~~~~~~~~~~~

* Deprecated support for ``partial_decompress`` when creating an array.
This functionality is no longer supported in ``numcodecs``, and will be removed
in ``zarr-python`` 2.19.0.
By :user:`David Stansby <dstansby>`

.. _release_2.18.4:

Expand All @@ -40,9 +50,6 @@ Maintenance
the Delta filter (see https://github.com/zarr-developers/numcodecs/issues/653 for more information).
By :user:`David Stansby <dstansby>` (:issue:`2544`).

Deprecations
~~~~~~~~~~~~

.. _release_2.18.3:

2.18.3
Expand Down
17 changes: 9 additions & 8 deletions zarr/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import re
from functools import reduce
from typing import Any
import warnings

import numpy as np
from numcodecs.compat import ensure_bytes
Expand Down Expand Up @@ -90,13 +91,6 @@ class Array:
If True (default), user attributes will be cached for attribute read
operations. If False, user attributes are reloaded from the store prior
to all attribute read operations.
partial_decompress : bool, optional
If True and while the chunk_store is a FSStore and the compression used
is Blosc, when getting data from the array chunks will be partially
read and decompressed when possible.

.. versionadded:: 2.7

write_empty_chunks : bool, optional
If True, all chunks will be stored regardless of their contents. If
False (default), each chunk is compared to the array's fill value prior
Expand Down Expand Up @@ -124,7 +118,7 @@ def __init__(
synchronizer=None,
cache_metadata=True,
cache_attrs=True,
partial_decompress=False,
partial_decompress=None,
write_empty_chunks=True,
zarr_version=None,
meta_array=None,
Expand Down Expand Up @@ -154,6 +148,13 @@ def __init__(
self._synchronizer = synchronizer
self._cache_metadata = cache_metadata
self._is_view = False
if partial_decompress is not None:
warnings.warn(
"Support for partial decompression is no longer supported in numcodecs. "
"Support for partial decompression will be removed in a future version of zarr-python v2.",
DeprecationWarning,
stacklevel=1,
)
self._partial_decompress = partial_decompress
self._write_empty_chunks = write_empty_chunks
if meta_array is not None:
Expand Down
6 changes: 1 addition & 5 deletions zarr/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ def open_array(
object_codec=None,
chunk_store=None,
storage_options=None,
partial_decompress=False,
partial_decompress=None,
write_empty_chunks=True,
*,
zarr_version=None,
Expand Down Expand Up @@ -522,10 +522,6 @@ def open_array(
storage_options : dict
If using an fsspec URL to create the store, these will be passed to
the backend implementation. Ignored otherwise.
partial_decompress : bool, optional
If True and while the chunk_store is a FSStore and the compression used
is Blosc, when getting data from the array chunks will be partially
read and decompressed when possible.
write_empty_chunks : bool, optional
If True (default), all chunks will be stored regardless of their
contents. If False, each chunk is compared to the array's fill value
Expand Down
20 changes: 19 additions & 1 deletion zarr/tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import sys
import pickle
import shutil

from typing import Any, Literal, Optional, Tuple, Union, Sequence
import unittest
from itertools import zip_longest
Expand Down Expand Up @@ -84,6 +85,11 @@

# noinspection PyMethodMayBeStatic

pytestmark = [
pytest.mark.filterwarnings("ignore:Call to deprecated function .* \_cbuffer\_sizes.*"),
pytest.mark.filterwarnings("ignore:Call to deprecated function .* \_cbuffer\_metainfo.*"),
]


class TestArray:
version = 2
Expand All @@ -94,7 +100,7 @@ class TestArray:
dimension_separator: Optional[DIMENSION_SEPARATOR] = None
cache_metadata = True
cache_attrs = True
partial_decompress: bool = False
partial_decompress: bool | None = None
write_empty_chunks = True
read_only = False
storage_transformers: Tuple[Any, ...] = ()
Expand Down Expand Up @@ -2481,6 +2487,9 @@ def expected(self):


@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStorePartialRead(TestArray):
compressor = Blosc(blocksize=256)
partial_decompress = True
Expand Down Expand Up @@ -2547,6 +2556,9 @@ def expected(self):


@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStoreNestedPartialRead(TestArrayWithFSStore):
compressor = Blosc()
dimension_separator = "/"
Expand Down Expand Up @@ -3020,6 +3032,9 @@ def expected(self):

@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStoreV3PartialRead(TestArrayWithFSStoreV3):
partial_decompress = True

Expand All @@ -3038,6 +3053,9 @@ def expected(self):
@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.skipif(not v3_api_available, reason="V3 is disabled")
@pytest.mark.skipif(not v3_sharding_available, reason="sharding is disabled")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStoreV3PartialReadUncompressedSharded(TestArrayWithFSStoreV3):
partial_decompress = True
compressor = None
Expand Down
Loading