Skip to content

Commit 28c17d7

Browse files
committed
cleanup
1 parent 56cf3cf commit 28c17d7

4 files changed

Lines changed: 24 additions & 105 deletions

File tree

src/zarr/abc/store.py

Lines changed: 6 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -16,19 +16,10 @@
1616

1717
from zarr.core.buffer import Buffer, BufferPrototype
1818

19-
__all__ = [
20-
"ByteGetter",
21-
"ByteSetter",
22-
"Store",
23-
"SupportsDeleteSync",
24-
"SupportsGetSync",
25-
"SupportsSetSync",
26-
"SupportsSyncStore",
27-
"set_or_delete",
28-
]
29-
30-
31-
@dataclass(frozen=True, slots=True)
19+
__all__ = ["ByteGetter", "ByteSetter", "Store", "set_or_delete"]
20+
21+
22+
@dataclass
3223
class RangeByteRequest:
3324
"""Request a specific byte range"""
3425

@@ -38,15 +29,15 @@ class RangeByteRequest:
3829
"""The end of the byte range request (exclusive)."""
3930

4031

41-
@dataclass(frozen=True, slots=True)
32+
@dataclass
4233
class OffsetByteRequest:
4334
"""Request all bytes starting from a given byte offset"""
4435

4536
offset: int
4637
"""The byte offset for the offset range request."""
4738

4839

49-
@dataclass(frozen=True, slots=True)
40+
@dataclass
5041
class SuffixByteRequest:
5142
"""Request up to the last `n` bytes"""
5243

@@ -721,31 +712,6 @@ async def delete(self) -> None: ...
721712
async def set_if_not_exists(self, default: Buffer) -> None: ...
722713

723714

724-
@runtime_checkable
725-
class SupportsGetSync(Protocol):
726-
def get_sync(
727-
self,
728-
key: str,
729-
*,
730-
prototype: BufferPrototype | None = None,
731-
byte_range: ByteRequest | None = None,
732-
) -> Buffer | None: ...
733-
734-
735-
@runtime_checkable
736-
class SupportsSetSync(Protocol):
737-
def set_sync(self, key: str, value: Buffer) -> None: ...
738-
739-
740-
@runtime_checkable
741-
class SupportsDeleteSync(Protocol):
742-
def delete_sync(self, key: str) -> None: ...
743-
744-
745-
@runtime_checkable
746-
class SupportsSyncStore(SupportsGetSync, SupportsSetSync, SupportsDeleteSync, Protocol): ...
747-
748-
749715
async def set_or_delete(byte_setter: ByteSetter, value: Buffer | None) -> None:
750716
"""Set or delete a value in a byte setter
751717

src/zarr/codecs/sharding.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,9 @@ class _ShardIndex(NamedTuple):
129129

130130
@property
131131
def chunks_per_shard(self) -> tuple[int, ...]:
132-
return tuple(self.offsets_and_lengths.shape[0:-1])
132+
result = tuple(self.offsets_and_lengths.shape[0:-1])
133+
# The cast is required until https://github.com/numpy/numpy/pull/27211 is merged
134+
return cast("tuple[int, ...]", result)
133135

134136
def _localize_chunk(self, chunk_coords: tuple[int, ...]) -> tuple[int, ...]:
135137
return tuple(
@@ -302,9 +304,7 @@ def to_dict_vectorized(
302304

303305
@dataclass(frozen=True)
304306
class ShardingCodec(
305-
ArrayBytesCodec,
306-
ArrayBytesCodecPartialDecodeMixin,
307-
ArrayBytesCodecPartialEncodeMixin,
307+
ArrayBytesCodec, ArrayBytesCodecPartialDecodeMixin, ArrayBytesCodecPartialEncodeMixin
308308
):
309309
"""Sharding codec"""
310310

@@ -486,7 +486,9 @@ async def _decode_partial_single(
486486
if self._is_total_shard(all_chunk_coords, chunks_per_shard):
487487
# read entire shard
488488
shard_dict_maybe = await self._load_full_shard_maybe(
489-
byte_getter, chunk_spec.prototype, chunks_per_shard
489+
byte_getter=byte_getter,
490+
prototype=chunk_spec.prototype,
491+
chunks_per_shard=chunks_per_shard,
490492
)
491493
else:
492494
# read some chunks within the shard
@@ -584,9 +586,7 @@ async def _encode_partial_single(
584586

585587
indexer = list(
586588
get_indexer(
587-
selection,
588-
shape=shard_shape,
589-
chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape),
589+
selection, shape=shard_shape, chunk_grid=RegularChunkGrid(chunk_shape=chunk_shape)
590590
)
591591
)
592592

@@ -702,8 +702,7 @@ def _shard_index_size(self, chunks_per_shard: tuple[int, ...]) -> int:
702702
get_pipeline_class()
703703
.from_codecs(self.index_codecs)
704704
.compute_encoded_size(
705-
16 * product(chunks_per_shard),
706-
self._get_index_chunk_spec(chunks_per_shard),
705+
16 * product(chunks_per_shard), self._get_index_chunk_spec(chunks_per_shard)
707706
)
708707
)
709708

@@ -748,8 +747,7 @@ async def _load_shard_index_maybe(
748747
)
749748
else:
750749
index_bytes = await byte_getter.get(
751-
prototype=numpy_buffer_prototype(),
752-
byte_range=SuffixByteRequest(shard_index_size),
750+
prototype=numpy_buffer_prototype(), byte_range=SuffixByteRequest(shard_index_size)
753751
)
754752
if index_bytes is not None:
755753
return await self._decode_shard_index(index_bytes, chunks_per_shard)

src/zarr/storage/_common.py

Lines changed: 1 addition & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,7 @@
55
from pathlib import Path
66
from typing import TYPE_CHECKING, Any, Literal, Self, TypeAlias
77

8-
from zarr.abc.store import (
9-
ByteRequest,
10-
Store,
11-
SupportsDeleteSync,
12-
SupportsGetSync,
13-
SupportsSetSync,
14-
)
8+
from zarr.abc.store import ByteRequest, Store
159
from zarr.core.buffer import Buffer, default_buffer_prototype
1610
from zarr.core.common import (
1711
ANY_ACCESS_MODE,
@@ -260,37 +254,6 @@ async def is_empty(self) -> bool:
260254
"""
261255
return await self.store.is_empty(self.path)
262256

263-
# -------------------------------------------------------------------
264-
# Synchronous IO delegation
265-
# -------------------------------------------------------------------
266-
267-
def get_sync(
268-
self,
269-
*,
270-
prototype: BufferPrototype | None = None,
271-
byte_range: ByteRequest | None = None,
272-
) -> Buffer | None:
273-
"""Synchronous read — delegates to ``self.store.get_sync(self.path, ...)``."""
274-
if not isinstance(self.store, SupportsGetSync):
275-
raise TypeError(f"Store {type(self.store).__name__} does not support synchronous get.")
276-
if prototype is None:
277-
prototype = default_buffer_prototype()
278-
return self.store.get_sync(self.path, prototype=prototype, byte_range=byte_range)
279-
280-
def set_sync(self, value: Buffer) -> None:
281-
"""Synchronous write — delegates to ``self.store.set_sync(self.path, value)``."""
282-
if not isinstance(self.store, SupportsSetSync):
283-
raise TypeError(f"Store {type(self.store).__name__} does not support synchronous set.")
284-
self.store.set_sync(self.path, value)
285-
286-
def delete_sync(self) -> None:
287-
"""Synchronous delete — delegates to ``self.store.delete_sync(self.path)``."""
288-
if not isinstance(self.store, SupportsDeleteSync):
289-
raise TypeError(
290-
f"Store {type(self.store).__name__} does not support synchronous delete."
291-
)
292-
self.store.delete_sync(self.path)
293-
294257
def __truediv__(self, other: str) -> StorePath:
295258
"""Combine this store path with another path"""
296259
return self.__class__(self.store, _dereference_path(self.path, other))

tests/test_codecs/test_sharding.py

Lines changed: 7 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,7 @@ def test_sharding_scalar(
112112
indirect=["array_fixture"],
113113
)
114114
def test_sharding_partial(
115-
store: Store,
116-
array_fixture: npt.NDArray[Any],
117-
index_location: ShardingCodecIndexLocation,
115+
store: Store, array_fixture: npt.NDArray[Any], index_location: ShardingCodecIndexLocation
118116
) -> None:
119117
data = array_fixture
120118
spath = StorePath(store)
@@ -150,9 +148,7 @@ def test_sharding_partial(
150148
indirect=["array_fixture"],
151149
)
152150
def test_sharding_partial_readwrite(
153-
store: Store,
154-
array_fixture: npt.NDArray[Any],
155-
index_location: ShardingCodecIndexLocation,
151+
store: Store, array_fixture: npt.NDArray[Any], index_location: ShardingCodecIndexLocation
156152
) -> None:
157153
data = array_fixture
158154
spath = StorePath(store)
@@ -184,9 +180,7 @@ def test_sharding_partial_readwrite(
184180
@pytest.mark.parametrize("index_location", ["start", "end"])
185181
@pytest.mark.parametrize("store", ["local", "memory", "zip"], indirect=["store"])
186182
def test_sharding_partial_read(
187-
store: Store,
188-
array_fixture: npt.NDArray[Any],
189-
index_location: ShardingCodecIndexLocation,
183+
store: Store, array_fixture: npt.NDArray[Any], index_location: ShardingCodecIndexLocation
190184
) -> None:
191185
data = array_fixture
192186
spath = StorePath(store)
@@ -476,9 +470,7 @@ async def fail_chunk_reads(prototype: Any, key_ranges: Any, **kwargs: Any) -> li
476470
@pytest.mark.parametrize("index_location", ["start", "end"])
477471
@pytest.mark.parametrize("store", ["local", "memory", "zip"], indirect=["store"])
478472
def test_sharding_partial_overwrite(
479-
store: Store,
480-
array_fixture: npt.NDArray[Any],
481-
index_location: ShardingCodecIndexLocation,
473+
store: Store, array_fixture: npt.NDArray[Any], index_location: ShardingCodecIndexLocation
482474
) -> None:
483475
data = array_fixture[:10, :10, :10]
484476
spath = StorePath(store)
@@ -653,6 +645,7 @@ async def test_delete_empty_shards(store: Store) -> None:
653645
compressors=None,
654646
fill_value=1,
655647
)
648+
print(a.metadata.to_dict())
656649
await _AsyncArrayProxy(a)[:, :].set(np.zeros((16, 16)))
657650
await _AsyncArrayProxy(a)[8:, :].set(np.ones((8, 16)))
658651
await _AsyncArrayProxy(a)[:, 8:].set(np.ones((16, 8)))
@@ -697,6 +690,7 @@ async def test_sharding_with_empty_inner_chunk(
697690
)
698691
data[:4, :4] = fill_value
699692
await a.setitem(..., data)
693+
print("read data")
700694
data_read = await a.getitem(...)
701695
assert np.array_equal(data_read, data)
702696

@@ -708,9 +702,7 @@ async def test_sharding_with_empty_inner_chunk(
708702
)
709703
@pytest.mark.parametrize("chunks_per_shard", [(5, 2), (2, 5), (5, 5)])
710704
async def test_sharding_with_chunks_per_shard(
711-
store: Store,
712-
index_location: ShardingCodecIndexLocation,
713-
chunks_per_shard: tuple[int],
705+
store: Store, index_location: ShardingCodecIndexLocation, chunks_per_shard: tuple[int]
714706
) -> None:
715707
chunk_shape = (2, 1)
716708
shape = tuple(x * y for x, y in zip(chunks_per_shard, chunk_shape, strict=False))

0 commit comments

Comments
 (0)