Skip to content

Commit

Permalink
Merge branch 'main' into icechunk-append
Browse files Browse the repository at this point in the history
  • Loading branch information
TomNicholas authored Dec 5, 2024
2 parents 8496359 + 20dd9dc commit 7dc9186
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions virtualizarr/readers/hdf/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@

h5py = soft_import("h5py", "For reading hdf files", strict=False)
if h5py:
Dataset = h5py.Dataset
Group = h5py.Group
Dataset = h5py.Dataset # type: ignore
Group = h5py.Group # type: ignore
else:
Dataset = dict()
Group = dict()
Dataset = dict() # type: ignore
Group = dict() # type: ignore


class HDFVirtualBackend(VirtualBackend):
Expand Down Expand Up @@ -183,14 +183,14 @@ def _dataset_dims(dataset: Dataset) -> Union[List[str], List[None]]:
rank = len(dataset.shape)
if rank:
for n in range(rank):
num_scales = len(dataset.dims[n])
num_scales = len(dataset.dims[n]) # type: ignore
if num_scales == 1:
dims.append(dataset.dims[n][0].name[1:])
dims.append(dataset.dims[n][0].name[1:]) # type: ignore
elif h5py.h5ds.is_scale(dataset.id):
dims.append(dataset.name[1:])
elif num_scales > 1:
raise ValueError(
f"{dataset.name}: {len(dataset.dims[n])} "
f"{dataset.name}: {len(dataset.dims[n])} " # type: ignore
f"dimension scales attached to dimension #{n}"
)
elif num_scales == 0:
Expand Down Expand Up @@ -287,7 +287,7 @@ def _dataset_to_variable(path: str, dataset: Dataset) -> Optional[Variable]:
fill_value = fill_value.item()
filters = [codec.get_config() for codec in codecs]
zarray = ZArray(
chunks=chunks,
chunks=chunks, # type: ignore
compressor=None,
dtype=dtype,
fill_value=fill_value,
Expand Down

0 comments on commit 7dc9186

Please sign in to comment.