diff --git a/src/anndata/_io/specs/methods.py b/src/anndata/_io/specs/methods.py index 5c84d83d8..91e11044b 100644 --- a/src/anndata/_io/specs/methods.py +++ b/src/anndata/_io/specs/methods.py @@ -541,6 +541,9 @@ def write_vlen_string_array_zarr( from numcodecs import VLenUTF8 from zarr.codecs import VLenUTF8Codec + dataset_kwargs = dataset_kwargs.copy() + if "compressor" in dataset_kwargs: + dataset_kwargs.pop("compressor") f.create_array( k, shape=elem.shape, @@ -1165,6 +1168,7 @@ def _remove_scalar_compression_args(dataset_kwargs: Mapping[str, Any]) -> dict: "shuffle", "fletcher32", "scaleoffset", + "compressor", ): dataset_kwargs.pop(arg, None) return dataset_kwargs diff --git a/tests/test_readwrite.py b/tests/test_readwrite.py index 50861ac6b..21cfca1fc 100644 --- a/tests/test_readwrite.py +++ b/tests/test_readwrite.py @@ -342,13 +342,14 @@ def test_zarr_compression(tmp_path): ad.io.write_zarr(pth, adata, compressor=compressor) - def check_compressed(key, value): - if isinstance(value, zarr.Array) and value.shape != (): + def check_compressed(value, key): + if value.shape != (): if value.compressor != compressor: not_compressed.append(key) - with zarr.open(str(pth), "r") as f: - f.visititems(check_compressed) + f = zarr.open(str(pth), "r") + for key in f.array_keys(): + check_compressed(f[key], key) if not_compressed: msg = "\n\t".join(not_compressed)