From e9da3d01850ce26350bb0319c7523439e0e6ed25 Mon Sep 17 00:00:00 2001 From: Callan Gray Date: Thu, 26 Sep 2024 18:20:24 +0800 Subject: [PATCH 1/4] Add complex dtype support --- include/z5/filesystem/factory.hxx | 4 ++++ include/z5/metadata.hxx | 2 ++ include/z5/types/types.hxx | 9 ++++++--- src/python/lib/dataset.cxx | 11 +++++++++++ 4 files changed, 23 insertions(+), 3 deletions(-) diff --git a/include/z5/filesystem/factory.hxx b/include/z5/filesystem/factory.hxx index adaf7b7e..bf86bb92 100644 --- a/include/z5/filesystem/factory.hxx +++ b/include/z5/filesystem/factory.hxx @@ -42,6 +42,10 @@ namespace filesystem { ptr.reset(new Dataset(dataset, metadata)); break; case types::float64: ptr.reset(new Dataset(dataset, metadata)); break; + case types::complex64: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex128: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } diff --git a/include/z5/metadata.hxx b/include/z5/metadata.hxx index 4a0f06ca..f1c106a8 100644 --- a/include/z5/metadata.hxx +++ b/include/z5/metadata.hxx @@ -143,6 +143,8 @@ namespace z5 { } else { throw std::runtime_error("Invalid string value for fillValue"); } + } else if(fillValJson.type() == nlohmann::json::value_t::null) { + fillValue = std::numeric_limits::quiet_NaN(); } else { fillValue = static_cast(fillValJson); } diff --git a/include/z5/types/types.hxx b/include/z5/types/types.hxx index 7f9d5097..5da1ec36 100644 --- a/include/z5/types/types.hxx +++ b/include/z5/types/types.hxx @@ -28,7 +28,8 @@ namespace types { enum Datatype { int8, int16, int32, int64, uint8, uint16, uint32, uint64, - float32, float64 + float32, float64, + complex64, complex128 }; struct Datatypes { @@ -38,7 +39,8 @@ namespace types { static DtypeMap & zarrToDtype() { static DtypeMap dtypeMap({{{"|i1", int8}, {"(module, "float32"); exportIoT(module, "float64"); + // complex types + exportIoT>(module, "complex64"); + exportIoT>(module, "complex128"); // export writing scalars // The overloads cannot be properly resolved, @@ -281,6 +284,14 @@ namespace z5 { static_cast(val), numberOfThreads); break; + case types::Datatype::complex64 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); + break; + case types::Datatype::complex128 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); + break; default: throw(std::runtime_error("Invalid datatype")); } From ea80fe20db0a13fd55f95ca7b67ba41f20669bea Mon Sep 17 00:00:00 2001 From: Callan Gray Date: Mon, 14 Oct 2024 16:32:09 +0800 Subject: [PATCH 2/4] Add complex dtype support --- include/z5/filesystem/factory.hxx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/include/z5/filesystem/factory.hxx b/include/z5/filesystem/factory.hxx index bf86bb92..831fa640 100644 --- a/include/z5/filesystem/factory.hxx +++ b/include/z5/filesystem/factory.hxx @@ -82,6 +82,10 @@ namespace filesystem { ptr.reset(new Dataset(dataset, metadata)); break; case types::float64: ptr.reset(new Dataset(dataset, metadata)); break; + case types::complex64: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex128: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } From f0bc7511221eff4872658ad2e5934ae7643e1060 Mon Sep 17 00:00:00 2001 From: Callan Gray Date: Thu, 17 Oct 2024 10:55:39 +0800 Subject: [PATCH 3/4] Add complex256 dtype support --- include/z5/filesystem/factory.hxx | 4 ++++ include/z5/types/types.hxx | 7 ++++--- src/python/lib/dataset.cxx | 9 +++++++-- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/include/z5/filesystem/factory.hxx b/include/z5/filesystem/factory.hxx index 831fa640..5fc35312 100644 --- a/include/z5/filesystem/factory.hxx +++ b/include/z5/filesystem/factory.hxx @@ -46,6 +46,8 @@ namespace filesystem { ptr.reset(new Dataset>(dataset, metadata)); break; case types::complex128: ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex256: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } @@ -86,6 +88,8 @@ namespace filesystem { ptr.reset(new Dataset>(dataset, metadata)); break; case types::complex128: ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex256: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } diff --git a/include/z5/types/types.hxx b/include/z5/types/types.hxx index 5da1ec36..99d6aead 100644 --- a/include/z5/types/types.hxx +++ b/include/z5/types/types.hxx @@ -5,6 +5,7 @@ #include #include #include +#include #include "nlohmann/json.hpp" @@ -29,7 +30,7 @@ namespace types { int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, - complex64, complex128 + complex64, complex128, complex256 }; struct Datatypes { @@ -40,7 +41,7 @@ namespace types { static DtypeMap dtypeMap({{{"|i1", int8}, {">(module, "complex64"); exportIoT>(module, "complex128"); + exportIoT>(module, "complex256"); // export writing scalars // The overloads cannot be properly resolved, @@ -289,8 +290,12 @@ namespace z5 { numberOfThreads); break; case types::Datatype::complex128 : writePyScalar>(ds, roiBegin, roiShape, - static_cast>(val), - numberOfThreads); + static_cast>(val), + numberOfThreads); + break; + case types::Datatype::complex256 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); break; default: throw(std::runtime_error("Invalid datatype")); From 202eee47a3e967df51d19d7e46872a19d0b2f4e6 Mon Sep 17 00:00:00 2001 From: Callan Gray Date: Mon, 28 Oct 2024 18:15:12 +0800 Subject: [PATCH 4/4] Add complex dataset tests --- src/test/test_dataset.cxx | 101 +++++++++++++++++++++++++++++++++++--- 1 file changed, 93 insertions(+), 8 deletions(-) diff --git a/src/test/test_dataset.cxx b/src/test/test_dataset.cxx index 1596fd56..ec0f141d 100644 --- a/src/test/test_dataset.cxx +++ b/src/test/test_dataset.cxx @@ -13,10 +13,11 @@ namespace z5 { class DatasetTest : public ::testing::Test { protected: - DatasetTest() : fileHandle_("data.zr"), floatHandle_(fileHandle_, "float"), intHandle_(fileHandle_, "int") { + DatasetTest() : fileHandle_("data.zr"), intHandle_(fileHandle_, "int"), floatHandle_(fileHandle_, "float"), complexFloatHandle_(fileHandle_, "complexFloat") { // int zarray metadata jInt_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \" dataComplexFloat_[size_]; }; @@ -222,6 +237,76 @@ namespace z5 { } } + TEST_F(DatasetTest, OpenComplexFloatDataset) { + + auto ds = openDataset(fileHandle_, "complexFloat"); + const auto & chunksPerDim = ds->chunksPerDimension(); + + std::default_random_engine generator; + + // test uninitialized chunk -> this is expected to throw a runtime error + std::complex dataTmp[size_]; + ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error); + + // test for 10 random chunks + for(unsigned t = 0; t < 10; ++t) { + + // get a random chunk + types::ShapeType chunkId(ds->dimension()); + for(unsigned d = 0; d < ds->dimension(); ++d) { + std::uniform_int_distribution distr(0, chunksPerDim[d] - 1); + chunkId[d] = distr(generator); + } + + ds->writeChunk(chunkId, dataComplexFloat_); + + // read a chunk + std::complex dataTmp[size_]; + ds->readChunk(chunkId, dataTmp); + + // check + for(std::size_t i = 0; i < size_; ++i) { + ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]); + } + } + } + + TEST_F(DatasetTest, CreateComplexFloatDataset) { + + DatasetMetadata complexFloatMeta; + complexFloatMeta.fromJson(jComplexFloat_, true); + + auto ds = createDataset(fileHandle_, "complexFloat1", complexFloatMeta); + const auto & chunksPerDim = ds->chunksPerDimension(); + + std::default_random_engine generator; + + // test uninitialized chunk -> this is expected to throw a runtime error + std::complex dataTmp[size_]; + ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error); + + // test for 10 random chunks + for(unsigned t = 0; t < 10; ++t) { + + // get a random chunk + types::ShapeType chunkId(ds->dimension()); + for(unsigned d = 0; d < ds->dimension(); ++d) { + std::uniform_int_distribution distr(0, chunksPerDim[d] - 1); + chunkId[d] = distr(generator); + } + + ds->writeChunk(chunkId, dataComplexFloat_); + + // read a chunk + std::complex dataTmp[size_]; + ds->readChunk(chunkId, dataTmp); + + // check + for(std::size_t i = 0; i < size_; ++i) { + ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]); + } + } + } TEST_F(DatasetTest, CreateBloscDataset) {