diff --git a/include/z5/filesystem/factory.hxx b/include/z5/filesystem/factory.hxx index adaf7b7..5fc3531 100644 --- a/include/z5/filesystem/factory.hxx +++ b/include/z5/filesystem/factory.hxx @@ -42,6 +42,12 @@ namespace filesystem { ptr.reset(new Dataset(dataset, metadata)); break; case types::float64: ptr.reset(new Dataset(dataset, metadata)); break; + case types::complex64: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex128: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex256: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } @@ -78,6 +84,12 @@ namespace filesystem { ptr.reset(new Dataset(dataset, metadata)); break; case types::float64: ptr.reset(new Dataset(dataset, metadata)); break; + case types::complex64: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex128: + ptr.reset(new Dataset>(dataset, metadata)); break; + case types::complex256: + ptr.reset(new Dataset>(dataset, metadata)); break; } return ptr; } diff --git a/include/z5/metadata.hxx b/include/z5/metadata.hxx index b50acc1..4e92f57 100644 --- a/include/z5/metadata.hxx +++ b/include/z5/metadata.hxx @@ -143,6 +143,8 @@ namespace z5 { } else { throw std::runtime_error("Invalid string value for fillValue"); } + } else if(fillValJson.type() == nlohmann::json::value_t::null) { + fillValue = std::numeric_limits::quiet_NaN(); } else { fillValue = static_cast(fillValJson); } diff --git a/include/z5/types/types.hxx b/include/z5/types/types.hxx index 7f9d509..99d6aea 100644 --- a/include/z5/types/types.hxx +++ b/include/z5/types/types.hxx @@ -5,6 +5,7 @@ #include #include #include +#include #include "nlohmann/json.hpp" @@ -28,7 +29,8 @@ namespace types { enum Datatype { int8, int16, int32, int64, uint8, uint16, uint32, uint64, - float32, float64 + float32, float64, + complex64, complex128, complex256 }; struct Datatypes { @@ -38,7 +40,8 @@ namespace types { static DtypeMap & zarrToDtype() { static DtypeMap dtypeMap({{{"|i1", int8}, {"(module, "float32"); exportIoT(module, "float64"); + // complex types + exportIoT>(module, "complex64"); + exportIoT>(module, "complex128"); + exportIoT>(module, "complex256"); // export writing scalars // The overloads cannot be properly resolved, @@ -281,6 +285,18 @@ namespace z5 { static_cast(val), numberOfThreads); break; + case types::Datatype::complex64 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); + break; + case types::Datatype::complex128 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); + break; + case types::Datatype::complex256 : writePyScalar>(ds, roiBegin, roiShape, + static_cast>(val), + numberOfThreads); + break; default: throw(std::runtime_error("Invalid datatype")); } diff --git a/src/test/test_dataset.cxx b/src/test/test_dataset.cxx index 1596fd5..ec0f141 100644 --- a/src/test/test_dataset.cxx +++ b/src/test/test_dataset.cxx @@ -13,10 +13,11 @@ namespace z5 { class DatasetTest : public ::testing::Test { protected: - DatasetTest() : fileHandle_("data.zr"), floatHandle_(fileHandle_, "float"), intHandle_(fileHandle_, "int") { + DatasetTest() : fileHandle_("data.zr"), intHandle_(fileHandle_, "int"), floatHandle_(fileHandle_, "float"), complexFloatHandle_(fileHandle_, "complexFloat") { // int zarray metadata jInt_ = "{ \"chunks\": [10, 10, 10], \"compressor\": { \"clevel\": 5, \"cname\": \"lz4\", \"id\": \"blosc\", \"shuffle\": 1}, \"dtype\": \" dataComplexFloat_[size_]; }; @@ -222,6 +237,76 @@ namespace z5 { } } + TEST_F(DatasetTest, OpenComplexFloatDataset) { + + auto ds = openDataset(fileHandle_, "complexFloat"); + const auto & chunksPerDim = ds->chunksPerDimension(); + + std::default_random_engine generator; + + // test uninitialized chunk -> this is expected to throw a runtime error + std::complex dataTmp[size_]; + ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error); + + // test for 10 random chunks + for(unsigned t = 0; t < 10; ++t) { + + // get a random chunk + types::ShapeType chunkId(ds->dimension()); + for(unsigned d = 0; d < ds->dimension(); ++d) { + std::uniform_int_distribution distr(0, chunksPerDim[d] - 1); + chunkId[d] = distr(generator); + } + + ds->writeChunk(chunkId, dataComplexFloat_); + + // read a chunk + std::complex dataTmp[size_]; + ds->readChunk(chunkId, dataTmp); + + // check + for(std::size_t i = 0; i < size_; ++i) { + ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]); + } + } + } + + TEST_F(DatasetTest, CreateComplexFloatDataset) { + + DatasetMetadata complexFloatMeta; + complexFloatMeta.fromJson(jComplexFloat_, true); + + auto ds = createDataset(fileHandle_, "complexFloat1", complexFloatMeta); + const auto & chunksPerDim = ds->chunksPerDimension(); + + std::default_random_engine generator; + + // test uninitialized chunk -> this is expected to throw a runtime error + std::complex dataTmp[size_]; + ASSERT_THROW(ds->readChunk(types::ShapeType({0, 0, 0}), dataTmp), std::runtime_error); + + // test for 10 random chunks + for(unsigned t = 0; t < 10; ++t) { + + // get a random chunk + types::ShapeType chunkId(ds->dimension()); + for(unsigned d = 0; d < ds->dimension(); ++d) { + std::uniform_int_distribution distr(0, chunksPerDim[d] - 1); + chunkId[d] = distr(generator); + } + + ds->writeChunk(chunkId, dataComplexFloat_); + + // read a chunk + std::complex dataTmp[size_]; + ds->readChunk(chunkId, dataTmp); + + // check + for(std::size_t i = 0; i < size_; ++i) { + ASSERT_EQ(dataTmp[i], dataComplexFloat_[i]); + } + } + } TEST_F(DatasetTest, CreateBloscDataset) {