diff --git a/.github/workflows/testreporting.yml b/.github/workflows/testreporting.yml index a30807935d..8a512683f6 100644 --- a/.github/workflows/testreporting.yml +++ b/.github/workflows/testreporting.yml @@ -79,7 +79,7 @@ jobs: run: | export CC=${{env.CC}} export CXX=${{env.CXX}} - cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON + cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_EXAMPLES:BOOL=ON -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON - name: Build # Build your program with the given configuration @@ -90,7 +90,7 @@ jobs: - name: Run tests run: | - BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit -o build/tests/bag_tests-testreport.xml + BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit | tee build/tests/bag_tests-testreport.xml - name: Test Reporter uses: mikepenz/action-junit-report@v5 diff --git a/api/bag.cpp b/api/bag.cpp index 66ec97b61b..e2623a58d0 100644 --- a/api/bag.cpp +++ b/api/bag.cpp @@ -824,7 +824,7 @@ BagError bagGetErrorString( strncpy(str, "Metadata One or more elements of the requested coverage are missing from the XML file", MAX_STR-1); break; case BAG_METADTA_INVLID_DIMENSIONS: - sprintf(str, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); + snprintf(str, MAX_STR, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); break; case BAG_METADTA_BUFFER_EXCEEDED: strncpy(str, "Metadata supplied buffer is too large to be stored in the internal array", MAX_STR-1); @@ -866,7 +866,7 @@ BagError bagGetErrorString( strncpy(str, "HDF Bag is not an HDF5 File", MAX_STR-1); break; case BAG_HDF_RANK_INCOMPATIBLE: - sprintf(str, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); + snprintf(str, MAX_STR, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); break; case BAG_HDF_TYPE_NOT_FOUND: strncpy(str, "HDF Bag surface Datatype parameter not available", MAX_STR-1); diff --git a/api/bag_dataset.cpp b/api/bag_dataset.cpp index 02f332d900..f564e737df 100644 --- a/api/bag_dataset.cpp +++ b/api/bag_dataset.cpp @@ -502,12 +502,12 @@ void Dataset::createDataset( // Mandatory Layers // Elevation - this->addLayer(SimpleLayer::create(*this, Elevation, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Elevation, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); // Uncertainty - this->addLayer(SimpleLayer::create(*this, Uncertainty, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Uncertainty, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); } //! Create an optional simple layer. @@ -546,8 +546,8 @@ Layer& Dataset::createSimpleLayer( case Num_Soundings: //[[fallthrough]]; case Average_Elevation: //[[fallthrough]]; case Nominal_Elevation: - return this->addLayer(SimpleLayer::create(*this, type, chunkSize, - compressionLevel)); + return this->addLayer(SimpleLayer::create(*this, type, + m_pMetadata->rows(), m_pMetadata->columns(), chunkSize, compressionLevel)); case Surface_Correction: //[[fallthrough]]; case Georef_Metadata: //[[fallthrough]]; default: @@ -1073,10 +1073,17 @@ void Dataset::readDataset( OpenMode openMode) { signal(SIGABRT, handleAbrt); - m_pH5file = std::unique_ptr<::H5::H5File, DeleteH5File>(new ::H5::H5File{ - fileName.c_str(), - (openMode == BAG_OPEN_READONLY) ? H5F_ACC_RDONLY : H5F_ACC_RDWR}, - DeleteH5File{}); + try { + m_pH5file = std::unique_ptr<::H5::H5File, DeleteH5File>(new ::H5::H5File{ + fileName.c_str(), + (openMode == BAG_OPEN_READONLY) ? H5F_ACC_RDONLY : H5F_ACC_RDWR}, + DeleteH5File{}); + } + catch( ::H5::FileIException& e ) + { + std::cerr << "Unable to read BAG file, error was: " << e.getCDetailMsg() << std::endl; + e.printErrorStack(); + } m_pMetadata = std::make_unique(*this); @@ -1103,7 +1110,10 @@ void Dataset::readDataset( H5Dclose(id); - auto layerDesc = SimpleLayerDescriptor::open(*this, layerType); + // Pre-stage the layer-specific desciptor. Note that we don't need to specify the + // dimensions of the layer here, since they're set from the HDF5 dataset when it + // gets opened with SimpleLayer::open(). + auto layerDesc = SimpleLayerDescriptor::open(*this, layerType, 0, 0); this->addLayer(SimpleLayer::open(*this, *layerDesc)); } @@ -1166,7 +1176,10 @@ void Dataset::readDataset( } { - auto descriptor = VRRefinementsDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the refinements; note that this + // doesn't have to have specific dimensions since they're set when the refinements + // layer is read in VRRefinements::open(). + auto descriptor = VRRefinementsDescriptor::open(*this, 0, 0); this->addLayer(VRRefinements::open(*this, *descriptor)); } @@ -1176,7 +1189,10 @@ void Dataset::readDataset( { H5Dclose(id); - auto descriptor = VRNodeDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the nodes; note that this doesn't + // have to have specific dimensions since they're set when the nodes layer is + // read in VRNode::open(). + auto descriptor = VRNodeDescriptor::open(*this, 0, 0); this->addLayer(VRNode::open(*this, *descriptor)); } } diff --git a/api/bag_exceptions.h b/api/bag_exceptions.h index 6f1979b2c4..31abc59058 100644 --- a/api/bag_exceptions.h +++ b/api/bag_exceptions.h @@ -441,7 +441,7 @@ struct BAG_API InvalidVRRefinementDimensions final : virtual std::exception { const char* what() const noexcept override { - return "The variable resolution refinement layer is not 1 dimensional."; + return "The variable resolution refinement layer is inconsistent with specification."; } }; diff --git a/api/bag_georefmetadatalayer.cpp b/api/bag_georefmetadatalayer.cpp index bde3e0e66e..0eefbf499b 100644 --- a/api/bag_georefmetadatalayer.cpp +++ b/api/bag_georefmetadatalayer.cpp @@ -104,8 +104,13 @@ std::shared_ptr GeorefMetadataLayer::create( keyType != DT_UINT64) throw InvalidKeyType{}; + // The keys array should be the same dimensions as the mandatory elevation layer, so read + // from the file global descriptor, and set. + uint32_t rows = 0, cols = 0; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); auto pDescriptor = GeorefMetadataLayerDescriptor::create(dataset, name, profile, keyType, - definition, chunkSize, compressionLevel); + definition, rows, cols, + chunkSize, compressionLevel); // Create the H5 Group to hold keys & values. const auto& h5file = dataset.getH5file(); @@ -122,7 +127,8 @@ std::shared_ptr GeorefMetadataLayer::create( auto h5valueDataSet = GeorefMetadataLayer::createH5valueDataSet(dataset, *pDescriptor); auto layer = std::make_shared(dataset, - *pDescriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + *pDescriptor, std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -150,6 +156,12 @@ std::shared_ptr GeorefMetadataLayer::open( new ::H5::DataSet{h5file.openDataSet(internalPath + COMPOUND_KEYS)}, DeleteH5dataSet{}); + // The keys array has the dimensions of the layer, so we can read and reset the + // descriptor dimensions, in case they were inconsistent (or not set). + std::array dims; + h5keyDataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + std::unique_ptr<::H5::DataSet, DeleteH5dataSet> h5vrKeyDataSet{}; if (dataset.getVRMetadata()) h5vrKeyDataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( @@ -161,7 +173,9 @@ std::shared_ptr GeorefMetadataLayer::open( DeleteH5dataSet{}); auto layer = std::make_shared(dataset, - descriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + descriptor, + std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -188,7 +202,9 @@ GeorefMetadataLayer::createH5keyDataSet( std::unique_ptr<::H5::DataSet, DeleteH5dataSet> pH5dataSet; { - // Use the dimensions from the descriptor. + // Use the dimensions from the descriptor. We could do this from the specific + // descriptor for the layer, too, which should mirror the size of the file global + // descriptor used here. uint32_t dim0 = 0, dim1 = 0; std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); const std::array fileDims{dim0, dim1}; diff --git a/api/bag_georefmetadatalayer.h b/api/bag_georefmetadatalayer.h index 32f940f371..faceb66d78 100644 --- a/api/bag_georefmetadatalayer.h +++ b/api/bag_georefmetadatalayer.h @@ -64,8 +64,8 @@ class BAG_API GeorefMetadataLayer final : public Layer protected: static std::shared_ptr create(DataType keyType, const std::string& name, GeorefMetadataProfile profile, Dataset& dataset, - const RecordDefinition& definition, uint64_t chunkSize, - int compressionLevel); + const RecordDefinition& definition, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, GeorefMetadataLayerDescriptor& descriptor); diff --git a/api/bag_georefmetadatalayerdescriptor.cpp b/api/bag_georefmetadatalayerdescriptor.cpp index 527d0fd1a9..9a9bd0e69d 100644 --- a/api/bag_georefmetadatalayerdescriptor.cpp +++ b/api/bag_georefmetadatalayerdescriptor.cpp @@ -35,10 +35,11 @@ GeorefMetadataLayerDescriptor::GeorefMetadataLayerDescriptor( GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(dataset.getNextId(), GEOREF_METADATA_PATH + name, name, - Georef_Metadata, chunkSize, compressionLevel) + Georef_Metadata, rows, cols, chunkSize, compressionLevel) , m_pBagDataset(dataset.shared_from_this()) , m_profile(profile) , m_keyType(keyType) @@ -72,12 +73,14 @@ std::shared_ptr GeorefMetadataLayerDescriptor::cr GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, - std::move(definition), chunkSize, compressionLevel}); + std::move(definition), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing georeferenced metadata layer descriptor. @@ -165,8 +168,13 @@ std::shared_ptr GeorefMetadataLayerDescriptor::op profile = UNKNOWN_METADATA_PROFILE; } + std::array dims; + h5dataSet.getSpace().getSimpleExtentDims(dims.data(), nullptr); + return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, definition, + static_cast(dims[0]), + static_cast(dims[1]), chunkSize, compressionLevel}); } diff --git a/api/bag_georefmetadatalayerdescriptor.h b/api/bag_georefmetadatalayerdescriptor.h index 6749d09568..bc3d029037 100644 --- a/api/bag_georefmetadatalayerdescriptor.h +++ b/api/bag_georefmetadatalayerdescriptor.h @@ -21,12 +21,13 @@ namespace BAG { class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor { public: - static std::shared_ptr create(Dataset& dataset, - const std::string& name, GeorefMetadataProfile profile, DataType keyType, - RecordDefinition definition, uint64_t chunkSize, - int compressionLevel); - static std::shared_ptr open(Dataset& dataset, - const std::string& name); + static std::shared_ptr + create(Dataset& dataset, + const std::string& name, GeorefMetadataProfile profile, DataType keyType, + RecordDefinition definition, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + static std::shared_ptr + open(Dataset& dataset, const std::string& name); GeorefMetadataLayerDescriptor(const GeorefMetadataLayerDescriptor&) = delete; GeorefMetadataLayerDescriptor(GeorefMetadataLayerDescriptor&&) = delete; @@ -52,7 +53,8 @@ class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor protected: GeorefMetadataLayerDescriptor(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile, - DataType keyType, RecordDefinition definition, uint64_t chunkSize, + DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); private: diff --git a/api/bag_interleavedlegacylayer.cpp b/api/bag_interleavedlegacylayer.cpp index d18e6f3fd7..c4e7124197 100644 --- a/api/bag_interleavedlegacylayer.cpp +++ b/api/bag_interleavedlegacylayer.cpp @@ -51,6 +51,10 @@ std::shared_ptr InterleavedLegacyLayer::open( descriptor.setMinMax(std::get<1>(possibleMinMax), std::get<2>(possibleMinMax)); + std::array dims; + h5dataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + return std::make_shared(dataset, descriptor, std::move(h5dataSet)); } diff --git a/api/bag_interleavedlegacylayerdescriptor.cpp b/api/bag_interleavedlegacylayerdescriptor.cpp index 179d5685f8..1567ad2df0 100644 --- a/api/bag_interleavedlegacylayerdescriptor.cpp +++ b/api/bag_interleavedlegacylayerdescriptor.cpp @@ -20,9 +20,10 @@ namespace BAG { InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( uint32_t id, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(id, Layer::getInternalPath(layerType), - kLayerTypeMapString.at(layerType), layerType, 0, 0) + kLayerTypeMapString.at(layerType), layerType, rows, cols, 0, 0) , m_groupType(groupType) , m_elementSize(Layer::getElementSize(Layer::getDataType(layerType))) { @@ -45,8 +46,10 @@ InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( const Dataset& dataset, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(dataset, layerType, + rows, cols, groupType == NODE ? NODE_GROUP_PATH : groupType == ELEVATION @@ -76,9 +79,11 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( new InterleavedLegacyLayerDescriptor{dataset.getNextId(), layerType, - groupType}); + groupType, rows, cols}); } //! Open an interleaved layer descriptor. @@ -99,8 +104,10 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType}); + new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType, rows, cols}); } diff --git a/api/bag_interleavedlegacylayerdescriptor.h b/api/bag_interleavedlegacylayerdescriptor.h index 6e5fc9d642..1d5beeeb9f 100644 --- a/api/bag_interleavedlegacylayerdescriptor.h +++ b/api/bag_interleavedlegacylayerdescriptor.h @@ -43,9 +43,9 @@ class BAG_API InterleavedLegacyLayerDescriptor final : public LayerDescriptor protected: InterleavedLegacyLayerDescriptor(uint32_t id, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); InterleavedLegacyLayerDescriptor(const Dataset& dataset, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); private: static void validateTypes(LayerType layerType, GroupType groupType); diff --git a/api/bag_layer.cpp b/api/bag_layer.cpp index dbec860765..454ef7d78d 100644 --- a/api/bag_layer.cpp +++ b/api/bag_layer.cpp @@ -208,9 +208,8 @@ UInt8Array Layer::read( if (m_pBagDataset.expired()) throw DatasetNotFound{}; - const auto pDataset = m_pBagDataset.lock(); uint32_t numRows = 0, numColumns = 0; - std::tie(numRows, numColumns) = pDataset->getDescriptor().getDims(); + std::tie(numRows, numColumns) = m_pLayerDescriptor->getDims(); if (columnEnd >= numColumns || rowEnd >= numRows) throw InvalidReadSize{}; diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index 931d69e7b7..531a531ced 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -30,6 +30,7 @@ LayerDescriptor::LayerDescriptor( std::string internalPath, std::string name, LayerType type, + uint64_t rows, uint64_t cols, uint64_t chunkSize, int compressionLevel) : m_id(id) @@ -39,6 +40,7 @@ LayerDescriptor::LayerDescriptor( , m_compressionLevel(compressionLevel) , m_chunkSize(chunkSize) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { } @@ -56,11 +58,13 @@ LayerDescriptor::LayerDescriptor( LayerDescriptor::LayerDescriptor( const Dataset& dataset, LayerType type, + uint64_t rows, uint64_t cols, std::string internalPath, std::string name) : m_id(dataset.getNextId()) , m_layerType(type) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { m_internalPath = internalPath.empty() ? Layer::getInternalPath(type) @@ -169,6 +173,21 @@ const std::string& LayerDescriptor::getName() const & noexcept return m_name; } +//! Retrieve the dimensions (shape) of the layer. +//! Return dimensions as uint32_t rather than the underlying uint64_t to maintain compatibility with the rest of the +//! BAG API, which assumes rows and cols are uint32_t. +/*! +\return + The row and column spacing/resolution of the grid +*/ +std::tuple LayerDescriptor::getDims() const & noexcept +{ + return std::tuple{ + static_cast(std::get<0>(m_dims)), + static_cast(std::get<1>(m_dims)) + }; +} + //! Get the size of a buffer for reading a specified number rows and columns. /*! \param rows @@ -180,8 +199,8 @@ const std::string& LayerDescriptor::getName() const & noexcept A buffer that can hold rows x columns of values of this layer. */ size_t LayerDescriptor::getReadBufferSize( - uint32_t rows, - uint32_t columns) const noexcept + uint64_t rows, + uint64_t columns) const noexcept { return rows * columns * this->getElementSize(); } @@ -204,6 +223,12 @@ LayerDescriptor& LayerDescriptor::setMinMax( return *this; } +LayerDescriptor& LayerDescriptor::setDims(uint64_t rows, uint64_t cols) & noexcept +{ + m_dims = {rows, cols}; + return *this; +} + //! Set the HDF5 path of the layer. /*! \param inPath diff --git a/api/bag_layerdescriptor.h b/api/bag_layerdescriptor.h index 98ea835a8d..937caf55a8 100644 --- a/api/bag_layerdescriptor.h +++ b/api/bag_layerdescriptor.h @@ -53,17 +53,21 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this getMinMax() const noexcept; const std::string& getName() const & noexcept; + std::tuple getDims() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; + LayerDescriptor& setDims(uint64_t rows, uint64_t cols) & noexcept; protected: LayerDescriptor(uint32_t id, std::string internalPath, std::string name, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint64_t rows, uint64_t cols, uint64_t chunkSize, + int compressionLevel); LayerDescriptor(const Dataset& dataset, LayerType type, + uint64_t rows, uint64_t cols, std::string internalPath = {}, std::string name = {}); - size_t getReadBufferSize(uint32_t rows, uint32_t columns) const noexcept; + size_t getReadBufferSize(uint64_t rows, uint64_t columns) const noexcept; LayerDescriptor& setInternalPath(std::string inPath) & noexcept; @@ -85,6 +89,9 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this m_minMax{}; + //! The dimensions of the layer. These are uint64_t (rather than uint32_t like elsewhere in the API) because + //! dimensions are initialized from HDF5 hsize_t values, which are uint64_t. + std::tuple m_dims{}; friend GeorefMetadataLayer; friend InterleavedLegacyLayer; diff --git a/api/bag_metadata_export.cpp b/api/bag_metadata_export.cpp index 45fc3ec2f4..50087c87e9 100644 --- a/api/bag_metadata_export.cpp +++ b/api/bag_metadata_export.cpp @@ -993,7 +993,7 @@ bool addSpatialRepresentation(xmlNode &parentNode, const BagSpatialRepresentatio xmlSetProp(pPointNode, XMLCast("gml:id"), XMLCast("id1")); char pointsString[88]; - sprintf(pointsString, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); + snprintf(pointsString, 88, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); xmlNode *pCoordNode = xmlNewChild(pPointNode, pGmlNamespace, XMLCast("coordinates"), EncodedString(*parentNode.doc, pointsString)); xmlSetProp(pCoordNode, XMLCast("decimal"), XMLCast(".")); diff --git a/api/bag_metadata_import.cpp b/api/bag_metadata_import.cpp index ec05792435..bd16c012a2 100644 --- a/api/bag_metadata_import.cpp +++ b/api/bag_metadata_import.cpp @@ -1249,7 +1249,7 @@ bool decodeReferenceSystemInfoFromSpatial( { char buffer[2048]; - sprintf(buffer, "%d", epsg); + snprintf(buffer, 2048, "%d", epsg); referenceSystemInfo->definition = copyString(buffer); referenceSystemInfo->type = copyString("EPSG"); diff --git a/api/bag_simplelayer.cpp b/api/bag_simplelayer.cpp index 56b059d6ce..b5d6d5846b 100644 --- a/api/bag_simplelayer.cpp +++ b/api/bag_simplelayer.cpp @@ -46,11 +46,11 @@ SimpleLayer::SimpleLayer( std::shared_ptr SimpleLayer::create( Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { - auto descriptor = SimpleLayerDescriptor::create(dataset, type, chunkSize, - compressionLevel); + auto descriptor = SimpleLayerDescriptor::create(dataset, type, rows, cols, chunkSize, compressionLevel); auto h5dataSet = SimpleLayer::createH5dataSet(dataset, *descriptor); return std::make_shared(dataset, *descriptor, std::move(h5dataSet)); @@ -74,6 +74,12 @@ std::shared_ptr SimpleLayer::open( auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( new ::H5::DataSet{h5file.openDataSet(descriptor.getInternalPath())}, DeleteH5dataSet{}); + + // Configure the layer dimensions in the descriptor (we implicitally expect the layer + // to be two-dimensional) + hsize_t dims[2]; + h5dataSet->getSpace().getSimpleExtentDims(dims); + descriptor.setDims(dims[0], dims[1]); // Read the min/max attribute values. const auto possibleMinMax = dataset.getMinMax(descriptor.getLayerType()); @@ -101,7 +107,7 @@ SimpleLayer::createH5dataSet( const SimpleLayerDescriptor& descriptor) { uint32_t dim0 = 0, dim1 = 0; - std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); + std::tie(dim0, dim1) = descriptor.getDims(); const std::array fileDims{dim0, dim1}; ::H5::DataSpace h5dataSpace{kRank, fileDims.data(), fileDims.data()}; diff --git a/api/bag_simplelayer.h b/api/bag_simplelayer.h index 6925f4eeba..4d280c0420 100644 --- a/api/bag_simplelayer.h +++ b/api/bag_simplelayer.h @@ -46,7 +46,7 @@ class BAG_API SimpleLayer final : public Layer protected: static std::shared_ptr create(Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, SimpleLayerDescriptor& descriptor); diff --git a/api/bag_simplelayerdescriptor.cpp b/api/bag_simplelayerdescriptor.cpp index ff266f1d26..2e36968490 100644 --- a/api/bag_simplelayerdescriptor.cpp +++ b/api/bag_simplelayerdescriptor.cpp @@ -19,10 +19,11 @@ namespace BAG { SimpleLayerDescriptor::SimpleLayerDescriptor( uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(type), - kLayerTypeMapString.at(type), type, chunkSize, compressionLevel) + kLayerTypeMapString.at(type), type, rows, cols, chunkSize, compressionLevel) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -36,8 +37,9 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( */ SimpleLayerDescriptor::SimpleLayerDescriptor( const Dataset& dataset, - LayerType type) - : LayerDescriptor(dataset, type) + LayerType type, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, type, rows, cols) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -59,11 +61,12 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( std::shared_ptr SimpleLayerDescriptor::create( const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset.getNextId(), type, chunkSize, + new SimpleLayerDescriptor{dataset.getNextId(), type, rows, cols, chunkSize, compressionLevel}); } @@ -79,10 +82,10 @@ std::shared_ptr SimpleLayerDescriptor::create( */ std::shared_ptr SimpleLayerDescriptor::open( const Dataset& dataset, - LayerType type) + LayerType type, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset, type}); + new SimpleLayerDescriptor{dataset, type, rows, cols}); } diff --git a/api/bag_simplelayerdescriptor.h b/api/bag_simplelayerdescriptor.h index b784f398fd..ca48599777 100644 --- a/api/bag_simplelayerdescriptor.h +++ b/api/bag_simplelayerdescriptor.h @@ -16,10 +16,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor { public: static std::shared_ptr create(const Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(const Dataset& dataset, - LayerType type); + LayerType type, uint32_t rows, uint32_t cols); SimpleLayerDescriptor(const SimpleLayerDescriptor&) = delete; SimpleLayerDescriptor(SimpleLayerDescriptor&&) = delete; @@ -36,9 +37,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor } protected: - SimpleLayerDescriptor(uint32_t id, LayerType type, uint64_t chunkSize, + SimpleLayerDescriptor(uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - SimpleLayerDescriptor(const Dataset& dataset, LayerType type); + SimpleLayerDescriptor(const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_surfacecorrections.cpp b/api/bag_surfacecorrections.cpp index 7b6f4c912e..10be24b4ac 100644 --- a/api/bag_surfacecorrections.cpp +++ b/api/bag_surfacecorrections.cpp @@ -358,7 +358,7 @@ UInt8Array SurfaceCorrections::readCorrectedRow( --corrector; // This is 0 based when used. auto originalRow = layer.read(row, row, columnStart, columnEnd); - auto* data = reinterpret_cast(originalRow.data()); + auto data = reinterpret_cast(originalRow.data()); // Obtain cell resolution and SW origin (0,1,1,0). double swCornerX = 0., swCornerY = 0.; diff --git a/api/bag_surfacecorrectionsdescriptor.cpp b/api/bag_surfacecorrectionsdescriptor.cpp index 6e1057cb22..2039d299dd 100644 --- a/api/bag_surfacecorrectionsdescriptor.cpp +++ b/api/bag_surfacecorrectionsdescriptor.cpp @@ -54,7 +54,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(Surface_Correction), kLayerTypeMapString.at(Surface_Correction), Surface_Correction, - chunkSize, compressionLevel) + 0, 0, chunkSize, compressionLevel) // Dims default to 0,0 like derived type , m_surfaceType(type) , m_elementSize(BAG::getElementSize(type)) , m_numCorrectors(numCorrectors) @@ -68,7 +68,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( */ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( const Dataset& dataset) - : LayerDescriptor(dataset, Surface_Correction) + : LayerDescriptor(dataset, Surface_Correction, 0, 0) // Dims set in body { const auto h5dataSet = dataset.getH5file().openDataSet( Layer::getInternalPath(Surface_Correction)); @@ -279,6 +279,7 @@ SurfaceCorrectionsDescriptor& SurfaceCorrectionsDescriptor::setDims( { m_numRows = numRows; m_numColumns = numColumns; + LayerDescriptor::setDims(numRows, numColumns); return *this; } diff --git a/api/bag_vrmetadata.cpp b/api/bag_vrmetadata.cpp index 3c77c222fb..73dda33d3e 100644 --- a/api/bag_vrmetadata.cpp +++ b/api/bag_vrmetadata.cpp @@ -373,6 +373,12 @@ void VRMetadata::writeProxy( auto pDataset = this->getDataset().lock(); pDataset->getDescriptor().setDims(static_cast(newDims[0]), static_cast(newDims[1])); + // The file descriptor is global (and the size of the mandatory layers) and specified + // in the metadata; each layer has its own size, however, which we need to update. In + // this case, the VRMetadataDescriptor has the same dimensions as the mandatory layer + // (since there should be a refinement for each fixed-resolution cell), so it's formally + // redundant. But we want to make sure that it's consistent, so ... + pDescriptor->setDims(fileDims[0], fileDims[1]); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrmetadatadescriptor.cpp b/api/bag_vrmetadatadescriptor.cpp index 20aca27cec..4c64885f7c 100644 --- a/api/bag_vrmetadatadescriptor.cpp +++ b/api/bag_vrmetadatadescriptor.cpp @@ -17,11 +17,12 @@ namespace BAG { */ VRMetadataDescriptor::VRMetadataDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_METADATA_PATH, - kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, + rows, cols, chunkSize, compressionLevel) { } @@ -31,8 +32,9 @@ VRMetadataDescriptor::VRMetadataDescriptor( The BAG Dataset this layer belongs to. */ VRMetadataDescriptor::VRMetadataDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Metadata, VR_METADATA_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Metadata, rows, cols, VR_METADATA_PATH) { } @@ -53,9 +55,15 @@ std::shared_ptr VRMetadataDescriptor::create( uint64_t chunkSize, int compressionLevel) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset.getNextId(), chunkSize, - compressionLevel}); + new VRMetadataDescriptor{dataset.getNextId(), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing variable resolution metadata descriptor. @@ -69,8 +77,14 @@ std::shared_ptr VRMetadataDescriptor::create( std::shared_ptr VRMetadataDescriptor::open( const Dataset& dataset) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset}); + new VRMetadataDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrmetadatadescriptor.h b/api/bag_vrmetadatadescriptor.h index 98718c0065..e661993b25 100644 --- a/api/bag_vrmetadatadescriptor.h +++ b/api/bag_vrmetadatadescriptor.h @@ -44,9 +44,9 @@ class BAG_API VRMetadataDescriptor final : public LayerDescriptor VRMetadataDescriptor& setMinResolution(float minResX, float minResY) & noexcept; protected: - VRMetadataDescriptor(uint32_t id, uint64_t chunkSize, + VRMetadataDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRMetadataDescriptor(const Dataset& dataset); + explicit VRMetadataDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index dc9ba27a8c..3caabc1aea 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -166,11 +166,20 @@ std::shared_ptr VRNode::open( descriptor.setMinMaxNSamples(minNSamples, maxNSamples); auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( - new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, + new ::H5::DataSet{h5file.openDataSet(VR_NODE_PATH)}, DeleteH5dataSet{}); - return std::make_unique(dataset, - descriptor, std::move(h5dataSet)); + // We need to know the dimensions of the array on file so that we can update the + // descriptor for the layer. + hsize_t dims[2]; + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 2) { + // Should be 1D according to BAG spec, but some implementations use a 2D array, + // so for compatibility's sake, use 2D. + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(dims[0], dims[1]); + return std::make_unique(dataset, descriptor, std::move(h5dataSet)); } @@ -189,9 +198,9 @@ VRNode::createH5dataSet( const Dataset& dataset, const VRNodeDescriptor& descriptor) { - const hsize_t fileLength = 0; - const hsize_t kMaxFileLength = H5S_UNLIMITED; - const ::H5::DataSpace h5fileDataSpace{1, &fileLength, &kMaxFileLength}; + std::array fileDims{0, 0}; + const std::array kMaxFileDims{H5S_UNLIMITED, H5S_UNLIMITED}; + const ::H5::DataSpace h5fileDataSpace{kRank, fileDims.data(), kMaxFileDims.data()}; // Create the creation property list. const ::H5::DSetCreatPropList h5createPropList{}; @@ -201,7 +210,8 @@ VRNode::createH5dataSet( const auto compressionLevel = descriptor.getCompressionLevel(); if (chunkSize > 0) { - h5createPropList.setChunk(1, &chunkSize); + const std::array chunkDims{chunkSize, chunkSize}; + h5createPropList.setChunk(kRank, chunkDims.data()); if (compressionLevel > 0 && compressionLevel <= kMaxCompressionLevel) h5createPropList.setDeflate(compressionLevel); @@ -263,18 +273,20 @@ UInt8Array VRNode::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; + + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); UInt8Array buffer{bufferSize}; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } @@ -314,11 +326,10 @@ void VRNode::writeAttributesProxy() const } //! \copydoc Layer::write -//! Ignore rows since the data is 1 dimensional. void VRNode::writeProxy( - uint32_t /*rowStart*/, + uint32_t rowStart, uint32_t columnStart, - uint32_t /*rowEnd*/, + uint32_t rowEnd, uint32_t columnEnd, const uint8_t* buffer) { @@ -327,25 +338,31 @@ void VRNode::writeProxy( if (!pDescriptor) throw InvalidLayerDescriptor{}; - const hsize_t columns = (columnEnd - columnStart) + 1; - const hsize_t offset = columnStart; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto rows = (rowEnd - rowStart) + 1; + const auto columns = (columnEnd - columnStart) + 1; + const std::array count{rows, columns}; + const std::array offset{rowStart, columnStart}; + const ::H5::DataSpace memDataSpace{kRank, count.data(), count.data()}; + + ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); // Expand the file data space if needed. - std::array fileLength{}; - std::array maxFileLength{}; + std::array fileDims{}; + std::array maxFileDims{}; - ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); - const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), - maxFileLength.data()); - if (numDims != 1) + const int numDims = fileDataSpace.getSimpleExtentDims(fileDims.data(), + maxFileDims.data()); + if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; + } - if (fileLength[0] < (columnEnd + 1)) + if ((fileDims[0] < (rowEnd + 1)) || + (fileDims[1] < (columnEnd + 1))) { - const auto newMaxLength = std::max(fileLength[0], columnEnd + 1); - - m_pH5dataSet->extend(&newMaxLength); + const std::array newDims{ + std::max(fileDims[0], rowEnd + 1), + std::max(fileDims[1], columnEnd + 1)}; + m_pH5dataSet->extend(newDims.data()); fileDataSpace = m_pH5dataSet->getSpace(); @@ -353,11 +370,18 @@ void VRNode::writeProxy( if (this->getDataset().expired()) throw DatasetNotFound{}; - auto pDataset = this->getDataset().lock(); - pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); +// auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. +// pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(newDims[0], newDims[1]); } - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); const auto memDataType = makeDataType(); diff --git a/api/bag_vrnodedescriptor.cpp b/api/bag_vrnodedescriptor.cpp index aecb0f22ff..1f86d1ec97 100644 --- a/api/bag_vrnodedescriptor.cpp +++ b/api/bag_vrnodedescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRNodeDescriptor::VRNodeDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_NODE_PATH, - kLayerTypeMapString.at(VarRes_Node), VarRes_Node, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Node), VarRes_Node, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,9 @@ VRNodeDescriptor::VRNodeDescriptor( The BAG Dataset this layer belongs to. */ VRNodeDescriptor::VRNodeDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Node, VR_NODE_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Node, rows, cols, VR_NODE_PATH) { } @@ -55,7 +58,7 @@ std::shared_ptr VRNodeDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRNodeDescriptor{dataset.getNextId(), chunkSize, + new VRNodeDescriptor{dataset.getNextId(), 0, 0, chunkSize, compressionLevel}); } @@ -65,13 +68,12 @@ std::shared_ptr VRNodeDescriptor::create( The BAG Dataset this layer belongs to. */ std::shared_ptr VRNodeDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRNodeDescriptor{dataset}); + new VRNodeDescriptor{dataset, rows, cols}); } - //! \copydoc LayerDescriptor::getDataType DataType VRNodeDescriptor::getDataTypeProxy() const noexcept { diff --git a/api/bag_vrnodedescriptor.h b/api/bag_vrnodedescriptor.h index cd8fd526c9..8de0fe7ed5 100644 --- a/api/bag_vrnodedescriptor.h +++ b/api/bag_vrnodedescriptor.h @@ -43,14 +43,15 @@ class BAG_API VRNodeDescriptor final : public LayerDescriptor uint32_t maxNumHypotheses) & noexcept; protected: - VRNodeDescriptor(uint32_t id, uint64_t chunkSize, + VRNodeDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRNodeDescriptor(const Dataset& dataset); + explicit VRNodeDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 291412208d..2897b0d421 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -4,11 +4,11 @@ #include "bag_vrrefinements.h" #include "bag_vrrefinementsdescriptor.h" +#include #include #include //memset #include - namespace BAG { namespace { @@ -59,10 +59,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() & noexcept - { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Retrieve the layer's descriptor. Note: this shadows BAG::Layer.getDescriptor() /*! @@ -70,9 +70,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() const & noexcept { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() const & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Constructor. /*! @@ -151,6 +152,16 @@ std::unique_ptr VRRefinements::open( new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, DeleteH5dataSet{}); + // We need to know the dimensions of the array on file so that we can update the + // descriptor for the layer. + hsize_t dims[2]; + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 2) { + // Should be 1D according to BAG spec, but some implementations use a 2D array, + // so for compatibility's sake, use 2D. + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(dims[0], dims[1]); return std::unique_ptr(new VRRefinements{dataset, descriptor, std::move(h5dataSet)}); } @@ -171,9 +182,9 @@ VRRefinements::createH5dataSet( const Dataset& dataset, const VRRefinementsDescriptor& descriptor) { - constexpr hsize_t fileLength = 0; - constexpr hsize_t kMaxFileLength = H5S_UNLIMITED; - const ::H5::DataSpace h5fileDataSpace{1, &fileLength, &kMaxFileLength}; + std::array fileDims{0, 0}; + const std::array kMaxFileDims{H5S_UNLIMITED, H5S_UNLIMITED}; + const ::H5::DataSpace h5fileDataSpace{kRank, fileDims.data(), kMaxFileDims.data()}; // Create the creation property list. const ::H5::DSetCreatPropList h5createPropList{}; @@ -183,7 +194,8 @@ VRRefinements::createH5dataSet( const auto compressionLevel = descriptor.getCompressionLevel(); if (chunkSize > 0) { - h5createPropList.setChunk(1, &chunkSize); + const std::array chunkDims{chunkSize, chunkSize}; + h5createPropList.setChunk(kRank, chunkDims.data()); if (compressionLevel > 0 && compressionLevel <= kMaxCompressionLevel) h5createPropList.setDeflate(compressionLevel); @@ -237,18 +249,20 @@ UInt8Array VRRefinements::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); - UInt8Array buffer{bufferSize}; + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); + UInt8Array buffer{bufferSize}; + + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } @@ -281,11 +295,10 @@ void VRRefinements::writeAttributesProxy() const } //! \copydoc Layer::write -//! Ignore rows since the data is 1 dimensional. void VRRefinements::writeProxy( - uint32_t /*rowStart*/, + uint32_t rowStart, uint32_t columnStart, - uint32_t /*rowEnd*/, + uint32_t rowEnd, uint32_t columnEnd, const uint8_t* buffer) { @@ -294,25 +307,31 @@ void VRRefinements::writeProxy( if (!pDescriptor) throw InvalidLayerDescriptor{}; - const hsize_t columns = (columnEnd - columnStart) + 1; - const hsize_t offset = columnStart; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto rows = (rowEnd - rowStart) + 1; + const auto columns = (columnEnd - columnStart) + 1; + const std::array count{rows, columns}; + const std::array offset{rowStart, columnStart}; + const ::H5::DataSpace memDataSpace{kRank, count.data(), count.data()}; + + ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); // Expand the file data space if needed. - std::array fileLength{}; - std::array maxFileLength{}; + std::array fileDims{}; + std::array maxFileDims{}; - ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); - const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), - maxFileLength.data()); - if (numDims != 1) + const int numDims = fileDataSpace.getSimpleExtentDims(fileDims.data(), + maxFileDims.data()); + if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; + } - if (fileLength[0] < (columnEnd + 1)) + if ((fileDims[0] < (rowEnd + 1)) || + (fileDims[1] < (columnEnd + 1))) { - const auto newMaxLength = std::max(fileLength[0], columnEnd + 1); - - m_pH5dataSet->extend(&newMaxLength); + const std::array newDims{ + std::max(fileDims[0], rowEnd + 1), + std::max(fileDims[1], columnEnd + 1)}; + m_pH5dataSet->extend(newDims.data()); fileDataSpace = m_pH5dataSet->getSpace(); @@ -320,11 +339,18 @@ void VRRefinements::writeProxy( if (this->getDataset().expired()) throw DatasetNotFound{}; - auto pDataset = this->getDataset().lock(); - pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); +// auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. +// pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(newDims[0], newDims[1]); } - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); const auto memDataType = makeDataType(); diff --git a/api/bag_vrrefinementsdescriptor.cpp b/api/bag_vrrefinementsdescriptor.cpp index 8a5630daca..f422a7950e 100644 --- a/api/bag_vrrefinementsdescriptor.cpp +++ b/api/bag_vrrefinementsdescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRRefinementsDescriptor::VRRefinementsDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_REFINEMENT_PATH, - kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,8 @@ VRRefinementsDescriptor::VRRefinementsDescriptor( The BAG Dataset this layer belongs to. */ VRRefinementsDescriptor::VRRefinementsDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Refinement, VR_REFINEMENT_PATH) + const Dataset& dataset, uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Refinement, rows, cols, VR_REFINEMENT_PATH) { } @@ -54,7 +56,7 @@ std::shared_ptr VRRefinementsDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset.getNextId(), chunkSize, + new VRRefinementsDescriptor{dataset.getNextId(), 0, 0, chunkSize, compressionLevel}); } @@ -67,10 +69,10 @@ std::shared_ptr VRRefinementsDescriptor::create( The existing variable resolution refinements descriptor. */ std::shared_ptr VRRefinementsDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset}); + new VRRefinementsDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrrefinementsdescriptor.h b/api/bag_vrrefinementsdescriptor.h index 9fa283adc9..0c7190aa93 100644 --- a/api/bag_vrrefinementsdescriptor.h +++ b/api/bag_vrrefinementsdescriptor.h @@ -38,14 +38,15 @@ class BAG_API VRRefinementsDescriptor final : public LayerDescriptor float maxUncertainty) & noexcept; protected: - VRRefinementsDescriptor(uint32_t id, uint64_t chunkSize, - int compressionLevel); - explicit VRRefinementsDescriptor(const Dataset& dataset); + VRRefinementsDescriptor(uint32_t id, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + explicit VRRefinementsDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/swig/include/bag_georefmetadatalayerdescriptor.i b/api/swig/include/bag_georefmetadatalayerdescriptor.i index 44acbc8617..f1fe8c20c2 100644 --- a/api/swig/include/bag_georefmetadatalayerdescriptor.i +++ b/api/swig/include/bag_georefmetadatalayerdescriptor.i @@ -34,8 +34,8 @@ class GeorefMetadataLayerDescriptor final : public LayerDescriptor public: static std::shared_ptr create(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile, DataType indexType, - RecordDefinition definition, uint64_t chunkSize, - int compressionLevel); + RecordDefinition definition, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); %rename(openDataset) open(Dataset& dataset, const std::string& name); static std::shared_ptr open(Dataset& dataset, const std::string& name); diff --git a/api/swig/include/bag_layerdescriptor.i b/api/swig/include/bag_layerdescriptor.i index cbe5a813c1..473876f0e6 100644 --- a/api/swig/include/bag_layerdescriptor.i +++ b/api/swig/include/bag_layerdescriptor.i @@ -54,11 +54,13 @@ public: //! Intentionally omit exposing of std::tuple method (unsupported by SWIG), //! so it can be exposed with std::pair below. //std::tuple getMinMax() const noexcept; + //const std::tuple& getDims() const & noexcept; const std::string& getName() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; + LayerDescriptor& setDims(uint64_t rows, uint64_t cols) & noexcept; }; %extend LayerDescriptor @@ -69,6 +71,13 @@ public: std::tie(min, max) = self->getMinMax(); return std::pair(min, max); } + + std::pair getDims() const noexcept + { + uint64_t rows=0, cols=0; + std::tie(rows, cols) = self->getDims(); + return std::pair(rows, cols); + } } } // namespace BAG diff --git a/api/swig/include/bag_simplelayerdescriptor.i b/api/swig/include/bag_simplelayerdescriptor.i index 84d048852d..e59f086f27 100644 --- a/api/swig/include/bag_simplelayerdescriptor.i +++ b/api/swig/include/bag_simplelayerdescriptor.i @@ -28,11 +28,12 @@ class SimpleLayerDescriptor final : public LayerDescriptor { public: static std::shared_ptr create(const Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); - %rename(openDataset) open(const Dataset&, LayerType); + %rename(openDataset) open(const Dataset&, LayerType, uint32_t rows, uint32_t cols); static std::shared_ptr open( - const Dataset& dataset, LayerType type); + const Dataset& dataset, LayerType type, uint32_t rows, uint32_t cols); SimpleLayerDescriptor(const SimpleLayerDescriptor&) = delete; SimpleLayerDescriptor(SimpleLayerDescriptor&&) = delete; diff --git a/api/swig/python/CMakeLists.txt b/api/swig/python/CMakeLists.txt index a21208aac9..e5d5044b92 100644 --- a/api/swig/python/CMakeLists.txt +++ b/api/swig/python/CMakeLists.txt @@ -31,7 +31,11 @@ endif() set_property(GLOBAL PROPERTY BAGPY_BINARY_DIR_PROP ${CMAKE_CURRENT_BINARY_DIR}) set(BAGPY_MODULE_NAME bagPy.py) -set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-py3") +if (MSVC) + set(CMAKE_SWIG_FLAGS "-Wextra" "-v") +else() + set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-DSWIGWORDSIZE64") +endif() set_source_files_properties("bagpy.i" PROPERTIES SWIG_MODULE_NAME bagPy diff --git a/api/swig/python/bagpy.i b/api/swig/python/bagpy.i index 49d2fc6e5e..6956ba64a5 100644 --- a/api/swig/python/bagpy.i +++ b/api/swig/python/bagpy.i @@ -35,6 +35,7 @@ namespace std %template(DoublePair) pair; %template(FloatPair) pair; %template(UInt32Pair) pair; + %template(UInt64Pair) pair; %template(Cover) pair, pair >; %template(FloatVector) vector; %template(UInt32Vector) vector; diff --git a/examples/sample-data/Sample_VR_BAG-gzip.bag b/examples/sample-data/Sample_VR_BAG-gzip.bag new file mode 100644 index 0000000000..99430ca884 Binary files /dev/null and b/examples/sample-data/Sample_VR_BAG-gzip.bag differ diff --git a/examples/sample-data/test_vr.bag b/examples/sample-data/test_vr.bag new file mode 100644 index 0000000000..679bf1582c Binary files /dev/null and b/examples/sample-data/test_vr.bag differ diff --git a/python/test_simplelayerdescriptor.py b/python/test_simplelayerdescriptor.py index 2909d1fdcb..ed172bcbf3 100644 --- a/python/test_simplelayerdescriptor.py +++ b/python/test_simplelayerdescriptor.py @@ -12,6 +12,8 @@ datapath = str(pathlib.Path(__file__).parent.absolute()) + "/../examples/sample-data" kExpectedChunkSize = 100 kExpectedCompressionLevel = 6 +kRows = 30 +kCols = 40 class TestSimpleLayerDescriptor(unittest.TestCase): @@ -26,7 +28,7 @@ def testCreation(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -47,7 +49,7 @@ def testGetSetName(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -68,7 +70,7 @@ def testGetDataType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -87,13 +89,13 @@ def testGetLayerType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getLayerType(), Elevation) - descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, + descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -112,7 +114,7 @@ def testGetSetMinMax(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -137,12 +139,12 @@ def testGetInternalPath(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Elevation)) - descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, + descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Uncertainty)) @@ -160,7 +162,7 @@ def testGetElementSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getElementSize(), @@ -179,7 +181,7 @@ def testGetChunkSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getChunkSize(), kExpectedChunkSize) @@ -197,7 +199,7 @@ def testGetCompressionLevel(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getCompressionLevel(), kExpectedCompressionLevel) diff --git a/python/test_surfacecorrectionsdescriptor.py b/python/test_surfacecorrectionsdescriptor.py index e73d02f07d..e07a3ddf6b 100644 --- a/python/test_surfacecorrectionsdescriptor.py +++ b/python/test_surfacecorrectionsdescriptor.py @@ -12,6 +12,8 @@ datapath = str(pathlib.Path(__file__).parent.absolute()) + "/../examples/sample-data" kExpectedChunkSize = 100 kExpectedCompressionLevel = 6 +kRows = 30 +kCols = 40 class TestSurfaceCorrectionsDescriptor(unittest.TestCase): @@ -27,7 +29,7 @@ def testCreation(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -48,7 +50,7 @@ def testGetSetName(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -69,7 +71,7 @@ def testGetDataType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -88,13 +90,13 @@ def testGetLayerType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getLayerType(), Elevation) - descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, + descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -113,7 +115,7 @@ def testGetSetMinMax(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -138,12 +140,12 @@ def testGetInternalPath(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Elevation)) - descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, + descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Uncertainty)) @@ -161,7 +163,7 @@ def testGetElementSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getElementSize(), @@ -180,7 +182,7 @@ def testGetChunkSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getChunkSize(), kExpectedChunkSize) @@ -198,7 +200,7 @@ def testGetCompressionLevel(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getCompressionLevel(), kExpectedCompressionLevel) diff --git a/python/test_vrmetadata.py b/python/test_vrmetadata.py index 66544ad714..6443e1aa43 100644 --- a/python/test_vrmetadata.py +++ b/python/test_vrmetadata.py @@ -142,6 +142,13 @@ def testWriteRead(self): buffer = VRMetadataLayerItems((kExpectedItem0,)) vrMetadata.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, buffer) + # Force a close. + del vrMetadata + del dataset + + # Re-open read-only + dataset = Dataset.openDataset(tmpBagFile.getName(), BAG_OPEN_READONLY) + vrMetadata = dataset.getVRMetadata() # Read the record back. buffer = vrMetadata.read(kRowStart, kColumnStart, kRowEnd, kColumnEnd) diff --git a/scripts/dev-cont-build-bag.sh b/scripts/dev-cont-build-bag.sh index 5dc4778958..504f27b523 100755 --- a/scripts/dev-cont-build-bag.sh +++ b/scripts/dev-cont-build-bag.sh @@ -3,16 +3,24 @@ set -ex # Abort on error. # Note: This script is meant to be run within the development container defined by ../Dockerfile.dev. +rm -rf venv-docker +python3 -m venv venv-docker +source venv-docker/bin/activate +pip install -r requirements.txt +pip install mypy numpy GDAL==3.9.3 + # Configure CMake cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -B build -S . \ - -DCMAKE_INSTALL_PREFIX=/usr -DBAG_BUILD_TESTS:BOOL=ON + -DCMAKE_INSTALL_PREFIX=/usr -DBAG_BUILD_TESTS:BOOL=ON -DBAG_BUILD_EXAMPLES:BOOL=ON # Build cmake --build build -python3 -m pip wheel -w ./wheel/ ./build/api/swig/python +pip wheel -w ./wheel/ ./build/api/swig/python # Install cmake --install build -python3 -m pip install --break-system-packages ./wheel/bagPy-*.whl +pip install --force-reinstall ./wheel/bagPy-*.whl +# Generate PEP484 stub file +stubgen -m bagPy -o ./python # Run tests export BAG_SAMPLES_PATH=/tmp/bag/examples/sample-data ./build/tests/bag_tests_d -python3 -m pytest python/test_*.py +pytest python/test_*.py diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index fd293931c2..7e6d0ce859 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -30,6 +30,7 @@ set(TEST_SOURCE_FILES test_bag_vrrefinements.cpp test_bag_vrrefinementsdescriptor.cpp test_bag_vrtrackinglist.cpp + test_vr_bag.cpp ) source_group("Source Files" FILES ${TEST_SOURCE_FILES}) diff --git a/tests/test_bag_simplelayerdescriptor.cpp b/tests/test_bag_simplelayerdescriptor.cpp index 3c0797d3c3..4723b07c71 100644 --- a/tests/test_bag_simplelayerdescriptor.cpp +++ b/tests/test_bag_simplelayerdescriptor.cpp @@ -9,6 +9,7 @@ #include #include +#include using BAG::Dataset; @@ -315,6 +316,11 @@ const std::string kMetadataXML{R"( )"}; +constexpr const uint32_t kRows = 30; +constexpr const uint32_t kCols = 40; +constexpr uint64_t kExpectedChunkSize = 100; +constexpr unsigned int kExpectedCompressionLevel = 6; + } // namespace // NOTE The base class is also tested here. @@ -328,26 +334,30 @@ TEST_CASE("test layer descriptor creation", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Check that the layer descriptor type matches that was created."); CHECK(pDescriptor->getLayerType() == Elevation); + auto dims = pDescriptor->getDims(); + UNSCOPED_INFO("Check that the layer descriptor dimensions match that which was created."); + CHECK( (std::get<0>(dims) == kRows && std::get<1>(dims) == kCols) ); + UNSCOPED_INFO("Check the chunk size is read properly."); CHECK(pDescriptor->getChunkSize() == kExpectedChunkSize); UNSCOPED_INFO("Check the compression level is read properly."); CHECK(pDescriptor->getCompressionLevel() == kExpectedCompressionLevel); + } // const std::string& getName() const & noexcept; @@ -360,16 +370,15 @@ TEST_CASE("test layer descriptor get/set name", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); const auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); const std::string kExpectedName{"Expected Name"}; @@ -389,23 +398,24 @@ TEST_CASE("test layer descriptor get data type", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the data type of an Elevation layer descriptor is correct."); CHECK(pDescriptor->getDataType() == Layer::getDataType(Elevation)); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Num_Hypotheses, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); UNSCOPED_INFO("Verify the data type of an Num_Hypotheses layer descriptor is correct."); CHECK(pDescriptor->getDataType() == Layer::getDataType(Num_Hypotheses)); @@ -420,23 +430,24 @@ TEST_CASE("test layer descriptor get layer type", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the layer type of an Elevation layer descriptor is correct."); CHECK(pDescriptor->getLayerType() == Elevation); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Std_Dev, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the layer type of an Std_Dev layer descriptor is correct."); @@ -453,16 +464,15 @@ TEST_CASE("test layer descriptor get/set min max", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify setting min max does not throw."); @@ -484,16 +494,15 @@ TEST_CASE("test layer descriptor get internal path", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); - auto pDescriptor = SimpleLayerDescriptor::create(*pDataset,Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Elevation internal path is as expected."); @@ -501,7 +510,9 @@ TEST_CASE("test layer descriptor get internal path", CHECK(pDescriptor->getInternalPath() == Layer::getInternalPath(Elevation)); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Uncertainty, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Uncertainty internal path is as expected."); @@ -518,16 +529,15 @@ TEST_CASE("test layer descriptor get element size", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Elevation element size is as expected."); @@ -545,16 +555,15 @@ TEST_CASE("test descriptor get chunk size", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify getting the chunk size does not throw."); @@ -573,16 +582,15 @@ TEST_CASE("test descriptor get compression level", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify getting the compression level does not throw."); @@ -591,5 +599,3 @@ TEST_CASE("test descriptor get compression level", UNSCOPED_INFO("Verify getting the compression level matches the expected."); CHECK(pDescriptor->getCompressionLevel() == kExpectedCompressionLevel); } - - diff --git a/tests/test_bag_vrmetadata.cpp b/tests/test_bag_vrmetadata.cpp index 638f64cd67..cb5e710cdf 100644 --- a/tests/test_bag_vrmetadata.cpp +++ b/tests/test_bag_vrmetadata.cpp @@ -411,61 +411,68 @@ TEST_CASE("test vr metadata write read", "[vrmetadata][write][read]") { const TestUtils::RandomFileGuard tmpBagFile; - UNSCOPED_INFO("Check dataset was created successfully."); - constexpr uint64_t kChunkSize = 100; - constexpr unsigned int kCompressionLevel = 6; + constexpr uint32_t kRowStart = 0; + constexpr uint32_t kColumnStart = 0; + constexpr uint32_t kRowEnd = 0; + constexpr uint32_t kColumnEnd = 0; - BAG::Metadata metadata; - metadata.loadFromBuffer(kMetadataXML); + constexpr BAG::VRMetadataItem kExpectedItem0{ + 0, 1, 2, 3.45f, 6.789f, 1001.01f, 4004.004f}; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kChunkSize, - kCompressionLevel); - REQUIRE(pDataset); + { // test create + UNSCOPED_INFO("Check dataset was created successfully."); + constexpr uint64_t kChunkSize = 100; + constexpr unsigned int kCompressionLevel = 6; - UNSCOPED_INFO("Check creating variable resolution layers does not throw."); - REQUIRE_NOTHROW(pDataset->createVR(kChunkSize, kCompressionLevel, false)); + BAG::Metadata metadata; + metadata.loadFromBuffer(kMetadataXML); - UNSCOPED_INFO("Check the variable resolution metadata exists."); - auto pVrMetadata = pDataset->getVRMetadata(); - REQUIRE(pVrMetadata); + auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kChunkSize, + kCompressionLevel); + REQUIRE(pDataset); - UNSCOPED_INFO("Check VRMetadataDescriptor is the default descriptor."); - auto pVrMetadataDescriptor = - std::dynamic_pointer_cast( - pVrMetadata->getDescriptor()); - REQUIRE(pVrMetadataDescriptor); + UNSCOPED_INFO("Check creating variable resolution layers does not throw."); + REQUIRE_NOTHROW(pDataset->createVR(kChunkSize, kCompressionLevel, false)); - UNSCOPED_INFO("Write one record."); - constexpr BAG::VRMetadataItem kExpectedItem0{ - 0, 1, 2, 3.45f, 6.789f, 1001.01f, 4004.004f}; + UNSCOPED_INFO("Check the variable resolution metadata exists."); + auto pVrMetadata = pDataset->getVRMetadata(); + REQUIRE(pVrMetadata); - const auto* buffer = reinterpret_cast(&kExpectedItem0); - constexpr uint32_t kRowStart = 0; - constexpr uint32_t kColumnStart = 0; - constexpr uint32_t kRowEnd = 0; - constexpr uint32_t kColumnEnd = 0; + UNSCOPED_INFO("Check VRMetadataDescriptor is the default descriptor."); + auto pVrMetadataDescriptor = + std::dynamic_pointer_cast( + pVrMetadata->getDescriptor()); + REQUIRE(pVrMetadataDescriptor); + + UNSCOPED_INFO("Write one record."); + const auto *buffer = reinterpret_cast(&kExpectedItem0); + REQUIRE_NOTHROW(pVrMetadata->write(kRowStart, kColumnStart, kRowEnd, + kColumnEnd, buffer)); + } + + { // test open + auto pDataset = Dataset::open(tmpBagFile, BAG_OPEN_READONLY); - REQUIRE_NOTHROW(pVrMetadata->write(kRowStart, kColumnStart, kRowEnd, - kColumnEnd, buffer)); - - UNSCOPED_INFO("Read the record back."); - auto result = pVrMetadata->read(kRowStart, kColumnStart, kRowEnd, kColumnEnd); - CHECK(result); - - const auto* res = reinterpret_cast(result.data()); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::index."); - CHECK(res->index == kExpectedItem0.index); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_x."); - CHECK(res->dimensions_x == kExpectedItem0.dimensions_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_y."); - CHECK(res->dimensions_y == kExpectedItem0.dimensions_y); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_x."); - CHECK(res->resolution_x == kExpectedItem0.resolution_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_y."); - CHECK(res->resolution_y == kExpectedItem0.resolution_y); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_x."); - CHECK(res->sw_corner_x == kExpectedItem0.sw_corner_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_y."); - CHECK(res->sw_corner_y == kExpectedItem0.sw_corner_y); + UNSCOPED_INFO("Read the record back."); + auto pVrMetadata = pDataset->getVRMetadata(); + auto result = pVrMetadata->read(kRowStart, kColumnStart, kRowEnd, kColumnEnd); + CHECK(result); + + const auto *res = reinterpret_cast(result.data()); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::index."); + CHECK(res->index == kExpectedItem0.index); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_x."); + CHECK(res->dimensions_x == kExpectedItem0.dimensions_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_y."); + CHECK(res->dimensions_y == kExpectedItem0.dimensions_y); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_x."); + CHECK(res->resolution_x == kExpectedItem0.resolution_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_y."); + CHECK(res->resolution_y == kExpectedItem0.resolution_y); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_x."); + CHECK(res->sw_corner_x == kExpectedItem0.sw_corner_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_y."); + CHECK(res->sw_corner_y == kExpectedItem0.sw_corner_y); + } } diff --git a/tests/test_vr_bag.cpp b/tests/test_vr_bag.cpp new file mode 100644 index 0000000000..0cdec4e884 --- /dev/null +++ b/tests/test_vr_bag.cpp @@ -0,0 +1,85 @@ +#include +#include +#include +#include +#include + +#include + + +using BAG::Dataset; +using BAG::VRRefinements; +using BAG::VRRefinementsDescriptor; +using BAG::VRMetadata; +using BAG::VRMetadataDescriptor; + + +// Test basic reading of an existing VR BAG from https://github.com/OSGeo/gdal/blob/master/autotest/gdrivers/data/bag/test_vr.bag +TEST_CASE("test VR BAG reading GDAL", "[dataset][open][VR][GDAL]") +{ + const std::string bagFileName{std::string{std::getenv("BAG_SAMPLES_PATH")} + + "/test_vr.bag"}; + + const size_t kNumExpectedLayers = 4; + const auto dataset = Dataset::open(bagFileName, BAG_OPEN_READONLY); + REQUIRE(dataset); + + CHECK(dataset->getLayerTypes().size() == kNumExpectedLayers); + + const uint32_t kExpectedRows = 4; + const uint32_t kExpectedCols = 6; + CHECK(dataset->getDescriptor().getVersion() == "1.6.2"); + auto dims = dataset->getDescriptor().getDims(); + CHECK(std::get<0>(dims) == kExpectedRows); + CHECK(std::get<1>(dims) == kExpectedCols); + + auto vrMeta = dataset->getVRMetadata(); + REQUIRE(vrMeta); + const auto vrMetaDesc = vrMeta->getDescriptor(); + auto vrMetaDescDims = vrMetaDesc->getDims(); + // VR metadata descriptor dims should be the same as BAG dataset dims... + CHECK(std::get<0>(vrMetaDescDims) == kExpectedRows); + CHECK(std::get<1>(vrMetaDescDims) == kExpectedCols); + + auto vrRef = dataset->getVRRefinements(); + REQUIRE(vrRef); + const auto vrRefDesc = vrRef->getDescriptor(); + auto vrRefDescDims = vrRefDesc->getDims(); + CHECK(std::get<0>(vrRefDescDims) == 1); + CHECK(std::get<1>(vrRefDescDims) == 556); +} + +// Test basic reading of an existing VR BAG from the National Bathymetric Source archive (https://www.nauticalcharts.noaa.gov/learn/nbs.html) +TEST_CASE("test VR BAG reading NBS", "[dataset][open][VR][NBS]") +{ + const std::string bagFileName{std::string{std::getenv("BAG_SAMPLES_PATH")} + + "/Sample_VR_BAG-gzip.bag"}; + + const size_t kNumExpectedLayers = 4; + const auto dataset = Dataset::open(bagFileName, BAG_OPEN_READONLY); + REQUIRE(dataset); + + CHECK(dataset->getLayerTypes().size() == kNumExpectedLayers); + + const uint32_t kExpectedRows = 4; + const uint32_t kExpectedCols = 4; + CHECK(dataset->getDescriptor().getVersion() == "1.6.0"); + auto dims = dataset->getDescriptor().getDims(); + CHECK(std::get<0>(dims) == kExpectedRows); + CHECK(std::get<1>(dims) == kExpectedCols); + + auto vrMeta = dataset->getVRMetadata(); + REQUIRE(vrMeta); + const auto vrMetaDesc = vrMeta->getDescriptor(); + auto vrMetaDescDims = vrMetaDesc->getDims(); + // VR metadata descriptor dims should be the same as BAG dataset dims... + CHECK(std::get<0>(vrMetaDescDims) == kExpectedRows); + CHECK(std::get<1>(vrMetaDescDims) == kExpectedCols); + + auto vrRef = dataset->getVRRefinements(); + REQUIRE(vrRef); + const auto vrRefDesc = vrRef->getDescriptor(); + auto vrRefDescDims = vrRefDesc->getDims(); + CHECK(std::get<0>(vrRefDescDims) == 1); + CHECK(std::get<1>(vrRefDescDims) == 3750); +}