diff --git a/modules/README.md b/modules/README.md index dbdbfe19a..46b6530b4 100644 --- a/modules/README.md +++ b/modules/README.md @@ -53,3 +53,5 @@ $ cmake -D OPENCV_EXTRA_MODULES_PATH=/modules -D BUILD_opencv_re 22. **opencv_xphoto**: Additional photo processing algorithms: Color balance / Denoising / Inpainting. 23. **opencv_stereo**: Stereo Correspondence done with different descriptors: Census / CS-Census / MCT / BRIEF / MV. + +24. **opencv_hdf**: Hierarchical Data Format I/O. diff --git a/modules/hdf/CMakeLists.txt b/modules/hdf/CMakeLists.txt new file mode 100644 index 000000000..9bba8b48a --- /dev/null +++ b/modules/hdf/CMakeLists.txt @@ -0,0 +1,21 @@ +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}) + +find_package(HDF5) +if(HDF5_FOUND) + set(HAVE_HDF5 1) + message(STATUS "HDF5: YES") +else() + ocv_module_disable(hdf) + message(STATUS "HDF5: NO") +endif() + +if(${HDF5_FOUND}) + include_directories(${HDF5_INCLUDE_DIRS}) +endif() + +set(the_description "Hierarchical Data Format I/O") +ocv_define_module(hdf opencv_core WRAP python) + +if(${HDF5_FOUND}) + target_link_libraries(opencv_hdf ${HDF5_LIBRARIES}) +endif() diff --git a/modules/hdf/README.md b/modules/hdf/README.md new file mode 100644 index 000000000..92c1a8484 --- /dev/null +++ b/modules/hdf/README.md @@ -0,0 +1,4 @@ +HDF I/O +============================================================ + +The module contains I/O routines for Hierarchical Data Formats. diff --git a/modules/hdf/doc/pics/hdfview_demo.gif b/modules/hdf/doc/pics/hdfview_demo.gif new file mode 100644 index 000000000..410bdf59a Binary files /dev/null and b/modules/hdf/doc/pics/hdfview_demo.gif differ diff --git a/modules/hdf/include/opencv2/hdf.hpp b/modules/hdf/include/opencv2/hdf.hpp new file mode 100644 index 000000000..4ca6d11bb --- /dev/null +++ b/modules/hdf/include/opencv2/hdf.hpp @@ -0,0 +1,54 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF_HPP__ +#define __OPENCV_HDF_HPP__ + +#include "opencv2/hdf/hdf5.hpp" + +/** @defgroup hdf Hierarchical Data Format I/O routines + +This module provides storage routines for Hierarchical Data Format objects. + + @{ + @defgroup hdf5 Hierarchical Data Format version 5 + +Hierarchical Data Format version 5 +-------------------------------------------------------- + + + @} +*/ + +#endif diff --git a/modules/hdf/include/opencv2/hdf/hdf5.hpp b/modules/hdf/include/opencv2/hdf/hdf5.hpp new file mode 100644 index 000000000..504cca105 --- /dev/null +++ b/modules/hdf/include/opencv2/hdf/hdf5.hpp @@ -0,0 +1,681 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF5_HPP__ +#define __OPENCV_HDF5_HPP__ + +#include + +#include + + + +using namespace std; + +namespace cv +{ +namespace hdf +{ + +//! @addtogroup hdf5 +//! @{ + + +/** @brief Hierarchical Data Format version 5 interface. + +Notice that module is compiled only when hdf5 is correctly installed. + + */ +class CV_EXPORTS_W HDF5 +{ +public: + + CV_WRAP enum + { + H5_UNLIMITED = -1, H5_NONE = -1, H5_GETDIMS = 100, H5_GETMAXDIMS = 101, + }; + + virtual ~HDF5() {} + + /** @brief Close and release hdf5 object. + */ + CV_WRAP virtual void close( ) = 0; + + /** @brief Create a group. + @param grlabel specify the hdf5 group label. + + Create a hdf5 group. + + @note Groups are useful for better organise multiple datasets. It is possible to create subgroups within any group. + Existence of a particular group can be checked using hlexists(). In case of subgroups label would be e.g: 'Group1/SubGroup1' + where SubGroup1 is within the root group Group1. + + - In this example Group1 will have one subgrup labeled SubGroup1: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create Group1 if does not exists + if ( ! h5io->hlexists( "Group1" ) ) + h5io->grcreate( "Group1" ); + else + printf("Group1 already created, skipping\n" ); + // create SubGroup1 if does not exists + if ( ! h5io->hlexists( "Group1/SubGroup1" ) ) + h5io->grcreate( "Group1/SubGroup1" ); + else + printf("SubGroup1 already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note When a dataset is created with dscreate() or kpcreate() it can be created right within a group by specifying + full path within the label, in our example would be: 'Group1/SubGroup1/MyDataSet'. It is not thread safe. + */ + CV_WRAP virtual void grcreate( String grlabel ) = 0; + + /** @brief Check if label exists or not. + @param label specify the hdf5 dataset label. + + Returns **true** if dataset exists, and **false** if does not. + + @note Checks if dataset, group or other object type (hdf5 link) exists under the label name. It is thread safe. + */ + CV_WRAP virtual bool hlexists( String label ) const = 0; + + /* @overload */ + CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, + const vector& dims_chunks = vector() ) const = 0; + /** @brief Create and allocate storage for two dimensional single or multi channel dataset. + @param rows declare amount of rows + @param cols declare amount of cols + @param type type to be used + @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param dims_chunks each array member specify chunking sizes to be used for block i/o, + by default NULL means none at all. + + @note If the dataset already exists an exception will be thrown. + + - Existence of the dataset can be checked using hlexists(), see in this example: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 100x50 CV_64FC2 matrix + if ( ! h5io->hlexists( "hilbert" ) ) + h5io->dscreate( 100, 50, CV_64FC2, "hilbert" ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note Activating compression requires internal chunking. Chunking can significantly improve access + speed booth at read or write time especially for windowed access logic that shifts offset inside dataset. + If no custom chunking is specified default one will be invoked by the size of **whole** dataset + as single big chunk of data. + + - See example of level 9 compression using internal default chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create level 9 compressed space for CV_64FC2 matrix + if ( ! h5io->hlexists( "hilbert", 9 ) ) + h5io->dscreate( 100, 50, CV_64FC2, "hilbert", 9 ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note A value of H5_UNLIMITED for **rows** or **cols** or booth means **unlimited** data on the specified dimension, + thus is possible to expand anytime such dataset on row, col or booth directions. Presence of H5_UNLIMITED on any + dimension **require** to define custom chunking. No default chunking will be defined in unlimited scenario since + default size on that dimension will be zero, and will grow once dataset is written. Writing into dataset that have + H5_UNLIMITED on some of its dimension requires dsinsert() that allow growth on unlimited dimension instead of dswrite() + that allows to write only in predefined data space. + + - Example below shows no compression but unlimited dimension on cols using 100x100 internal chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create level 9 compressed space for CV_64FC2 matrix + int chunks[2] = { 100, 100 }; + h5io->dscreate( 100, cv::hdf::HDF5::H5_UNLIMITED, CV_64FC2, "hilbert", cv::hdf::HDF5::H5_NONE, chunks ); + // release + h5io->close(); + @endcode + + @note It is **not** thread safe, it must be called only once at dataset creation otherwise exception will occur. + Multiple datasets inside single hdf5 file is allowed. + */ + CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; + + /* @overload */ + CV_WRAP virtual void dscreate( const vector& sizes, const int type, String dslabel, + const int compresslevel = HDF5::H5_NONE, const vector& dims_chunks = vector() ) const = 0; + /** @brief Create and allocate storage for n-dimensional dataset, single or mutichannel type. + @param n_dims declare number of dimensions + @param sizes array containing sizes for each dimensions + @param type type to be used + @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param dims_chunks each array member specify chunking sizes to be used for block i/o, + by default NULL means none at all. + @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked + using hlexists(). + + - See example below that creates a 6 dimensional storage space: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 6 dimensional CV_64FC2 matrix + if ( ! h5io->hlexists( "nddata" ) ) + int n_dims = 5; + int dsdims[n_dims] = { 100, 100, 20, 10, 5, 5 }; + h5io->dscreate( n_dims, sizes, CV_64FC2, "nddata" ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note Activating compression requires internal chunking. Chunking can significantly improve access + speed booth at read or write time especially for windowed access logic that shifts offset inside dataset. + If no custom chunking is specified default one will be invoked by the size of **whole** dataset + as single big chunk of data. + + - See example of level 0 compression (shallow) using chunking against first + dimension, thus storage will consists by 100 chunks of data: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create space for 6 dimensional CV_64FC2 matrix + if ( ! h5io->hlexists( "nddata" ) ) + int n_dims = 5; + int dsdims[n_dims] = { 100, 100, 20, 10, 5, 5 }; + int chunks[n_dims] = { 1, 100, 20, 10, 5, 5 }; + h5io->dscreate( n_dims, dsdims, CV_64FC2, "nddata", 0, chunks ); + else + printf("DS already created, skipping\n" ); + // release + h5io->close(); + @endcode + + @note A value of H5_UNLIMITED inside the **sizes** array means **unlimited** data on that dimension, thus is + possible to expand anytime such dataset on those unlimited directions. Presence of H5_UNLIMITED on any dimension + **require** to define custom chunking. No default chunking will be defined in unlimited scenario since default size + on that dimension will be zero, and will grow once dataset is written. Writing into dataset that have H5_UNLIMITED on + some of its dimension requires dsinsert() instead of dswrite() that allow growth on unlimited dimension instead of + dswrite() that allows to write only in predefined data space. + + - Example below shows a 3 dimensional dataset using no compression with all unlimited sizes and one unit chunking: + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + int n_dims = 3; + int chunks[n_dims] = { 1, 1, 1 }; + int dsdims[n_dims] = { cv::hdf::HDF5::H5_UNLIMITED, cv::hdf::HDF5::H5_UNLIMITED, cv::hdf::HDF5::H5_UNLIMITED }; + h5io->dscreate( n_dims, dsdims, CV_64FC2, "nddata", cv::hdf::HDF5::H5_NONE, chunks ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; + + /** @brief Fetch dataset sizes + @param dslabel specify the hdf5 dataset label to be measured. + @param dims_flag will fetch dataset dimensions on H5_GETDIMS, and dataset maximum dimensions on H5_GETMAXDIMS. + + Returns vector object containing sizes of dataset on each dimensions. + + @note Resulting vector size will match the amount of dataset dimensions. By default H5_GETDIMS will return + actual dataset dimensions. Using H5_GETMAXDIM flag will get maximum allowed dimension which normally match + actual dataset dimension but can hold H5_UNLIMITED value if dataset was prepared in **unlimited** mode on + some of its dimension. It can be useful to check existing dataset dimensions before overwrite it as whole or subset. + Trying to write with oversized source data into dataset target will thrown exception. + */ + CV_WRAP virtual vector dsgetsize( String dslabel, int dims_flag = HDF5::H5_GETDIMS ) const = 0; + + /** @brief Fetch dataset type + @param dslabel specify the hdf5 dataset label to be checked. + + Returns the stored matrix type. This is an identifier compatible with the CvMat type system, + like e.g. CV_16SC5 (16-bit signed 5-channel array), and so on. + + @note Result can be parsed with CV_MAT_CN() to obtain amount of channels and CV_MAT_DEPTH() to obtain native cvdata type. + It is thread safe. + */ + CV_WRAP virtual int dsgettype( String dslabel ) const = 0; + + /* @overload */ + CV_WRAP virtual void dswrite( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Write or overwrite a Mat object into specified dataset of hdf5 file. + @param Array specify Mat data array to be written. + @param dslabel specify the target hdf5 dataset label. + @param dims_offset each array member specify the offset location + over dataset's each dimensions from where InputArray will be (over)written into dataset. + @param dims_counts each array member specify the amount of data over dataset's + each dimensions from InputArray that will be written into dataset. + + Writes Mat object into targeted dataset. + + @note If dataset is not created and does not exist it will be created **automatically**. Only Mat is supported and + it must to be **continuous**. It is thread safe but it is recommended that writes to happen over separate non overlapping + regions. Multiple datasets can be written inside single hdf5 file. + + - Example below writes a 100x100 CV_64FC2 matrix into a dataset. No dataset precreation required. If routine + is called multiple times dataset will be just overwritten: + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(100, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // write / overwrite dataset + h5io->dswrite( H, "hilbert" ); + // release + h5io->close(); + @endcode + + - Example below writes a smaller 50x100 matrix into 100x100 compressed space optimised by two 50x100 chunks. + Matrix is written twice into first half (0->50) and second half (50->100) of data space using offset. + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(50, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // optimise dataset by two chunks + int chunks[2] = { 50, 100 }; + // create 100x100 CV_64FC2 compressed space + h5io->dscreate( 100, 100, CV_64FC2, "hilbert", 9, chunks ); + // write into first half + int offset1[2] = { 0, 0 }; + h5io->dswrite( H, "hilbert", offset1 ); + // write into second half + int offset2[2] = { 50, 0 }; + h5io->dswrite( H, "hilbert", offset2 ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dswrite( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + /* @overload */ + CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Insert or overwrite a Mat object into specified dataset and autoexpand dataset size if **unlimited** property allows. + @param Array specify Mat data array to be written. + @param dslabel specify the target hdf5 dataset label. + @param dims_offset each array member specify the offset location + over dataset's each dimensions from where InputArray will be (over)written into dataset. + @param dims_counts each array member specify the amount of data over dataset's + each dimensions from InputArray that will be written into dataset. + + Writes Mat object into targeted dataset and **autoexpand** dataset dimension if allowed. + + @note Unlike dswrite(), datasets are **not** created **automatically**. Only Mat is supported and it must to be **continuous**. + If dsinsert() happen over outer regions of dataset dimensions and on that dimension of dataset is in **unlimited** mode then + dataset is expanded, otherwise exception is thrown. To create datasets with **unlimited** property on specific or more + dimensions see dscreate() and the optional H5_UNLIMITED flag at creation time. It is not thread safe over same dataset + but multiple datasets can be merged inside single hdf5 file. + + - Example below creates **unlimited** rows x 100 cols and expand rows 5 times with dsinsert() using single 100x100 CV_64FC2 + over the dataset. Final size will have 5x100 rows and 100 cols, reflecting H matrix five times over row's span. Chunks size is + 100x100 just optimized against the H matrix size having compression disabled. If routine is called multiple times dataset will be + just overwritten: + @code{.cpp} + // dual channel hilbert matrix + cv::Mat H(50, 100, CV_64FC2); + for(int i = 0; i < H.rows; i++) + for(int j = 0; j < H.cols; j++) + { + H.at(i,j)[0] = 1./(i+j+1); + H.at(i,j)[1] = -1./(i+j+1); + count++; + } + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // optimise dataset by chunks + int chunks[2] = { 100, 100 }; + // create Unlimited x 100 CV_64FC2 space + h5io->dscreate( cv::hdf::HDF5::H5_UNLIMITED, 100, CV_64FC2, "hilbert", cv::hdf::HDF5::H5_NONE, chunks ); + // write into first half + int offset[2] = { 0, 0 }; + for ( int t = 0; t < 5; t++ ) + { + offset[0] += 100 * t; + h5io->dsinsert( H, "hilbert", offset ); + } + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + + /* @overload */ + CV_WRAP virtual void dsread( OutputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const = 0; + /** @brief Read specific dataset from hdf5 file into Mat object. + @param Array Mat container where data reads will be returned. + @param dslabel specify the source hdf5 dataset label. + @param dims_offset each array member specify the offset location over + each dimensions from where dataset starts to read into OutputArray. + @param dims_counts each array member specify the amount over dataset's each + dimensions of dataset to read into OutputArray. + + Reads out Mat object reflecting the stored dataset. + + @note If hdf5 file does not exist an exception will be thrown. Use hlexists() to check dataset presence. + It is thread safe. + + - Example below reads a dataset: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank Mat container + cv::Mat H; + // read hibert dataset + h5io->read( H, "hilbert" ); + // release + h5io->close(); + @endcode + + - Example below perform read of 3x5 submatrix from second row and third element. + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank Mat container + cv::Mat H; + int offset[2] = { 1, 2 }; + int counts[2] = { 3, 5 }; + // read hibert dataset + h5io->read( H, "hilbert", offset, counts ); + // release + h5io->close(); + @endcode + */ + CV_WRAP virtual void dsread( OutputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; + + /** @brief Fetch keypoint dataset size + @param kplabel specify the hdf5 dataset label to be measured. + @param dims_flag will fetch dataset dimensions on H5_GETDIMS, and dataset maximum dimensions on H5_GETMAXDIMS. + + Returns size of keypoints dataset. + + @note Resulting size will match the amount of keypoints. By default H5_GETDIMS will return actual dataset dimension. + Using H5_GETMAXDIM flag will get maximum allowed dimension which normally match actual dataset dimension but can hold + H5_UNLIMITED value if dataset was prepared in **unlimited** mode. It can be useful to check existing dataset dimension + before overwrite it as whole or subset. Trying to write with oversized source data into dataset target will thrown + exception. + */ + CV_WRAP virtual int kpgetsize( String kplabel, int dims_flag = HDF5::H5_GETDIMS ) const = 0; + + /** @brief Create and allocate special storage for cv::KeyPoint dataset. + @param size declare fixed number of KeyPoints + @param kplabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. + @param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. + @param chunks each array member specify chunking sizes to be used for block i/o, + by default H5_NONE means none at all. + @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked + using hlexists(). + + - See example below that creates space for 100 keypoints in the dataset: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + if ( ! h5io->hlexists( "keypoints" ) ) + h5io->kpcreate( 100, "keypoints" ); + else + printf("DS already created, skipping\n" ); + @endcode + + @note A value of H5_UNLIMITED for **size** means **unlimited** keypoints, thus is possible to expand anytime such + dataset by adding or inserting. Presence of H5_UNLIMITED **require** to define custom chunking. No default chunking + will be defined in unlimited scenario since default size on that dimension will be zero, and will grow once dataset + is written. Writing into dataset that have H5_UNLIMITED on some of its dimension requires kpinsert() that allow + growth on unlimited dimension instead of kpwrite() that allows to write only in predefined data space. + + - See example below that creates unlimited space for keypoints chunking size of 100 but no compression: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + if ( ! h5io->hlexists( "keypoints" ) ) + h5io->kpcreate( cv::hdf::HDF5::H5_UNLIMITED, "keypoints", cv::hdf::HDF5::H5_NONE, 100 ); + else + printf("DS already created, skipping\n" ); + @endcode + */ + virtual void kpcreate( const int size, String kplabel, + const int compresslevel = H5_NONE, const int chunks = H5_NONE ) const = 0; + + /** @brief Write or overwrite list of KeyPoint into specified dataset of hdf5 file. + @param keypoints specify keypoints data list to be written. + @param kplabel specify the target hdf5 dataset label. + @param offset specify the offset location on dataset from where keypoints will be (over)written into dataset. + @param counts specify the amount of keypoints that will be written into dataset. + + Writes vector object into targeted dataset. + + @note If dataset is not created and does not exist it will be created **automatically**. It is thread safe but + it is recommended that writes to happen over separate non overlapping regions. Multiple datasets can be written + inside single hdf5 file. + + - Example below writes a 100 keypoints into a dataset. No dataset precreation required. If routine is called multiple + times dataset will be just overwritten: + @code{.cpp} + // generate 100 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 100; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // write / overwrite dataset + h5io->kpwrite( keypoints, "keypoints" ); + // release + h5io->close(); + @endcode + + - Example below uses smaller set of 50 keypoints and writes into compressed space of 100 keypoints optimised by 10 chunks. + Same keypoint set is written three times, first into first half (0->50) and at second half (50->75) then into remaining slots + (75->99) of data space using offset and count parameters to settle the window for write access.If routine is called multiple times + dataset will be just overwritten: + @code{.cpp} + // generate 50 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 50; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create maximum compressed space of size 100 with chunk size 10 + h5io->kpcreate( 100, "keypoints", 9, 10 ); + // write into first half + h5io->kpwrite( keypoints, "keypoints", 0 ); + // write first 25 keypoints into second half + h5io->kpwrite( keypoints, "keypoints", 50, 25 ); + // write first 25 keypoints into remained space of second half + h5io->kpwrite( keypoints, "keypoints", 75, 25 ); + // release + h5io->close(); + @endcode + */ + virtual void kpwrite( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + + /** @brief Insert or overwrite list of KeyPoint into specified dataset and autoexpand dataset size if **unlimited** property allows. + @param keypoints specify keypoints data list to be written. + @param kplabel specify the target hdf5 dataset label. + @param offset specify the offset location on dataset from where keypoints will be (over)written into dataset. + @param counts specify the amount of keypoints that will be written into dataset. + + Writes vector object into targeted dataset and **autoexpand** dataset dimension if allowed. + + @note Unlike kpwrite(), datasets are **not** created **automatically**. If dsinsert() happen over outer region of dataset + and dataset has been created in **unlimited** mode then dataset is expanded, otherwise exception is thrown. To create datasets + with **unlimited** property see kpcreate() and the optional H5_UNLIMITED flag at creation time. It is not thread safe over same + dataset but multiple datasets can be merged inside single hdf5 file. + + - Example below creates **unlimited** space for keypoints storage, and inserts a list of 10 keypoints ten times into that space. + Final dataset will have 100 keypoints. Chunks size is 10 just optimized against list of keypoints. If routine is called multiple + times dataset will be just overwritten: + @code{.cpp} + // generate 10 dummy keypoints + std::vector keypoints; + for(int i = 0; i < 10; i++) + keypoints.push_back( cv::KeyPoint(i, -i, 1, -1, 0, 0, -1) ); + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // create unlimited size space with chunk size of 10 + h5io->kpcreate( cv::hdf::HDF5::H5_UNLIMITED, "keypoints", -1, 10 ); + // insert 10 times same 10 keypoints + for(int i = 0; i < 10; i++) + h5io->kpinsert( keypoints, "keypoints", i * 10 ); + // release + h5io->close(); + @endcode + */ + virtual void kpinsert( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + + /** @brief Read specific keypoint dataset from hdf5 file into vector object. + @param keypoints vector container where data reads will be returned. + @param kplabel specify the source hdf5 dataset label. + @param offset specify the offset location over dataset from where read starts. + @param counts specify the amount of keypoints from dataset to read. + + Reads out vector object reflecting the stored dataset. + + @note If hdf5 file does not exist an exception will be thrown. Use hlexists() to check dataset presence. + It is thread safe. + + - Example below reads a dataset containing keypoints starting with second entry: + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank KeyPoint container + std::vector keypoints; + // read keypoints starting second one + h5io->kpread( keypoints, "keypoints", 1 ); + // release + h5io->close(); + @endcode + + - Example below perform read of 3 keypoints from second entry. + @code{.cpp} + // open hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // blank KeyPoint container + std::vector keypoints; + // read three keypoints starting second one + h5io->kpread( keypoints, "keypoints", 1, 3 ); + // release + h5io->close(); + @endcode + */ + virtual void kpread( vector& keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const = 0; + +}; + + /** @brief Open or create hdf5 file + @param HDF5Filename specify the HDF5 filename. + + Returns pointer to the hdf5 object class + + @note If hdf5 file does not exist it will be created. Any operations except dscreate() functions on object + will be thread safe. Multiple datasets can be created inside single hdf5 file, and can be accessed + from same hdf5 object from multiple instances as long read or write operations are done over + non-overlapping regions of dataset. Single hdf5 file also can be opened by multiple instances, + reads and writes can be instantiated at the same time as long non-overlapping regions are involved. Object + is released using close(). + + - Example below open and then release the file. + @code{.cpp} + // open / autocreate hdf5 file + cv::Ptr h5io = cv::hdf::open( "mytest.h5" ); + // ... + // release + h5io->close(); + @endcode + + ![Visualization of 10x10 CV_64FC2 (Hilbert matrix) using HDFView tool](pics/hdfview_demo.gif) + + - Text dump (3x3 Hilbert matrix) of hdf5 dataset using **h5dump** tool: + @code{.txt} + $ h5dump test.h5 + HDF5 "test.h5" { + GROUP "/" { + DATASET "hilbert" { + DATATYPE H5T_ARRAY { [2] H5T_IEEE_F64LE } + DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) } + DATA { + (0,0): [ 1, -1 ], [ 0.5, -0.5 ], [ 0.333333, -0.333333 ], + (1,0): [ 0.5, -0.5 ], [ 0.333333, -0.333333 ], [ 0.25, -0.25 ], + (2,0): [ 0.333333, -0.333333 ], [ 0.25, -0.25 ], [ 0.2, -0.2 ] + } + } + } + } + @endcode + */ + CV_EXPORTS_W Ptr open( String HDF5Filename ); + +//! @} + +} // end namespace hdf +} // end namespace cv +#endif // _OPENCV_HDF5_HPP_ diff --git a/modules/hdf/src/hdf5.cpp b/modules/hdf/src/hdf5.cpp new file mode 100644 index 000000000..6e0fb52da --- /dev/null +++ b/modules/hdf/src/hdf5.cpp @@ -0,0 +1,1051 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#include "precomp.hpp" + + + +using namespace std; + +namespace cv +{ +namespace hdf +{ + +class HDF5Impl : public HDF5 +{ +public: + + HDF5Impl( String HDF5Filename ); + + virtual ~HDF5Impl() { close(); }; + + // close and release + virtual void close( ); + + /* + * h5 generic + */ + + // check if object / link exists + virtual bool hlexists( String label ) const; + + /* + * h5 group + */ + + // create a group + virtual void grcreate( String grlabel ); + + /* + * cv::Mat + */ + + // get sizes of dataset + virtual vector dsgetsize( String dslabel, int dims_flag = H5_GETDIMS ) const; + + // get data type of dataset + virtual int dsgettype( String dslabel ) const; + + // overload dscreate() + virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = H5_NONE, + const vector& dims_chunks = vector() ) const; + + // create two dimensional single or mutichannel dataset + virtual void dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; + + // overload dscreate() + virtual void dscreate( const vector& sizes, const int type, String dslabel, + const int compresslevel = H5_NONE, const vector& dims_chunks = vector() ) const; + + // create n-dimensional single or mutichannel dataset + virtual void dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; + + // overload dswrite() + virtual void dswrite( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // write into dataset + virtual void dswrite( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + // overload dsinsert() + virtual void dsinsert( InputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // append / merge into dataset + virtual void dsinsert( InputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + // overload dsread() + virtual void dsread( OutputArray Array, String dslabel, + const vector& dims_offset = vector(), + const vector& dims_counts = vector() ) const; + + // read from dataset + virtual void dsread( OutputArray Array, String dslabel, + const int* dims_offset = NULL, const int* dims_counts = NULL ) const; + + /* + * std::vector + */ + + // get size of keypoints dataset + virtual int kpgetsize( String kplabel, int dims_flag = H5_GETDIMS ) const; + + // create KeyPoint structure + virtual void kpcreate( const int size, String kplabel, + const int compresslevel = H5_NONE, const int chunks = H5_NONE ) const; + + // write KeyPoint structures + virtual void kpwrite( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + + // append / merge KeyPoint structures + virtual void kpinsert( const vector keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + + // read KeyPoint structure + virtual void kpread( vector& keypoints, String kplabel, + const int offset = H5_NONE, const int counts = H5_NONE ) const; + +private: + + // store filename + String m_hdf5_filename; + + // hdf5 file handler + hid_t m_h5_file_id; + + // translate cvType -> h5Type + inline hid_t GetH5type( int cvType ) const; + + // translate h5Type -> cvType + inline int GetCVtype( hid_t h5Type ) const; + +}; + +inline hid_t HDF5Impl::GetH5type( int cvType ) const +{ + hid_t h5Type = -1; + + switch ( CV_MAT_DEPTH( cvType ) ) + { + case CV_64F: + h5Type = H5T_NATIVE_DOUBLE; + break; + case CV_32F: + h5Type = H5T_NATIVE_FLOAT; + break; + case CV_8U: + h5Type = H5T_NATIVE_UCHAR; + break; + case CV_8S: + h5Type = H5T_NATIVE_CHAR; + break; + case CV_16U: + h5Type = H5T_NATIVE_USHORT; + break; + case CV_16S: + h5Type = H5T_NATIVE_SHORT; + break; + case CV_32S: + h5Type = H5T_NATIVE_INT; + break; + default: + CV_Error( Error::StsInternal, "Unknown cvType." ); + } + return h5Type; +} + +inline int HDF5Impl::GetCVtype( hid_t h5Type ) const +{ + int cvType = -1; + + if ( H5Tequal( h5Type, H5T_NATIVE_DOUBLE ) ) + cvType = CV_64F; + else if ( H5Tequal( h5Type, H5T_NATIVE_FLOAT ) ) + cvType = CV_32F; + else if ( H5Tequal( h5Type, H5T_NATIVE_UCHAR ) ) + cvType = CV_8U; + else if ( H5Tequal( h5Type, H5T_NATIVE_CHAR ) ) + cvType = CV_8S; + else if ( H5Tequal( h5Type, H5T_NATIVE_USHORT ) ) + cvType = CV_16U; + else if ( H5Tequal( h5Type, H5T_NATIVE_SHORT ) ) + cvType = CV_16S; + else if ( H5Tequal( h5Type, H5T_NATIVE_INT ) ) + cvType = CV_32S; + else + CV_Error( Error::StsInternal, "Unknown H5Type." ); + + return cvType; +} + +HDF5Impl::HDF5Impl( String _hdf5_filename ) + : m_hdf5_filename( _hdf5_filename ) +{ + // save old + // error handler + void *errdata; + H5E_auto2_t errfunc; + hid_t stackid = H5E_DEFAULT; + H5Eget_auto( stackid, &errfunc, &errdata ); + + // turn off error handling + H5Eset_auto( stackid, NULL, NULL ); + + // check HDF5 file presence (err supressed) + htri_t check = H5Fis_hdf5( m_hdf5_filename.c_str() ); + + // restore previous error handler + H5Eset_auto( stackid, errfunc, errdata ); + + if ( check == 1 ) + // open the HDF5 file + m_h5_file_id = H5Fopen( m_hdf5_filename.c_str(), + H5F_ACC_RDWR, H5P_DEFAULT ); + else + // create the HDF5 file + m_h5_file_id = H5Fcreate( m_hdf5_filename.c_str(), + H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT ); +} + +void HDF5Impl::close() +{ + if ( m_h5_file_id != -1 ) + H5Fclose( m_h5_file_id ); + // mark closed + m_h5_file_id = -1; + + H5close( ); +} + +/* + * h5 generic + */ + +bool HDF5Impl::hlexists( String label ) const +{ + bool exists = false; + + hid_t lid = H5Pcreate( H5P_LINK_ACCESS ); + if ( H5Lexists(m_h5_file_id, label.c_str(), lid) == 1 ) + exists = true; + + H5Pclose(lid); + return exists; +} + +/* + * h5 group + */ + +void HDF5Impl::grcreate( String grlabel ) +{ + hid_t gid = H5Gcreate( m_h5_file_id, grlabel.c_str(), + H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + H5Gclose( gid ); +} + +/* + * cv:Mat + */ + +vector HDF5Impl::dsgetsize( String dslabel, int dims_flag ) const +{ + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + // fetch dims + hsize_t dsdims[n_dims]; + if ( dims_flag == H5_GETDIMS ) + H5Sget_simple_extent_dims( fspace, dsdims, NULL ); + else + H5Sget_simple_extent_dims( fspace, NULL, dsdims ); + + // fill with size data + vector SizeVect( n_dims ); + for ( int d = 0; d < n_dims; d++ ) + SizeVect[d] = (int) dsdims[d]; + + H5Dclose( dsdata ); + H5Sclose( fspace ); + + return SizeVect; +} + +int HDF5Impl::dsgettype( String dslabel ) const +{ + hid_t h5type; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + int channs = 1; + if ( H5Tget_class( dstype ) == H5T_ARRAY ) + { + // fetch channs + hsize_t ardims[1]; + H5Tget_array_dims( dstype, ardims ); + channs = ardims[0]; + // fetch depth + hid_t tsuper = H5Tget_super( dstype ); + h5type = H5Tget_native_type( tsuper, H5T_DIR_ASCEND ); + H5Tclose( tsuper ); + } + else + h5type = H5Tget_native_type( dstype, H5T_DIR_DESCEND ); + + // convert to CVType + int cvtype = GetCVtype( h5type ); + + H5Tclose( dstype ); + H5Dclose( dsdata ); + + return CV_MAKETYPE( cvtype, channs ); +} + +// overload +void HDF5Impl::dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel, + const vector& dims_chunks ) const +{ + CV_Assert( &dims_chunks[0] == NULL || dims_chunks.size() == 2 ); + dscreate( rows, cols, type, dslabel, compresslevel, &dims_chunks[0] ); +} + +void HDF5Impl::dscreate( const int rows, const int cols, const int type, + String dslabel, const int compresslevel, const int* dims_chunks ) const +{ + // dataset dims + int dsizes[2] = { rows, cols }; + + // create the two dim array + dscreate( 2, dsizes, type, dslabel, compresslevel, dims_chunks ); +} + +// overload +void HDF5Impl::dscreate( const vector& sizes, const int type, + String dslabel, const int compresslevel, + const vector& dims_chunks ) const +{ + CV_Assert( &dims_chunks[0] == NULL || dims_chunks.size() == sizes.size() ); + + const int n_dims = (int) sizes.size(); + dscreate( n_dims, &sizes[0], type, dslabel, compresslevel, &dims_chunks[0] ); +} + +void HDF5Impl::dscreate( const int n_dims, const int* sizes, const int type, + String dslabel, const int compresslevel, const int* dims_chunks ) const +{ + // compress valid H5_NONE, 0-9 + CV_Assert( compresslevel >= H5_NONE && compresslevel <= 9 ); + + if ( hlexists( dslabel ) == true ) + CV_Error( Error::StsInternal, "Requested dataset already exists." ); + + int channs = CV_MAT_CN( type ); + + hsize_t chunks[n_dims]; + hsize_t dsdims[n_dims]; + hsize_t maxdim[n_dims]; + + // dimension space + for ( int d = 0; d < n_dims; d++ ) + { + CV_Assert( sizes[d] >= H5_UNLIMITED ); + + // dataset dimension + if ( sizes[d] == H5_UNLIMITED ) + { + CV_Assert( dims_chunks != NULL ); + + dsdims[d] = 0; + maxdim[d] = H5S_UNLIMITED; + } + else + { + dsdims[d] = sizes[d]; + maxdim[d] = sizes[d]; + } + // default chunking + if ( dims_chunks == NULL ) + chunks[d] = sizes[d]; + else + chunks[d] = dims_chunks[d]; + } + + // create dataset space + hid_t dspace = H5Screate_simple( n_dims, dsdims, maxdim ); + + // create data property + hid_t dsdcpl = H5Pcreate( H5P_DATASET_CREATE ); + + // set properties + if ( compresslevel >= 0 ) + H5Pset_deflate( dsdcpl, compresslevel ); + + if ( dims_chunks != NULL || compresslevel >= 0 ) + H5Pset_chunk( dsdcpl, n_dims, chunks ); + + // convert to h5 type + hid_t dstype = GetH5type( type ); + + // expand channs + if ( channs > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // create data + H5Dcreate( m_h5_file_id, dslabel.c_str(), dstype, + dspace, H5P_DEFAULT, dsdcpl, H5P_DEFAULT ); + + if ( channs > 1 ) + H5Tclose( dstype ); + + H5Pclose( dsdcpl ); + H5Sclose( dspace ); +} + +// overload +void HDF5Impl::dsread( OutputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dsread( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dsread( OutputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + hid_t h5type; + + // open the HDF5 dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + int channs = 1; + if ( H5Tget_class( dstype ) == H5T_ARRAY ) + { + // fetch channs + hsize_t ardims[1]; + H5Tget_array_dims( dstype, ardims ); + channs = ardims[0]; + // fetch depth + hid_t tsuper = H5Tget_super( dstype ); + h5type = H5Tget_native_type( tsuper, H5T_DIR_ASCEND ); + H5Tclose( tsuper ); + } else + h5type = H5Tget_native_type( dstype, H5T_DIR_ASCEND ); + + int dType = GetCVtype( h5type ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + // fetch dims + hsize_t dsdims[n_dims]; + H5Sget_simple_extent_dims( fspace, dsdims, NULL ); + + // set amount by custom offset + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] -= dims_offset[d]; + } + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] = dims_counts[d]; + } + + // get memory write window + int mxdims[n_dims]; + hsize_t foffset[n_dims]; + for ( int d = 0; d < n_dims; d++ ) + { + foffset[d] = 0; + mxdims[d] = (int) dsdims[d]; + } + + // allocate persistent Mat + Array.create( n_dims, mxdims, CV_MAKETYPE(dType, channs) ); + + // get blank data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // get matrix write window + H5Sselect_hyperslab( dspace, H5S_SELECT_SET, + foffset, NULL, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + foffset[d] = dims_offset[d]; + } + + // get a file read window + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + foffset, NULL, dsdims, NULL ); + + // read from DS + Mat matrix = Array.getMat(); + H5Dread( dsdata, dstype, dspace, fspace, H5P_DEFAULT, matrix.data ); + + H5Tclose( dstype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +// overload +void HDF5Impl::dswrite( InputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dswrite( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dswrite( InputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + Mat matrix = Array.getMat(); + + // memory array should be compact + CV_Assert( matrix.isContinuous() ); + + int n_dims = matrix.dims; + int channs = matrix.channels(); + + int dsizes[n_dims]; + hsize_t dsdims[n_dims]; + hsize_t offset[n_dims]; + // replicate Mat dimensions + for ( int d = 0; d < n_dims; d++ ) + { + offset[d] = 0; + dsizes[d] = matrix.size[d]; + dsdims[d] = matrix.size[d]; + } + + // pre-create dataset if needed + if ( hlexists( dslabel ) == false ) + dscreate( n_dims, dsizes, matrix.type(), dslabel ); + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + dsdims[d] = dims_counts[d]; + } + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + offset[d] = dims_offset[d]; + } + + // create offset write window space + hid_t fspace = H5Dget_space( dsdata ); + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + offset, NULL, dsdims, NULL ); + + // convert type + hid_t dstype = GetH5type( matrix.type() ); + + // expand channs + if ( matrix.channels() > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // write into dataset + H5Dwrite( dsdata, dstype, dspace, fspace, + H5P_DEFAULT, matrix.data ); + + if ( matrix.channels() > 1 ) + H5Tclose( dstype ); + + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +// overload +void HDF5Impl::dsinsert( InputArray Array, String dslabel, + const vector& dims_offset, + const vector& dims_counts ) const +{ + dsinsert( Array, dslabel, &dims_offset[0], &dims_counts[0] ); +} + +void HDF5Impl::dsinsert( InputArray Array, String dslabel, + const int* dims_offset, const int* dims_counts ) const +{ + // only Mat support + CV_Assert( Array.isMat() ); + + // check dataset exists + if ( hlexists( dslabel ) == false ) + CV_Error( Error::StsInternal, "Dataset does not exist." ); + + Mat matrix = Array.getMat(); + + // memory array should be compact + CV_Assert( matrix.isContinuous() ); + + int n_dims = matrix.dims; + int channs = matrix.channels(); + + hsize_t dsdims[n_dims]; + hsize_t offset[n_dims]; + // replicate Mat dimensions + for ( int d = 0; d < n_dims; d++ ) + { + offset[d] = 0; + dsdims[d] = matrix.size[d]; + } + + // set custom amount of data + if ( dims_counts != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + { + CV_Assert( dims_counts[d] <= matrix.size[d] ); + dsdims[d] = dims_counts[d]; + } + } + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, dslabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( n_dims, dsdims, NULL ); + + // set custom offsets + if ( dims_offset != NULL ) + { + for ( int d = 0; d < n_dims; d++ ) + offset[d] = dims_offset[d]; + } + + // get actual file space and dims + hid_t fspace = H5Dget_space( dsdata ); + int f_dims = H5Sget_simple_extent_ndims( fspace ); + hsize_t fsdims[f_dims]; + H5Sget_simple_extent_dims( fspace, fsdims, NULL ); + H5Sclose( fspace ); + + CV_Assert( f_dims == n_dims ); + + // compute new extents + hsize_t nwdims[n_dims]; + for ( int d = 0; d < n_dims; d++ ) + { + // init + nwdims[d] = 0; + // add offset + if ( dims_offset != NULL ) + nwdims[d] += dims_offset[d]; + // add counts or matrixsize + if ( dims_counts != NULL ) + nwdims[d] += dims_counts[d]; + else + nwdims[d] += matrix.size[d]; + + // clamp back if smaller + if ( nwdims[d] < fsdims[d] ) + nwdims[d] = fsdims[d]; + } + + // extend dataset + H5Dextend( dsdata, nwdims ); + + // get the extended data space + fspace = H5Dget_space( dsdata ); + + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + offset, NULL, dsdims, NULL ); + + // convert type + hid_t dstype = GetH5type( matrix.type() ); + + // expand channs + if ( matrix.channels() > 1 ) + { + hsize_t adims[1] = { channs }; + dstype = H5Tarray_create( dstype, 1, adims ); + } + + // write into dataset + H5Dwrite( dsdata, dstype, dspace, fspace, + H5P_DEFAULT, matrix.data ); + + if ( matrix.channels() > 1 ) + H5Tclose( dstype ); + + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +/* + * std::vector + */ + +int HDF5Impl::kpgetsize( String kplabel, int dims_flag ) const +{ + vector sizes = dsgetsize( kplabel, dims_flag ); + + CV_Assert( sizes.size() == 1 ); + + return sizes[0]; +} + +void HDF5Impl::kpcreate( const int size, String kplabel, + const int compresslevel, const int chunks ) const +{ + // size valid + CV_Assert( size >= H5_UNLIMITED ); + + // valid chunks + CV_Assert( chunks == H5_NONE || chunks > 0 ); + + // compress valid -1, 0-9 + CV_Assert( compresslevel >= H5_NONE && compresslevel <= 9 ); + + if ( hlexists( kplabel ) == true ) + CV_Error( Error::StsInternal, "Requested dataset already exists." ); + + hsize_t dchunk[1]; + hsize_t dsdims[1]; + hsize_t maxdim[1]; + + // dataset dimension + if ( size == H5_UNLIMITED ) + { + dsdims[0] = 0; + maxdim[0] = H5S_UNLIMITED; + } + else + { + dsdims[0] = size; + maxdim[0] = size; + } + + // default chunking + if ( chunks == H5_NONE ) + if ( size == H5_UNLIMITED ) + dchunk[0] = 1; + else + dchunk[0] = size; + else + dchunk[0] = chunks; + + // dataset compound type + hid_t dstype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( dstype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( dstype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( dstype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // create dataset space + hid_t dspace = H5Screate_simple( 1, dsdims, maxdim ); + + // create data property + hid_t dsdcpl = H5Pcreate( H5P_DATASET_CREATE ); + + // set properties + if ( compresslevel >= 0 ) + H5Pset_deflate( dsdcpl, compresslevel ); + + // if chunking or compression + if ( dchunk[0] > 0 || compresslevel >= 0 ) + H5Pset_chunk( dsdcpl, 1, dchunk ); + + // create data + H5Dcreate( m_h5_file_id, kplabel.c_str(), dstype, + dspace, H5P_DEFAULT, dsdcpl, H5P_DEFAULT ); + + H5Tclose( dstype ); + H5Pclose( dsdcpl ); + H5Sclose( dspace ); +} + +void HDF5Impl::kpwrite( const vector keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() > 0 ); + + hsize_t dsddims[1]; + hsize_t doffset[1]; + + // replicate vector dimension + doffset[0] = 0; + dsddims[0] = keypoints.size(); + + // pre-create dataset if needed + if ( hlexists( kplabel ) == false ) + kpcreate( dsddims[0], kplabel ); + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + doffset[0] = offset; + + // create offset write window space + hid_t fspace = H5Dget_space( dsdata ); + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + doffset, NULL, dsddims, NULL ); + + // memory compound type + hid_t mmtype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( mmtype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( mmtype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // write into dataset + H5Dwrite( dsdata, mmtype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( mmtype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +void HDF5Impl::kpinsert( const vector keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() > 0 ); + + // check dataset exists + if ( hlexists( kplabel ) == false ) + CV_Error( Error::StsInternal, "Dataset does not exist." ); + + hsize_t dsddims[1]; + hsize_t doffset[1]; + + // replicate vector dimension + doffset[0] = 0; + dsddims[0] = keypoints.size(); + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // open dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // create input data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + doffset[0] = offset; + + // get actual file space and dims + hid_t fspace = H5Dget_space( dsdata ); + int f_dims = H5Sget_simple_extent_ndims( fspace ); + hsize_t fsdims[f_dims]; + H5Sget_simple_extent_dims( fspace, fsdims, NULL ); + H5Sclose( fspace ); + + CV_Assert( f_dims == 1 ); + + // compute new extents + hsize_t nwdims[1] = { 0 }; + // add offset + if ( offset != H5_NONE ) + nwdims[0] += offset; + // add counts or matrixsize + if ( counts != H5_NONE ) + nwdims[0] += counts; + else + nwdims[0] += keypoints.size(); + + // clamp back if smaller + if ( nwdims[0] < fsdims[0] ) + nwdims[0] = fsdims[0]; + + // extend dataset + H5Dextend( dsdata, nwdims ); + + // get the extended data space + fspace = H5Dget_space( dsdata ); + + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + doffset, NULL, dsddims, NULL ); + + // memory compound type + hid_t mmtype = H5Tcreate( H5T_COMPOUND, sizeof( KeyPoint ) ); + H5Tinsert( mmtype, "xpos", HOFFSET( KeyPoint, pt.x ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "ypos", HOFFSET( KeyPoint, pt.y ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "size", HOFFSET( KeyPoint, size ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "angle", HOFFSET( KeyPoint, angle ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "response", HOFFSET( KeyPoint, response ), H5T_NATIVE_FLOAT ); + H5Tinsert( mmtype, "octave", HOFFSET( KeyPoint, octave ), H5T_NATIVE_INT32 ); + H5Tinsert( mmtype, "class_id", HOFFSET( KeyPoint, class_id ), H5T_NATIVE_INT32 ); + + // write into dataset + H5Dwrite( dsdata, mmtype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( mmtype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +void HDF5Impl::kpread( vector& keypoints, String kplabel, + const int offset, const int counts ) const +{ + CV_Assert( keypoints.size() == 0 ); + + // open the HDF5 dataset + hid_t dsdata = H5Dopen( m_h5_file_id, kplabel.c_str(), H5P_DEFAULT ); + + // get data type + hid_t dstype = H5Dget_type( dsdata ); + + // get file space + hid_t fspace = H5Dget_space( dsdata ); + + // fetch rank + int n_dims = H5Sget_simple_extent_ndims( fspace ); + + CV_Assert( n_dims == 1 ); + + // fetch dims + hsize_t dsddims[1]; + H5Sget_simple_extent_dims( fspace, dsddims, NULL ); + + // set amount by custom offset + if ( offset != H5_NONE ) + dsddims[0] -= offset; + + // set custom amount of data + if ( counts != H5_NONE ) + dsddims[0] = counts; + + // get memory write window + hsize_t foffset[1] = { 0 }; + + // allocate keypoints vector + keypoints.resize( dsddims[0] ); + + // get blank data space + hid_t dspace = H5Screate_simple( 1, dsddims, NULL ); + + // get matrix write window + H5Sselect_hyperslab( dspace, H5S_SELECT_SET, + foffset, NULL, dsddims, NULL ); + + // set custom offsets + if ( offset != H5_NONE ) + foffset[0] = offset; + + // get a file read window + H5Sselect_hyperslab( fspace, H5S_SELECT_SET, + foffset, NULL, dsddims, NULL ); + + // read from DS + H5Dread( dsdata, dstype, dspace, fspace, H5P_DEFAULT, &keypoints[0] ); + + H5Tclose( dstype ); + H5Sclose( dspace ); + H5Sclose( fspace ); + H5Dclose( dsdata ); +} + +CV_EXPORTS Ptr open( String HDF5Filename ) +{ + return makePtr( HDF5Filename ); +} + +} // end namespace hdf +} // end namespace cv diff --git a/modules/hdf/src/precomp.hpp b/modules/hdf/src/precomp.hpp new file mode 100644 index 000000000..3069155f7 --- /dev/null +++ b/modules/hdf/src/precomp.hpp @@ -0,0 +1,43 @@ +/********************************************************************* + * Software License Agreement (BSD License) + * + * Copyright (c) 2015 + * Balint Cristian + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + *********************************************************************/ + +#ifndef __OPENCV_HDF_PRECOMP_H__ +#define __OPENCV_HDF_PRECOMP_H__ + +#include "opencv2/core.hpp" + +#include + +#include "opencv2/hdf.hpp" +#endif