Commit f03e415e authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #1493 from csukuangfj:improve-hdf

parents 81ca8dab 13508c76
......@@ -11,7 +11,7 @@
* using "kernel2" with radius 100.
*
* Both kernels are created from linear function, using
* linear interpolation (parametr ft:LINEAR).
* linear interpolation (parameter ft:LINEAR).
*/
#include "opencv2/core.hpp"
......@@ -26,7 +26,7 @@ int main(void)
// Input image
Mat I = imread("input.png");
// Kernel cretion
// Kernel creation
Mat kernel1, kernel2;
ft::createKernel(ft::LINEAR, 3, kernel1, 3);
......
......@@ -47,6 +47,9 @@ This module provides storage routines for Hierarchical Data Format objects.
Hierarchical Data Format version 5
--------------------------------------------------------
In order to use it, the hdf5 library has to be installed, which
means cmake should find it using `find_package(HDF5)` .
@}
*/
......
This diff is collapsed.
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/**
* @file create_groups.cpp
* @author Fangjun Kuang <csukuangfj dot at gmail dot com>
* @date December 2017
*
* @brief It demonstrates how to create HDF5 groups and subgroups.
*
* Basic steps:
* 1. Use hdf::open to create a HDF5 file
* 2. Use HDF5::hlexists to check if a group exists or not
* 3. Use HDF5::grcreate to create a group by specifying its name
* 4. Use hdf::close to close a HDF5 file after modifying it
*
*/
//! [tutorial]
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/hdf.hpp>
using namespace cv;
int main()
{
//! [create_group]
//! [tutorial_create_file]
Ptr<hdf::HDF5> h5io = hdf::open("mytest.h5");
//! [tutorial_create_file]
//! [tutorial_create_group]
// "/" means the root group, which is always present
if (!h5io->hlexists("/Group1"))
h5io->grcreate("/Group1");
else
std::cout << "/Group1 has already been created, skip it.\n";
//! [tutorial_create_group]
//! [tutorial_create_subgroup]
// Note that Group1 has been created above, otherwise exception will occur
if (!h5io->hlexists("/Group1/SubGroup1"))
h5io->grcreate("/Group1/SubGroup1");
else
std::cout << "/Group1/SubGroup1 has already been created, skip it.\n";
//! [tutorial_create_subgroup]
//! [tutorial_close_file]
h5io->close();
//! [tutorial_close_file]
//! [create_group]
return 0;
}
//! [tutorial]
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/**
* @file create_read_write.cpp
* @author Fangjun Kuang <csukuangfj dot at gmail dot com>
* @date December 2017
*
* @brief It demonstrates how to create a dataset, how
* to write a cv::Mat to the dataset and how to
* read a cv::Mat from it.
*
*/
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wmissing-declarations"
# if defined __clang__ || defined __APPLE__
# pragma GCC diagnostic ignored "-Wmissing-prototypes"
# pragma GCC diagnostic ignored "-Wextra"
# endif
#endif
//! [tutorial]
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/hdf.hpp>
using namespace cv;
void write_root_group_single_channel()
{
String filename = "root_group_single_channel.h5";
String dataset_name = "/single"; // Note that it is a child of the root group /
// prepare data
Mat data;
data = (cv::Mat_<float>(2, 3) << 0, 1, 2, 3, 4, 5, 6);
//! [tutorial_open_file]
Ptr<hdf::HDF5> h5io = hdf::open(filename);
//! [tutorial_open_file]
//! [tutorial_write_root_single_channel]
// write data to the given dataset
// the dataset "/single" is created automatically, since it is a child of the root
h5io->dswrite(data, dataset_name);
//! [tutorial_write_root_single_channel]
//! [tutorial_read_dataset]
Mat expected;
h5io->dsread(expected, dataset_name);
//! [tutorial_read_dataset]
//! [tutorial_check_result]
double diff = norm(data - expected);
CV_Assert(abs(diff) < 1e-10);
//! [tutorial_check_result]
h5io->close();
}
void write_single_channel()
{
String filename = "single_channel.h5";
String parent_name = "/data";
String dataset_name = parent_name + "/single";
// prepare data
Mat data;
data = (cv::Mat_<float>(2, 3) << 0, 1, 2, 3, 4, 5);
Ptr<hdf::HDF5> h5io = hdf::open(filename);
//! [tutorial_create_dataset]
// first we need to create the parent group
if (!h5io->hlexists(parent_name)) h5io->grcreate(parent_name);
// create the dataset if it not exists
if (!h5io->hlexists(dataset_name)) h5io->dscreate(data.rows, data.cols, data.type(), dataset_name);
//! [tutorial_create_dataset]
// the following is the same with the above function write_root_group_single_channel()
h5io->dswrite(data, dataset_name);
Mat expected;
h5io->dsread(expected, dataset_name);
double diff = norm(data - expected);
CV_Assert(abs(diff) < 1e-10);
h5io->close();
}
/*
* creating, reading and writing multiple-channel matrices
* are the same with single channel matrices
*/
void write_multiple_channels()
{
String filename = "two_channels.h5";
String parent_name = "/data";
String dataset_name = parent_name + "/two_channels";
// prepare data
Mat data(2, 3, CV_32SC2);
for (size_t i = 0; i < data.total()*data.channels(); i++)
((int*) data.data)[i] = (int)i;
Ptr<hdf::HDF5> h5io = hdf::open(filename);
// first we need to create the parent group
if (!h5io->hlexists(parent_name)) h5io->grcreate(parent_name);
// create the dataset if it not exists
if (!h5io->hlexists(dataset_name)) h5io->dscreate(data.rows, data.cols, data.type(), dataset_name);
// the following is the same with the above function write_root_group_single_channel()
h5io->dswrite(data, dataset_name);
Mat expected;
h5io->dsread(expected, dataset_name);
double diff = norm(data - expected);
CV_Assert(abs(diff) < 1e-10);
h5io->close();
}
int main()
{
write_root_group_single_channel();
write_single_channel();
write_multiple_channels();
return 0;
}
//! [tutorial]
This diff is collapsed.
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/**
* @file test_hdf5.cpp
* @author Fangjun Kuang <csukuangfj dot at gmail dot com>
* @date December 2017
*
*/
#include<stdio.h> // for remove()
#include "test_precomp.hpp"
#include <vector>
using namespace cv;
struct HDF5_Test : public testing::Test
{
virtual void SetUp()
{
m_filename = "test.h5";
// 0 1 2
// 3 4 5
m_single_channel.create(2, 3, CV_32F);
for (size_t i = 0; i < m_single_channel.total(); i++)
{
((float*)m_single_channel.data)[i] = i;
}
// 0 1 2 3 4 5
// 6 7 8 9 10 11
m_two_channels.create(2, 3, CV_32SC2);
for (size_t i = 0; i < m_two_channels.total()*m_two_channels.channels(); i++)
{
((int*)m_two_channels.data)[i] = (int)i;
}
}
//! Remove the hdf5 file
void reset()
{
remove(m_filename.c_str());
}
String m_filename; //!< filename for testing
Ptr<hdf::HDF5> m_hdf_io; //!< HDF5 file pointer
Mat m_single_channel; //!< single channel matrix for test
Mat m_two_channels; //!< two-channel matrix for test
};
TEST_F(HDF5_Test, create_a_single_group)
{
reset();
String group_name = "parent";
m_hdf_io = hdf::open(m_filename);
m_hdf_io->grcreate(group_name);
EXPECT_EQ(m_hdf_io->hlexists(group_name), true);
EXPECT_EQ(m_hdf_io->hlexists("child"), false);
m_hdf_io->close();
}
TEST_F(HDF5_Test, create_a_child_group)
{
reset();
String parent = "parent";
String child = parent + "/child";
m_hdf_io = hdf::open(m_filename);
m_hdf_io->grcreate(parent);
m_hdf_io->grcreate(child);
EXPECT_EQ(m_hdf_io->hlexists(parent), true);
EXPECT_EQ(m_hdf_io->hlexists(child), true);
m_hdf_io->close();
}
TEST_F(HDF5_Test, create_dataset)
{
reset();
String dataset_single_channel = "/single";
String dataset_two_channels = "/dual";
m_hdf_io = hdf::open(m_filename);
m_hdf_io->dscreate(m_single_channel.rows,
m_single_channel.cols,
m_single_channel.type(),
dataset_single_channel);
m_hdf_io->dscreate(m_two_channels.rows,
m_two_channels.cols,
m_two_channels.type(),
dataset_two_channels);
EXPECT_EQ(m_hdf_io->hlexists(dataset_single_channel), true);
EXPECT_EQ(m_hdf_io->hlexists(dataset_two_channels), true);
std::vector<int> dims;
dims = m_hdf_io->dsgetsize(dataset_single_channel, hdf::HDF5::H5_GETDIMS);
EXPECT_EQ(dims.size(), (size_t)2);
EXPECT_EQ(dims[0], m_single_channel.rows);
EXPECT_EQ(dims[1], m_single_channel.cols);
dims = m_hdf_io->dsgetsize(dataset_two_channels, hdf::HDF5::H5_GETDIMS);
EXPECT_EQ(dims.size(), (size_t)2);
EXPECT_EQ(dims[0], m_two_channels.rows);
EXPECT_EQ(dims[1], m_two_channels.cols);
int type;
type = m_hdf_io->dsgettype(dataset_single_channel);
EXPECT_EQ(type, m_single_channel.type());
type = m_hdf_io->dsgettype(dataset_two_channels);
EXPECT_EQ(type, m_two_channels.type());
m_hdf_io->close();
}
TEST_F(HDF5_Test, write_read_dataset_1)
{
reset();
String dataset_single_channel = "/single";
String dataset_two_channels = "/dual";
m_hdf_io = hdf::open(m_filename);
// since the dataset is under the root group, it is created by dswrite() automatically.
m_hdf_io->dswrite(m_single_channel, dataset_single_channel);
m_hdf_io->dswrite(m_two_channels, dataset_two_channels);
EXPECT_EQ(m_hdf_io->hlexists(dataset_single_channel), true);
EXPECT_EQ(m_hdf_io->hlexists(dataset_two_channels), true);
// read single channel matrix
Mat single;
m_hdf_io->dsread(single, dataset_single_channel);
EXPECT_EQ(single.type(), m_single_channel.type());
EXPECT_EQ(single.size(), m_single_channel.size());
EXPECT_NEAR(norm(single-m_single_channel), 0, 1e-10);
// read dual channel matrix
Mat dual;
m_hdf_io->dsread(dual, dataset_two_channels);
EXPECT_EQ(dual.type(), m_two_channels.type());
EXPECT_EQ(dual.size(), m_two_channels.size());
EXPECT_NEAR(norm(dual-m_two_channels), 0, 1e-10);
m_hdf_io->close();
}
TEST_F(HDF5_Test, write_read_dataset_2)
{
reset();
// create the dataset manually if it is not inside
// the root group
String parent = "/parent";
String dataset_single_channel = parent + "/single";
String dataset_two_channels = parent + "/dual";
m_hdf_io = hdf::open(m_filename);
m_hdf_io->grcreate(parent);
EXPECT_EQ(m_hdf_io->hlexists(parent), true);
m_hdf_io->dscreate(m_single_channel.rows,
m_single_channel.cols,
m_single_channel.type(),
dataset_single_channel);
m_hdf_io->dscreate(m_two_channels.rows,
m_two_channels.cols,
m_two_channels.type(),
dataset_two_channels);
EXPECT_EQ(m_hdf_io->hlexists(dataset_single_channel), true);
EXPECT_EQ(m_hdf_io->hlexists(dataset_two_channels), true);
m_hdf_io->dswrite(m_single_channel, dataset_single_channel);
m_hdf_io->dswrite(m_two_channels, dataset_two_channels);
EXPECT_EQ(m_hdf_io->hlexists(dataset_single_channel), true);
EXPECT_EQ(m_hdf_io->hlexists(dataset_two_channels), true);
// read single channel matrix
Mat single;
m_hdf_io->dsread(single, dataset_single_channel);
EXPECT_EQ(single.type(), m_single_channel.type());
EXPECT_EQ(single.size(), m_single_channel.size());
EXPECT_NEAR(norm(single-m_single_channel), 0, 1e-10);
// read dual channel matrix
Mat dual;
m_hdf_io->dsread(dual, dataset_two_channels);
EXPECT_EQ(dual.type(), m_two_channels.type());
EXPECT_EQ(dual.size(), m_two_channels.size());
EXPECT_NEAR(norm(dual-m_two_channels), 0, 1e-10);
m_hdf_io->close();
}
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
CV_TEST_MAIN("cv")
\ No newline at end of file
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wmissing-declarations"
# if defined __clang__ || defined __APPLE__
# pragma GCC diagnostic ignored "-Wmissing-prototypes"
# pragma GCC diagnostic ignored "-Wextra"
# endif
#endif
#ifndef __OPENCV_TEST_PRECOMP_HPP__
#define __OPENCV_TEST_PRECOMP_HPP__
#include "opencv2/ts.hpp"
#include "opencv2/core.hpp"
#include "opencv2/hdf.hpp"
#endif
Creating Groups {#tutorial_hdf_create_groups}
===============================
Goal
----
This tutorial will show you:
- How to create a HDF5 file?
- How to create a group?
- How to check whether a given group exists or not?
- How to create a subgroup?
Source Code
----
The following code creates two groups: `Group1` and `SubGroup1`, where
`SubGroup1` is a child of `Group1`.
You can download the code from [here][1] or find it in the file
`modules/hdf/samples/create_groups.cpp` of the opencv_contrib source code library.
@snippet samples/create_groups.cpp tutorial
Explanation
----
First, we create a HDF5 file
@snippet samples/create_groups.cpp tutorial_create_file
If the given file does not exist, it will be created. Otherwise, it is open for read and write.
Next, we create the group `Group1`
@snippet samples/create_groups.cpp tutorial_create_group
Note that we have to check whether `/Group1` exists or not using
the function `hlexists` before creating it. You can not create
a group with an existing name. Otherwise, an error will occur.
Then, we create the subgroup named `Subgroup1`. In order to
indicate that it is a sub group of `Group1`, we have to
use the group name `/Group1/SubGroup1`:
@snippet samples/create_groups.cpp tutorial_create_subgroup
Note that before creating a subgroup, we have to make sure
that its parent group exists. Otherwise, an error will occur.
In the end, we have to close the file
@snippet samples/create_groups.cpp tutorial_close_file
Result
----
There are many tools that can be used to inspect a given HDF file, such
as HDFView and h5dump. If you are using Ubuntu, you can install
them with the following commands:
@code
sudo apt-get install hdf5-tools hdfview
@endcode
There are also binaries available from the The HDF Group official website <https://support.hdfgroup.org/HDF5/Tutor/tools.html>.
The following figure shows the result visualized with the tool HDFView:
![Figure 1: Results of creating groups and subgroups](pics/create_groups.png)
The output for `h5dump` is:
@code
$ h5dump mytest.h5
HDF5 "mytest.h5" {
GROUP "/" {
GROUP "Group1" {
GROUP "SubGroup1" {
}
}
}
}
@endcode
[1]: https://github.com/opencv/opencv_contrib/tree/master/modules/hdf/samples/create_groups.cpp
Creating, Writing and Reading Datasets {#tutorial_hdf_create_read_write_datasets}
===============================
Goal
----
This tutorial shows you:
- How to create a dataset?
- How to write a `cv::Mat` to a dataset?
- How to read a `cv::Mat` from a dataset?
@note Currently, it supports only reading and writing `cv::Mat` and the matrix should be continuous
in memory. Supports for other data types have not been implemented yet.
Source Code
----
The following code demonstrates writing a single channel
matrix and a two-channel matrix to datasets and then reading them
back.
You can download the code from [here][1] or find it in the file
`modules/hdf/samples/create_read_write_datasets.cpp` of the opencv_contrib source code library.
@snippet samples/create_read_write_datasets.cpp tutorial
Explanation
----
The first step for creating a dataset is to open the file
@snippet samples/create_read_write_datasets.cpp tutorial_open_file
For the function `write_root_group_single_channel()`, since
the dataset name is `/single`, which is inside the root group, we can use
@snippet samples/create_read_write_datasets.cpp tutorial_write_root_single_channel
to write the data directly to the dataset without the need of creating
it beforehand. Because it is created inside `HDF5::dswrite()`
automatically.
@warning This applies only to datasets that reside inside the root group.
Of course, we can create the dataset by ourselves:
@snippet samples/create_read_write_datasets.cpp tutorial_create_dataset
To read data from a dataset, we use
@snippet samples/create_read_write_datasets.cpp tutorial_read_dataset
by specifying the name of the dataset.
We can check that the data read out is exactly the data written before by using
@snippet samples/create_read_write_datasets.cpp tutorial_check_result
Results
----
Figure 1 shows the result visualized using the tool HDFView for the file
`root_group_sinle_channel`. The results
of matrices for datasets that are not the direct children of the root group
are given in Figure 2 and Figure 3, respectively.
![Figure 1: Result for writing a single channel matrix to a dataset inside the root group](pics/root_group_single_channel.png)
![Figure 2: Result for writing a single channel matrix to a dataset not in the root group](pics/single_channel.png)
![Figure 3: Result for writing a two-channel matrix to a dataset not in the root group](pics/two_channels.png)
[1]: https://github.com/opencv/opencv_contrib/tree/master/modules/hdf/samples/create_read_write_datasets.cpp
The Hierarchical Data Format (hdf) I/O {#tutorial_table_of_content_hdf}
=====================================
Here you will know how to read and write a HDF5 file using OpenCV.
Currently, only `cv::Mat` is supported.
Note that the HDF5 library has to be installed in your system
to use this module.
- @subpage tutorial_hdf_create_groups
*Compatibility:* \> OpenCV 3.0
*Author:* Fangjun Kuang
You will learn how to create groups and subgroups.
- @subpage tutorial_hdf_create_read_write_datasets
*Compatibility:* \> OpenCV 3.0
*Author:* Fangjun Kuang
You will learn how to create, read and write datasets.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment