Consider the following code:
#include <H5Cpp.h>
#include <vector>
#include <eigen3/Eigen/Dense>
#include <iostream>
double* matrix_to_array(Eigen::MatrixXd const &input){
int const NX = input.rows();
int const NY = input.cols();
double *data = new double[NX*NY];
for(std::size_t i=0; i<NX; i++){
for(std::size_t j=0; j<NY; j++){
data[j+i*NX] = input(i,j);
}
}
return data;
}
int main() {
Eigen::MatrixXd data = Eigen::MatrixXd::Random(124, 4654);
data.fill(3);
H5::H5File file("data.hdf5", H5F_ACC_TRUNC);
hsize_t dimsf[2] = {data.rows(), data.cols()};
H5::DataSpace dataspace(2, dimsf);
H5::DataSet dataset = file.createDataSet("test_data_set",
H5::PredType::NATIVE_DOUBLE,
dataspace);
auto data_arr = matrix_to_array(data);
dataset.write(data_arr, H5::PredType::NATIVE_DOUBLE);
delete[] data_arr;
}
It compiles just fine using the following CMakeLists.txt
cmake_minimum_required(VERSION 2.8)
project(test)
find_package(HDF5 REQUIRED COMPONENTS C CXX)
include_directories(${HDF5_INCLUDE_DIRS})
add_executable(hdf5 hdf5.cpp)
target_link_libraries(hdf5 ${HDF5_HL_LIBRARIES} ${HDF5_CXX_LIBRARIES} ${HDF5_LIBRARIES})
After executing I thought everything was fine, but upon running the following python code (which bscly. just prints the data row by row)
import h5py
import numpy as np
hf = h5py.File("build/data.hdf5", "r")
keys = list(hf.keys())
data_set = hf.get(keys[0])
data_set_np = np.array(data_set)
for row in data_set_np:
print(row)
I realized that the first 18000 or so entries of the matrix were properly written to the hdf5-file, while the rest was set to zero for some reason. I checked data
and data_arr
in the above C++ code, and all the entries of both matrices are set to 0, so the error must happen somewhere in the writing process to the hdf5-file... The issue is, I don't see where. What exactly am I missing?