add hdf5-mpi test; works but there are hdf5 identifiers left open...

This commit is contained in:
Christian Zimmermann 2024-10-31 18:50:24 -07:00
parent 1663f7ae9a
commit 8c75c96a64
11 changed files with 191 additions and 12 deletions

View file

@ -23,7 +23,7 @@ if(HDF5_IS_PARALLEL)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include)
add_subdirectory(lib)
#add_subdirectory(tests)
add_subdirectory(tests)
install(CODE "execute_process(COMMAND sed -i \"s|CXZ_H5_MPI_BUILD_MODE 1|CXZ_H5_MPI_BUILD_MODE 0|g;\" ${CMAKE_CURRENT_SOURCE_DIR}/include/h5_mpi_base.h)")
install(DIRECTORY include/ DESTINATION ${INSTALL_PATH}/include/cnorxz/hdf5-mpi)

View file

@ -0,0 +1,12 @@
// -*- C++ -*-
/**
@file opt/hdf5-mpi/include/cnorxz_hdf5_mpi.cc.h
@brief CNORXZ HDF5 MPI template sources header
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include "h5_rdataset.cc.h"

View file

@ -0,0 +1,15 @@
// -*- C++ -*-
/**
@file opt/hdf5-mpi/include/cnorxz_hdf5_mpi.h
@brief CNORXZ HDF5 MPI main header
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include "h5_rfile.h"
#include "h5_rdataset.h"
#include "cnorxz_hdf5_mpi.cc.h"

View file

@ -23,7 +23,7 @@ namespace CNORXZ
{
const hid_t tid = getTypeId(*data.data());
if(data.begin().formatIsTrivial()){
init(data.range(), tid, data.data());
dynamic_cast<Dataset*>(this)->initbase(data.range(), tid, data.data());
}
else {
CXZ_ERROR("Got array type with non-trivial format; non-contiguous data formats are not supported yet!");
@ -39,7 +39,7 @@ namespace CNORXZ
template <typename T>
mpi::RArray<T> SRDataset<T>::read(const RangePtr& geom) const
{
RangePtr rr = mpi::rrange(mFileRange, geom);
auto rr = rangeCast<mpi::RRange<YRange,YRange>>( mpi::rrange(mFileRange, geom) );
mpi::RArray<T> out(rr);
readbase(out.data(), rr, nullptr);
return out;
@ -48,3 +48,4 @@ namespace CNORXZ
}
}
#endif

View file

@ -36,6 +36,7 @@ namespace CNORXZ
@param _parent Parent content object.
*/
RDataset(const String& name, const ContentBase* _parent);
virtual ~RDataset();
virtual RDataset& initbase(const RangePtr& fileRange, hid_t type) override;
virtual RDataset& writebase(const RangePtr& writeRange, Sptr<YIndex> pos,
@ -46,7 +47,7 @@ namespace CNORXZ
@param data Array containing the dataset.
*/
template <typename T>
Dataset& init(const mpi::RArray<T>& data);
RDataset& init(const mpi::RArray<T>& data);
private:
bool checkHaveParallel() const;

View file

@ -26,6 +26,11 @@ namespace CNORXZ
}
}
RDataset::~RDataset()
{
this->close();
}
RDataset& RDataset::initbase(const RangePtr& fileRange, hid_t type)
{
RangePtr fr = fileRange;
@ -42,6 +47,7 @@ namespace CNORXZ
fr = yrange(rs);
}
Dataset::initbase(fr, type);
MPI_Barrier(MPI_COMM_WORLD);
return *this;
}
@ -49,14 +55,15 @@ namespace CNORXZ
{
//bool todo = true;
RangePtr dr = writeRange;
if(dr->stype() == "R"){
bool parallel = dr->stype() == "R";
if(parallel){
dr = writeRange->sub(1);
}
CXZ_ASSERT(dr->dim() == mFileRange->dim(), "dimension of data range ("
<< dr->dim() << ") different from dimension of file range ("
<< mFileRange->dim() << ")");
Vector<hsize_t> offset(mFileRange->dim());
if(dr->stype() == "R"){
if(parallel){
mpi::RIndex<YIndex,YIndex> idx(writeRange);
idx.localize();
const SizeT rat = mpi::getNumRanks() / idx.rankI()->lmax().val();
@ -79,6 +86,7 @@ namespace CNORXZ
for(SizeT i = 0; i != dims.size(); ++i){
dims[i] = dr->sub(i)->size();
}
VCHECK(toString(offset));
H5Sselect_hyperslab(mFilespace, H5S_SELECT_SET, offset.data(), NULL, dims.data(), NULL);
const hid_t memspace = H5Screate_simple(dims.size(), dims.data(), NULL);
const hid_t xfer_plist_id = H5Pcreate(H5P_DATASET_XFER);
@ -86,23 +94,25 @@ namespace CNORXZ
H5Dwrite(mId, mType, memspace, mFilespace, xfer_plist_id, data);
H5Pclose(xfer_plist_id);
H5Sclose(memspace);
MPI_Barrier(MPI_COMM_WORLD);
return *this;
}
void Dataset::readbase(void* dest, RangePtr readRange, Sptr<YIndex> beg) const
void RDataset::readbase(void* dest, RangePtr readRange, Sptr<YIndex> beg) const
{
RangePtr dr = readRange;
bool parallel = dr->stype() == "R";
if(not dr){
dr = mFileRange;
}
if(dr->stype() == "R"){
if(parallel){
dr = readRange->sub(1);
}
CXZ_ASSERT(dr->dim() == mFileRange->dim(), "dimension of data range ("
<< dr->dim() << ") different from dimension of file range ("
<< mFileRange->dim() << ")");
Vector<hsize_t> offset(mFileRange->dim());
if(dr->stype() == "R"){
if(parallel){
mpi::RIndex<YIndex,YIndex> idx(readRange);
idx.localize();
const SizeT rat = mpi::getNumRanks() / idx.rankI()->lmax().val();
@ -123,7 +133,7 @@ namespace CNORXZ
Vector<hsize_t> dims(mFileRange->dim());
for(SizeT i = 0; i != dims.size(); ++i){
dims[i] = readRange->sub(i)->size();
dims[i] = dr->sub(i)->size();
}
H5Sselect_hyperslab(mFilespace, H5S_SELECT_SET, offset.data(), NULL, dims.data(), NULL);
const hid_t mem_space_id = H5Screate_simple(static_cast<hsize_t>(dims.size()),
@ -136,6 +146,7 @@ namespace CNORXZ
<< "', errorcode :" << err);
H5Pclose(xfer_plist_id);
H5Sclose(mem_space_id);
MPI_Barrier(MPI_COMM_WORLD);
}
bool RDataset::checkHaveParallel() const

View file

@ -25,6 +25,7 @@ namespace CNORXZ
RFile::~RFile()
{
this->close();
if(mFAPL_id){
H5Pclose(mFAPL_id);
}

View file

@ -0,0 +1,9 @@
add_definitions(-DTEST_NUMBER_FILE="${CMAKE_SOURCE_DIR}/src/tests/numbers.txt")
include_directories(${CMAKE_SOURCE_DIR}/src/tests)
include_directories(${CMAKE_SOURCE_DIR}/src/opt/mpi/tests)
add_executable(h5mpibasic h5_mpi_basic_unit_test.cc)
add_dependencies(h5mpibasic cnorxz cnorxzhdf5 test_lib)
target_link_libraries(h5mpibasic ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${HDF5_LIBS} ${MPI_LIBS} cnorxz cnorxzhdf5 cnorxzmpi cnorxzhdf5mpi test_lib)
set(MPI_TEST_COMMAND mpirun -n 4 h5mpibasic)
add_test(NAME h5mpibasic COMMAND ${MPI_TEST_COMMAND})

View file

@ -0,0 +1,122 @@
// -*- C++ -*-
/**
@file opt/hdf5-mpi/tests/h5_mpi_basic_unit_test.cc
@brief cnorxz hdf5 mpi basic unit tests.
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <cstdio>
#include "gtest/gtest.h"
#include "cnorxz_hdf5.h"
#include "cnorxz_hdf5_mpi.h"
#include "test_numbers.h"
#include "mpi_env.h"
namespace
{
using namespace CNORXZ;
using namespace CNORXZ::hdf5;
using namespace CNORXZ::mpi;
using Test::Numbers;
typedef MIndex<CIndex,CIndex,CIndex,CIndex> C4;
class RDataset_test : public ::testing::Test
{
protected:
RDataset_test()
{
mFilename = "parallel_test_file.h5";
Vector<RangePtr> grv(4);
grv[0] = CRangeFactory(5).create();
grv[1] = CRangeFactory(14).create();
grv[2] = CRangeFactory(10).create();
grv[3] = CRangeFactory(3).create();
const RangePtr g1 = CRangeFactory(1).create();
const RangePtr g2 = CRangeFactory(2).create();
mGeom = yrange(Vector<RangePtr>{g1,g2,g2,g1});
const RangePtr gr = yrange(grv);
const RangePtr rr = rrange(gr,mGeom);
mLR = rr->sub(1);
Vector<Double> vec = Numbers::get(14,gr->size()+10);
Vector<Double> loc(mLR->size());
for(auto k = CIndex(mGeom); k.lex() != k.lmax().val(); ++k){
if(k.lex() == getRankNumber()){
std::copy(vec.begin()+k.lex()*mLR->size(), vec.begin()+(k.lex()+1)*mLR->size(), loc.begin());
}
}
auto locarr = MArray<Double>( mLR, loc );
mA = RArray<Double>( locarr, mGeom );
}
RArray<Double> mA;
RangePtr mGeom;
RangePtr mLR;
String mFilename;
};
TEST_F(RDataset_test, Write)
{
if(getRankNumber() == 0){
std::remove(mFilename.c_str());
}
MPI_Barrier(MPI_COMM_WORLD);
RFile h5f(mFilename, false);
h5f.open();
h5f.addGroup("dir");
auto dir = h5f.getGroup("dir");
dir->add("dat", [](const String& name, const ContentBase* par, const RArray<Double>& d)
{ auto o = std::make_shared<SRDataset<Double>>( name, par ); o->init(d); return o; }, mA );
h5f.close();
}
TEST_F(RDataset_test, Read)
{
RFile h5f(mFilename, false);
h5f.open();
//h5f.addGroup("dir");
auto dir = h5f.getGroup("dir");
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dir->open();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto dat = dir->get("dat", [](const String& name, const ContentBase* par, auto& i)
{ i->close(); auto dset = std::make_shared<SRDataset<Double>>(name, par); i = dset;
return dset; } );
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto a = dat->read(mGeom);
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dat->close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dir->close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
h5f.close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto i = std::make_shared<CIndex>(mLR);
i->ifor( operation( [](Double a, Double b) { EXPECT_EQ(a,b); }, mA(i), a(i) ), NoF{} )();
}
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
::testing::AddGlobalTestEnvironment( new MPIEnv(argc, argv) );
return RUN_ALL_TESTS();
}

View file

@ -43,7 +43,7 @@ namespace CNORXZ
decltype(auto) Group::get(const String& name, F&& f)
{
auto i = this->getIndexTo(name);
return f(*i);
return f(name,this,*i);
}
template <typename... Ts>
@ -122,7 +122,7 @@ namespace CNORXZ
Vector<String> nvec({name});
mCont.extend( URangeFactory<String>( nvec ).create() );
auto ii = getIndexTo(name);
*ii = f(args...);
*ii = f(name, this, args...);
return *this;
}

View file

@ -56,6 +56,13 @@ namespace CNORXZ
Group& Group::close()
{
if(mCont.range() != nullptr){
for(auto& x: mCont){
if(x != nullptr){
x->close();
}
}
}
if(mId != 0){
H5Gclose(mId);
}