hdf5: fix bug + hdf5-mpi: test works

This commit is contained in:
Christian Zimmermann 2024-11-02 19:09:44 -07:00
parent 8c75c96a64
commit 456706c691
7 changed files with 90 additions and 37 deletions

View file

@ -25,9 +25,8 @@ if(HDF5_IS_PARALLEL)
add_subdirectory(lib)
add_subdirectory(tests)
install(CODE "execute_process(COMMAND sed -i \"s|CXZ_H5_MPI_BUILD_MODE 1|CXZ_H5_MPI_BUILD_MODE 0|g;\" ${CMAKE_CURRENT_SOURCE_DIR}/include/h5_mpi_base.h)")
install(DIRECTORY include/ DESTINATION ${INSTALL_PATH}/include/cnorxz/hdf5-mpi)
install(CODE "execute_process(COMMAND sed -i \"s|CXZ_H5_MPI_BUILD_MODE 0|CXZ_H5_MPI_BUILD_MODE 1|g;\" ${CMAKE_CURRENT_SOURCE_DIR}/include/h5_mpi_base.h)")
install(CODE "execute_process(COMMAND sed -i \"s|CXZ_H5_MPI_BUILD_MODE 1|CXZ_H5_MPI_BUILD_MODE 0|g;\" ${INSTALL_PATH}/include/cnorxz/hdf5-mpi/h5_mpi_base.h)")
else()
message(WARNING "no parallel support in available HDF5 library")
endif()

View file

@ -36,7 +36,7 @@ namespace CNORXZ
@param _parent Parent content object.
*/
RDataset(const String& name, const ContentBase* _parent);
virtual ~RDataset();
//virtual ~RDataset();
virtual RDataset& initbase(const RangePtr& fileRange, hid_t type) override;
virtual RDataset& writebase(const RangePtr& writeRange, Sptr<YIndex> pos,

View file

@ -21,14 +21,6 @@ namespace CNORXZ
{
CXZ_ASSERT( checkHaveParallel(), "tried to open dataset in parallel mode while file"
<< parent()->filename() << " was opened in serial mode");
if(exists()){
open();
}
}
RDataset::~RDataset()
{
this->close();
}
RDataset& RDataset::initbase(const RangePtr& fileRange, hid_t type)
@ -86,7 +78,6 @@ namespace CNORXZ
for(SizeT i = 0; i != dims.size(); ++i){
dims[i] = dr->sub(i)->size();
}
VCHECK(toString(offset));
H5Sselect_hyperslab(mFilespace, H5S_SELECT_SET, offset.data(), NULL, dims.data(), NULL);
const hid_t memspace = H5Screate_simple(dims.size(), dims.data(), NULL);
const hid_t xfer_plist_id = H5Pcreate(H5P_DATASET_XFER);

View file

@ -0,0 +1,84 @@
#ifndef __cxz_hdf5_mpi_diag_utils_h__
#define __cxz_hdf5_mpi_diag_utils_h__
inline String getStringObjType(hid_t obj_id)
{
const H5I_type_t t = H5Iget_type(obj_id);
switch(t){
case H5I_UNINIT:
return "uninitialized";
case H5I_BADID:
return "bad";
case H5I_FILE:
return "file";
case H5I_GROUP:
return "group";
case H5I_DATATYPE:
return "datatype";
case H5I_DATASPACE:
return "dataspace";
case H5I_DATASET:
return "dataset";
case H5I_MAP:
return "map";
case H5I_ATTR:
return "attr";
case H5I_VFL:
return "vfl";
case H5I_VOL:
return "vol";
case H5I_GENPROP_CLS:
return "gen prop list class";
case H5I_GENPROP_LST:
return "gen prop list";
case H5I_ERROR_CLASS:
return "err class";
case H5I_ERROR_MSG:
return "err msg";
case H5I_ERROR_STACK:
return "err stack";
case H5I_SPACE_SEL_ITER:
return "space sel iter";
case H5I_EVENTSET:
return "eventset";
case H5I_NTYPES:
return "ntypes";
default:
return "unknonw";
}
}
void list_objects(hid_t file_id, const String& desc)
{
constexpr SizeT maxnamesize = 256;
MPI_Barrier(MPI_COMM_WORLD);
if(getRankNumber() == 0){
std::cout << " ======= " << desc << " ======= " << std::endl;
}
const ssize_t num = H5Fget_obj_count(file_id, H5F_OBJ_ALL);
CXZ_ASSERT(num >= 0, "error while counting objects: " << num);
Vector<hid_t> list(num);
H5Fget_obj_ids(file_id, H5F_OBJ_ALL, num, list.data());
const SizeT nranks = getNumRanks();
for(SizeT n = 0; n != nranks; ++n){
MPI_Barrier(MPI_COMM_WORLD);
if(n == getRankNumber()){
std::cout << "have " << num << " objects on rank " << n << ":" << std::endl;
for(SizeT i = 0; i != static_cast<SizeT>(num); ++i){
char iname[maxnamesize];
H5Iget_name(list[i], iname, maxnamesize);
const String itype = getStringObjType(list[i]);
std::cout << " - " << list[i] << " [" << itype << "]: " << iname << std::endl;
}
}
MPI_Barrier(MPI_COMM_WORLD);
}
if(getRankNumber() == 0){
std::cout << " ==================== " << std::endl;
std::cout << std::endl;
}
MPI_Barrier(MPI_COMM_WORLD);
}
#endif

View file

@ -83,31 +83,11 @@ namespace
TEST_F(RDataset_test, Read)
{
RFile h5f(mFilename, false);
h5f.open();
//h5f.addGroup("dir");
auto dir = h5f.getGroup("dir");
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dir->open();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto dat = dir->get("dat", [](const String& name, const ContentBase* par, auto& i)
{ i->close(); auto dset = std::make_shared<SRDataset<Double>>(name, par); i = dset;
auto dat = h5f.open().getGroup("dir")->open().get("dat", [](const String& name, const ContentBase* par, auto& i)
{ (*i)->close(); auto dset = std::make_shared<SRDataset<Double>>(name, par); *i = dset;
return dset; } );
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto a = dat->read(mGeom);
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dat->close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
dir->close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
h5f.close();
CHECK;
MPI_Barrier(MPI_COMM_WORLD);
auto i = std::make_shared<CIndex>(mLR);
i->ifor( operation( [](Double a, Double b) { EXPECT_EQ(a,b); }, mA(i), a(i) ), NoF{} )();

View file

@ -43,7 +43,7 @@ namespace CNORXZ
decltype(auto) Group::get(const String& name, F&& f)
{
auto i = this->getIndexTo(name);
return f(name,this,*i);
return f(name,this,i);
}
template <typename... Ts>

View file

@ -100,7 +100,6 @@ namespace CNORXZ
const String next = name.substr(delimpos+1);
auto g = getGroup(thisname);
g->open();
CHECK;
return g->get(next);
}
auto i = this->getIndexTo(thisname);
@ -170,8 +169,8 @@ namespace CNORXZ
{
const hid_t id = H5Dopen(loc_id, name, H5P_DEFAULT);
if(not H5Aexists(id, "CLASS")){
return false;
H5Dclose(id);
return false;
}
hid_t attrid = H5Aopen(id, "CLASS", H5P_DEFAULT);
const hid_t atype = H5Aget_type(attrid);