WIP: rrange + corresp test

This commit is contained in:
Christian Zimmermann 2024-03-15 02:08:01 +01:00
parent f7c7cd3363
commit f55c38cc65
16 changed files with 241 additions and 59 deletions

View file

@ -123,6 +123,14 @@ else()
endif()
endif()
# CHECK LIBRARIES : mpi
if(DEFINED ENABLE_mpi)
set(ENABLE_mpi ${ENABLE_mpi} CACHE BOOL "enable mpi")
else()
set(ENABLE_mpi TRUE CACHE BOOL "enable mpi")
endif()
# DEFINES
add_definitions(-DVERSION="${VERSION}")

View file

@ -12,4 +12,8 @@ if(ENABLE_cereal)
add_subdirectory(opt/cereal)
endif()
if(ENABLE_mpi)
add_subdirectory(opt/mpi)
endif()
install(DIRECTORY include/ DESTINATION ${INSTALL_PATH}/include/cnorxz)

View file

@ -311,6 +311,7 @@ namespace CNORXZ
public:
typedef RangeBase RB;
typedef YIndex IndexType;
typedef typename IndexType::MetaType MetaType;
friend YRangeFactory;

View file

@ -0,0 +1,16 @@
find_package(MPI REQUIRED)
if(MPI_FOUND)
include_directories(${MPI_C_INCLUDE_DIRS})
else()
message(FATAL_ERROR "HDF5 not found")
endif()
message(STATUS "mpi lib = ${MPI_C_LIBRARIES}")
set(MPI_LIBS ${MPI_LIBRARIES})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include)
add_subdirectory(lib)
add_subdirectory(tests)
install(DIRECTORY include/ DESTINATION ${INSTALL_PATH}/include/cnorxz/mpi)

View file

@ -0,0 +1,14 @@
// -*- C++ -*-
/**
@file opt/mpi/include/cnorxz_mpi.cc.h
@brief CNORXZ MPI template sources header
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include "mpi_wrappers.cc.h"
//#include "rop_types.cc.h"
#include "rrange.cc.h"

View file

@ -0,0 +1,20 @@
// -*- C++ -*-
/**
@file opt/mpi/include/cnorxz_mpi.h
@brief CNORXZ MPI main header
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include "mpi_base.h"
#include "mpi_wrappers.h"
//#include "raindex.h"
//#include "rarray.h"
//#include "rop_types.h"
#include "rrange.h"
#include "typemap.h"
#include "cnorxz_mpi.cc.h"

View file

@ -20,6 +20,14 @@ namespace CNORXZ
{
namespace mpi
{
// class declarations
template <class RangeI, class RangeK>
class RRange;
template <class IndexI, class IndexK>
class RIndex;
// wrapper functions
/** Get number of THIS rank. */

View file

@ -22,9 +22,9 @@ namespace CNORXZ
template <typename T>
void BCast<T>::bcast(T& d, SizeT root)
{
static_assert( TypeMap<T>::exists, "no bcast implementation for given type" );
static_assert( Typemap<T>::exists, "no bcast implementation for given type" );
const int ret = MPI_Bcast( reinterpret_cast<void*>(&d), 1,
TypeMap<T>::value, MPI_COMM_WORLD );
Typemap<T>::value(), root, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
return;
}
@ -35,7 +35,7 @@ namespace CNORXZ
{
SizeT size = d.size();
const int ret = MPI_Bcast( reinterpret_cast<void*>(&size), 1,
MPI_UNSIGNED_LONG, MPI_COMM_WORLD );
MPI_UNSIGNED_LONG, root, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
if(size != d.size()){
d.resize(size);
@ -47,7 +47,7 @@ namespace CNORXZ
}
else {
const int ret2 = MPI_Bcast( reinterpret_cast<void*>(d.data()), size,
TypeMap<T>::value, MPI_COMM_WORLD );
Typemap<T>::value(), root, MPI_COMM_WORLD );
CXZ_ASSERT(ret2 == MPI_SUCCESS, "got bcast error = " << ret2);
}
}
@ -55,15 +55,15 @@ namespace CNORXZ
template <typename T, SizeT N>
void BCast<Arr<T,N>>::bcast(Arr<T,N>& d, SizeT root)
{
if constexpr( BCast<T,N>::special ){
if constexpr( BCast<T>::special ){
for(auto& x: d){
bcast(x, root);
}
}
else {
const int ret = MPI_Bcast( reinterpret_cast<void*>(d.data()), N,
TypeMap<T>::value, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret2);
Typemap<T>::value(), root, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
}
}
@ -71,25 +71,25 @@ namespace CNORXZ
{
SizeT size = d.size();
const int ret = MPI_Bcast( reinterpret_cast<void*>(&size), 1,
MPI_UNSIGNED_LONG, MPI_COMM_WORLD );
MPI_UNSIGNED_LONG, root, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
if(size != d.size()){
d.resize(size);
}
const int ret = MPI_Bcast( reinterpret_cast<void*>(d.data()), size,
MPI_CHAR, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
const int ret2 = MPI_Bcast( reinterpret_cast<void*>(d.data()), size,
MPI_CHAR, root, MPI_COMM_WORLD );
CXZ_ASSERT(ret2 == MPI_SUCCESS, "got bcast error = " << ret2);
}
template <typename... Ts>
void BCast<Tuple<Ts...>>::bcast(Tuple<Ts...>& d, SizeT root)
{
if constexpr( ( BCast<Ts>::special or ... ) ){
ifor<0,sizeof...(Ts)>( [&](auto i) { bcast( std::get<i>(d), root ); }, NoF {} );
iter<0,sizeof...(Ts)>( [&](auto i) { bcast( std::get<i>(d), root ); }, NoF {} );
}
else {
const int ret = MPI_Bcast( reinterpret_cast<void*>(&d), sizeof(d),
MPI_BYTE, MPI_COMM_WORLD );
MPI_BYTE, root, MPI_COMM_WORLD );
CXZ_ASSERT(ret == MPI_SUCCESS, "got bcast error = " << ret);
}
}
@ -97,7 +97,8 @@ namespace CNORXZ
template <typename T>
void bcast(T& d, SizeT root)
{
return BCast<T>(d, root);
BCast<T>::bcast(d, root);
return;
}
} // namespace mpi

View file

@ -12,8 +12,7 @@
#ifndef __cxz_mpi_wrappers_h__
#define __cxz_mpi_wrappers_h__
#include "mpi.h"
#include "cnorxz.h"
#include "mpi_base.h"
#include "typemap.h"
namespace CNORXZ

View file

@ -14,6 +14,7 @@
#include "rrange.h"
#include "mpi_wrappers.h"
#include "mpi_wrappers.cc.h"
namespace CNORXZ
{
@ -27,24 +28,24 @@ namespace CNORXZ
template <class IndexI, class IndexK>
RIndex<IndexI,IndexK>::RIndex(const RIndex& in) :
mRange(in.mRange),
mI(std::make_shared<Index>(mRange->local())),
mK(std::make_shared<YIndex>(mRange->geom()))
mI(std::make_shared<IndexI>(mRange->local())),
mK(std::make_shared<IndexK>(mRange->geom()))
{
*this = in.lex();
}
template <class IndexI, class IndexK>
RIndex& RIndex<IndexI,IndexK>::operator=(const RIndex& in)
RIndex<IndexI,IndexK>& RIndex<IndexI,IndexK>::operator=(const RIndex& in)
{
mRange = in.mRange;
mI = std::make_shared<Index>(mRange->local());
mK = std::make_shared<YIndex>(mRange->geom());
mI = std::make_shared<IndexI>(mRange->local());
mK = std::make_shared<IndexK>(mRange->geom());
*this = in.lex();
return *this;
}
template <class IndexI, class IndexK>
RIndex<IndexI,IndexK>::RIndex(const RangePtr& global, SizeT lexpos = 0) :
RIndex<IndexI,IndexK>::RIndex(const RangePtr& global, SizeT lexpos) :
mRange(rangeCast<RangeType>(global)),
mI(std::make_shared<IndexI>(mRange->local())),
mK(std::make_shared<IndexK>(mRange->geom()))
@ -56,7 +57,7 @@ namespace CNORXZ
RIndex<IndexI,IndexK>& RIndex<IndexI,IndexK>::operator=(SizeT pos)
{
IB::mPos = pos; // = lex
if(lexpos >= lmax().val()){
if(pos >= lmax().val()){
IB::mPos = pmax().val();
return *this;
}
@ -155,15 +156,15 @@ namespace CNORXZ
}
template <class IndexI, class IndexK>
constexpr RIndex<IndexI,IndexK>::decltype(auto) pmax() const
constexpr decltype(auto) RIndex<IndexI,IndexK>::pmax() const
{
return mK->lmax().val() * mI->lmax().val();
return UPos(mK->lmax().val() * mI->lmax().val());
}
template <class IndexI, class IndexK>
constexpr RIndex<IndexI,IndexK>::decltype(auto) lmax() const
constexpr decltype(auto) RIndex<IndexI,IndexK>::lmax() const
{
return mK->lmax().val() * mI->lmax().val();
return UPos(mK->lmax().val() * mI->lmax().val());
}
template <class IndexI, class IndexK>
@ -173,7 +174,7 @@ namespace CNORXZ
}
template <class IndexI, class IndexK>
MetaType RIndex<IndexI,IndexK>::operator*() const
typename RIndex<IndexI,IndexK>::MetaType RIndex<IndexI,IndexK>::operator*() const
{
return meta();
}
@ -185,7 +186,7 @@ namespace CNORXZ
}
template <class IndexI, class IndexK>
Sptr<RangeType> RIndex<IndexI,IndexK>::range() const
Sptr<typename RIndex<IndexI,IndexK>::RangeType> RIndex<IndexI,IndexK>::range() const
{
return mRange;
}
@ -194,7 +195,7 @@ namespace CNORXZ
template <SizeT I>
decltype(auto) RIndex<IndexI,IndexK>::stepSize(const IndexId<I>& id) const
{
return mK->stepSize(id) * mI->lmax().val() + mI->stepSize(id);
return mK->stepSize(id) * mI->lmax() + mI->stepSize(id);
}
template <class IndexI, class IndexK>
@ -202,16 +203,20 @@ namespace CNORXZ
{
const SizeT r = mK->lex();
String o;
broadcast(r, mI->stringMeta(), &o);
auto x = mI->stringMeta();
bcast(x, r);
return o;
}
template <class IndexI, class IndexK>
MetaType RIndex<IndexI,IndexK>::meta() const
typename RIndex<IndexI,IndexK>::MetaType RIndex<IndexI,IndexK>::meta() const
{
const SizeT r = mK->lex();
MetaType o;
broadcast(r, mI->meta(), &o);
if constexpr(Typemap<MetaType>::exists){
auto x = mI->meta();
const SizeT r = mK->lex();
bcast(x, r);
}
return o;
}
@ -253,7 +258,7 @@ namespace CNORXZ
constexpr decltype(auto) RIndex<IndexI,IndexK>::ifor(const Xpr& xpr, F&& f) const
{
CXZ_ERROR("not implemented");
return 0;
return xpr;
}
template <class IndexI, class IndexK>
@ -276,7 +281,7 @@ namespace CNORXZ
}
template <class IndexI, class IndexK>
Sptr<Index> RIndex<IndexI,IndexK>::local() const
Sptr<IndexI> RIndex<IndexI,IndexK>::local() const
{
return mI;
}
@ -299,8 +304,9 @@ namespace CNORXZ
<< ") does not match number of ranks ( = " << s << ")");
if constexpr(has_static_sub<typename RangeI::IndexType>::value and
has_static_sub<typename RangeK::IndexType>::value) {
static_assert(typename RangeI::NR == typename RangeK::NR,
"ranges have to be of same dimension");
constexpr SizeT NRI = RangeI::NR;
constexpr SizeT NRK = RangeK::NR;
static_assert(NRI == NRK, "ranges have to be of same dimension");
}
else {
CXZ_ASSERT(ri->dim() == rk->dim(), "ranges have to be of same dimension, got "
@ -311,7 +317,7 @@ namespace CNORXZ
template <class RangeI, class RangeK>
void RRangeFactory<RangeI,RangeK>::make()
{
Vector<Uuid> key = { mRI->key(), mRK->key() };
Vector<Uuid> key = { mRI->id(), mRK->id() };
const auto& info = typeid(RRange<RangeI,RangeK>);
mProd = this->fromCreated(info, key);
if(mProd == nullptr) {
@ -397,7 +403,7 @@ namespace CNORXZ
}
template <class RangeI, class RangeK>
const MetaType RRange<RangeI,RangeK>::get(SizeT pos) const
const typename RRange<RangeI,RangeK>::MetaType RRange<RangeI,RangeK>::get(SizeT pos) const
{
return (this->begin()+pos)->meta();
}
@ -425,6 +431,12 @@ namespace CNORXZ
mGeom(geom)
{}
template <class RangeI, class RangeK>
Vector<Uuid> RRange<RangeI,RangeK>::key() const
{
return Vector<Uuid> { mLocal->id(), mGeom->id() };
}
} // namespace mpi
} // namespace CNORXZ

View file

@ -12,7 +12,7 @@
#ifndef __cxz_mpi_rrange_h__
#define __cxz_mpi_rrange_h__
#include "cnorxz.h"
#include "mpi_base.h"
namespace CNORXZ
{
@ -140,7 +140,7 @@ namespace CNORXZ
SizeT rank() const;
/** Get the local index on THIS rank. */
Sptr<IndexI,IndexK> local() const;
Sptr<IndexI> local() const;
//!!!
private:
@ -237,6 +237,7 @@ namespace CNORXZ
Sptr<RangeI> mLocal; /**< Local range of THIS rank. */
Sptr<RangeK> mGeom; /**< Rank geometry range. */
virtual Vector<Uuid> key() const override final;
};
/** Create RRange from global range and given rank geometry.

View file

@ -18,42 +18,42 @@ namespace CNORXZ
struct Typemap<int>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_INT;
static MPI_Datatype value() { return MPI_INT; }
};
template <>
struct Typemap<unsigned>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_UNSIGNED;
static MPI_Datatype value() { return MPI_UNSIGNED; }
};
template <>
struct Typemap<long int>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_LONG;
static MPI_Datatype value() { return MPI_LONG; }
};
template <>
struct Typemap<unsigned long>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_UNSIGNED_LONG;
static MPI_Datatype value() { return MPI_UNSIGNED_LONG; }
};
template <>
struct Typemap<double>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_DOUBLE;
static MPI_Datatype value() { return MPI_DOUBLE; }
};
template <>
struct Typemap<float>
{
static constexpr bool exists = true;
static constexpr MPI_Datatype value = MPI_FLOAT;
static MPI_Datatype value() { return MPI_FLOAT; }
};
// further !!!

View file

@ -0,0 +1,18 @@
set(libcnorxzmpi_a_SOURCES
${CMAKE_CURRENT_SOURCE_DIR}/mpi_base.cc
${CMAKE_CURRENT_SOURCE_DIR}/rrange.cc
)
add_library(cnorxzmpi_obj OBJECT
${libcnorxzmpi_a_SOURCES}
)
set_target_properties(cnorxzmpi_obj PROPERTIES POSITION_INDEPENDENT_CODE TRUE)
add_library(cnorxzmpi SHARED
$<TARGET_OBJECTS:cnorxzmpi_obj>
)
set_target_properties(cnorxzmpi PROPERTIES POSITION_INDEPENDENT_CODE TRUE)
install(TARGETS cnorxzmpi LIBRARY DESTINATION ${INSTALL_PATH}/lib)

View file

@ -10,6 +10,7 @@
**/
#include "rrange.h"
#include "rrange.cc.h"
namespace CNORXZ
{
@ -27,14 +28,16 @@ namespace CNORXZ
"global range has to be of same dimension as geometry range, got "
<< global->dim() << " and " << geom->dim());
MArray<RangePtr> o(geom);
YIndex k = geom->begin();
YIndex ke = geom->end();
auto mu = std::make_shared<CIndex>(geom->sub());
MArray<MArray<RangePtr>> rs(geom->sub()->range());
rs(mu) = operation( [&](const SizeT i) { return rsplit(globla->sub(i), geom->sub(i)); } , xpr(mu) );
Vector elem(geom->sub()->range());
YIndex k(geom);
YIndex ke(geom, geom->size());
auto mu = std::make_shared<CIndex>(geom->sub().range());
MArray<MArray<RangePtr>> rs(geom->sub().range());
rs(mu) = operation( [&](const SizeT i) { return rsplit(global->sub(i), geom->sub(i)); } , xpr(mu) );
Vector<RangePtr> elem(geom->dim());
for(; k != ke; ++k){
mu->ifor( operation( [&](const SizeT i){ elem[i] = rs[i][k->pack()[i]]; }, xpr(mu) ) )();
mu->ifor( operation( [&](const SizeT i, const MArray<RangePtr>& el){
elem[i] = el[DIndex(k.pack()[i])];
}, xpr(mu), rs(mu) ), NoF {} )();
o[k] = YRangeFactory(elem).create();
}
return o;
@ -42,13 +45,13 @@ namespace CNORXZ
else {
CXZ_ASSERT(global->size() % geom->size() == 0,
"global range has to be dividible by geometry range, got "
<< global->size() << " and " << k->lmax().val());
<< global->size() << " and " << geom->size());
const SizeT n = global->size() / geom->size();
auto k = std::make_shared<CIndex>(geom);
auto jb = global->begin();
auto je = global->begin();
MArray<RangePtr> o(geom);
o(k) = operation( [&](const SizeT x){ jb = n*x; je = n*(x+1)-1; return jb.prange(je) } , xpr(k) );
o(k) = operation( [&](const SizeT x){ jb = n*x; je = n*(x+1)-1; return jb.prange(je); } , xpr(k) );
return o;
}
}
@ -58,14 +61,15 @@ namespace CNORXZ
const MArray<RangePtr> rs = rsplit(global, geom);
RangePtr o = nullptr;
for(auto ri = rs.begin(); ri != rs.end(); ++ri){
if(ri->lex() == getRangeNum()){
if(ri.lex() == getRankNumber()){
o = *ri;
}
}
assert(o);
auto loc = rangeCast<YRange>(global);
auto geo = rangeCast<YRange>(geom);
return RRangeFactory<YRange,YRange>(loc, geom).create();
RRangeFactory<YRange,YRange> xx(loc, geo);
return RRangeFactory<YRange,YRange>(loc, geo).create();
}

View file

@ -0,0 +1,7 @@
add_definitions(-DTEST_NUMBER_FILE="${CMAKE_SOURCE_DIR}/src/tests/numbers.txt")
include_directories(${CMAKE_SOURCE_DIR}/src/tests)
add_executable(mpirrutest rrange_unit_test.cc)
add_dependencies(mpirrutest cnorxz cnorxzmpi test_lib)
target_link_libraries(mpirrutest ${GTEST_BOTH_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${MPI_LIBS} cnorxz cnorxzmpi test_lib)
add_test(NAME mpirrutest COMMAND mpirrutest)

View file

@ -0,0 +1,69 @@
// -*- C++ -*-
/**
@file opt/mpi/tests/rrange_unit_test.cc
@brief RRange unit tests.
Copyright (c) 2024 Christian Zimmermann. All rights reserved.
Mail: chizeta@f3l.de
**/
#include <cstdlib>
#include <iostream>
#include "gtest/gtest.h"
#include "cnorxz.h"
#include "cnorxz_mpi.h"
#include "test_numbers.h"
#include "rrange.cc.h"
namespace
{
using namespace CNORXZ;
using Test::Numbers;
using namespace CNORXZ::mpi;
class RRange_Test : public ::testing::Test
{
protected:
RRange_Test()
{
CXZ_ASSERT(getNumRanks() == 4, "exptected 4 ranks");
Vector<Int> xs(12);
Vector<Int> ts(16);
mXRange = URangeFactory<Int>(xs).create();
mTRange = URangeFactory<Int>(ts).create();
Vector<RangePtr> rs { mTRange, mXRange, mXRange, mXRange };
mGRange = YRangeFactory(rs).create();
RangePtr g1 = CRangeFactory(1).create();
RangePtr g2 = CRangeFactory(2).create();
Vector<RangePtr> gs { g2, g1, g1, g2 };
mGeom = YRangeFactory(gs).create();
mRRange = rrange(mGRange, mGeom);
}
RangePtr mXRange;
RangePtr mTRange;
RangePtr mGRange;
RangePtr mGeom;
RangePtr mRRange;
};
TEST_F(RRange_Test, Basics)
{
EXPECT_EQ(mRRange->size(), mGRange->size());
}
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
MPI_Init(&argc, &argv);
const int ret = RUN_ALL_TESTS();
MPI_Finalize();
return ret;
}