changed: refactor MPIPacker

to limit the overload party, we put packing details for
specific types in separate structs.
emit compiler error if unsupported type is given,
better to detect this on compile time rather than runtime
This commit is contained in:
Arne Morten Kvarving 2022-09-07 13:54:15 +02:00
parent 3a19ab31bd
commit b1092c1a12
4 changed files with 318 additions and 408 deletions

View File

@ -74,7 +74,6 @@ list (APPEND MAIN_SOURCE_FILES
opm/simulators/timestepping/gatherConvergenceReport.cpp opm/simulators/timestepping/gatherConvergenceReport.cpp
opm/simulators/utils/DeferredLogger.cpp opm/simulators/utils/DeferredLogger.cpp
opm/simulators/utils/gatherDeferredLogger.cpp opm/simulators/utils/gatherDeferredLogger.cpp
opm/simulators/utils/MPIPacker.cpp
opm/simulators/utils/ParallelFileMerger.cpp opm/simulators/utils/ParallelFileMerger.cpp
opm/simulators/utils/ParallelRestart.cpp opm/simulators/utils/ParallelRestart.cpp
opm/simulators/wells/ALQState.cpp opm/simulators/wells/ALQState.cpp
@ -146,7 +145,8 @@ if(HAVE_AMGCL)
endif() endif()
if(MPI_FOUND) if(MPI_FOUND)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/ParallelEclipseState.cpp list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/ParallelEclipseState.cpp
opm/simulators/utils/ParallelSerialization.cpp) opm/simulators/utils/ParallelSerialization.cpp
opm/simulators/utils/MPIPacker.cpp)
endif() endif()
# originally generated with the command: # originally generated with the command:

View File

@ -107,11 +107,11 @@ public:
const_cast<T&>(data).serializeOp(*this); const_cast<T&>(data).serializeOp(*this);
} else { } else {
if (m_op == Operation::PACKSIZE) if (m_op == Operation::PACKSIZE)
m_packSize += Mpi::packSize(data, m_comm); m_packSize += Mpi::Packer::packSize(data, m_comm);
else if (m_op == Operation::PACK) else if (m_op == Operation::PACK)
Mpi::pack(data, m_buffer, m_position, m_comm); Mpi::Packer::pack(data, m_buffer, m_position, m_comm);
else if (m_op == Operation::UNPACK) else if (m_op == Operation::UNPACK)
Mpi::unpack(const_cast<T&>(data), m_buffer, m_position, m_comm); Mpi::Packer::unpack(const_cast<T&>(data), m_buffer, m_position, m_comm);
} }
} }
@ -138,14 +138,14 @@ public:
}; };
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm); m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
handle(data); handle(data);
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm); Mpi::packer::pack(data.size(), m_buffer, m_position, m_comm);
handle(data); handle(data);
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
size_t size; size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm); Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
data.resize(size); data.resize(size);
handle(data); handle(data);
} }
@ -156,8 +156,8 @@ public:
void vector(std::vector<bool>& data) void vector(std::vector<bool>& data)
{ {
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm); m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
m_packSize += data.size()*Mpi::packSize(bool(), m_comm); m_packSize += data.size()*Mpi::Packer::packSize(bool(), m_comm);
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
(*this)(data.size()); (*this)(data.size());
for (const auto entry : data) { // Not a reference: vector<bool> range for (const auto entry : data) { // Not a reference: vector<bool> range
@ -198,14 +198,14 @@ public:
}; };
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm); m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
handle(data); handle(data);
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm); Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
handle(data); handle(data);
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
size_t size; size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm); Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
handle(data); handle(data);
} }
} }
@ -223,14 +223,14 @@ public:
(*this)(d); (*this)(d);
}; };
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.index(), m_comm); m_packSize += Mpi::Packer::packSize(data.index(), m_comm);
std::visit(visitor, data); std::visit(visitor, data);
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.index(), m_buffer, m_position, m_comm); Mpi::Packer::pack(data.index(), m_buffer, m_position, m_comm);
std::visit(visitor, data); std::visit(visitor, data);
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
size_t index; size_t index;
Mpi::unpack(index, m_buffer, m_position, m_comm); Mpi::Packer::unpack(index, m_buffer, m_position, m_comm);
auto& data_mut = const_cast<std::variant<Args...>&>(data); auto& data_mut = const_cast<std::variant<Args...>&>(data);
data_mut = detail::make_variant<Args...>(index); data_mut = detail::make_variant<Args...>(index);
std::visit(visitor, data_mut); std::visit(visitor, data_mut);
@ -244,18 +244,18 @@ public:
void optional(const std::optional<T>& data) void optional(const std::optional<T>& data)
{ {
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.has_value(), m_comm); m_packSize += Mpi::Packer::packSize(data.has_value(), m_comm);
if (data.has_value()) { if (data.has_value()) {
(*this)(*data); (*this)(*data);
} }
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.has_value(), m_buffer, m_position, m_comm); Mpi::Packer::pack(data.has_value(), m_buffer, m_position, m_comm);
if (data.has_value()) { if (data.has_value()) {
(*this)(*data); (*this)(*data);
} }
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
bool has; bool has;
Mpi::unpack(has, m_buffer, m_position, m_comm); Mpi::Packer::unpack(has, m_buffer, m_position, m_comm);
if (has) { if (has) {
T res; T res;
(*this)(res); (*this)(res);
@ -306,20 +306,20 @@ public:
}; };
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm); m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
for (auto& it : data) { for (auto& it : data) {
keyHandle(it.first); keyHandle(it.first);
handle(it.second); handle(it.second);
} }
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm); Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
for (auto& it : data) { for (auto& it : data) {
keyHandle(it.first); keyHandle(it.first);
handle(it.second); handle(it.second);
} }
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
size_t size; size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm); Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
for (size_t i = 0; i < size; ++i) { for (size_t i = 0; i < size; ++i) {
Key key; Key key;
keyHandle(key); keyHandle(key);
@ -351,18 +351,18 @@ public:
}; };
if (m_op == Operation::PACKSIZE) { if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm); m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
for (auto& it : data) { for (auto& it : data) {
handle(it); handle(it);
} }
} else if (m_op == Operation::PACK) { } else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm); Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
for (auto& it : data) { for (auto& it : data) {
handle(it); handle(it);
} }
} else if (m_op == Operation::UNPACK) { } else if (m_op == Operation::UNPACK) {
size_t size; size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm); Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
for (size_t i = 0; i < size; ++i) { for (size_t i = 0; i < size; ++i) {
Data entry; Data entry;
handle(entry); handle(entry);

View File

@ -19,277 +19,118 @@
#include <config.h> #include <config.h>
#include "MPIPacker.hpp" #include "MPIPacker.hpp"
#include <bitset>
#include <cstdint> #include <cstdint>
#include <cstring>
#include <ctime> #include <ctime>
#include <string>
#include <type_traits>
#include <dune/common/parallel/mpitraits.hh> #include <opm/common/utility/TimeService.hpp>
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#endif
namespace Opm namespace Opm {
namespace Mpi {
namespace detail {
template<std::size_t Size>
std::size_t Packing<false,std::bitset<Size>>::
packSize(const std::bitset<Size>& data,
Parallel::MPIComm comm)
{ {
namespace Mpi return Packing<true,unsigned long long>::packSize(data.to_ullong(), comm);
{
template<class T>
std::size_t packSize(const T*, std::size_t, Opm::Parallel::MPIComm,
std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
} }
template<class T> template<std::size_t Size>
std::size_t packSize(const T*, std::size_t l, Opm::Parallel::MPIComm comm, void Packing<false,std::bitset<Size>>::
std::integral_constant<bool, true>) pack(const std::bitset<Size>& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{ {
#if HAVE_MPI Packing<true,unsigned long long>::pack(data.to_ullong(), buffer, position, comm);
int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
std::size_t totalSize = size;
MPI_Pack_size(l, Dune::MPITraits<T>::getType(), comm, &size);
return totalSize + size;
#else
(void) comm;
return l-l;
#endif
} }
template<class T> template<std::size_t Size>
std::size_t packSize(const T* data, std::size_t l, Opm::Parallel::MPIComm comm) void Packing<false,std::bitset<Size>>::
unpack(std::bitset<Size>& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{ {
return packSize(data, l, comm, typename std::is_pod<T>::type()); unsigned long long d;
Packing<true,unsigned long long>::unpack(d, buffer, position, comm);
data = std::bitset<Size>(d);
} }
std::size_t packSize(const char* str, Opm::Parallel::MPIComm comm) std::size_t Packing<false,std::string>::
packSize(const std::string& data, Parallel::MPIComm comm)
{ {
#if HAVE_MPI
int size; int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size); MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
int totalSize = size; int totalSize = size;
MPI_Pack_size(strlen(str)+1, MPI_CHAR, comm, &size); MPI_Pack_size(strlen(data.c_str()), MPI_CHAR, comm, &size);
return totalSize + size; return totalSize + size;
#else
(void) str;
(void) comm;
return 0;
#endif
} }
std::size_t packSize(const std::string& str, Opm::Parallel::MPIComm comm) void Packing<false,std::string>::
pack(const std::string& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{ {
return packSize(str.c_str(), comm); std::size_t length = strlen(data.c_str());
}
template <class T>
struct Packing
{
};
template <std::size_t Size>
struct Packing<std::bitset<Size>>
{
static std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm)
{
return Mpi::packSize(data.to_ullong(), comm);
}
static void pack(const std::bitset<Size>& data, std::vector<char>& buffer, int& position, Opm::Parallel::MPIComm comm)
{
Mpi::pack(data.to_ullong(), buffer, position, comm);
}
static void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position, Opm::Parallel::MPIComm comm)
{
unsigned long long d;
Mpi::unpack(d, buffer, position, comm);
data = std::bitset<Size>(d);
}
};
template<std::size_t Size>
std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm)
{
return Packing<std::bitset<Size>>::packSize(data, comm);
}
std::size_t packSize(const Opm::time_point&, Opm::Parallel::MPIComm comm)
{
std::time_t tp = 0;
return packSize(tp, comm);
}
////// pack routines
template<class T>
void pack(const T*, std::size_t, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&l, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm);
MPI_Pack(data, l, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
#else
(void) data;
(void) comm;
(void) l;
(void) buffer;
(void) position;
#endif
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
pack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
void pack(const char* str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
#if HAVE_MPI
std::size_t length = strlen(str)+1;
MPI_Pack(&length, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(), MPI_Pack(&length, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm); buffer.size(), &position, comm);
MPI_Pack(str, strlen(str)+1, MPI_CHAR, buffer.data(), buffer.size(), MPI_Pack(data.c_str(), length, MPI_CHAR, buffer.data(), buffer.size(),
&position, comm); &position, comm);
#else
(void) str;
(void) comm;
(void) buffer;
(void) position;
#endif
} }
void pack(const std::string& str, std::vector<char>& buffer, int& position, void Packing<false,std::string>::
unpack(std::string& data,
std::vector<char>& buffer,
int& position,
Opm::Parallel::MPIComm comm) Opm::Parallel::MPIComm comm)
{ {
pack(str.c_str(), buffer, position, comm); std::size_t length = 0;
MPI_Unpack(buffer.data(), buffer.size(), &position, &length, 1,
Dune::MPITraits<std::size_t>::getType(), comm);
std::vector<char> cStr(length+1, '\0');
MPI_Unpack(buffer.data(), buffer.size(), &position, cStr.data(), length,
MPI_CHAR, comm);
data.clear();
data.append(cStr.data());
} }
template<std::size_t Size> std::size_t Packing<false,time_point>::
void pack(const std::bitset<Size>& data, std::vector<char>& buffer, packSize(const time_point&, Opm::Parallel::MPIComm comm)
int& position, Opm::Parallel::MPIComm comm)
{ {
Packing<std::bitset<Size>>::pack(data, buffer, position, comm); return Packing<true,std::time_t>::packSize(std::time_t(), comm);
} }
void pack(const Opm::time_point& data, std::vector<char>& buffer, int& position, void Packing<false,time_point>::
Opm::Parallel::MPIComm comm) pack(const time_point& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{ {
pack(Opm::TimeService::to_time_t(data), buffer, position, comm); Packing<true,std::time_t>::pack(TimeService::to_time_t(data),
buffer, position, comm);
} }
void Packing<false,time_point>::
/// Mpi::unpack routines unpack(time_point& data,
std::vector<char>& buffer,
template<class T> int& position,
void unpack(T*, const std::size_t&, std::vector<char>&, int&, Parallel::MPIComm comm)
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{ {
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); std::time_t res;
Packing<true,std::time_t>::unpack(res, buffer, position, comm);
data = TimeService::from_time_t(res);
} }
template<class T> template struct Packing<false,std::bitset<4>>;
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, data, l,
Dune::MPITraits<T>::getType(), comm);
#else
(void) data;
(void) comm;
(void) l;
(void) buffer;
(void) position;
#endif
} }
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
unpack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, const_cast<char*>(str), length, MPI_CHAR, comm);
#else
(void) str;
(void) comm;
(void) length;
(void) buffer;
(void) position;
#endif
}
void unpack(std::string& str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
std::size_t length=0;
unpack(length, buffer, position, comm);
std::vector<char> cStr(length, '\0');
unpack(cStr.data(), length, buffer, position, comm);
str.clear();
str.append(cStr.data());
}
template<std::size_t Size>
void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
Packing<std::bitset<Size>>::unpack(data, buffer, position, comm);
}
void unpack([[maybe_unused]] Opm::time_point& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
std::time_t tp;
unpack(tp, buffer, position, comm);
#if HAVE_MPI
data = Opm::TimeService::from_time_t(tp);
#endif
}
#define INSTANTIATE_PACK(...) \
template std::size_t packSize(const __VA_ARGS__& data, \
Opm::Parallel::MPIComm comm); \
template void pack(const __VA_ARGS__& data, \
std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm); \
template void unpack(__VA_ARGS__& data, \
std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm);
INSTANTIATE_PACK(float)
INSTANTIATE_PACK(double)
INSTANTIATE_PACK(bool)
INSTANTIATE_PACK(int)
INSTANTIATE_PACK(unsigned char)
INSTANTIATE_PACK(unsigned int)
INSTANTIATE_PACK(unsigned long int)
INSTANTIATE_PACK(unsigned long long int)
INSTANTIATE_PACK(std::bitset<4>)
#undef INSTANTIATE_PACK
} // end namespace Mpi } // end namespace Mpi
} // end namespace Opm } // end namespace Opm

View File

@ -19,179 +19,248 @@
#ifndef MPI_SERIALIZER_HPP #ifndef MPI_SERIALIZER_HPP
#define MPI_SERIALIZER_HPP #define MPI_SERIALIZER_HPP
#include <opm/common/ErrorMacros.hpp>
#include <opm/common/utility/TimeService.hpp> #include <opm/common/utility/TimeService.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp> #include <opm/simulators/utils/ParallelCommunication.hpp>
#include <dune/common/parallel/mpitraits.hh>
#include <bitset> #include <bitset>
#include <cstddef> #include <cstddef>
#include <string> #include <string>
#include <typeinfo>
namespace Opm namespace Opm {
namespace Mpi {
namespace detail {
//! \brief Abstract struct for packing which is (partially) specialized for specific types.
template <bool pod, class T>
struct Packing
{ {
static std::size_t packSize(const T&, Parallel::MPIComm);
static void pack(const T&, std::vector<char>&, int&, Parallel::MPIComm);
static void unpack(T&, std::vector<char>&, int&, Parallel::MPIComm);
};
namespace Mpi //! \brief Packaging for pod data.
template<class T>
struct Packing<true,T>
{ {
template<class T> //! \brief Calculates the pack size for a POD.
std::size_t packSize(const T*, std::size_t, Opm::Parallel::MPIComm, //! \param data The data to pack
std::integral_constant<bool, false>); //! \param comm The communicator to use
static std::size_t packSize(const T& data, Parallel::MPIComm comm)
{
return packSize(&data, 1, comm);
}
template<class T> //! \brief Calculates the pack size for an array of POD.
std::size_t packSize(const T*, std::size_t l, Opm::Parallel::MPIComm comm, //! \param data The array to pack
std::integral_constant<bool, true>); //! \param n Length of array
//! \param comm The communicator to use
template<class T> static std::size_t packSize(const T*, std::size_t n, Parallel::MPIComm comm)
std::size_t packSize(const T* data, std::size_t l, Opm::Parallel::MPIComm comm); {
int size = 0;
template<class T> MPI_Pack_size(n, Dune::MPITraits<T>::getType(), comm, &size);
std::size_t packSize(const T&, Opm::Parallel::MPIComm,
std::integral_constant<bool, false>)
{
std::string msg = std::string{"Packing not (yet) supported for non-pod type: "} + typeid(T).name();
OPM_THROW(std::logic_error, msg);
}
template<class T>
std::size_t packSize(const T&, Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
int size{};
MPI_Pack_size(1, Dune::MPITraits<T>::getType(), comm, &size);
return size; return size;
#else }
(void) comm;
return 0;
#endif
}
template<class T> //! \brief Pack a POD.
std::size_t packSize(const T& data, Opm::Parallel::MPIComm comm) //! \param data The variable to pack
{ //! \param buffer Buffer to pack into
return packSize(data, comm, typename std::is_pod<T>::type()); //! \param position Position in buffer to use
} //! \param comm The communicator to use
static void pack(const T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
pack(&data, 1, buffer, position, comm);
}
std::size_t packSize(const char* str, Opm::Parallel::MPIComm comm); //! \brief Pack an array of POD.
//! \param data The array to pack
template<std::size_t Size> //! \param n Length of array
std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm); //! \param buffer Buffer to pack into
//! \param position Position in buffer to use
////// pack routines //! \param comm The communicator to use
static void pack(const T* data,
template<class T> std::size_t n,
void pack(const T*, std::size_t, std::vector<char>&, int&, std::vector<char>& buffer,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>); int& position,
Parallel::MPIComm comm)
template<class T> {
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position, MPI_Pack(data, n, Dune::MPITraits<T>::getType(), buffer.data(),
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>);
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<class T>
void pack(const T&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&data, 1, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm); buffer.size(), &position, comm);
#else }
(void) data;
(void) comm;
(void) buffer;
(void) position;
#endif
}
template<class T> //! \brief Unpack a POD.
void pack(const T& data, std::vector<char>& buffer, int& position, //! \param data The variable to unpack
Opm::Parallel::MPIComm comm) //! \param buffer Buffer to unpack from
{ //! \param position Position in buffer to use
pack(data, buffer, position, comm, typename std::is_pod<T>::type()); //! \param comm The communicator to use
} static void unpack(T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
unpack(&data, 1, buffer, position, comm);
}
void pack(const char* str, std::vector<char>& buffer, int& position, //! \brief Unpack an array of POD.
Opm::Parallel::MPIComm comm); //! \param data The array to unpack
//! \param n Length of array
template<std::size_t Size> //! \param buffer Buffer to unpack from
void pack(const std::bitset<Size>& data, std::vector<char>& buffer, int& position, //! \param position Position in buffer to use
Opm::Parallel::MPIComm comm); //! \param comm The communicator to use
static void unpack(T* data,
/// unpack routines std::size_t n,
std::vector<char>& buffer,
template<class T> int& position,
void unpack(T*, const std::size_t&, std::vector<char>&, int&, Parallel::MPIComm comm)
Opm::Parallel::MPIComm, std::integral_constant<bool, false>); {
MPI_Unpack(buffer.data(), buffer.size(), &position, data, n,
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>);
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<class T>
void unpack(T&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, &data, 1,
Dune::MPITraits<T>::getType(), comm); Dune::MPITraits<T>::getType(), comm);
#else }
(void) data; };
(void) comm;
(void) buffer;
(void) position;
#endif
}
//! \brief Default handling for unsupported types.
template<class T> template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position, struct Packing<false,T>
Opm::Parallel::MPIComm comm)
{ {
unpack(data, buffer, position, comm, typename std::is_pod<T>::type()); static std::size_t packSize(const T&, Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
return 0;
}
static void pack(const T&, std::vector<char>&, int&,
Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
}
static void unpack(T&, std::vector<char>&, int&,
Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
}
};
//! \brief Specialization for std::bitset
template <std::size_t Size>
struct Packing<false,std::bitset<Size>>
{
static std::size_t packSize(const std::bitset<Size>&, Opm::Parallel::MPIComm);
static void pack(const std::bitset<Size>&, std::vector<char>&, int&, Opm::Parallel::MPIComm);
static void unpack(std::bitset<Size>&, std::vector<char>&, int&, Opm::Parallel::MPIComm);
};
#define ADD_PACK_SPECIALIZATION(T) \
template<> \
struct Packing<false,T> \
{ \
static std::size_t packSize(const T&, Parallel::MPIComm); \
static void pack(const T&, std::vector<char>&, int&, Parallel::MPIComm); \
static void unpack(T&, std::vector<char>&, int&, Parallel::MPIComm); \
};
ADD_PACK_SPECIALIZATION(std::string)
ADD_PACK_SPECIALIZATION(time_point)
} }
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position, //! \brief Struct handling packing of serialization for MPI communication.
Opm::Parallel::MPIComm comm); struct Packer {
//! \brief Calculates the pack size for a variable.
//! \tparam T The type of the data to be packed
//! \param data The data to pack
//! \param comm The communicator to use
template<class T>
static std::size_t packSize(const T& data, Parallel::MPIComm comm)
{
return detail::Packing<std::is_pod_v<T>,T>::packSize(data,comm);
}
template<std::size_t Size> //! \brief Calculates the pack size for an array.
void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position, //! \tparam T The type of the data to be packed
Opm::Parallel::MPIComm comm); //! \param data The array to pack
//! \param n Length of array
//! \param comm The communicator to use
template<class T>
static std::size_t packSize(const T* data, std::size_t n, Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
return detail::Packing<true,T>::packSize(data,n,comm);
}
/// prototypes for complex types //! \brief Pack a variable.
//! \tparam T The type of the data to be packed
//! \param data The variable to pack
//! \param buffer Buffer to pack into
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void pack(const T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
detail::Packing<std::is_pod_v<T>,T>::pack(data, buffer, position, comm);
}
#define ADD_PACK_PROTOTYPES(T) \ //! \brief Pack an array.
std::size_t packSize(const T& data, Opm::Parallel::MPIComm comm); \ //! \tparam T The type of the data to be packed
void pack(const T& data, std::vector<char>& buffer, int& position, \ //! \param data The array to pack
Opm::Parallel::MPIComm comm); \ //! \param n Length of array
void unpack(T& data, std::vector<char>& buffer, int& position, \ //! \param buffer Buffer to pack into
Opm::Parallel::MPIComm comm); //! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void pack(const T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
detail::Packing<true,T>::pack(data, n, buffer, position, comm);
}
ADD_PACK_PROTOTYPES(std::string) //! \brief Unpack a variable.
ADD_PACK_PROTOTYPES(time_point) //! \tparam T The type of the data to be unpacked
//! \param data The variable to unpack
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void unpack(T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
detail::Packing<std::is_pod_v<T>,T>::unpack(data, buffer, position, comm);
}
//! \brief Unpack an array.
//! \tparam T The type of the data to be unpacked
//! \param data The array to unpack
//! \param n Length of array
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void unpack(T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
detail::Packing<true,T>::unpack(data, n, buffer, position, comm);
}
};
} // end namespace Mpi } // end namespace Mpi
} // end namespace Opm } // end namespace Opm
#endif // MPI_SERIALIZER_HPP #endif // MPI_SERIALIZER_HPP