changed: refactor MPIPacker

to limit the overload party, we put packing details for
specific types in separate structs.
emit compiler error if unsupported type is given,
better to detect this on compile time rather than runtime
This commit is contained in:
Arne Morten Kvarving 2022-09-07 13:54:15 +02:00
parent 3a19ab31bd
commit b1092c1a12
4 changed files with 318 additions and 408 deletions

View File

@ -74,7 +74,6 @@ list (APPEND MAIN_SOURCE_FILES
opm/simulators/timestepping/gatherConvergenceReport.cpp
opm/simulators/utils/DeferredLogger.cpp
opm/simulators/utils/gatherDeferredLogger.cpp
opm/simulators/utils/MPIPacker.cpp
opm/simulators/utils/ParallelFileMerger.cpp
opm/simulators/utils/ParallelRestart.cpp
opm/simulators/wells/ALQState.cpp
@ -146,7 +145,8 @@ if(HAVE_AMGCL)
endif()
if(MPI_FOUND)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/ParallelEclipseState.cpp
opm/simulators/utils/ParallelSerialization.cpp)
opm/simulators/utils/ParallelSerialization.cpp
opm/simulators/utils/MPIPacker.cpp)
endif()
# originally generated with the command:

View File

@ -107,11 +107,11 @@ public:
const_cast<T&>(data).serializeOp(*this);
} else {
if (m_op == Operation::PACKSIZE)
m_packSize += Mpi::packSize(data, m_comm);
m_packSize += Mpi::Packer::packSize(data, m_comm);
else if (m_op == Operation::PACK)
Mpi::pack(data, m_buffer, m_position, m_comm);
Mpi::Packer::pack(data, m_buffer, m_position, m_comm);
else if (m_op == Operation::UNPACK)
Mpi::unpack(const_cast<T&>(data), m_buffer, m_position, m_comm);
Mpi::Packer::unpack(const_cast<T&>(data), m_buffer, m_position, m_comm);
}
}
@ -138,14 +138,14 @@ public:
};
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm);
m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
handle(data);
} else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm);
Mpi::packer::pack(data.size(), m_buffer, m_position, m_comm);
handle(data);
} else if (m_op == Operation::UNPACK) {
size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
data.resize(size);
handle(data);
}
@ -156,8 +156,8 @@ public:
void vector(std::vector<bool>& data)
{
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm);
m_packSize += data.size()*Mpi::packSize(bool(), m_comm);
m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
m_packSize += data.size()*Mpi::Packer::packSize(bool(), m_comm);
} else if (m_op == Operation::PACK) {
(*this)(data.size());
for (const auto entry : data) { // Not a reference: vector<bool> range
@ -198,14 +198,14 @@ public:
};
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm);
m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
handle(data);
} else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm);
Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
handle(data);
} else if (m_op == Operation::UNPACK) {
size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
handle(data);
}
}
@ -223,14 +223,14 @@ public:
(*this)(d);
};
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.index(), m_comm);
m_packSize += Mpi::Packer::packSize(data.index(), m_comm);
std::visit(visitor, data);
} else if (m_op == Operation::PACK) {
Mpi::pack(data.index(), m_buffer, m_position, m_comm);
Mpi::Packer::pack(data.index(), m_buffer, m_position, m_comm);
std::visit(visitor, data);
} else if (m_op == Operation::UNPACK) {
size_t index;
Mpi::unpack(index, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(index, m_buffer, m_position, m_comm);
auto& data_mut = const_cast<std::variant<Args...>&>(data);
data_mut = detail::make_variant<Args...>(index);
std::visit(visitor, data_mut);
@ -244,18 +244,18 @@ public:
void optional(const std::optional<T>& data)
{
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.has_value(), m_comm);
m_packSize += Mpi::Packer::packSize(data.has_value(), m_comm);
if (data.has_value()) {
(*this)(*data);
}
} else if (m_op == Operation::PACK) {
Mpi::pack(data.has_value(), m_buffer, m_position, m_comm);
Mpi::Packer::pack(data.has_value(), m_buffer, m_position, m_comm);
if (data.has_value()) {
(*this)(*data);
}
} else if (m_op == Operation::UNPACK) {
bool has;
Mpi::unpack(has, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(has, m_buffer, m_position, m_comm);
if (has) {
T res;
(*this)(res);
@ -306,20 +306,20 @@ public:
};
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm);
m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
for (auto& it : data) {
keyHandle(it.first);
handle(it.second);
}
} else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm);
Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
for (auto& it : data) {
keyHandle(it.first);
handle(it.second);
}
} else if (m_op == Operation::UNPACK) {
size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
for (size_t i = 0; i < size; ++i) {
Key key;
keyHandle(key);
@ -351,18 +351,18 @@ public:
};
if (m_op == Operation::PACKSIZE) {
m_packSize += Mpi::packSize(data.size(), m_comm);
m_packSize += Mpi::Packer::packSize(data.size(), m_comm);
for (auto& it : data) {
handle(it);
}
} else if (m_op == Operation::PACK) {
Mpi::pack(data.size(), m_buffer, m_position, m_comm);
Mpi::Packer::pack(data.size(), m_buffer, m_position, m_comm);
for (auto& it : data) {
handle(it);
}
} else if (m_op == Operation::UNPACK) {
size_t size;
Mpi::unpack(size, m_buffer, m_position, m_comm);
Mpi::Packer::unpack(size, m_buffer, m_position, m_comm);
for (size_t i = 0; i < size; ++i) {
Data entry;
handle(entry);

View File

@ -19,277 +19,118 @@
#include <config.h>
#include "MPIPacker.hpp"
#include <bitset>
#include <cstdint>
#include <cstring>
#include <ctime>
#include <string>
#include <type_traits>
#include <dune/common/parallel/mpitraits.hh>
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#endif
#include <opm/common/utility/TimeService.hpp>
namespace Opm
namespace Opm {
namespace Mpi {
namespace detail {
template<std::size_t Size>
std::size_t Packing<false,std::bitset<Size>>::
packSize(const std::bitset<Size>& data,
Parallel::MPIComm comm)
{
namespace Mpi
{
template<class T>
std::size_t packSize(const T*, std::size_t, Opm::Parallel::MPIComm,
std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
return Packing<true,unsigned long long>::packSize(data.to_ullong(), comm);
}
template<class T>
std::size_t packSize(const T*, std::size_t l, Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
template<std::size_t Size>
void Packing<false,std::bitset<Size>>::
pack(const std::bitset<Size>& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
#if HAVE_MPI
int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
std::size_t totalSize = size;
MPI_Pack_size(l, Dune::MPITraits<T>::getType(), comm, &size);
return totalSize + size;
#else
(void) comm;
return l-l;
#endif
Packing<true,unsigned long long>::pack(data.to_ullong(), buffer, position, comm);
}
template<class T>
std::size_t packSize(const T* data, std::size_t l, Opm::Parallel::MPIComm comm)
template<std::size_t Size>
void Packing<false,std::bitset<Size>>::
unpack(std::bitset<Size>& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
return packSize(data, l, comm, typename std::is_pod<T>::type());
unsigned long long d;
Packing<true,unsigned long long>::unpack(d, buffer, position, comm);
data = std::bitset<Size>(d);
}
std::size_t packSize(const char* str, Opm::Parallel::MPIComm comm)
std::size_t Packing<false,std::string>::
packSize(const std::string& data, Parallel::MPIComm comm)
{
#if HAVE_MPI
int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
int totalSize = size;
MPI_Pack_size(strlen(str)+1, MPI_CHAR, comm, &size);
MPI_Pack_size(strlen(data.c_str()), MPI_CHAR, comm, &size);
return totalSize + size;
#else
(void) str;
(void) comm;
return 0;
#endif
}
std::size_t packSize(const std::string& str, Opm::Parallel::MPIComm comm)
void Packing<false,std::string>::
pack(const std::string& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
return packSize(str.c_str(), comm);
}
template <class T>
struct Packing
{
};
template <std::size_t Size>
struct Packing<std::bitset<Size>>
{
static std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm)
{
return Mpi::packSize(data.to_ullong(), comm);
}
static void pack(const std::bitset<Size>& data, std::vector<char>& buffer, int& position, Opm::Parallel::MPIComm comm)
{
Mpi::pack(data.to_ullong(), buffer, position, comm);
}
static void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position, Opm::Parallel::MPIComm comm)
{
unsigned long long d;
Mpi::unpack(d, buffer, position, comm);
data = std::bitset<Size>(d);
}
};
template<std::size_t Size>
std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm)
{
return Packing<std::bitset<Size>>::packSize(data, comm);
}
std::size_t packSize(const Opm::time_point&, Opm::Parallel::MPIComm comm)
{
std::time_t tp = 0;
return packSize(tp, comm);
}
////// pack routines
template<class T>
void pack(const T*, std::size_t, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&l, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm);
MPI_Pack(data, l, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
#else
(void) data;
(void) comm;
(void) l;
(void) buffer;
(void) position;
#endif
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
pack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
void pack(const char* str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
#if HAVE_MPI
std::size_t length = strlen(str)+1;
std::size_t length = strlen(data.c_str());
MPI_Pack(&length, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm);
MPI_Pack(str, strlen(str)+1, MPI_CHAR, buffer.data(), buffer.size(),
&position, comm);
#else
(void) str;
(void) comm;
(void) buffer;
(void) position;
#endif
buffer.size(), &position, comm);
MPI_Pack(data.c_str(), length, MPI_CHAR, buffer.data(), buffer.size(),
&position, comm);
}
void pack(const std::string& str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
void Packing<false,std::string>::
unpack(std::string& data,
std::vector<char>& buffer,
int& position,
Opm::Parallel::MPIComm comm)
{
pack(str.c_str(), buffer, position, comm);
std::size_t length = 0;
MPI_Unpack(buffer.data(), buffer.size(), &position, &length, 1,
Dune::MPITraits<std::size_t>::getType(), comm);
std::vector<char> cStr(length+1, '\0');
MPI_Unpack(buffer.data(), buffer.size(), &position, cStr.data(), length,
MPI_CHAR, comm);
data.clear();
data.append(cStr.data());
}
template<std::size_t Size>
void pack(const std::bitset<Size>& data, std::vector<char>& buffer,
int& position, Opm::Parallel::MPIComm comm)
std::size_t Packing<false,time_point>::
packSize(const time_point&, Opm::Parallel::MPIComm comm)
{
Packing<std::bitset<Size>>::pack(data, buffer, position, comm);
return Packing<true,std::time_t>::packSize(std::time_t(), comm);
}
void pack(const Opm::time_point& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
void Packing<false,time_point>::
pack(const time_point& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
pack(Opm::TimeService::to_time_t(data), buffer, position, comm);
Packing<true,std::time_t>::pack(TimeService::to_time_t(data),
buffer, position, comm);
}
/// Mpi::unpack routines
template<class T>
void unpack(T*, const std::size_t&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
void Packing<false,time_point>::
unpack(time_point& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
std::time_t res;
Packing<true,std::time_t>::unpack(res, buffer, position, comm);
data = TimeService::from_time_t(res);
}
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, data, l,
Dune::MPITraits<T>::getType(), comm);
#else
(void) data;
(void) comm;
(void) l;
(void) buffer;
(void) position;
#endif
template struct Packing<false,std::bitset<4>>;
}
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
unpack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, const_cast<char*>(str), length, MPI_CHAR, comm);
#else
(void) str;
(void) comm;
(void) length;
(void) buffer;
(void) position;
#endif
}
void unpack(std::string& str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
std::size_t length=0;
unpack(length, buffer, position, comm);
std::vector<char> cStr(length, '\0');
unpack(cStr.data(), length, buffer, position, comm);
str.clear();
str.append(cStr.data());
}
template<std::size_t Size>
void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
Packing<std::bitset<Size>>::unpack(data, buffer, position, comm);
}
void unpack([[maybe_unused]] Opm::time_point& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
std::time_t tp;
unpack(tp, buffer, position, comm);
#if HAVE_MPI
data = Opm::TimeService::from_time_t(tp);
#endif
}
#define INSTANTIATE_PACK(...) \
template std::size_t packSize(const __VA_ARGS__& data, \
Opm::Parallel::MPIComm comm); \
template void pack(const __VA_ARGS__& data, \
std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm); \
template void unpack(__VA_ARGS__& data, \
std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm);
INSTANTIATE_PACK(float)
INSTANTIATE_PACK(double)
INSTANTIATE_PACK(bool)
INSTANTIATE_PACK(int)
INSTANTIATE_PACK(unsigned char)
INSTANTIATE_PACK(unsigned int)
INSTANTIATE_PACK(unsigned long int)
INSTANTIATE_PACK(unsigned long long int)
INSTANTIATE_PACK(std::bitset<4>)
#undef INSTANTIATE_PACK
} // end namespace Mpi
} // end namespace Opm

View File

@ -19,179 +19,248 @@
#ifndef MPI_SERIALIZER_HPP
#define MPI_SERIALIZER_HPP
#include <opm/common/ErrorMacros.hpp>
#include <opm/common/utility/TimeService.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp>
#include <dune/common/parallel/mpitraits.hh>
#include <bitset>
#include <cstddef>
#include <string>
#include <typeinfo>
namespace Opm
namespace Opm {
namespace Mpi {
namespace detail {
//! \brief Abstract struct for packing which is (partially) specialized for specific types.
template <bool pod, class T>
struct Packing
{
static std::size_t packSize(const T&, Parallel::MPIComm);
static void pack(const T&, std::vector<char>&, int&, Parallel::MPIComm);
static void unpack(T&, std::vector<char>&, int&, Parallel::MPIComm);
};
namespace Mpi
//! \brief Packaging for pod data.
template<class T>
struct Packing<true,T>
{
template<class T>
std::size_t packSize(const T*, std::size_t, Opm::Parallel::MPIComm,
std::integral_constant<bool, false>);
//! \brief Calculates the pack size for a POD.
//! \param data The data to pack
//! \param comm The communicator to use
static std::size_t packSize(const T& data, Parallel::MPIComm comm)
{
return packSize(&data, 1, comm);
}
template<class T>
std::size_t packSize(const T*, std::size_t l, Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>);
//! \brief Calculates the pack size for an array of POD.
//! \param data The array to pack
//! \param n Length of array
//! \param comm The communicator to use
static std::size_t packSize(const T*, std::size_t n, Parallel::MPIComm comm)
{
int size = 0;
MPI_Pack_size(n, Dune::MPITraits<T>::getType(), comm, &size);
return size;
}
template<class T>
std::size_t packSize(const T* data, std::size_t l, Opm::Parallel::MPIComm comm);
//! \brief Pack a POD.
//! \param data The variable to pack
//! \param buffer Buffer to pack into
//! \param position Position in buffer to use
//! \param comm The communicator to use
static void pack(const T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
pack(&data, 1, buffer, position, comm);
}
//! \brief Pack an array of POD.
//! \param data The array to pack
//! \param n Length of array
//! \param buffer Buffer to pack into
//! \param position Position in buffer to use
//! \param comm The communicator to use
static void pack(const T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
MPI_Pack(data, n, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
}
//! \brief Unpack a POD.
//! \param data The variable to unpack
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
static void unpack(T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
unpack(&data, 1, buffer, position, comm);
}
//! \brief Unpack an array of POD.
//! \param data The array to unpack
//! \param n Length of array
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
static void unpack(T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
MPI_Unpack(buffer.data(), buffer.size(), &position, data, n,
Dune::MPITraits<T>::getType(), comm);
}
};
//! \brief Default handling for unsupported types.
template<class T>
std::size_t packSize(const T&, Opm::Parallel::MPIComm,
std::integral_constant<bool, false>)
struct Packing<false,T>
{
std::string msg = std::string{"Packing not (yet) supported for non-pod type: "} + typeid(T).name();
OPM_THROW(std::logic_error, msg);
static std::size_t packSize(const T&, Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
return 0;
}
static void pack(const T&, std::vector<char>&, int&,
Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
}
static void unpack(T&, std::vector<char>&, int&,
Parallel::MPIComm)
{
static_assert(!std::is_same_v<T,T>, "Packing not supported for type");
}
};
//! \brief Specialization for std::bitset
template <std::size_t Size>
struct Packing<false,std::bitset<Size>>
{
static std::size_t packSize(const std::bitset<Size>&, Opm::Parallel::MPIComm);
static void pack(const std::bitset<Size>&, std::vector<char>&, int&, Opm::Parallel::MPIComm);
static void unpack(std::bitset<Size>&, std::vector<char>&, int&, Opm::Parallel::MPIComm);
};
#define ADD_PACK_SPECIALIZATION(T) \
template<> \
struct Packing<false,T> \
{ \
static std::size_t packSize(const T&, Parallel::MPIComm); \
static void pack(const T&, std::vector<char>&, int&, Parallel::MPIComm); \
static void unpack(T&, std::vector<char>&, int&, Parallel::MPIComm); \
};
ADD_PACK_SPECIALIZATION(std::string)
ADD_PACK_SPECIALIZATION(time_point)
}
template<class T>
std::size_t packSize(const T&, Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
int size{};
MPI_Pack_size(1, Dune::MPITraits<T>::getType(), comm, &size);
return size;
#else
(void) comm;
return 0;
#endif
}
//! \brief Struct handling packing of serialization for MPI communication.
struct Packer {
//! \brief Calculates the pack size for a variable.
//! \tparam T The type of the data to be packed
//! \param data The data to pack
//! \param comm The communicator to use
template<class T>
static std::size_t packSize(const T& data, Parallel::MPIComm comm)
{
return detail::Packing<std::is_pod_v<T>,T>::packSize(data,comm);
}
template<class T>
std::size_t packSize(const T& data, Opm::Parallel::MPIComm comm)
{
return packSize(data, comm, typename std::is_pod<T>::type());
}
//! \brief Calculates the pack size for an array.
//! \tparam T The type of the data to be packed
//! \param data The array to pack
//! \param n Length of array
//! \param comm The communicator to use
template<class T>
static std::size_t packSize(const T* data, std::size_t n, Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
return detail::Packing<true,T>::packSize(data,n,comm);
}
std::size_t packSize(const char* str, Opm::Parallel::MPIComm comm);
//! \brief Pack a variable.
//! \tparam T The type of the data to be packed
//! \param data The variable to pack
//! \param buffer Buffer to pack into
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void pack(const T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
detail::Packing<std::is_pod_v<T>,T>::pack(data, buffer, position, comm);
}
template<std::size_t Size>
std::size_t packSize(const std::bitset<Size>& data, Opm::Parallel::MPIComm comm);
//! \brief Pack an array.
//! \tparam T The type of the data to be packed
//! \param data The array to pack
//! \param n Length of array
//! \param buffer Buffer to pack into
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void pack(const T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
detail::Packing<true,T>::pack(data, n, buffer, position, comm);
}
////// pack routines
//! \brief Unpack a variable.
//! \tparam T The type of the data to be unpacked
//! \param data The variable to unpack
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void unpack(T& data,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
detail::Packing<std::is_pod_v<T>,T>::unpack(data, buffer, position, comm);
}
template<class T>
void pack(const T*, std::size_t, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>);
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>);
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<class T>
void pack(const T&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&data, 1, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
#else
(void) data;
(void) comm;
(void) buffer;
(void) position;
#endif
}
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
pack(data, buffer, position, comm, typename std::is_pod<T>::type());
}
void pack(const char* str, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<std::size_t Size>
void pack(const std::bitset<Size>& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
/// unpack routines
template<class T>
void unpack(T*, const std::size_t&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>);
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm,
std::integral_constant<bool, true>);
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<class T>
void unpack(T&, std::vector<char>&, int&,
Opm::Parallel::MPIComm, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, &data, 1,
Dune::MPITraits<T>::getType(), comm);
#else
(void) data;
(void) comm;
(void) buffer;
(void) position;
#endif
}
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm)
{
unpack(data, buffer, position, comm, typename std::is_pod<T>::type());
}
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
template<std::size_t Size>
void unpack(std::bitset<Size>& data, std::vector<char>& buffer, int& position,
Opm::Parallel::MPIComm comm);
/// prototypes for complex types
#define ADD_PACK_PROTOTYPES(T) \
std::size_t packSize(const T& data, Opm::Parallel::MPIComm comm); \
void pack(const T& data, std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm); \
void unpack(T& data, std::vector<char>& buffer, int& position, \
Opm::Parallel::MPIComm comm);
ADD_PACK_PROTOTYPES(std::string)
ADD_PACK_PROTOTYPES(time_point)
//! \brief Unpack an array.
//! \tparam T The type of the data to be unpacked
//! \param data The array to unpack
//! \param n Length of array
//! \param buffer Buffer to unpack from
//! \param position Position in buffer to use
//! \param comm The communicator to use
template<class T>
static void unpack(T* data,
std::size_t n,
std::vector<char>& buffer,
int& position,
Parallel::MPIComm comm)
{
static_assert(std::is_pod_v<T>, "Array packing not supported for non-pod data");
detail::Packing<true,T>::unpack(data, n, buffer, position, comm);
}
};
} // end namespace Mpi
} // end namespace Opm
#endif // MPI_SERIALIZER_HPP