use eclmpiserializer for SummaryConfig broadcast

This commit is contained in:
Arne Morten Kvarving 2020-03-10 10:01:26 +01:00
parent 1e65a1421b
commit e1bf6ed4a6
4 changed files with 34 additions and 35 deletions

View File

@ -386,7 +386,6 @@ int main(int argc, char** argv)
setupMessageLimiter(schedule->getMessageLimits(), "STDOUT_LOGGER"); setupMessageLimiter(schedule->getMessageLimits(), "STDOUT_LOGGER");
summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard)); summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard));
#ifdef HAVE_MPI #ifdef HAVE_MPI
Opm::Mpi::packAndSend(*summaryConfig, Dune::MPIHelper::getCollectiveCommunication());
Opm::Mpi::packAndSend(*schedule, Dune::MPIHelper::getCollectiveCommunication()); Opm::Mpi::packAndSend(*schedule, Dune::MPIHelper::getCollectiveCommunication());
#endif #endif
} }
@ -395,11 +394,11 @@ int main(int argc, char** argv)
summaryConfig.reset(new Opm::SummaryConfig); summaryConfig.reset(new Opm::SummaryConfig);
schedule.reset(new Opm::Schedule); schedule.reset(new Opm::Schedule);
parState = new Opm::ParallelEclipseState; parState = new Opm::ParallelEclipseState;
Opm::Mpi::receiveAndUnpack(*summaryConfig, mpiHelper.getCollectiveCommunication());
Opm::Mpi::receiveAndUnpack(*schedule, mpiHelper.getCollectiveCommunication()); Opm::Mpi::receiveAndUnpack(*schedule, mpiHelper.getCollectiveCommunication());
eclipseState.reset(parState); eclipseState.reset(parState);
} }
Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication()); Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication());
ser.broadcast(*summaryConfig);
ser.broadcast(*parState); ser.broadcast(*parState);
#endif #endif

View File

@ -1651,14 +1651,6 @@ std::size_t packSize(const SummaryNode& data,
packSize(data.isUserDefined(), comm); packSize(data.isUserDefined(), comm);
} }
std::size_t packSize(const SummaryConfig& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getKwds(), comm) +
packSize(data.getShortKwds(), comm) +
packSize(data.getSmryKwds(), comm);
}
std::size_t packSize(const EquilRecord& data, std::size_t packSize(const EquilRecord& data,
Dune::MPIHelper::MPICommunicator comm) Dune::MPIHelper::MPICommunicator comm)
{ {
@ -3328,15 +3320,6 @@ void pack(const SummaryNode& data,
pack(data.isUserDefined(), buffer, position, comm); pack(data.isUserDefined(), buffer, position, comm);
} }
void pack(const SummaryConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getKwds(), buffer, position, comm);
pack(data.getShortKwds(), buffer, position, comm);
pack(data.getSmryKwds(), buffer, position, comm);
}
void pack(const EquilRecord& data, void pack(const EquilRecord& data,
std::vector<char>& buffer, int& position, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm) Dune::MPIHelper::MPICommunicator comm)
@ -5681,19 +5664,6 @@ void unpack(SummaryNode& data,
.isUserDefined(isUserDefined); .isUserDefined(isUserDefined);
} }
void unpack(SummaryConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
SummaryConfig::keyword_list kwds;
std::set<std::string> shortKwds, smryKwds;
unpack(kwds, buffer, position, comm);
unpack(shortKwds, buffer, position, comm);
unpack(smryKwds, buffer, position, comm);
data = SummaryConfig(kwds, shortKwds, smryKwds);
}
void unpack(EquilRecord& data, void unpack(EquilRecord& data,
std::vector<char>& buffer, int& position, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm) Dune::MPIHelper::MPICommunicator comm)
@ -6046,8 +6016,24 @@ INSTANTIATE_PACK_VECTOR(bool)
INSTANTIATE_PACK_VECTOR(char) INSTANTIATE_PACK_VECTOR(char)
INSTANTIATE_PACK_VECTOR(int) INSTANTIATE_PACK_VECTOR(int)
INSTANTIATE_PACK_VECTOR(std::array<double, 3>) INSTANTIATE_PACK_VECTOR(std::array<double, 3>)
INSTANTIATE_PACK_VECTOR(SummaryNode)
#undef INSTANTIATE_PACK_VECTOR #undef INSTANTIATE_PACK_VECTOR
#define INSTANTIATE_PACK_SET(...) \
template std::size_t packSize(const std::set<__VA_ARGS__>& data, \
Dune::MPIHelper::MPICommunicator comm); \
template void pack(const std::set<__VA_ARGS__>& data, \
std::vector<char>& buffer, int& position, \
Dune::MPIHelper::MPICommunicator comm); \
template void unpack(std::set<__VA_ARGS__>& data, \
std::vector<char>& buffer, int& position, \
Dune::MPIHelper::MPICommunicator comm);
INSTANTIATE_PACK_SET(std::string)
#undef INSTANTIATE_PACK_SET
#define INSTANTIATE_PACK_SHARED_PTR(...) \ #define INSTANTIATE_PACK_SHARED_PTR(...) \
template std::size_t packSize(const std::shared_ptr<__VA_ARGS__>& data, \ template std::size_t packSize(const std::shared_ptr<__VA_ARGS__>& data, \
Dune::MPIHelper::MPICommunicator comm); \ Dune::MPIHelper::MPICommunicator comm); \
@ -6078,6 +6064,7 @@ INSTANTIATE_PACK(int)
INSTANTIATE_PACK(std::array<short,3>) INSTANTIATE_PACK(std::array<short,3>)
INSTANTIATE_PACK(std::array<bool,3>) INSTANTIATE_PACK(std::array<bool,3>)
INSTANTIATE_PACK(unsigned char) INSTANTIATE_PACK(unsigned char)
INSTANTIATE_PACK(SummaryNode)
#undef INSTANTIATE_PACK #undef INSTANTIATE_PACK
} // end namespace Mpi } // end namespace Mpi

View File

@ -141,7 +141,6 @@ class SpiralICD;
class StandardCond; class StandardCond;
class Stone1exRecord; class Stone1exRecord;
class Stone1exTable; class Stone1exTable;
class SummaryConfig;
class SummaryNode; class SummaryNode;
class Tabdims; class Tabdims;
class TableColumn; class TableColumn;
@ -564,7 +563,6 @@ ADD_PACK_PROTOTYPES(SpiralICD)
ADD_PACK_PROTOTYPES(std::string) ADD_PACK_PROTOTYPES(std::string)
ADD_PACK_PROTOTYPES(Stone1exRecord) ADD_PACK_PROTOTYPES(Stone1exRecord)
ADD_PACK_PROTOTYPES(Stone1exTable) ADD_PACK_PROTOTYPES(Stone1exTable)
ADD_PACK_PROTOTYPES(SummaryConfig)
ADD_PACK_PROTOTYPES(SummaryNode) ADD_PACK_PROTOTYPES(SummaryNode)
ADD_PACK_PROTOTYPES(Tabdims) ADD_PACK_PROTOTYPES(Tabdims)
ADD_PACK_PROTOTYPES(TableColumn) ADD_PACK_PROTOTYPES(TableColumn)

View File

@ -110,6 +110,7 @@
#include <opm/parser/eclipse/EclipseState/Tables/TableSchema.hpp> #include <opm/parser/eclipse/EclipseState/Tables/TableSchema.hpp>
#include <opm/output/eclipse/RestartValue.hpp> #include <opm/output/eclipse/RestartValue.hpp>
#include <opm/simulators/utils/ParallelRestart.hpp> #include <opm/simulators/utils/ParallelRestart.hpp>
#include <ebos/eclmpiserializer.hh>
namespace { namespace {
@ -543,6 +544,20 @@ std::tuple<T,int,int> PackUnpack(const T& in)
return std::make_tuple(out, pos1, pos2); return std::make_tuple(out, pos1, pos2);
} }
template<class T>
std::tuple<T,int,int> PackUnpack2(T& in)
{
auto comm = Dune::MPIHelper::getCollectiveCommunication();
Opm::EclMpiSerializer ser(comm);
ser.pack(in);
size_t pos1 = ser.position();
T out;
ser.unpack(out);
size_t pos2 = ser.position();
return std::make_tuple(out, pos1, pos2);
}
#define DO_CHECKS(TYPE_NAME) \ #define DO_CHECKS(TYPE_NAME) \
BOOST_CHECK_MESSAGE(std::get<1>(val2) == std::get<2>(val2), "Packed size differ from unpack size for " #TYPE_NAME); \ BOOST_CHECK_MESSAGE(std::get<1>(val2) == std::get<2>(val2), "Packed size differ from unpack size for " #TYPE_NAME); \
@ -2189,7 +2204,7 @@ BOOST_AUTO_TEST_CASE(SummaryConfig)
.isUserDefined(true); .isUserDefined(true);
Opm::SummaryConfig val1({node}, {"test1", "test2"}, {"test3", "test4"}); Opm::SummaryConfig val1({node}, {"test1", "test2"}, {"test3", "test4"});
auto val2 = PackUnpack(val1); auto val2 = PackUnpack2(val1);
DO_CHECKS(SummaryConfig) DO_CHECKS(SummaryConfig)
#endif #endif
} }