From 68225309a17402f844916b76e096fa365cad607c Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Wed, 4 Dec 2019 13:34:25 +0100 Subject: [PATCH 1/5] add helper templates to pack/send and receive/unpack --- opm/simulators/utils/ParallelRestart.hpp | 25 ++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index a7f67269d..8a4c53263 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -736,6 +736,31 @@ ADD_PACK_PROTOTYPES(WellTracerProperties) ADD_PACK_PROTOTYPES(WList) ADD_PACK_PROTOTYPES(WListManager) +template +const T& packAndSend(const T& in, const auto& comm) +{ + if (comm.size() == 0) + return in; + + std::size_t size = packSize(in, comm); + std::vector buffer(size); + int pos = 0; + Mpi::pack(in, buffer, pos, comm); + comm.broadcast(&pos, 1, 0); + comm.broadcast(buffer.data(), pos, 0); + return in; +} + +template +void receiveAndUnpack(T& result, const auto& comm) +{ + int size; + comm.broadcast(&size, 1, 0); + std::vector buffer(size); + comm.broadcast(buffer.data(), size, 0); + int pos = 0; + unpack(result, buffer, pos, comm); +} } // end namespace Mpi RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState, From 725af1442eb22482afc473c69bf31ec1567b726e Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Mon, 6 Jan 2020 15:00:05 +0100 Subject: [PATCH 2/5] add mpi serialization for SummaryNode --- opm/simulators/utils/ParallelRestart.cpp | 52 ++++++++++++++++++++++++ opm/simulators/utils/ParallelRestart.hpp | 2 + tests/test_ParallelRestart.cpp | 18 ++++++++ 3 files changed, 72 insertions(+) diff --git a/opm/simulators/utils/ParallelRestart.cpp b/opm/simulators/utils/ParallelRestart.cpp index 1c2a25ecb..d33e035b2 100644 --- a/opm/simulators/utils/ParallelRestart.cpp +++ b/opm/simulators/utils/ParallelRestart.cpp @@ -63,6 +63,7 @@ #include #include #include +#include #include #include #include @@ -1663,6 +1664,18 @@ std::size_t packSize(const Schedule& data, packSize(data.getWellGroupEvents(), comm); } +std::size_t packSize(const SummaryNode& data, + Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(data.keyword(), comm) + + packSize(data.category(), comm) + + packSize(data.location(), comm) + + packSize(data.type(), comm) + + packSize(data.namedEntity(), comm) + + packSize(data.number(), comm) + + packSize(data.isUserDefined(), comm); +} + ////// pack routines template @@ -3347,6 +3360,19 @@ void pack(const Schedule& data, pack(data.getWellGroupEvents(), buffer, position, comm); } +void pack(const SummaryNode& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(data.keyword(), buffer, position, comm); + pack(data.category(), buffer, position, comm); + pack(data.location(), buffer, position, comm) ; + pack(data.type(), buffer, position, comm); + pack(data.namedEntity(), buffer, position, comm); + pack(data.number(), buffer, position, comm); + pack(data.isUserDefined(), buffer, position, comm); +} + /// unpack routines template @@ -5763,6 +5789,32 @@ void unpack(Schedule& data, std::vector& buffer, int& position, rftConfig, nupCol, wellGroupEvents); } +void unpack(SummaryNode& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + std::string keyword; + SummaryNode::Category category; + Location location; + SummaryNode::Type type; + std::string namedEntity; + int number; + bool isUserDefined; + + unpack(keyword, buffer, position, comm); + unpack(category, buffer, position, comm); + unpack(location, buffer, position, comm) ; + unpack(type, buffer, position, comm); + unpack(namedEntity, buffer, position, comm); + unpack(number, buffer, position, comm); + unpack(isUserDefined, buffer, position, comm); + data = SummaryNode{keyword, category, location} + .parameterType(type) + .namedEntity(namedEntity) + .number(number) + .isUserDefined(isUserDefined); +} + #define INSTANTIATE_PACK_VECTOR(T) \ template std::size_t packSize(const std::vector& data, \ Dune::MPIHelper::MPICommunicator comm); \ diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index 8a4c53263..712fcc793 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -125,6 +125,7 @@ class SimpleTable; class SkprpolyTable; class SkprwatTable; class SpiralICD; +class SummaryNode; class Tabdims; class TableColumn; class TableContainer; @@ -689,6 +690,7 @@ ADD_PACK_PROTOTYPES(SkprpolyTable) ADD_PACK_PROTOTYPES(SkprwatTable) ADD_PACK_PROTOTYPES(SpiralICD) ADD_PACK_PROTOTYPES(std::string) +ADD_PACK_PROTOTYPES(SummaryNode) ADD_PACK_PROTOTYPES(Tabdims) ADD_PACK_PROTOTYPES(TableColumn) ADD_PACK_PROTOTYPES(TableContainer) diff --git a/tests/test_ParallelRestart.cpp b/tests/test_ParallelRestart.cpp index 61f2c3c61..7b42b9328 100644 --- a/tests/test_ParallelRestart.cpp +++ b/tests/test_ParallelRestart.cpp @@ -74,6 +74,7 @@ #include #include #include +#include #include #include #include @@ -2340,6 +2341,23 @@ BOOST_AUTO_TEST_CASE(Schedule) } +BOOST_AUTO_TEST_CASE(SummaryNode) +{ +#ifdef HAVE_MPI + auto val1 = Opm::SummaryNode{"test1", Opm::SummaryNode::Category::Region, + Opm::Location{"test2", 1}} + .parameterType(Opm::SummaryNode::Type::Pressure) + .namedEntity("test3") + .number(2) + .isUserDefined(true); + + auto val2 = PackUnpack(val1); + BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2)); + BOOST_CHECK(val1 == std::get<0>(val2)); +#endif +} + + bool init_unit_test_func() { return true; From b596ac6e6e9cedbb1ebc27076d1876ca432c5561 Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Mon, 6 Jan 2020 15:26:41 +0100 Subject: [PATCH 3/5] add mpi serialization for std::set --- opm/simulators/utils/ParallelRestart.cpp | 41 ++++++++++++++++++++++++ opm/simulators/utils/ParallelRestart.hpp | 15 +++++++++ 2 files changed, 56 insertions(+) diff --git a/opm/simulators/utils/ParallelRestart.cpp b/opm/simulators/utils/ParallelRestart.cpp index d33e035b2..8f08528a4 100644 --- a/opm/simulators/utils/ParallelRestart.cpp +++ b/opm/simulators/utils/ParallelRestart.cpp @@ -222,6 +222,18 @@ std::size_t packSize(const std::unordered_set& data, return totalSize; } +template +std::size_t packSize(const std::set& data, + Dune::MPIHelper::MPICommunicator comm) +{ + std::size_t totalSize = packSize(data.size(), comm); + for (const auto& entry : data) + { + totalSize += packSize(entry, comm); + } + return totalSize; +} + template std::size_t packSize(const OrderedMap& data, Dune::MPIHelper::MPICommunicator comm) { @@ -1765,6 +1777,19 @@ void pack(const std::vector& data, std::vector& buffer, int& positio pack(entry, buffer, position, comm); } +template +void pack(const std::set& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(data.size(), buffer, position, comm); + + for (const auto& entry : data) + { + pack(entry, buffer, position, comm); + } +} + template void pack(const std::unordered_set& data, std::vector& buffer, int& position, @@ -3499,6 +3524,22 @@ void unpack(std::tuple& data, std::vector& buffer, unpack_tuple_entry(data, buffer, position, comm); } +template +void unpack(std::set& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + std::size_t size = 0; + unpack(size, buffer, position, comm); + + for (;size>0; size--) + { + K entry; + unpack(entry, buffer, position, comm); + data.insert(entry); + } +} + template void unpack(std::unordered_set& data, std::vector& buffer, int& position, diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index 712fcc793..a0cabbe30 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -54,6 +54,7 @@ #include +#include #include #include #include @@ -190,6 +191,10 @@ std::size_t packSize(const std::pair& data, Dune::MPIHelper::MPICommunica template std::size_t packSize(const std::vector& data, Dune::MPIHelper::MPICommunicator comm); +template +std::size_t packSize(const std::set& data, + Dune::MPIHelper::MPICommunicator comm); + template std::size_t packSize(const std::unordered_set& data, Dune::MPIHelper::MPICommunicator comm); @@ -331,6 +336,11 @@ template void pack(const std::tuple& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm); +template +void pack(const std::set& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm); + template void pack(const std::unordered_set& data, std::vector& buffer, int& position, @@ -489,6 +499,11 @@ template void unpack(std::tuple& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm); +template +void unpack(std::set& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm); + template void unpack(std::unordered_set& data, std::vector& buffer, int& position, From fa5f026fe47fd40f92d22eeda147c888e19a8e8c Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Mon, 6 Jan 2020 15:27:37 +0100 Subject: [PATCH 4/5] add mpi serialization for SummaryConfig --- opm/simulators/utils/ParallelRestart.cpp | 30 ++++++++++++++++++++++++ opm/simulators/utils/ParallelRestart.hpp | 2 ++ tests/test_ParallelRestart.cpp | 18 ++++++++++++++ 3 files changed, 50 insertions(+) diff --git a/opm/simulators/utils/ParallelRestart.cpp b/opm/simulators/utils/ParallelRestart.cpp index 8f08528a4..daa63d6b8 100644 --- a/opm/simulators/utils/ParallelRestart.cpp +++ b/opm/simulators/utils/ParallelRestart.cpp @@ -1688,6 +1688,14 @@ std::size_t packSize(const SummaryNode& data, packSize(data.isUserDefined(), comm); } +std::size_t packSize(const SummaryConfig& data, + Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(data.getKwds(), comm) + + packSize(data.getShortKwds(), comm) + + packSize(data.getSmryKwds(), comm); +} + ////// pack routines template @@ -3398,6 +3406,15 @@ void pack(const SummaryNode& data, pack(data.isUserDefined(), buffer, position, comm); } +void pack(const SummaryConfig& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(data.getKwds(), buffer, position, comm); + pack(data.getShortKwds(), buffer, position, comm); + pack(data.getSmryKwds(), buffer, position, comm); +} + /// unpack routines template @@ -5856,6 +5873,19 @@ void unpack(SummaryNode& data, .isUserDefined(isUserDefined); } +void unpack(SummaryConfig& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + SummaryConfig::keyword_list kwds; + std::set shortKwds, smryKwds; + + unpack(kwds, buffer, position, comm); + unpack(shortKwds, buffer, position, comm); + unpack(smryKwds, buffer, position, comm); + data = SummaryConfig(kwds, shortKwds, smryKwds); +} + #define INSTANTIATE_PACK_VECTOR(T) \ template std::size_t packSize(const std::vector& data, \ Dune::MPIHelper::MPICommunicator comm); \ diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index a0cabbe30..29e496e87 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -126,6 +126,7 @@ class SimpleTable; class SkprpolyTable; class SkprwatTable; class SpiralICD; +class SummaryConfig; class SummaryNode; class Tabdims; class TableColumn; @@ -705,6 +706,7 @@ ADD_PACK_PROTOTYPES(SkprpolyTable) ADD_PACK_PROTOTYPES(SkprwatTable) ADD_PACK_PROTOTYPES(SpiralICD) ADD_PACK_PROTOTYPES(std::string) +ADD_PACK_PROTOTYPES(SummaryConfig) ADD_PACK_PROTOTYPES(SummaryNode) ADD_PACK_PROTOTYPES(Tabdims) ADD_PACK_PROTOTYPES(TableColumn) diff --git a/tests/test_ParallelRestart.cpp b/tests/test_ParallelRestart.cpp index 7b42b9328..6930efe59 100644 --- a/tests/test_ParallelRestart.cpp +++ b/tests/test_ParallelRestart.cpp @@ -2358,6 +2358,24 @@ BOOST_AUTO_TEST_CASE(SummaryNode) } +BOOST_AUTO_TEST_CASE(SummaryConfig) +{ +#ifdef HAVE_MPI + auto node = Opm::SummaryNode{"test1", Opm::SummaryNode::Category::Region, + Opm::Location{"test2", 1}} + .parameterType(Opm::SummaryNode::Type::Pressure) + .namedEntity("test3") + .number(2) + .isUserDefined(true); + Opm::SummaryConfig val1({node}, {"test1", "test2"}, {"test3", "test4"}); + + auto val2 = PackUnpack(val1); + BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2)); + BOOST_CHECK(val1 == std::get<0>(val2)); +#endif +} + + bool init_unit_test_func() { return true; From 227ff00730079325ff1dd39a6e9f7f079f8b21e4 Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Mon, 6 Jan 2020 10:25:59 +0100 Subject: [PATCH 5/5] avoid usage of deck on all processes setting up SummaryConfig --- flow/flow.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/flow/flow.cpp b/flow/flow.cpp index 383b6c53b..3e8c199e7 100644 --- a/flow/flow.cpp +++ b/flow/flow.cpp @@ -347,10 +347,19 @@ int main(int argc, char** argv) eclipseState.reset( new Opm::EclipseState(*deck, parseContext, errorGuard )); schedule.reset(new Opm::Schedule(*deck, *eclipseState, parseContext, errorGuard)); - summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard)); if (mpiRank == 0) { setupMessageLimiter(schedule->getMessageLimits(), "STDOUT_LOGGER"); + summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard)); +#ifdef HAVE_MPI + Opm::Mpi::packAndSend(*summaryConfig, mpiHelper.getCollectiveCommunication()); +#endif } +#ifdef HAVE_MPI + else { + summaryConfig.reset(new Opm::SummaryConfig); + Opm::Mpi::receiveAndUnpack(*summaryConfig, mpiHelper.getCollectiveCommunication()); + } +#endif Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);