Merge pull request #5115 from akva2/move_mpi_serializer

changed: move ebos/eclmpiserializer.hh to opm/simulators/utils/MPISerializer.hpp
This commit is contained in:
Bård Skaflestad 2024-01-23 12:53:21 +01:00 committed by GitHub
commit 42476bedb2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 24 additions and 23 deletions

View File

@ -241,6 +241,8 @@ if(MPI_FOUND)
opm/simulators/utils/ParallelNLDDPartitioningZoltan.cpp opm/simulators/utils/ParallelNLDDPartitioningZoltan.cpp
opm/simulators/utils/ParallelSerialization.cpp opm/simulators/utils/ParallelSerialization.cpp
opm/simulators/utils/SetupZoltanParams.cpp) opm/simulators/utils/SetupZoltanParams.cpp)
list(APPEND PUBLIC_HEADER_FILES opm/simulators/utils/MPIPacker.hpp
opm/simulators/utils/MPISerializer.hpp)
endif() endif()
if(HDF5_FOUND) if(HDF5_FOUND)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/HDF5File.cpp) list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/HDF5File.cpp)
@ -416,7 +418,6 @@ list (APPEND PUBLIC_HEADER_FILES
ebos/eclgenericwriter.hh ebos/eclgenericwriter.hh
ebos/eclgenericwriter_impl.hh ebos/eclgenericwriter_impl.hh
ebos/eclmixingratecontrols.hh ebos/eclmixingratecontrols.hh
ebos/eclmpiserializer.hh
ebos/eclnewtonmethod.hh ebos/eclnewtonmethod.hh
ebos/ecloutputblackoilmodule.hh ebos/ecloutputblackoilmodule.hh
ebos/eclpolyhedralgridvanguard.hh ebos/eclpolyhedralgridvanguard.hh

View File

@ -26,7 +26,7 @@
#include <ebos/eclgenericcpgridvanguard.hh> #include <ebos/eclgenericcpgridvanguard.hh>
#if HAVE_MPI #if HAVE_MPI
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#endif #endif
#include <opm/simulators/utils/ParallelEclipseState.hpp> #include <opm/simulators/utils/ParallelEclipseState.hpp>
@ -365,7 +365,7 @@ void EclGenericCpGridVanguard<ElementMapper,GridView,Scalar>::doCreateGrids_(Ecl
// other processes // other processes
if (has_numerical_aquifer && mpiSize > 1) { if (has_numerical_aquifer && mpiSize > 1) {
auto nnc_input = eclState.getInputNNC(); auto nnc_input = eclState.getInputNNC();
EclMpiSerializer ser(grid_->comm()); Parallel::MpiSerializer ser(grid_->comm());
ser.broadcast(nnc_input); ser.broadcast(nnc_input);
if (mpiRank > 0) { if (mpiRank > 0) {
eclState.setInputNNC(nnc_input); eclState.setInputNNC(nnc_input);

View File

@ -45,7 +45,7 @@
#include <opm/output/eclipse/Summary.hpp> #include <opm/output/eclipse/Summary.hpp>
#if HAVE_MPI #if HAVE_MPI
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#endif #endif
#if HAVE_MPI #if HAVE_MPI
@ -257,7 +257,7 @@ writeInit(const std::function<unsigned int(unsigned int)>& map)
#if HAVE_MPI #if HAVE_MPI
if (collectToIORank_.isParallel()) { if (collectToIORank_.isParallel()) {
const auto& comm = grid_.comm(); const auto& comm = grid_.comm();
Opm::EclMpiSerializer ser(comm); Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(outputNnc_); ser.broadcast(outputNnc_);
} }
#endif #endif
@ -659,7 +659,7 @@ evalSummary(const int reportStepNum,
#if HAVE_MPI #if HAVE_MPI
if (collectToIORank_.isParallel()) { if (collectToIORank_.isParallel()) {
EclMpiSerializer ser(grid_.comm()); Parallel::MpiSerializer ser(grid_.comm());
ser.append(summaryState); ser.append(summaryState);
} }
#endif #endif

View File

@ -18,19 +18,19 @@
module for the precise wording of the license and the list of module for the precise wording of the license and the list of
copyright holders. copyright holders.
*/ */
#ifndef ECL_MPI_SERIALIZER_HH #ifndef MPI_SERIALIZER_HPP
#define ECL_MPI_SERIALIZER_HH #define MPI_SERIALIZER_HPP
#include <opm/common/utility/Serializer.hpp> #include <opm/common/utility/Serializer.hpp>
#include <opm/simulators/utils/MPIPacker.hpp> #include <opm/simulators/utils/MPIPacker.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp> #include <opm/simulators/utils/ParallelCommunication.hpp>
namespace Opm { namespace Opm::Parallel {
//! \brief Class for serializing and broadcasting data using MPI. //! \brief Class for serializing and broadcasting data using MPI.
class EclMpiSerializer : public Serializer<Mpi::Packer> { class MpiSerializer : public Serializer<Mpi::Packer> {
public: public:
EclMpiSerializer(Parallel::Communication comm) MpiSerializer(Parallel::Communication comm)
: Serializer<Mpi::Packer>(m_packer) : Serializer<Mpi::Packer>(m_packer)
, m_packer(comm) , m_packer(comm)
, m_comm(comm) , m_comm(comm)

View File

@ -24,7 +24,7 @@
#endif #endif
#if HAVE_MPI #if HAVE_MPI
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#endif #endif
#include <opm/output/eclipse/EclipseIO.hpp> #include <opm/output/eclipse/EclipseIO.hpp>
@ -51,7 +51,7 @@ RestartValue loadParallelRestart(const EclipseIO* eclIO,
restartValues = eclIO->loadRestart(actionState, summaryState, solutionKeys, extraKeys); restartValues = eclIO->loadRestart(actionState, summaryState, solutionKeys, extraKeys);
} }
EclMpiSerializer ser(comm); Parallel::MpiSerializer ser(comm);
ser.broadcast(0, restartValues, summaryState); ser.broadcast(0, restartValues, summaryState);
return restartValues; return restartValues;
#else #else

View File

@ -63,7 +63,7 @@
#include <opm/input/eclipse/Schedule/Well/WVFPEXP.hpp> #include <opm/input/eclipse/Schedule/Well/WVFPEXP.hpp>
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#include <dune/common/parallel/mpihelper.hh> #include <dune/common/parallel/mpihelper.hh>
@ -75,14 +75,14 @@ void eclStateBroadcast(Parallel::Communication comm, EclipseState& eclState, Sch
Action::State& actionState, Action::State& actionState,
WellTestState& wtestState) WellTestState& wtestState)
{ {
Opm::EclMpiSerializer ser(comm); Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(0, eclState, schedule, summaryConfig, udqState, actionState, wtestState); ser.broadcast(0, eclState, schedule, summaryConfig, udqState, actionState, wtestState);
} }
template <class T> template <class T>
void eclBroadcast(Parallel::Communication comm, T& data) void eclBroadcast(Parallel::Communication comm, T& data)
{ {
Opm::EclMpiSerializer ser(comm); Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(data); ser.broadcast(data);
} }

View File

@ -73,7 +73,7 @@ public:
m_distributed_fieldProps.copyTran(globalProps); m_distributed_fieldProps.copyTran(globalProps);
} }
EclMpiSerializer ser(comm); Parallel::MpiSerializer ser(comm);
ser.broadcast(*this); ser.broadcast(*this);
m_no_data = m_intKeys.size() + m_doubleKeys.size(); m_no_data = m_intKeys.size() + m_doubleKeys.size();

View File

@ -37,7 +37,7 @@
#include <opm/simulators/linalg/bda/WellContributions.hpp> #include <opm/simulators/linalg/bda/WellContributions.hpp>
#if HAVE_MPI #if HAVE_MPI
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#endif #endif
#include <algorithm> #include <algorithm>
@ -1411,7 +1411,7 @@ namespace Opm {
group_alq_rates.resize(num_rates_to_sync); group_alq_rates.resize(num_rates_to_sync);
} }
#if HAVE_MPI #if HAVE_MPI
EclMpiSerializer ser(comm); Parallel::MpiSerializer ser(comm);
ser.broadcast(i, group_indexes, group_oil_rates, ser.broadcast(i, group_indexes, group_oil_rates,
group_gas_rates, group_water_rates, group_alq_rates); group_gas_rates, group_water_rates, group_alq_rates);
#endif #endif

View File

@ -143,14 +143,14 @@
#include <opm/input/eclipse/EclipseState/Tables/TableSchema.hpp> #include <opm/input/eclipse/EclipseState/Tables/TableSchema.hpp>
#include <opm/output/data/Aquifer.hpp> #include <opm/output/data/Aquifer.hpp>
#include <opm/output/eclipse/RestartValue.hpp> #include <opm/output/eclipse/RestartValue.hpp>
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
template<class T> template<class T>
std::tuple<T,int,int> PackUnpack(T& in) std::tuple<T,int,int> PackUnpack(T& in)
{ {
const auto& comm = Dune::MPIHelper::getCommunication(); const auto& comm = Dune::MPIHelper::getCommunication();
Opm::EclMpiSerializer ser(comm); Opm::Parallel::MpiSerializer ser(comm);
ser.pack(in); ser.pack(in);
const size_t pos1 = ser.position(); const size_t pos1 = ser.position();
T out; T out;

View File

@ -26,7 +26,7 @@
#include <boost/test/unit_test.hpp> #include <boost/test/unit_test.hpp>
#include <opm/simulators/utils/MPIPacker.hpp> #include <opm/simulators/utils/MPIPacker.hpp>
#include <ebos/eclmpiserializer.hh> #include <opm/simulators/utils/MPISerializer.hpp>
#include <dune/common/parallel/mpihelper.hh> #include <dune/common/parallel/mpihelper.hh>
#include <numeric> #include <numeric>
@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE(BroadCast)
double d1 = cc.rank() == 1 ? 7.0 : 0.0; double d1 = cc.rank() == 1 ? 7.0 : 0.0;
size_t i1 = cc.rank() == 1 ? 8 : 0; size_t i1 = cc.rank() == 1 ? 8 : 0;
Opm::EclMpiSerializer ser(cc); Opm::Parallel::MpiSerializer ser(cc);
ser.broadcast(1, d, i, d1, i1); ser.broadcast(1, d, i, d1, i1);
for (size_t c = 0; c < 3; ++c) { for (size_t c = 0; c < 3; ++c) {