changed: move ebos/eclmpiserializer.hh to opm/simulators/utils/MPISerializer.hpp

it does not use the typetag system
This commit is contained in:
Arne Morten Kvarving 2024-01-23 10:51:32 +01:00
parent 7c37470bd8
commit 98c704c1c3
10 changed files with 24 additions and 23 deletions

View File

@ -241,6 +241,8 @@ if(MPI_FOUND)
opm/simulators/utils/ParallelNLDDPartitioningZoltan.cpp
opm/simulators/utils/ParallelSerialization.cpp
opm/simulators/utils/SetupZoltanParams.cpp)
list(APPEND PUBLIC_HEADER_FILES opm/simulators/utils/MPIPacker.hpp
opm/simulators/utils/MPISerializer.hpp)
endif()
if(HDF5_FOUND)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/HDF5File.cpp)
@ -416,7 +418,6 @@ list (APPEND PUBLIC_HEADER_FILES
ebos/eclgenericwriter.hh
ebos/eclgenericwriter_impl.hh
ebos/eclmixingratecontrols.hh
ebos/eclmpiserializer.hh
ebos/eclnewtonmethod.hh
ebos/ecloutputblackoilmodule.hh
ebos/eclpolyhedralgridvanguard.hh

View File

@ -26,7 +26,7 @@
#include <ebos/eclgenericcpgridvanguard.hh>
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#endif
#include <opm/simulators/utils/ParallelEclipseState.hpp>
@ -365,7 +365,7 @@ void EclGenericCpGridVanguard<ElementMapper,GridView,Scalar>::doCreateGrids_(Ecl
// other processes
if (has_numerical_aquifer && mpiSize > 1) {
auto nnc_input = eclState.getInputNNC();
EclMpiSerializer ser(grid_->comm());
Parallel::MpiSerializer ser(grid_->comm());
ser.broadcast(nnc_input);
if (mpiRank > 0) {
eclState.setInputNNC(nnc_input);

View File

@ -45,7 +45,7 @@
#include <opm/output/eclipse/Summary.hpp>
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#endif
#if HAVE_MPI
@ -257,7 +257,7 @@ writeInit(const std::function<unsigned int(unsigned int)>& map)
#if HAVE_MPI
if (collectToIORank_.isParallel()) {
const auto& comm = grid_.comm();
Opm::EclMpiSerializer ser(comm);
Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(outputNnc_);
}
#endif
@ -659,7 +659,7 @@ evalSummary(const int reportStepNum,
#if HAVE_MPI
if (collectToIORank_.isParallel()) {
EclMpiSerializer ser(grid_.comm());
Parallel::MpiSerializer ser(grid_.comm());
ser.append(summaryState);
}
#endif

View File

@ -18,19 +18,19 @@
module for the precise wording of the license and the list of
copyright holders.
*/
#ifndef ECL_MPI_SERIALIZER_HH
#define ECL_MPI_SERIALIZER_HH
#ifndef MPI_SERIALIZER_HPP
#define MPI_SERIALIZER_HPP
#include <opm/common/utility/Serializer.hpp>
#include <opm/simulators/utils/MPIPacker.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp>
namespace Opm {
namespace Opm::Parallel {
//! \brief Class for serializing and broadcasting data using MPI.
class EclMpiSerializer : public Serializer<Mpi::Packer> {
class MpiSerializer : public Serializer<Mpi::Packer> {
public:
EclMpiSerializer(Parallel::Communication comm)
MpiSerializer(Parallel::Communication comm)
: Serializer<Mpi::Packer>(m_packer)
, m_packer(comm)
, m_comm(comm)

View File

@ -24,7 +24,7 @@
#endif
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#endif
#include <opm/output/eclipse/EclipseIO.hpp>
@ -51,7 +51,7 @@ RestartValue loadParallelRestart(const EclipseIO* eclIO,
restartValues = eclIO->loadRestart(actionState, summaryState, solutionKeys, extraKeys);
}
EclMpiSerializer ser(comm);
Parallel::MpiSerializer ser(comm);
ser.broadcast(0, restartValues, summaryState);
return restartValues;
#else

View File

@ -63,7 +63,7 @@
#include <opm/input/eclipse/Schedule/Well/WVFPEXP.hpp>
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#include <dune/common/parallel/mpihelper.hh>
@ -75,14 +75,14 @@ void eclStateBroadcast(Parallel::Communication comm, EclipseState& eclState, Sch
Action::State& actionState,
WellTestState& wtestState)
{
Opm::EclMpiSerializer ser(comm);
Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(0, eclState, schedule, summaryConfig, udqState, actionState, wtestState);
}
template <class T>
void eclBroadcast(Parallel::Communication comm, T& data)
{
Opm::EclMpiSerializer ser(comm);
Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(data);
}

View File

@ -73,7 +73,7 @@ public:
m_distributed_fieldProps.copyTran(globalProps);
}
EclMpiSerializer ser(comm);
Parallel::MpiSerializer ser(comm);
ser.broadcast(*this);
m_no_data = m_intKeys.size() + m_doubleKeys.size();

View File

@ -37,7 +37,7 @@
#include <opm/simulators/linalg/bda/WellContributions.hpp>
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#endif
#include <algorithm>
@ -1411,7 +1411,7 @@ namespace Opm {
group_alq_rates.resize(num_rates_to_sync);
}
#if HAVE_MPI
EclMpiSerializer ser(comm);
Parallel::MpiSerializer ser(comm);
ser.broadcast(i, group_indexes, group_oil_rates,
group_gas_rates, group_water_rates, group_alq_rates);
#endif

View File

@ -143,14 +143,14 @@
#include <opm/input/eclipse/EclipseState/Tables/TableSchema.hpp>
#include <opm/output/data/Aquifer.hpp>
#include <opm/output/eclipse/RestartValue.hpp>
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
template<class T>
std::tuple<T,int,int> PackUnpack(T& in)
{
const auto& comm = Dune::MPIHelper::getCommunication();
Opm::EclMpiSerializer ser(comm);
Opm::Parallel::MpiSerializer ser(comm);
ser.pack(in);
const size_t pos1 = ser.position();
T out;

View File

@ -26,7 +26,7 @@
#include <boost/test/unit_test.hpp>
#include <opm/simulators/utils/MPIPacker.hpp>
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/MPISerializer.hpp>
#include <dune/common/parallel/mpihelper.hh>
#include <numeric>
@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE(BroadCast)
double d1 = cc.rank() == 1 ? 7.0 : 0.0;
size_t i1 = cc.rank() == 1 ? 8 : 0;
Opm::EclMpiSerializer ser(cc);
Opm::Parallel::MpiSerializer ser(cc);
ser.broadcast(1, d, i, d1, i1);
for (size_t c = 0; c < 3; ++c) {