consistently use ParallelCommunication.hpp for communication definition

This commit is contained in:
Arne Morten Kvarving
2022-08-29 14:03:52 +02:00
parent a322a3062f
commit 2c0ff6f81e
8 changed files with 55 additions and 72 deletions

View File

@@ -142,7 +142,7 @@ private:
std::shared_ptr<ParallelIndexSet> indexSet_;
std::shared_ptr<RemoteIndices> remoteIndices_;
Dune::CollectiveCommunication<MPI_Comm> communicator_;
Parallel::Communication communicator_;
mutable std::vector<double> ownerMask_;
};

View File

@@ -46,7 +46,8 @@ struct CommPolicy<double*>
namespace Opm
{
GlobalPerfContainerFactory::GlobalPerfContainerFactory(const IndexSet& local_indices, const Communication comm,
GlobalPerfContainerFactory::GlobalPerfContainerFactory(const IndexSet& local_indices,
const Parallel::Communication comm,
const int num_local_perfs)
: local_indices_(local_indices), comm_(comm)
{
@@ -182,7 +183,7 @@ int GlobalPerfContainerFactory::numGlobalPerfs() const
}
CommunicateAboveBelow::CommunicateAboveBelow([[maybe_unused]] const Communication& comm)
CommunicateAboveBelow::CommunicateAboveBelow([[maybe_unused]] const Parallel::Communication& comm)
#if HAVE_MPI
: comm_(comm), interface_(comm_)
#endif
@@ -319,7 +320,7 @@ void CommunicateAboveBelow::pushBackEclIndex([[maybe_unused]] int above,
}
void ParallelWellInfo::DestroyComm::operator()(Communication* comm)
void ParallelWellInfo::DestroyComm::operator()(Parallel::Communication* comm)
{
#if HAVE_MPI
// Only delete custom communicators.
@@ -353,13 +354,13 @@ ParallelWellInfo::ParallelWellInfo(const std::string& name,
bool hasLocalCells)
: name_(name), hasLocalCells_ (hasLocalCells),
isOwner_(true), rankWithFirstPerf_(-1),
comm_(new Communication(Dune::MPIHelper::getLocalCommunicator())),
comm_(new Parallel::Communication(Dune::MPIHelper::getLocalCommunicator())),
commAboveBelow_(new CommunicateAboveBelow(*comm_))
{}
ParallelWellInfo::ParallelWellInfo(const std::pair<std::string, bool>& well_info,
[[maybe_unused]] Communication allComm)
[[maybe_unused]] Parallel::Communication allComm)
: name_(well_info.first), hasLocalCells_(well_info.second),
rankWithFirstPerf_(-1)
{
@@ -367,9 +368,9 @@ ParallelWellInfo::ParallelWellInfo(const std::pair<std::string, bool>& well_info
MPI_Comm newComm;
int color = hasLocalCells_ ? 1 : MPI_UNDEFINED;
MPI_Comm_split(allComm, color, allComm.rank(), &newComm);
comm_.reset(new Communication(newComm));
comm_.reset(new Parallel::Communication(newComm));
#else
comm_.reset(new Communication(Dune::MPIHelper::getLocalCommunicator()));
comm_.reset(new Parallel::Communication(Dune::MPIHelper::getLocalCommunicator()));
#endif
commAboveBelow_.reset(new CommunicateAboveBelow(*comm_));
isOwner_ = (comm_->rank() == 0);

View File

@@ -51,19 +51,13 @@ public:
ownerAbove = 3,
overlapAbove = 4
};
using MPIComm = typename Dune::MPIHelper::MPICommunicator;
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
using Communication = Dune::Communication<MPIComm>;
#else
using Communication = Dune::CollectiveCommunication<MPIComm>;
#endif
using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
using IndexSet = Dune::ParallelIndexSet<int,LocalIndex,50>;
#if HAVE_MPI
using RI = Dune::RemoteIndices<IndexSet>;
#endif
explicit CommunicateAboveBelow(const Communication& comm);
explicit CommunicateAboveBelow(const Parallel::Communication& comm);
/// \brief Adds information about original index of the perforations in ECL Schedule.
///
/// \warning Theses indices need to be push in the same order as they
@@ -175,8 +169,9 @@ public:
const IndexSet& getIndexSet() const;
int numLocalPerfs() const;
private:
Communication comm_;
Parallel::Communication comm_;
/// \brief Mapping of the local well index to ecl index
IndexSet current_indices_;
#if HAVE_MPI
@@ -198,19 +193,14 @@ private:
class GlobalPerfContainerFactory
{
public:
using MPIComm = typename Dune::MPIHelper::MPICommunicator;
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
using Communication = Dune::Communication<MPIComm>;
#else
using Communication = Dune::CollectiveCommunication<MPIComm>;
#endif
using IndexSet = CommunicateAboveBelow::IndexSet;
using Attribute = CommunicateAboveBelow::Attribute;
using GlobalIndex = typename IndexSet::IndexPair::GlobalIndex;
/// \brief Constructor
/// \param local_indices completely set up index set for map ecl index to local index
GlobalPerfContainerFactory(const IndexSet& local_indices, const Communication comm,
GlobalPerfContainerFactory(const IndexSet& local_indices,
const Parallel::Communication comm,
int num_local_perfs);
/// \brief Creates a container that holds values for all perforations
@@ -229,9 +219,10 @@ public:
std::size_t num_components) const;
int numGlobalPerfs() const;
private:
const IndexSet& local_indices_;
Communication comm_;
Parallel::Communication comm_;
int num_global_perfs_;
/// \brief sizes for allgatherv
std::vector<int> sizes_;
@@ -251,13 +242,6 @@ private:
class ParallelWellInfo
{
public:
using MPIComm = typename Dune::MPIHelper::MPICommunicator;
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
using Communication = Dune::Communication<MPIComm>;
#else
using Communication = Dune::CollectiveCommunication<MPIComm>;
#endif
static constexpr int INVALID_ECL_INDEX = -1;
/// \brief Constructs object using MPI_COMM_SELF
@@ -271,9 +255,9 @@ public:
/// \param allComm The communication object with all MPI ranks active in the simulation.
/// Default is the one with all ranks available.
ParallelWellInfo(const std::pair<std::string,bool>& well_info,
Communication allComm);
Parallel::Communication allComm);
const Communication& communication() const
const Parallel::Communication& communication() const
{
return *comm_;
}
@@ -399,12 +383,13 @@ public:
/// it is stored. Container is ordered via ascendings index of the perforations
/// in the ECL schedule.
const GlobalPerfContainerFactory& getGlobalPerfContainerFactory() const;
private:
/// \brief Deleter that also frees custom MPI communicators
struct DestroyComm
{
void operator()(Communication* comm);
void operator()(Parallel::Communication* comm);
};
@@ -419,7 +404,7 @@ private:
/// \brief Communication object for the well
///
/// Contains only ranks where this well will perforate local cells.
std::unique_ptr<Communication, DestroyComm> comm_;
std::unique_ptr<Parallel::Communication, DestroyComm> comm_;
/// \brief used to communicate the values for the perforation above.
std::unique_ptr<CommunicateAboveBelow> commAboveBelow_;
@@ -443,6 +428,7 @@ public:
void connectionFound(std::size_t index);
bool checkAllConnectionsFound();
private:
std::vector<std::size_t> foundConnections_;
const Well& well_;

View File

@@ -26,6 +26,8 @@
#include <opm/input/eclipse/Schedule/Schedule.hpp>
#include <opm/simulators/wells/ParallelWellInfo.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp>
#include <algorithm>
#include <cassert>
#include <numeric>
@@ -825,8 +827,8 @@ WellState::parallelWellInfo(std::size_t well_index) const
return ws.parallel_info;
}
template void WellState::updateGlobalIsGrup<ParallelWellInfo::Communication>(const ParallelWellInfo::Communication& comm);
template void WellState::communicateGroupRates<ParallelWellInfo::Communication>(const ParallelWellInfo::Communication& comm);
template void WellState::updateGlobalIsGrup<Parallel::Communication>(const Parallel::Communication& comm);
template void WellState::communicateGroupRates<Parallel::Communication>(const Parallel::Communication& comm);
} // namespace Opm