Utility to communicate above value of a perf of distributed well

This adds an utility that creates a vector of all above values for
the local perforations. For distributed wells this is needed as the
perforation above might live on another processor. We use the parallel
index sets together with the global index of the cells that are
perforated.
This commit is contained in:
Markus Blatt 2020-11-27 18:24:35 +01:00
parent 73919d6136
commit 0126243f02
3 changed files with 386 additions and 3 deletions

View File

@ -20,8 +20,123 @@
#include <opm/simulators/wells/ParallelWellInfo.hpp>
#include <opm/common/ErrorMacros.hpp>
namespace Dune
{
#if HAVE_MPI
template<>
struct CommPolicy<double*>
{
using Type = double*;
using IndexedType = double;
using IndexedTypeFlag = Dune::SizeOne;
static const void* getAddress(const double*& v, int index)
{
return v + index;
}
static int getSize(const double*&, int)
{
return 1;
}
};
#endif
}
namespace Opm
{
CommunicateAbove::CommunicateAbove([[maybe_unused]] const Communication& comm)
#if HAVE_MPI
: comm_(comm), interface_(comm_)
#endif
{}
void CommunicateAbove::clear()
{
#if HAVE_MPI
above_indices_ = {};
current_indices_ = {};
interface_.free();
communicator_.free();
#endif
count_ = 0;
}
void CommunicateAbove::beginReset()
{
clear();
#if HAVE_MPI
if (comm_.size() > 1)
{
current_indices_.beginResize();
above_indices_.beginResize();
}
#endif
}
void CommunicateAbove::endReset()
{
#if HAVE_MPI
if (comm_.size() > 1)
{
above_indices_.endResize();
current_indices_.endResize();
remote_indices_.setIndexSets(current_indices_, above_indices_, comm_);
remote_indices_.setIncludeSelf(true);
remote_indices_.rebuild<true>();
using FromSet = Dune::EnumItem<Attribute,owner>;
using ToSet = Dune::AllSet<Attribute>;
interface_.build(remote_indices_, FromSet(), ToSet());
communicator_.build<double*>(interface_);
}
#endif
}
std::vector<double> CommunicateAbove::communicate(double first_above,
const double* current,
std::size_t size)
{
std::vector<double> above(size, first_above);
#if HAVE_MPI
if (comm_.size() > 1)
{
using Handle = Dune::OwnerOverlapCopyCommunication<int,int>::CopyGatherScatter<double*>;
auto aboveData = above.data();
// Ugly const_cast needed as my compiler says, that
// passing const double*& and double* as parameter is
// incompatible with function decl template<Data> forward(const Data&, Data&))
// That would need the first argument to be double* const&
communicator_.forward<Handle>(const_cast<double*>(current), aboveData);
}
else
#endif
{
if (above.size() > 1)
{
// No comunication needed, just copy.
std::copy(current, current + (above.size() - 1), above.begin()+1);
}
}
return above;
}
void CommunicateAbove::pushBackEclIndex([[maybe_unused]] int above,
[[maybe_unused]] int current,
[[maybe_unused]] bool isOwner)
{
#if HAVE_MPI
if (comm_.size() > 1)
{
Attribute attr = owner;
if (!isOwner)
{
attr = overlap;
}
above_indices_.add(above, {count_, attr, true});
current_indices_.add(current, {count_++, attr, true});
}
#endif
}
void ParallelWellInfo::DestroyComm::operator()(Communication* comm)
{
@ -46,12 +161,14 @@ ParallelWellInfo::ParallelWellInfo(const std::string& name,
bool hasLocalCells)
: name_(name), hasLocalCells_ (hasLocalCells),
isOwner_(true), rankWithFirstPerf_(-1),
comm_(new Communication(Dune::MPIHelper::getLocalCommunicator()))
comm_(new Communication(Dune::MPIHelper::getLocalCommunicator())),
commAbove_(new CommunicateAbove(*comm_))
{}
ParallelWellInfo::ParallelWellInfo(const std::pair<std::string,bool>& well_info,
Communication allComm)
: name_(well_info.first), hasLocalCells_(well_info.second)
[[maybe_unused]] Communication allComm)
: name_(well_info.first), hasLocalCells_(well_info.second),
rankWithFirstPerf_(-1)
{
#if HAVE_MPI
MPI_Comm newComm;
@ -61,6 +178,7 @@ ParallelWellInfo::ParallelWellInfo(const std::pair<std::string,bool>& well_info,
#else
comm_.reset(new Communication(Dune::MPIHelper::getLocalCommunicator()));
#endif
commAbove_.reset(new CommunicateAbove(*comm_));
isOwner_ = (comm_->rank() == 0);
}
@ -74,6 +192,42 @@ void ParallelWellInfo::communicateFirstPerforation(bool hasFirst)
rankWithFirstPerf_ = found - firstVec.begin();
}
void ParallelWellInfo::pushBackEclIndex(int above, int current)
{
commAbove_->pushBackEclIndex(above, current);
}
void ParallelWellInfo::beginReset()
{
commAbove_->beginReset();
}
void ParallelWellInfo::endReset()
{
commAbove_->beginReset();
}
void ParallelWellInfo::clearCommunicateAbove()
{
commAbove_->clear();
}
std::vector<double> ParallelWellInfo::communicateAboveValues(double zero_value,
const double* current_values,
std::size_t size) const
{
return commAbove_->communicate(zero_value, current_values,
size);
}
std::vector<double> ParallelWellInfo::communicateAboveValues(double zero_value,
const std::vector<double>& current_values) const
{
return commAbove_->communicate(zero_value, current_values.data(),
current_values.size());
}
bool operator<(const ParallelWellInfo& well1, const ParallelWellInfo& well2)
{
return well1.name() < well2.name() || (! (well2.name() < well1.name()) && well1.hasLocalCells() < well2.hasLocalCells());

View File

@ -21,6 +21,8 @@
#define OPM_PARALLELWELLINFO_HEADER_INCLUDED
#include <dune/common/version.hh>
#include <dune/common/parallel/mpihelper.hh>
#include <dune/common/parallel/plocalindex.hh>
#include <dune/istl/owneroverlapcopy.hh>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/Well.hpp>
@ -29,6 +31,73 @@
namespace Opm
{
/// \brief Class to facilitate getting values associated with the above perforation
///
class CommunicateAbove
{
enum Attribute {
owner=1, overlap=2
};
using MPIComm = typename Dune::MPIHelper::MPICommunicator;
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
using Communication = Dune::Communication<MPIComm>;
#else
using Communication = Dune::CollectiveCommunication<MPIComm>;
#endif
using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
#if HAVE_MPI
using IndexSet = Dune::ParallelIndexSet<std::size_t,LocalIndex,50>;
using RI = Dune::RemoteIndices<IndexSet>;
#endif
public:
CommunicateAbove(const Communication& comm);
/// \brief Adds information about the ecl indices of the perforations.
///
/// \warning Theses indices need to be push in the same order as they
/// appear in the ECL well specifiation. Use -1 if there is
/// no perforation above.
/// \param above The ECL index of the next open perforation above.
/// \param current The ECL index of the current open perforation.
void pushBackEclIndex(int above, int current, bool owner=true);
/// \brief Clear all the parallel information
void clear();
/// \brief Indicates that we will add the index information
/// \see pushBackEclIndex
void beginReset();
/// \brief Indicates that the index information is complete.
///
/// Sets up the commmunication structures to be used by
/// communicate()
void endReset();
/// \brief Creates an array of values for the perforation above.
/// \param first_value Value to use for above of the first perforation
/// \param current C-array of the values at the perforations
/// \param size The size of the C-array and the returned vector
/// \return a vector containing the values for the perforation above.
std::vector<double> communicate(double first_value,
const double* current,
std::size_t size);
private:
#if HAVE_MPI
Communication comm_;
/// \brief Mapping of the local well index to ecl index
IndexSet current_indices_;
/// \brief Mapping of the above well index to ecl index
IndexSet above_indices_;
RI remote_indices_;
Dune::Interface interface_;
Dune::BufferedCommunicator communicator_;
#endif
std::size_t count_{};
};
/// \brief Class encapsulating some information about parallel wells
///
/// e.g. It provides a communicator for well information
@ -72,6 +141,29 @@ public:
return res;
}
/// \brief Creates an array of values for the perforation above.
/// \param first_value Value to use for above of the first perforation
/// \param current C-array of the values at the perforations
/// \param size The size of the C-array and the returned vector
/// \return a vector containing the values for the perforation above.
std::vector<double> communicateAboveValues(double zero_value,
const double* current,
std::size_t size) const;
/// \brief Creates an array of values for the perforation above.
/// \param first_value Value to use for above of the first perforation
/// \param current vector of current values
std::vector<double> communicateAboveValues(double zero_value,
const std::vector<double>& current) const;
/// \brief Adds information about the ecl indices of the perforations.
///
/// \warning Theses indices need to be push in the same order as they
/// appear in the ECL well specifiation. Use -1 if there is
/// no perforation above.
/// \param above The ECL index of the next open perforation above.
/// \param current The ECL index of the current open perforation.
void pushBackEclIndex(int above, int current);
/// \brief Name of the well.
const std::string& name() const
{
@ -88,6 +180,16 @@ public:
return isOwner_;
}
/// \brief Inidicate that we will reset the ecl index information
///
/// \see pushBackEclIndex;
void beginReset();
/// \brief Inidicate completion of reset of the ecl index information
void endReset();
/// \brief Free data of communication data structures.
void clearCommunicateAbove();
private:
/// \brief Deleter that also frees custom MPI communicators
@ -109,6 +211,9 @@ private:
///
/// Contains only ranks where this well will perforate local cells.
std::unique_ptr<Communication, DestroyComm> comm_;
/// \brief used to communicate the values for the perforation above.
std::unique_ptr<CommunicateAbove> commAbove_;
};
/// \brief Class checking that all connections are on active cells

View File

@ -152,3 +152,127 @@ BOOST_AUTO_TEST_CASE(ParallelWellComparison)
#endif
}
BOOST_AUTO_TEST_CASE(CommunicateAboveSelf)
{
auto comm = Dune::MPIHelper::getLocalCommunicator();
Opm::CommunicateAbove commAbove{ comm };
for(std::size_t count=0; count < 2; ++count)
{
std::vector<int> eclIndex = {0, 1, 2, 3, 7 , 8, 10, 11};
std::vector<double> current(eclIndex.size());
std::transform(eclIndex.begin(), eclIndex.end(), current.begin(),
[](double v){ return 1+10.0*v;});
commAbove.beginReset();
for (std::size_t i = 0; i < current.size(); ++i)
{
if (i==0)
commAbove.pushBackEclIndex(-1, eclIndex[i]);
else
commAbove.pushBackEclIndex(eclIndex[i-1], eclIndex[i]);
}
commAbove.endReset();
auto above = commAbove.communicate(-10.0, current.data(), current.size());
BOOST_TEST(above[0]==-10.0);
BOOST_TEST(above.size() == current.size());
auto a = above.begin()+1;
std::for_each(current.begin(), current.begin() + (current.size()-1),
[&a](double v){ BOOST_TEST(*(a++) == v);});
}
}
BOOST_AUTO_TEST_CASE(CommunicateAboveSelf1)
{
auto comm = Dune::MPIHelper::getLocalCommunicator();
Opm::CommunicateAbove commAbove{ comm };
for(std::size_t count=0; count < 2; ++count)
{
std::vector<int> eclIndex = {0};
std::vector<double> current(eclIndex.size());
std::transform(eclIndex.begin(), eclIndex.end(), current.begin(),
[](double v){ return 1+10.0*v;});
commAbove.beginReset();
for (std::size_t i = 0; i < current.size(); ++i)
{
if (i==0)
commAbove.pushBackEclIndex(-1, eclIndex[i]);
else
commAbove.pushBackEclIndex(eclIndex[i-1], eclIndex[i]);
}
commAbove.endReset();
auto above = commAbove.communicate(-10.0, current.data(), current.size());
BOOST_TEST(above[0]==-10.0);
BOOST_TEST(above.size() == current.size());
auto a = above.begin()+1;
std::for_each(current.begin(), current.begin() + (current.size()-1),
[&a](double v){ BOOST_TEST(*(a++) == v);});
}
}
BOOST_AUTO_TEST_CASE(CommunicateAboveParalle)
{
using MPIComm = typename Dune::MPIHelper::MPICommunicator;
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
using Communication = Dune::Communication<MPIComm>;
#else
using Communication = Dune::CollectiveCommunication<MPIComm>;
#endif
auto comm = Communication(Dune::MPIHelper::getCommunicator());
Opm::CommunicateAbove commAbove{ comm };
for(std::size_t count=0; count < 2; ++count)
{
std::vector<int> globalEclIndex = {0, 1, 2, 3, 7 , 8, 10, 11};
auto oldSize = globalEclIndex.size();
std::size_t globalSize = 3 * comm.size();
auto lastIndex = globalEclIndex.back();
globalEclIndex.resize(globalSize);
if ( globalSize > oldSize)
{
++lastIndex;
for(auto entry = globalEclIndex.begin() + oldSize;
entry != globalEclIndex.end(); ++entry, ++lastIndex)
{
*entry = lastIndex;
}
}
std::vector<double> globalCurrent(globalEclIndex.size());
std::transform(globalEclIndex.begin(), globalEclIndex.end(), globalCurrent.begin(),
[](double v){ return 1+10.0*v;});
std::vector<double> current(3);
commAbove.beginReset();
for (std::size_t i = 0; i < current.size(); ++i)
{
auto gi = comm.rank() + comm.size() * i;
if (gi==0)
{
commAbove.pushBackEclIndex(-1, globalEclIndex[gi]);
}
else
{
commAbove.pushBackEclIndex(globalEclIndex[gi-1], globalEclIndex[gi]);
}
current[i] = globalCurrent[gi];
}
commAbove.endReset();
auto above = commAbove.communicate(-10.0, current.data(), current.size());
if (comm.rank() == 0)
BOOST_TEST(above[0]==-10.0);
BOOST_TEST(above.size() == current.size());
for (std::size_t i = 0; i < current.size(); ++i)
{
auto gi = comm.rank() + comm.size() * i;
if (gi > 0)
{
BOOST_TEST(above[i]==globalCurrent[gi-1]);
}
}
}
}