mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Also distribute the centroids when loadbalancing CpGrid.
They are attached to the cells as well and are now distributed during CpGrid::loadBalance. Due to this change we also rename FieldPropsDataHandle to PropsCentroidsDataHandle.
This commit is contained in:
parent
1b03b040a3
commit
4c962e61d1
@ -175,11 +175,11 @@ list (APPEND PUBLIC_HEADER_FILES
|
|||||||
opm/simulators/utils/ParallelFileMerger.hpp
|
opm/simulators/utils/ParallelFileMerger.hpp
|
||||||
opm/simulators/utils/DeferredLoggingErrorHelpers.hpp
|
opm/simulators/utils/DeferredLoggingErrorHelpers.hpp
|
||||||
opm/simulators/utils/DeferredLogger.hpp
|
opm/simulators/utils/DeferredLogger.hpp
|
||||||
opm/simulators/utils/FieldPropsDataHandle.hpp
|
|
||||||
opm/simulators/utils/gatherDeferredLogger.hpp
|
opm/simulators/utils/gatherDeferredLogger.hpp
|
||||||
opm/simulators/utils/moduleVersion.hpp
|
opm/simulators/utils/moduleVersion.hpp
|
||||||
opm/simulators/utils/ParallelEclipseState.hpp
|
opm/simulators/utils/ParallelEclipseState.hpp
|
||||||
opm/simulators/utils/ParallelRestart.hpp
|
opm/simulators/utils/ParallelRestart.hpp
|
||||||
|
opm/simulators/utils/PropsCentroidsDataHandle.hpp
|
||||||
opm/simulators/wells/PerforationData.hpp
|
opm/simulators/wells/PerforationData.hpp
|
||||||
opm/simulators/wells/RateConverter.hpp
|
opm/simulators/wells/RateConverter.hpp
|
||||||
opm/simulators/wells/SimFIBODetails.hpp
|
opm/simulators/wells/SimFIBODetails.hpp
|
||||||
|
@ -533,6 +533,15 @@ public:
|
|||||||
std::unordered_set<std::string> defunctWellNames() const
|
std::unordered_set<std::string> defunctWellNames() const
|
||||||
{ return std::unordered_set<std::string>(); }
|
{ return std::unordered_set<std::string>(); }
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* \brief Get the cell centroids for a distributed grid.
|
||||||
|
*
|
||||||
|
* Currently this only non-empty for a loadbalanced CpGrid.
|
||||||
|
*/
|
||||||
|
const std::vector<double>& cellCentroids() const
|
||||||
|
{
|
||||||
|
return centroids_;
|
||||||
|
}
|
||||||
protected:
|
protected:
|
||||||
void callImplementationInit()
|
void callImplementationInit()
|
||||||
{
|
{
|
||||||
@ -610,6 +619,12 @@ private:
|
|||||||
Opm::SummaryConfig* eclSummaryConfig_;
|
Opm::SummaryConfig* eclSummaryConfig_;
|
||||||
|
|
||||||
Dune::EdgeWeightMethod edgeWeightsMethod_;
|
Dune::EdgeWeightMethod edgeWeightsMethod_;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
/*! \brief The cell centroids after loadbalance was called.
|
||||||
|
* Empty otherwise. Used by EclTransmissibilty.
|
||||||
|
*/
|
||||||
|
std::vector<double> centroids_;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <class TypeTag>
|
template <class TypeTag>
|
||||||
|
@ -34,7 +34,7 @@
|
|||||||
#include <opm/grid/CpGrid.hpp>
|
#include <opm/grid/CpGrid.hpp>
|
||||||
#include <opm/grid/cpgrid/GridHelpers.hpp>
|
#include <opm/grid/cpgrid/GridHelpers.hpp>
|
||||||
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
||||||
#include <opm/simulators/utils/FieldPropsDataHandle.hpp>
|
#include <opm/simulators/utils/PropsCentroidsDataHandle.hpp>
|
||||||
|
|
||||||
#include <dune/grid/common/mcmgmapper.hh>
|
#include <dune/grid/common/mcmgmapper.hh>
|
||||||
|
|
||||||
@ -190,7 +190,15 @@ public:
|
|||||||
{
|
{
|
||||||
const auto wells = this->schedule().getWellsatEnd();
|
const auto wells = this->schedule().getWellsatEnd();
|
||||||
auto& eclState = static_cast<ParallelEclipseState&>(this->eclState());
|
auto& eclState = static_cast<ParallelEclipseState&>(this->eclState());
|
||||||
FieldPropsDataHandle<Dune::CpGrid> handle(*grid_, eclState);
|
const EclipseGrid* eclGrid = nullptr;
|
||||||
|
|
||||||
|
if (grid_->comm().rank() == 0)
|
||||||
|
{
|
||||||
|
eclGrid = &this->eclState().getInputGrid();
|
||||||
|
}
|
||||||
|
|
||||||
|
PropsCentroidsDataHandle<Dune::CpGrid> handle(*grid_, eclState, eclGrid, this->centroids_,
|
||||||
|
cartesianIndexMapper());
|
||||||
defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data()));
|
defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data()));
|
||||||
}
|
}
|
||||||
grid_->switchToDistributedView();
|
grid_->switchToDistributedView();
|
||||||
|
@ -149,47 +149,7 @@ public:
|
|||||||
for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx)
|
for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx)
|
||||||
axisCentroids[dimIdx].resize(numElements);
|
axisCentroids[dimIdx].resize(numElements);
|
||||||
|
|
||||||
std::vector<double> centroids;
|
const std::vector<double>& centroids = vanguard_.cellCentroids();
|
||||||
#if HAVE_MPI
|
|
||||||
size_t cells = vanguard_.grid().numCells();
|
|
||||||
if (global && comm.size() > 1) {
|
|
||||||
std::vector<size_t> sizes(comm.size());
|
|
||||||
if (comm.rank() == 0) {
|
|
||||||
const auto& eclGrid = eclState.getInputGrid();
|
|
||||||
comm.gather(&cells, sizes.data(), 1, 0);
|
|
||||||
for (int i = 1; i < comm.size(); ++i) {
|
|
||||||
std::vector<int> cell_id(sizes[i]);
|
|
||||||
MPI_Recv(cell_id.data(), sizes[i], MPI_INT,
|
|
||||||
i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
|
||||||
centroids.resize(dimWorld * sizes[i]);
|
|
||||||
|
|
||||||
auto cIt = centroids.begin();
|
|
||||||
for (int idx : cell_id) {
|
|
||||||
const auto& centroid = eclGrid.getCellCenter(idx);
|
|
||||||
for (const auto& it : centroid)
|
|
||||||
*cIt++ = it;
|
|
||||||
}
|
|
||||||
MPI_Send(centroids.data(), dimWorld * sizes[i],
|
|
||||||
MPI_DOUBLE, i, 0, MPI_COMM_WORLD);
|
|
||||||
}
|
|
||||||
centroids.clear();
|
|
||||||
} else {
|
|
||||||
comm.gather(&cells, sizes.data(), 1, 0);
|
|
||||||
std::vector<int> cell_ids;
|
|
||||||
cell_ids.reserve(cells);
|
|
||||||
auto elemIt = gridView.template begin</*codim=*/ 0>();
|
|
||||||
const auto& elemEndIt = gridView.template end</*codim=*/ 0>();
|
|
||||||
for (; elemIt != elemEndIt; ++elemIt) {
|
|
||||||
const auto& elem = *elemIt;
|
|
||||||
cell_ids.push_back(cartMapper.cartesianIndex(elemMapper.index(elem)));
|
|
||||||
}
|
|
||||||
MPI_Send(cell_ids.data(), cells, MPI_INT, 0, 0, MPI_COMM_WORLD);
|
|
||||||
centroids.resize(cells * dimWorld);
|
|
||||||
MPI_Recv(centroids.data(), dimWorld * cells, MPI_DOUBLE,
|
|
||||||
0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
auto elemIt = gridView.template begin</*codim=*/ 0>();
|
auto elemIt = gridView.template begin</*codim=*/ 0>();
|
||||||
const auto& elemEndIt = gridView.template end</*codim=*/ 0>();
|
const auto& elemEndIt = gridView.template end</*codim=*/ 0>();
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
#define PARALLEL_ECLIPSE_STATE_HPP
|
#define PARALLEL_ECLIPSE_STATE_HPP
|
||||||
|
|
||||||
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
||||||
//#include <opm/simulators/utils/FieldPropsDataHandle.hpp>
|
|
||||||
#include <dune/common/parallel/mpihelper.hh>
|
#include <dune/common/parallel/mpihelper.hh>
|
||||||
|
|
||||||
namespace Opm {
|
namespace Opm {
|
||||||
@ -41,7 +40,7 @@ public:
|
|||||||
friend class ParallelEclipseState; //!< Friend so props can be setup.
|
friend class ParallelEclipseState; //!< Friend so props can be setup.
|
||||||
//! \brief Friend to set up props
|
//! \brief Friend to set up props
|
||||||
template<class Grid>
|
template<class Grid>
|
||||||
friend class FieldPropsDataHandle;
|
friend class PropsCentroidsDataHandle;
|
||||||
|
|
||||||
//! \brief Constructor.
|
//! \brief Constructor.
|
||||||
//! \param manager The field property manager to wrap.
|
//! \param manager The field property manager to wrap.
|
||||||
@ -106,7 +105,7 @@ protected:
|
|||||||
class ParallelEclipseState : public EclipseState {
|
class ParallelEclipseState : public EclipseState {
|
||||||
//! \brief Friend to set up props
|
//! \brief Friend to set up props
|
||||||
template<class Grid>
|
template<class Grid>
|
||||||
friend class FieldPropsDataHandle;
|
friend class PropsCentroidsDataHandle;
|
||||||
public:
|
public:
|
||||||
//! \brief Default constructor.
|
//! \brief Default constructor.
|
||||||
ParallelEclipseState();
|
ParallelEclipseState();
|
||||||
|
@ -23,8 +23,8 @@
|
|||||||
* \author Markus Blatt, OPM-OP AS
|
* \author Markus Blatt, OPM-OP AS
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef FIELDPROPS_DATAHANDLE_HPP
|
#ifndef PROPS_CENTROIDS_DATAHANDLE_HPP
|
||||||
#define FIELDPROPS_DATAHANDLE_HPP
|
#define PROPS_CENTROIDS_DATAHANDLE_HPP
|
||||||
|
|
||||||
#if HAVE_MPI
|
#if HAVE_MPI
|
||||||
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
||||||
@ -35,17 +35,18 @@
|
|||||||
#include <dune/common/parallel/mpihelper.hh>
|
#include <dune/common/parallel/mpihelper.hh>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
namespace Opm
|
namespace Opm
|
||||||
{
|
{
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* \brief A Data handle t communicate the field properties during load balance.
|
* \brief A Data handle to communicate the field properties and cell centroids during load balance.
|
||||||
* \tparam Grid The type of grid where the load balancing is happening.
|
* \tparam Grid The type of grid where the load balancing is happening.
|
||||||
* \todo Maybe specialize this for CpGrid to save some space, later.
|
* \todo Maybe specialize this for CpGrid to save some space, later.
|
||||||
*/
|
*/
|
||||||
template<class Grid>
|
template<class Grid>
|
||||||
class FieldPropsDataHandle
|
class PropsCentroidsDataHandle
|
||||||
: public Dune::CommDataHandleIF< FieldPropsDataHandle<Grid>, double>
|
: public Dune::CommDataHandleIF< PropsCentroidsDataHandle<Grid>, double>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
//! \brief the data type we send (ints are converted to double)
|
//! \brief the data type we send (ints are converted to double)
|
||||||
@ -55,8 +56,17 @@ public:
|
|||||||
//! \param grid The grid where the loadbalancing is happening.
|
//! \param grid The grid where the loadbalancing is happening.
|
||||||
//! \param globalProps The field properties of the global grid
|
//! \param globalProps The field properties of the global grid
|
||||||
//! \param distributedProps The distributed field properties
|
//! \param distributedProps The distributed field properties
|
||||||
FieldPropsDataHandle(const Grid& grid, ParallelEclipseState& eclState)
|
//! \param eclGridOnRoot A pointer to eclipse grid on rank zero,
|
||||||
: m_grid(grid), m_distributed_fieldProps(eclState.m_fieldProps)
|
//! nullptr otherwise.
|
||||||
|
//! \param centroids Array to store the centroids in upon destruction
|
||||||
|
//! of the object.
|
||||||
|
//! \param cartMapper The cartesian index mapper of the grid.
|
||||||
|
PropsCentroidsDataHandle(const Grid& grid, ParallelEclipseState& eclState,
|
||||||
|
const EclipseGrid* eclGridOnRoot,
|
||||||
|
std::vector<double>& centroids,
|
||||||
|
const typename Dune::CartesianIndexMapper<Grid>& cartMapper)
|
||||||
|
: m_grid(grid), m_distributed_fieldProps(eclState.m_fieldProps),
|
||||||
|
m_centroids(centroids)
|
||||||
{
|
{
|
||||||
// Scatter the keys
|
// Scatter the keys
|
||||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||||
@ -75,7 +85,8 @@ public:
|
|||||||
comm.broadcast(buffer.data(), position, 0);
|
comm.broadcast(buffer.data(), position, 0);
|
||||||
|
|
||||||
// copy data to persistent map based on local id
|
// copy data to persistent map based on local id
|
||||||
auto noData = m_intKeys.size() + m_doubleKeys.size();
|
m_no_data = m_intKeys.size() + m_doubleKeys.size() +
|
||||||
|
Grid::dimensionworld;
|
||||||
const auto& idSet = m_grid.localIdSet();
|
const auto& idSet = m_grid.localIdSet();
|
||||||
const auto& gridView = m_grid.levelGridView(0);
|
const auto& gridView = m_grid.levelGridView(0);
|
||||||
using ElementMapper =
|
using ElementMapper =
|
||||||
@ -87,13 +98,18 @@ public:
|
|||||||
const auto& id = idSet.id(element);
|
const auto& id = idSet.id(element);
|
||||||
auto index = elemMapper.index(element);
|
auto index = elemMapper.index(element);
|
||||||
auto& data = elementData_[id];
|
auto& data = elementData_[id];
|
||||||
data.reserve(noData);
|
data.reserve(m_no_data);
|
||||||
|
|
||||||
for(const auto& intKey : m_intKeys)
|
for (const auto& intKey : m_intKeys)
|
||||||
data.push_back(globalProps.get_int(intKey)[index]);
|
data.push_back(globalProps.get_int(intKey)[index]);
|
||||||
|
|
||||||
for(const auto& doubleKey : m_doubleKeys)
|
for (const auto& doubleKey : m_doubleKeys)
|
||||||
data.push_back(globalProps.get_double(doubleKey)[index]);
|
data.push_back(globalProps.get_double(doubleKey)[index]);
|
||||||
|
|
||||||
|
auto cartIndex = cartMapper.cartesianIndex(index);
|
||||||
|
const auto& center = eclGridOnRoot->getCellCenter(cartIndex);
|
||||||
|
for (int dim = 0; dim < Grid::dimensionworld; ++dim)
|
||||||
|
data.push_back(center[dim]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -105,12 +121,12 @@ public:
|
|||||||
int position{};
|
int position{};
|
||||||
Mpi::unpack(m_intKeys, buffer, position, comm);
|
Mpi::unpack(m_intKeys, buffer, position, comm);
|
||||||
Mpi::unpack(m_doubleKeys, buffer, position, comm);
|
Mpi::unpack(m_doubleKeys, buffer, position, comm);
|
||||||
|
m_no_data = m_intKeys.size() + m_doubleKeys.size() +
|
||||||
|
Grid::dimensionworld;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldPropsDataHandle(const FieldPropsDataHandle&) = delete;
|
~PropsCentroidsDataHandle()
|
||||||
|
|
||||||
~FieldPropsDataHandle()
|
|
||||||
{
|
{
|
||||||
// distributed grid is now correctly set up.
|
// distributed grid is now correctly set up.
|
||||||
for(const auto& intKey : m_intKeys)
|
for(const auto& intKey : m_intKeys)
|
||||||
@ -119,6 +135,8 @@ public:
|
|||||||
for(const auto& doubleKey : m_doubleKeys)
|
for(const auto& doubleKey : m_doubleKeys)
|
||||||
m_distributed_fieldProps.m_doubleProps[doubleKey].resize(m_grid.size(0));
|
m_distributed_fieldProps.m_doubleProps[doubleKey].resize(m_grid.size(0));
|
||||||
|
|
||||||
|
m_centroids.resize(m_grid.size(0) * Grid::dimensionworld);
|
||||||
|
|
||||||
// copy data for the persistent mao to the field properties
|
// copy data for the persistent mao to the field properties
|
||||||
const auto& idSet = m_grid.localIdSet();
|
const auto& idSet = m_grid.localIdSet();
|
||||||
const auto& gridView = m_grid.levelGridView(0);
|
const auto& gridView = m_grid.levelGridView(0);
|
||||||
@ -139,6 +157,11 @@ public:
|
|||||||
|
|
||||||
for(const auto& doubleKey : m_doubleKeys)
|
for(const auto& doubleKey : m_doubleKeys)
|
||||||
m_distributed_fieldProps.m_doubleProps[doubleKey][index] = data->second[counter++];
|
m_distributed_fieldProps.m_doubleProps[doubleKey][index] = data->second[counter++];
|
||||||
|
|
||||||
|
auto centroidIter = m_centroids.begin() + Grid::dimensionworld * index;
|
||||||
|
auto centroidIterEnd = centroidIter + Grid::dimensionworld;
|
||||||
|
for ( ; centroidIter != centroidIterEnd; ++centroidIter )
|
||||||
|
*centroidIter = data->second[counter++];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,7 +182,7 @@ public:
|
|||||||
template<class EntityType>
|
template<class EntityType>
|
||||||
std::size_t size(const EntityType /* entity */)
|
std::size_t size(const EntityType /* entity */)
|
||||||
{
|
{
|
||||||
return m_intKeys.size() + m_doubleKeys.size();
|
return m_no_data;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class BufferType, class EntityType>
|
template<class BufferType, class EntityType>
|
||||||
@ -174,7 +197,7 @@ public:
|
|||||||
template<class BufferType, class EntityType>
|
template<class BufferType, class EntityType>
|
||||||
void scatter(BufferType& buffer, const EntityType& e, std::size_t n)
|
void scatter(BufferType& buffer, const EntityType& e, std::size_t n)
|
||||||
{
|
{
|
||||||
assert(n == m_intKeys.size() + m_doubleKeys.size());
|
assert(n == m_no_data);
|
||||||
auto& array = rcvdElementData_[m_grid.localIdSet().id(e)];
|
auto& array = rcvdElementData_[m_grid.localIdSet().id(e)];
|
||||||
array.resize(n);
|
array.resize(n);
|
||||||
for(auto& data: array)
|
for(auto& data: array)
|
||||||
@ -194,11 +217,19 @@ private:
|
|||||||
std::vector<std::string> m_doubleKeys;
|
std::vector<std::string> m_doubleKeys;
|
||||||
/// \brief The data per element as a vector mapped from the local id.
|
/// \brief The data per element as a vector mapped from the local id.
|
||||||
std::unordered_map<typename LocalIdSet::IdType, std::vector<double> > elementData_;
|
std::unordered_map<typename LocalIdSet::IdType, std::vector<double> > elementData_;
|
||||||
/// \brief The data per element as a vector mapped from the local id.
|
/*! \brief The data received per element as a vector mapped from the local id.
|
||||||
|
*
|
||||||
|
* Needed because CpGrid is in violation of the requirement that local ids
|
||||||
|
* need to be persistent across grid modifications
|
||||||
|
*/
|
||||||
std::unordered_map<typename LocalIdSet::IdType, std::vector<double> > rcvdElementData_;
|
std::unordered_map<typename LocalIdSet::IdType, std::vector<double> > rcvdElementData_;
|
||||||
|
/// \brief The cell centroids of the distributed grid.
|
||||||
|
std::vector<double>& m_centroids;
|
||||||
|
/// \brief The amount of data to send for each element
|
||||||
|
std::size_t m_no_data;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // end namespace Opm
|
} // end namespace Opm
|
||||||
#endif // HAVE_MPI
|
#endif // HAVE_MPI
|
||||||
#endif // FIELDPROPS_DATAHANDLE_HPP
|
#endif // PROPS_CENTROIDS_DATAHANDLE_HPP
|
||||||
|
|
Loading…
Reference in New Issue
Block a user