Merge pull request #4778 from aritorto/cellCentroid

Refactor element centroids lookup
This commit is contained in:
Markus Blatt 2023-09-15 10:02:15 +02:00 committed by GitHub
commit b209f6af77
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 40 additions and 79 deletions

View File

@ -517,7 +517,7 @@ list (APPEND PUBLIC_HEADER_FILES
opm/simulators/utils/moduleVersion.hpp
opm/simulators/utils/ParallelEclipseState.hpp
opm/simulators/utils/ParallelRestart.hpp
opm/simulators/utils/PropsCentroidsDataHandle.hpp
opm/simulators/utils/PropsDataHandle.hpp
opm/simulators/utils/SerializationPackers.hpp
opm/simulators/utils/VectorVectorDataHandle.hpp
opm/simulators/utils/PressureAverage.hpp

View File

@ -31,7 +31,7 @@
#include <opm/grid/common/GridEnums.hpp>
#include <opm/grid/common/CartesianIndexMapper.hpp>
#include <opm/grid/LookUpCellCentroid.hh>
#include <opm/input/eclipse/EclipseState/Aquifer/NumericalAquifer/NumericalAquiferCell.hpp>
#include <opm/input/eclipse/EclipseState/EclipseState.hpp>
@ -42,6 +42,8 @@
#include <opm/simulators/flow/BlackoilModelParametersEbos.hpp>
#include <array>
#include <cstddef>
#include <optional>
@ -51,6 +53,7 @@
namespace Opm {
template <class TypeTag>
class EclBaseVanguard;
template<typename Grid, typename GridView> struct LookUpCellCentroid;
}
namespace Opm::Properties {
@ -476,20 +479,18 @@ protected:
cellCentroids_(const CartMapper& cartMapper) const
{
return [this, cartMapper](int elemIdx) {
const auto& centroids = this->centroids_;
auto rank = this->gridView().comm().rank();
std::array<double,dimensionworld> centroid;
if (rank == 0) {
unsigned cartesianCellIdx = cartMapper.cartesianIndex(elemIdx);
centroid = this->eclState().getInputGrid().getCellCenter(cartesianCellIdx);
} else
{
std::copy(centroids.begin() + elemIdx * dimensionworld,
centroids.begin() + (elemIdx + 1) * dimensionworld,
centroid.begin());
}
return centroid;
};
std::array<double,dimensionworld> centroid;
if (this->gridView().comm().rank() == 0)
{
centroid = this->eclState().getInputGrid().getCellCenter(cartMapper.cartesianIndex(elemIdx));
}
else
{
LookUpCellCentroid<Grid,GridView> lookUpCellCentroid(this->gridView(), cartMapper, nullptr);
centroid = lookUpCellCentroid(elemIdx);
}
return centroid;
};
}
void callImplementationInit()
@ -506,7 +507,7 @@ protected:
{
std::size_t num_cells = asImp_().grid().leafGridView().size(0);
is_interior_.resize(num_cells);
ElementMapper elemMapper(this->gridView(), Dune::mcmgElementLayout());
for (const auto& element : elements(this->gridView()))
{
@ -578,7 +579,7 @@ private:
return zz/Scalar(corners);
}
Scalar computeCellThickness(const typename GridView::template Codim<0>::Entity& element) const
{
typedef typename Element::Geometry Geometry;
@ -609,11 +610,6 @@ private:
{ return *static_cast<const Implementation*>(this); }
protected:
/*! \brief The cell centroids after loadbalance was called.
* Empty otherwise. Used by EclTransmissibilty.
*/
std::vector<double> centroids_;
/*! \brief Mapping between cartesian and compressed cells.
* It is initialized the first time it is called
*/

View File

@ -208,8 +208,8 @@ public:
this->doLoadBalance_(this->edgeWeightsMethod(), this->ownersFirst(),
this->serialPartitioning(), this->enableDistributedWells(),
this->zoltanImbalanceTol(), this->gridView(),
this->schedule(), this->centroids_,
this->eclState(), this->parallelWells_, this->numJacobiBlocks());
this->schedule(), this->eclState(),
this->parallelWells_, this->numJacobiBlocks());
#endif
this->updateGridView_();

View File

@ -31,7 +31,7 @@
#include <opm/simulators/utils/ParallelEclipseState.hpp>
#include <opm/simulators/utils/ParallelSerialization.hpp>
#include <opm/simulators/utils/PropsCentroidsDataHandle.hpp>
#include <opm/simulators/utils/PropsDataHandle.hpp>
#include <opm/simulators/utils/SetupZoltanParams.hpp>
#include <opm/grid/cpgrid/GridHelpers.hpp>
@ -94,7 +94,6 @@ doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
const double zoltanImbalanceTol,
const GridView& gridView,
const Schedule& schedule,
std::vector<double>& centroids,
EclipseState& eclState1,
EclGenericVanguard::ParallelWellStruct& parallelWells,
const int numJacobiBlocks)
@ -143,7 +142,7 @@ doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
this->distributeGrid(edgeWeightsMethod, ownersFirst,
serialPartitioning, enableDistributedWells,
zoltanImbalanceTol, loadBalancerSet != 0,
faceTrans, wells, centroids,
faceTrans, wells,
eclState1, parallelWells);
}
@ -230,7 +229,6 @@ distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
std::vector<double>& centroids,
EclipseState& eclState1,
EclGenericVanguard::ParallelWellStruct& parallelWells)
{
@ -240,7 +238,7 @@ distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
this->distributeGrid(edgeWeightsMethod, ownersFirst,
serialPartitioning, enableDistributedWells,
zoltanImbalanceTol, loadBalancerSet, faceTrans,
wells, centroids, eclState, parallelWells);
wells, eclState, parallelWells);
}
else {
const auto message = std::string {
@ -267,20 +265,14 @@ distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
std::vector<double>& centroids,
ParallelEclipseState* eclState,
EclGenericVanguard::ParallelWellStruct& parallelWells)
{
OPM_TIMEBLOCK(gridDistribute);
const auto isIORank = this->grid_->comm().rank() == 0;
const auto* eclGrid = isIORank
? &eclState->getInputGrid()
: nullptr;
PropsCentroidsDataHandle<Dune::CpGrid> handle {
*this->grid_, *eclState, eclGrid, centroids,
this->cartesianIndexMapper()
PropsDataHandle<Dune::CpGrid> handle {
*this->grid_, *eclState
};
const auto addCornerCells = false;

View File

@ -131,7 +131,6 @@ protected:
const double zoltanImbalanceTol,
const GridView& gridView,
const Schedule& schedule,
std::vector<double>& centroids,
EclipseState& eclState,
EclGenericVanguard::ParallelWellStruct& parallelWells,
const int numJacobiBlocks);
@ -149,7 +148,6 @@ private:
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
std::vector<double>& centroids,
EclipseState& eclState,
EclGenericVanguard::ParallelWellStruct& parallelWells);
@ -161,7 +159,6 @@ private:
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
std::vector<double>& centroids,
ParallelEclipseState* eclState,
EclGenericVanguard::ParallelWellStruct& parallelWells);

View File

@ -44,7 +44,7 @@ public:
friend class ParallelEclipseState; //!< Friend so props can be setup.
//! \brief Friend to set up props
template<class Grid>
friend class PropsCentroidsDataHandle;
friend class PropsDataHandle;
//! \brief Constructor.
//! \param manager The field property manager to wrap.
@ -144,7 +144,7 @@ protected:
class ParallelEclipseState : public EclipseState {
//! \brief Friend to set up props
template<class Grid>
friend class PropsCentroidsDataHandle;
friend class PropsDataHandle;
public:
//! \brief Default constructor.
ParallelEclipseState(Parallel::Communication comm);

View File

@ -1,5 +1,5 @@
/*
Copyright 2020 Equinor AS.
Copyright 2020, 2023 Equinor AS.
This file is part of the Open Porous Media project (OPM).
@ -17,14 +17,14 @@
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
/*!
* \file FieldPropsDatahandle.hpp
* \file PropsDatahandle.hpp
* \brief File containing a data handle for communicating the FieldProperties
*
* \author Markus Blatt, OPM-OP AS
*/
#ifndef PROPS_CENTROIDS_DATAHANDLE_HPP
#define PROPS_CENTROIDS_DATAHANDLE_HPP
#ifndef PROPS_DATAHANDLE_HPP
#define PROPS_DATAHANDLE_HPP
#if HAVE_MPI
@ -43,13 +43,13 @@ namespace Opm
{
/*!
* \brief A Data handle to communicate the field properties and cell centroids during load balance.
* \brief A Data handle to communicate the field properties during load balance.
* \tparam Grid The type of grid where the load balancing is happening.
* \todo Maybe specialize this for CpGrid to save some space, later.
*/
template<class Grid>
class PropsCentroidsDataHandle
: public Dune::CommDataHandleIF< PropsCentroidsDataHandle<Grid>, double>
class PropsDataHandle
: public Dune::CommDataHandleIF< PropsDataHandle<Grid>, double>
{
public:
//! \brief the data type we send (ints are converted to double)
@ -59,18 +59,9 @@ public:
//! \param grid The grid where the loadbalancing is happening.
//! \param globalProps The field properties of the global grid
//! \param distributedProps The distributed field properties
//! \param eclGridOnRoot A pointer to eclipse grid on rank zero,
//! nullptr otherwise.
//! \param centroids Array to store the centroids in upon destruction
//! of the object.
//! \param cartMapper The cartesian index mapper of the grid.
PropsCentroidsDataHandle(const Grid& grid, ParallelEclipseState& eclState,
const EclipseGrid* eclGridOnRoot,
std::vector<double>& centroids,
const typename Dune::CartesianIndexMapper<Grid>& cartMapper)
PropsDataHandle(const Grid& grid, ParallelEclipseState& eclState)
: m_grid(grid),
m_distributed_fieldProps(eclState.m_fieldProps),
m_centroids(centroids)
m_distributed_fieldProps(eclState.m_fieldProps)
{
// Scatter the keys
const Parallel::Communication comm = m_grid.comm();
@ -85,7 +76,7 @@ public:
EclMpiSerializer ser(comm);
ser.broadcast(*this);
m_no_data = m_intKeys.size() + m_doubleKeys.size() + Grid::dimensionworld;
m_no_data = m_intKeys.size() + m_doubleKeys.size();
if (comm.rank() == 0) {
const FieldPropsManager& globalProps = eclState.globalFieldProps();
@ -118,16 +109,11 @@ public:
data.emplace_back(fieldData.data[index],
static_cast<unsigned char>(fieldData.value_status[index]));
}
auto cartIndex = cartMapper.cartesianIndex(index);
const auto& center = eclGridOnRoot->getCellCenter(cartIndex);
for (int dim = 0; dim < Grid::dimensionworld; ++dim)
data.emplace_back(center[dim], '1'); // write garbage for value_status
}
}
}
~PropsCentroidsDataHandle()
~PropsDataHandle()
{
// distributed grid is now correctly set up.
for (const auto& intKey : m_intKeys)
@ -142,8 +128,6 @@ public:
m_distributed_fieldProps.m_doubleProps[doubleKey].value_status.resize(m_grid.size(0));
}
m_centroids.resize(m_grid.size(0) * Grid::dimensionworld);
// copy data for the persistent mao to the field properties
const auto& idSet = m_grid.localIdSet();
const auto& gridView = m_grid.levelGridView(0);
@ -172,11 +156,6 @@ public:
m_distributed_fieldProps.m_doubleProps[doubleKey].data[index] = pair.first;
m_distributed_fieldProps.m_doubleProps[doubleKey].value_status[index] = static_cast<value::status>(pair.second);
}
auto centroidIter = m_centroids.begin() + Grid::dimensionworld * index;
auto centroidIterEnd = centroidIter + Grid::dimensionworld;
for ( ; centroidIter != centroidIterEnd; ++centroidIter )
*centroidIter = data->second[counter++].first; // value_status discarded
}
}
@ -244,13 +223,10 @@ private:
///
/// each entry is a pair of data and value_status.
std::unordered_map<typename LocalIdSet::IdType, std::vector<std::pair<double,unsigned char> > > elementData_;
/// \brief The cell centroids of the distributed grid.
std::vector<double>& m_centroids;
/// \brief The amount of data to send for each element
std::size_t m_no_data;
};
} // end namespace Opm
#endif // HAVE_MPI
#endif // PROPS_CENTROIDS_DATAHANDLE_HPP
#endif // PROPS_DATAHANDLE_HPP