diff --git a/CMakeLists_files.cmake b/CMakeLists_files.cmake index 328e43bfa..ac42c705a 100644 --- a/CMakeLists_files.cmake +++ b/CMakeLists_files.cmake @@ -175,11 +175,11 @@ list (APPEND PUBLIC_HEADER_FILES opm/simulators/utils/ParallelFileMerger.hpp opm/simulators/utils/DeferredLoggingErrorHelpers.hpp opm/simulators/utils/DeferredLogger.hpp - opm/simulators/utils/FieldPropsDataHandle.hpp opm/simulators/utils/gatherDeferredLogger.hpp opm/simulators/utils/moduleVersion.hpp opm/simulators/utils/ParallelEclipseState.hpp opm/simulators/utils/ParallelRestart.hpp + opm/simulators/utils/PropsCentroidsDataHandle.hpp opm/simulators/wells/PerforationData.hpp opm/simulators/wells/RateConverter.hpp opm/simulators/wells/SimFIBODetails.hpp diff --git a/ebos/eclbasevanguard.hh b/ebos/eclbasevanguard.hh index dbd16e223..fef9eae80 100644 --- a/ebos/eclbasevanguard.hh +++ b/ebos/eclbasevanguard.hh @@ -533,6 +533,15 @@ public: std::unordered_set defunctWellNames() const { return std::unordered_set(); } + /*! + * \brief Get the cell centroids for a distributed grid. + * + * Currently this only non-empty for a loadbalanced CpGrid. + */ + const std::vector& cellCentroids() const + { + return centroids_; + } protected: void callImplementationInit() { @@ -610,6 +619,12 @@ private: Opm::SummaryConfig* eclSummaryConfig_; Dune::EdgeWeightMethod edgeWeightsMethod_; + +protected: + /*! \brief The cell centroids after loadbalance was called. + * Empty otherwise. Used by EclTransmissibilty. + */ + std::vector centroids_; }; template diff --git a/ebos/eclcpgridvanguard.hh b/ebos/eclcpgridvanguard.hh index c0082d251..000b44f9c 100644 --- a/ebos/eclcpgridvanguard.hh +++ b/ebos/eclcpgridvanguard.hh @@ -34,7 +34,7 @@ #include #include #include -#include +#include #include @@ -190,7 +190,15 @@ public: { const auto wells = this->schedule().getWellsatEnd(); auto& eclState = static_cast(this->eclState()); - FieldPropsDataHandle handle(*grid_, eclState); + const EclipseGrid* eclGrid = nullptr; + + if (grid_->comm().rank() == 0) + { + eclGrid = &this->eclState().getInputGrid(); + } + + PropsCentroidsDataHandle handle(*grid_, eclState, eclGrid, this->centroids_, + cartesianIndexMapper()); defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data())); } grid_->switchToDistributedView(); diff --git a/ebos/ecltransmissibility.hh b/ebos/ecltransmissibility.hh index 498d93ef9..9ebaff7e8 100644 --- a/ebos/ecltransmissibility.hh +++ b/ebos/ecltransmissibility.hh @@ -149,47 +149,7 @@ public: for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx) axisCentroids[dimIdx].resize(numElements); - std::vector centroids; -#if HAVE_MPI - size_t cells = vanguard_.grid().numCells(); - if (global && comm.size() > 1) { - std::vector sizes(comm.size()); - if (comm.rank() == 0) { - const auto& eclGrid = eclState.getInputGrid(); - comm.gather(&cells, sizes.data(), 1, 0); - for (int i = 1; i < comm.size(); ++i) { - std::vector cell_id(sizes[i]); - MPI_Recv(cell_id.data(), sizes[i], MPI_INT, - i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - centroids.resize(dimWorld * sizes[i]); - - auto cIt = centroids.begin(); - for (int idx : cell_id) { - const auto& centroid = eclGrid.getCellCenter(idx); - for (const auto& it : centroid) - *cIt++ = it; - } - MPI_Send(centroids.data(), dimWorld * sizes[i], - MPI_DOUBLE, i, 0, MPI_COMM_WORLD); - } - centroids.clear(); - } else { - comm.gather(&cells, sizes.data(), 1, 0); - std::vector cell_ids; - cell_ids.reserve(cells); - auto elemIt = gridView.template begin(); - const auto& elemEndIt = gridView.template end(); - for (; elemIt != elemEndIt; ++elemIt) { - const auto& elem = *elemIt; - cell_ids.push_back(cartMapper.cartesianIndex(elemMapper.index(elem))); - } - MPI_Send(cell_ids.data(), cells, MPI_INT, 0, 0, MPI_COMM_WORLD); - centroids.resize(cells * dimWorld); - MPI_Recv(centroids.data(), dimWorld * cells, MPI_DOUBLE, - 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - } - } -#endif + const std::vector& centroids = vanguard_.cellCentroids(); auto elemIt = gridView.template begin(); const auto& elemEndIt = gridView.template end(); diff --git a/opm/simulators/utils/ParallelEclipseState.hpp b/opm/simulators/utils/ParallelEclipseState.hpp index 7cf307301..929fae8c6 100644 --- a/opm/simulators/utils/ParallelEclipseState.hpp +++ b/opm/simulators/utils/ParallelEclipseState.hpp @@ -20,7 +20,6 @@ #define PARALLEL_ECLIPSE_STATE_HPP #include -//#include #include namespace Opm { @@ -41,7 +40,7 @@ public: friend class ParallelEclipseState; //!< Friend so props can be setup. //! \brief Friend to set up props template - friend class FieldPropsDataHandle; + friend class PropsCentroidsDataHandle; //! \brief Constructor. //! \param manager The field property manager to wrap. @@ -106,7 +105,7 @@ protected: class ParallelEclipseState : public EclipseState { //! \brief Friend to set up props template - friend class FieldPropsDataHandle; + friend class PropsCentroidsDataHandle; public: //! \brief Default constructor. ParallelEclipseState(); diff --git a/opm/simulators/utils/FieldPropsDataHandle.hpp b/opm/simulators/utils/PropsCentroidsDataHandle.hpp similarity index 73% rename from opm/simulators/utils/FieldPropsDataHandle.hpp rename to opm/simulators/utils/PropsCentroidsDataHandle.hpp index 25b2dfef9..08a6f370e 100644 --- a/opm/simulators/utils/FieldPropsDataHandle.hpp +++ b/opm/simulators/utils/PropsCentroidsDataHandle.hpp @@ -23,8 +23,8 @@ * \author Markus Blatt, OPM-OP AS */ -#ifndef FIELDPROPS_DATAHANDLE_HPP -#define FIELDPROPS_DATAHANDLE_HPP +#ifndef PROPS_CENTROIDS_DATAHANDLE_HPP +#define PROPS_CENTROIDS_DATAHANDLE_HPP #if HAVE_MPI #include @@ -35,17 +35,18 @@ #include #include #include + namespace Opm { /*! - * \brief A Data handle t communicate the field properties during load balance. + * \brief A Data handle to communicate the field properties and cell centroids during load balance. * \tparam Grid The type of grid where the load balancing is happening. * \todo Maybe specialize this for CpGrid to save some space, later. */ template -class FieldPropsDataHandle - : public Dune::CommDataHandleIF< FieldPropsDataHandle, double> +class PropsCentroidsDataHandle + : public Dune::CommDataHandleIF< PropsCentroidsDataHandle, double> { public: //! \brief the data type we send (ints are converted to double) @@ -55,8 +56,17 @@ public: //! \param grid The grid where the loadbalancing is happening. //! \param globalProps The field properties of the global grid //! \param distributedProps The distributed field properties - FieldPropsDataHandle(const Grid& grid, ParallelEclipseState& eclState) - : m_grid(grid), m_distributed_fieldProps(eclState.m_fieldProps) + //! \param eclGridOnRoot A pointer to eclipse grid on rank zero, + //! nullptr otherwise. + //! \param centroids Array to store the centroids in upon destruction + //! of the object. + //! \param cartMapper The cartesian index mapper of the grid. + PropsCentroidsDataHandle(const Grid& grid, ParallelEclipseState& eclState, + const EclipseGrid* eclGridOnRoot, + std::vector& centroids, + const typename Dune::CartesianIndexMapper& cartMapper) + : m_grid(grid), m_distributed_fieldProps(eclState.m_fieldProps), + m_centroids(centroids) { // Scatter the keys const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); @@ -75,7 +85,8 @@ public: comm.broadcast(buffer.data(), position, 0); // copy data to persistent map based on local id - auto noData = m_intKeys.size() + m_doubleKeys.size(); + m_no_data = m_intKeys.size() + m_doubleKeys.size() + + Grid::dimensionworld; const auto& idSet = m_grid.localIdSet(); const auto& gridView = m_grid.levelGridView(0); using ElementMapper = @@ -87,13 +98,18 @@ public: const auto& id = idSet.id(element); auto index = elemMapper.index(element); auto& data = elementData_[id]; - data.reserve(noData); + data.reserve(m_no_data); - for(const auto& intKey : m_intKeys) + for (const auto& intKey : m_intKeys) data.push_back(globalProps.get_int(intKey)[index]); - for(const auto& doubleKey : m_doubleKeys) + for (const auto& doubleKey : m_doubleKeys) data.push_back(globalProps.get_double(doubleKey)[index]); + + auto cartIndex = cartMapper.cartesianIndex(index); + const auto& center = eclGridOnRoot->getCellCenter(cartIndex); + for (int dim = 0; dim < Grid::dimensionworld; ++dim) + data.push_back(center[dim]); } } else @@ -105,12 +121,12 @@ public: int position{}; Mpi::unpack(m_intKeys, buffer, position, comm); Mpi::unpack(m_doubleKeys, buffer, position, comm); + m_no_data = m_intKeys.size() + m_doubleKeys.size() + + Grid::dimensionworld; } } - FieldPropsDataHandle(const FieldPropsDataHandle&) = delete; - - ~FieldPropsDataHandle() + ~PropsCentroidsDataHandle() { // distributed grid is now correctly set up. for(const auto& intKey : m_intKeys) @@ -119,6 +135,8 @@ public: for(const auto& doubleKey : m_doubleKeys) m_distributed_fieldProps.m_doubleProps[doubleKey].resize(m_grid.size(0)); + m_centroids.resize(m_grid.size(0) * Grid::dimensionworld); + // copy data for the persistent mao to the field properties const auto& idSet = m_grid.localIdSet(); const auto& gridView = m_grid.levelGridView(0); @@ -139,6 +157,11 @@ public: for(const auto& doubleKey : m_doubleKeys) m_distributed_fieldProps.m_doubleProps[doubleKey][index] = data->second[counter++]; + + auto centroidIter = m_centroids.begin() + Grid::dimensionworld * index; + auto centroidIterEnd = centroidIter + Grid::dimensionworld; + for ( ; centroidIter != centroidIterEnd; ++centroidIter ) + *centroidIter = data->second[counter++]; } } @@ -159,7 +182,7 @@ public: template std::size_t size(const EntityType /* entity */) { - return m_intKeys.size() + m_doubleKeys.size(); + return m_no_data; } template @@ -174,7 +197,7 @@ public: template void scatter(BufferType& buffer, const EntityType& e, std::size_t n) { - assert(n == m_intKeys.size() + m_doubleKeys.size()); + assert(n == m_no_data); auto& array = rcvdElementData_[m_grid.localIdSet().id(e)]; array.resize(n); for(auto& data: array) @@ -194,11 +217,19 @@ private: std::vector m_doubleKeys; /// \brief The data per element as a vector mapped from the local id. std::unordered_map > elementData_; - /// \brief The data per element as a vector mapped from the local id. + /*! \brief The data received per element as a vector mapped from the local id. + * + * Needed because CpGrid is in violation of the requirement that local ids + * need to be persistent across grid modifications + */ std::unordered_map > rcvdElementData_; + /// \brief The cell centroids of the distributed grid. + std::vector& m_centroids; + /// \brief The amount of data to send for each element + std::size_t m_no_data; }; } // end namespace Opm #endif // HAVE_MPI -#endif // FIELDPROPS_DATAHANDLE_HPP +#endif // PROPS_CENTROIDS_DATAHANDLE_HPP