diff --git a/CMakeLists_files.cmake b/CMakeLists_files.cmake index d22649929..328e43bfa 100644 --- a/CMakeLists_files.cmake +++ b/CMakeLists_files.cmake @@ -175,6 +175,7 @@ list (APPEND PUBLIC_HEADER_FILES opm/simulators/utils/ParallelFileMerger.hpp opm/simulators/utils/DeferredLoggingErrorHelpers.hpp opm/simulators/utils/DeferredLogger.hpp + opm/simulators/utils/FieldPropsDataHandle.hpp opm/simulators/utils/gatherDeferredLogger.hpp opm/simulators/utils/moduleVersion.hpp opm/simulators/utils/ParallelEclipseState.hpp diff --git a/ebos/eclcpgridvanguard.hh b/ebos/eclcpgridvanguard.hh index baa67c333..c0082d251 100644 --- a/ebos/eclcpgridvanguard.hh +++ b/ebos/eclcpgridvanguard.hh @@ -34,6 +34,7 @@ #include #include #include +#include #include @@ -188,7 +189,9 @@ public: //distribute the grid and switch to the distributed view. { const auto wells = this->schedule().getWellsatEnd(); - defunctWellNames_ = std::get<1>(grid_->loadBalance(edgeWeightsMethod, &wells, faceTrans.data())); + auto& eclState = static_cast(this->eclState()); + FieldPropsDataHandle handle(*grid_, eclState); + defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data())); } grid_->switchToDistributedView(); @@ -210,14 +213,6 @@ public: this->updateGridView_(); #if HAVE_MPI if (mpiSize > 1) { - std::vector cartIndices; - cartIndices.reserve(grid_->numCells()); - auto locElemIt = this->gridView().template begin(); - const auto& locElemEndIt = this->gridView().template end(); - for (; locElemIt != locElemEndIt; ++locElemIt) { - cartIndices.push_back(cartesianIndexMapper_->cartesianIndex(locElemIt->index())); - } - static_cast(this->eclState()).setupLocalProps(cartIndices); static_cast(this->eclState()).switchToDistributedProps(); } #endif diff --git a/opm/simulators/utils/FieldPropsDataHandle.hpp b/opm/simulators/utils/FieldPropsDataHandle.hpp new file mode 100644 index 000000000..a8101e78f --- /dev/null +++ b/opm/simulators/utils/FieldPropsDataHandle.hpp @@ -0,0 +1,201 @@ +/* + Copyright 2020 Equinor AS. + + This file is part of the Open Porous Media project (OPM). + + OPM is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + OPM is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with OPM. If not, see . +*/ +/*! + * \file FieldPropsDatahandle.hpp + * \brief File containing a data handle for communicating the FieldProperties + * + * \author Markus Blatt, OPM-OP AS + */ + +#ifndef FIELDPROPS_DATAHANDLE_HPP +#define FIELDPROPS_DATAHANDLE_HPP + +#include +#include +#include +#include +#include +#include +#include +#include +namespace Opm +{ + +/*! + * \brief A Data handle t communicate the field properties during load balance. + * \tparam Grid The type of grid where the load balancing is happening. + * \todo Maybe specialize this for CpGrid to save some space, later. + */ +template +class FieldPropsDataHandle + : public Dune::CommDataHandleIF< FieldPropsDataHandle, double> +{ +public: + //! \brief the data type we send (ints are converted to double) + using DataType = double; + + //! \brief Constructor + //! \param grid The grid where the loadbalancing is happening. + //! \param globalProps The field properties of the global grid + //! \param distributedProps The distributed field properties + FieldPropsDataHandle(const Grid& grid, ParallelEclipseState& eclState) + : m_grid(grid), m_distributed_fieldProps(eclState.m_fieldProps) + { + // Scatter the keys + const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); + if (comm.rank() == 0) + { + const auto& globalProps = eclState.globalFieldProps(); + m_intKeys = globalProps.keys(); + m_doubleKeys = globalProps.keys(); + std::size_t packSize = Mpi::packSize(m_intKeys, comm) + + Mpi::packSize(m_doubleKeys,comm); + std::vector buffer(packSize); + int position = 0; + Mpi::pack(m_intKeys, buffer, position, comm); + Mpi::pack(m_doubleKeys, buffer, position, comm); + comm.broadcast(&position, 1, 0); + comm.broadcast(buffer.data(), position, 0); + + // copy data to persistent map based on local id + auto noData = m_intKeys.size() + m_doubleKeys.size(); + const auto& idSet = m_grid.localIdSet(); + const auto& gridView = m_grid.levelGridView(0); + using ElementMapper = + Dune::MultipleCodimMultipleGeomTypeMapper; + ElementMapper elemMapper(gridView, Dune::mcmgElementLayout()); + + for( const auto &element : elements( gridView, Dune::Partitions::interiorBorder ) ) + { + const auto& id = idSet.id(element); + auto index = elemMapper.index(element); + auto& data = elementData_[id]; + data.reserve(noData); + + for(const auto& intKey : m_intKeys) + data.push_back(globalProps.get_int(intKey)[index]); + + for(const auto& doubleKey : m_doubleKeys) + data.push_back(globalProps.get_double(doubleKey)[index]); + } + } + else + { + int bufferSize; + comm.broadcast(&bufferSize, 1, 0); + std::vector buffer(bufferSize); + comm.broadcast(buffer.data(), bufferSize, 0); + int position{}; + Mpi::unpack(m_intKeys, buffer, position, comm); + Mpi::unpack(m_doubleKeys, buffer, position, comm); + } + } + + FieldPropsDataHandle(const FieldPropsDataHandle&) = delete; + + ~FieldPropsDataHandle() + { + // distributed grid is now correctly set up. + for(const auto& intKey : m_intKeys) + m_distributed_fieldProps.m_intProps[intKey].resize(m_grid.size(0)); + + for(const auto& doubleKey : m_doubleKeys) + m_distributed_fieldProps.m_doubleProps[doubleKey].resize(m_grid.size(0)); + + // copy data for the persistent mao to the field properties + const auto& idSet = m_grid.localIdSet(); + const auto& gridView = m_grid.levelGridView(0); + using ElementMapper = + Dune::MultipleCodimMultipleGeomTypeMapper; + ElementMapper elemMapper(gridView, Dune::mcmgElementLayout()); + + for( const auto &element : elements( gridView, Dune::Partitions::all ) ) + { + std::size_t counter{}; + const auto& id = idSet.id(element); + auto index = elemMapper.index(element); + auto data = rcvdElementData_.find(id); + assert(data != elementData_.end()); + + for(const auto& intKey : m_intKeys) + m_distributed_fieldProps.m_intProps[intKey][index] = static_cast(data->second[counter++]); + + for(const auto& doubleKey : m_doubleKeys) + m_distributed_fieldProps.m_doubleProps[doubleKey][index] = data->second[counter++]; + } + } + + bool contains(int /* dim */, int codim) + { + return codim == 0; + } + + bool fixedsize(int /* dim */, int /* codim */) + { + return true; + } + bool fixedSize(int /* dim */, int /* codim */) + { + return true; + } + + template + std::size_t size(const EntityType /* entity */) + { + return m_intKeys.size() + m_doubleKeys.size(); + } + + template + void gather(BufferType& buffer, const EntityType& e) const + { + auto iter = elementData_.find(m_grid.localIdSet().id(e)); + assert(iter != elementData_.end()); + for(const auto& data : iter->second) + buffer.write(data); + } + + template + void scatter(BufferType& buffer, const EntityType& e, std::size_t n) + { + assert(n == m_intKeys.size() + m_doubleKeys.size()); + auto& array = rcvdElementData_[m_grid.localIdSet().id(e)]; + array.resize(n); + for(auto& data: array) + { + buffer.read(data); + } + } + +private: + using LocalIdSet = typename Grid::LocalIdSet; + const Grid& m_grid; + //! \brief The distributed field properties for receiving + ParallelFieldPropsManager& m_distributed_fieldProps; + //! \brief The names of the keys of the integer fields. + std::vector m_intKeys; + //! \brief The names of the keys of the double fields. + std::vector m_doubleKeys; + /// \brief The data per element as a vector mapped from the local id. + std::unordered_map > elementData_; + /// \brief The data per element as a vector mapped from the local id. + std::unordered_map > rcvdElementData_; +}; + +} // end namespace Opm +#endif // FIELDPROPS_DATAHANDLE_HPP diff --git a/opm/simulators/utils/ParallelEclipseState.cpp b/opm/simulators/utils/ParallelEclipseState.cpp index 70fc76523..b7db316d1 100644 --- a/opm/simulators/utils/ParallelEclipseState.cpp +++ b/opm/simulators/utils/ParallelEclipseState.cpp @@ -232,125 +232,4 @@ void ParallelEclipseState::switchToDistributedProps() m_parProps = true; } - -#if HAVE_MPI -namespace { - - -template -struct GetField { - GetField(const FieldPropsManager& propMan) : props(propMan) {} - std::vector getField(const std::string& key) const; - const FieldPropsManager& props; -}; - - -template<> -std::vector GetField::getField(const std::string& key) const { - return props.get_global_int(key); -} - - -template<> -std::vector GetField::getField(const std::string& key) const { - return props.get_global_double(key); -} - - -template -void extractRootProps(const std::vector& localToGlobal, - const std::vector& keys, - const GetField& getter, - std::map>& localMap) -{ - for (const std::string& key : keys) { - auto prop = getter.getField(key); - std::vector& local = localMap[key]; - local.reserve(localToGlobal.size()); - for (int cell : localToGlobal) { - local.push_back(prop[cell]); - } - } -} - - -template -void packProps(const std::vector& l2gCell, - const std::vector& keys, - const GetField& getter, - std::vector& buffer, int& position) -{ - const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); - std::vector sendData(l2gCell.size()); - for (const std::string& key : keys) { - auto prop = getter.getField(key); - size_t idx = 0; - for (int cell : l2gCell) - sendData[idx++] = prop[cell]; - Mpi::pack(sendData, buffer, position, comm); - } -} - - -} - - -void ParallelEclipseState::setupLocalProps(const std::vector& localToGlobal) -{ - const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); - if (comm.rank() == 0) { - extractRootProps(localToGlobal, this->globalFieldProps().keys(), - GetField(this->globalFieldProps()), - m_fieldProps.m_intProps); - extractRootProps(localToGlobal, this->globalFieldProps().keys(), - GetField(this->globalFieldProps()), - m_fieldProps.m_doubleProps); - for (int i = 1; i < comm.size(); ++i) { - std::vector l2gCell; - size_t size; - MPI_Recv(&size, 1, Dune::MPITraits::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - l2gCell.resize(size); - MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - size_t cells = l2gCell.size(); - const auto& intKeys = this->globalFieldProps().keys(); - const auto& dblKeys = this->globalFieldProps().keys(); - size = Mpi::packSize(intKeys, comm) + - Mpi::packSize(dblKeys,comm) + - intKeys.size() * Mpi::packSize(std::vector(cells), comm) + - dblKeys.size() * Mpi::packSize(std::vector(cells), comm); - - std::vector buffer(size); - int position = 0; - Mpi::pack(intKeys, buffer, position, comm); - Mpi::pack(dblKeys, buffer, position, comm); - packProps(l2gCell, intKeys, GetField(this->globalFieldProps()), - buffer, position); - packProps(l2gCell, dblKeys, GetField(this->globalFieldProps()), - buffer, position); - MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD); - MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD); - } - } else { - size_t l2gSize = localToGlobal.size(); - MPI_Send(&l2gSize, 1, Dune::MPITraits::getType(), 0, 0, MPI_COMM_WORLD); - MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD); - int size; - MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - std::vector buffer(size); - MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); - std::vector intKeys, dblKeys; - int position = 0; - Mpi::unpack(intKeys, buffer, position, comm); - Mpi::unpack(dblKeys, buffer, position, comm); - for (const std::string& key : intKeys) { - Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm); - } - for (const std::string& key : dblKeys) { - Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm); - } - } -} -#endif - - } // end namespace Opm diff --git a/opm/simulators/utils/ParallelEclipseState.hpp b/opm/simulators/utils/ParallelEclipseState.hpp index f41114b09..7cf307301 100644 --- a/opm/simulators/utils/ParallelEclipseState.hpp +++ b/opm/simulators/utils/ParallelEclipseState.hpp @@ -20,6 +20,7 @@ #define PARALLEL_ECLIPSE_STATE_HPP #include +//#include #include namespace Opm { @@ -38,6 +39,9 @@ class EclMpiSerializer; class ParallelFieldPropsManager : public FieldPropsManager { public: friend class ParallelEclipseState; //!< Friend so props can be setup. + //! \brief Friend to set up props + template + friend class FieldPropsDataHandle; //! \brief Constructor. //! \param manager The field property manager to wrap. @@ -100,6 +104,9 @@ protected: */ class ParallelEclipseState : public EclipseState { + //! \brief Friend to set up props + template + friend class FieldPropsDataHandle; public: //! \brief Default constructor. ParallelEclipseState(); @@ -135,13 +142,6 @@ public: //! setupLocalProps must be called prior to this. void switchToDistributedProps(); -#if HAVE_MPI - //! \brief Setup local properties. - //! \param localToGlobal Map from local cells on calling process to global cartesian cell - //! \details Must be called after grid has been paritioned - void setupLocalProps(const std::vector& localToGlobal); -#endif - //! \brief Returns a const ref to current field properties. const FieldPropsManager& fieldProps() const override;