From 55b5ebd3df058bbdcd2a7e94ec8a36a7d6ad44f7 Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Tue, 25 Feb 2020 12:32:46 +0100 Subject: [PATCH 1/7] add mpi serialization for GridDims --- opm/simulators/utils/ParallelRestart.cpp | 23 +++++++++++++++++++++++ opm/simulators/utils/ParallelRestart.hpp | 2 ++ tests/test_ParallelRestart.cpp | 10 ++++++++++ 3 files changed, 35 insertions(+) diff --git a/opm/simulators/utils/ParallelRestart.cpp b/opm/simulators/utils/ParallelRestart.cpp index 24e2ac6f1..581c6bf25 100644 --- a/opm/simulators/utils/ParallelRestart.cpp +++ b/opm/simulators/utils/ParallelRestart.cpp @@ -2037,6 +2037,12 @@ std::size_t packSize(const SolventDensityTable& data, return packSize(data.getSolventDensityColumn(), comm); } +std::size_t packSize(const GridDims& data, + Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(data.getNXYZ(), comm); +} + ////// pack routines template @@ -3956,6 +3962,13 @@ void pack(const SolventDensityTable& data, pack(data.getSolventDensityColumn(), buffer, position, comm); } +void pack(const GridDims& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(data.getNXYZ(), buffer, position, comm); +} + /// unpack routines template @@ -6722,6 +6735,16 @@ void unpack(SolventDensityTable& data, std::vector& buffer, int& position, data = SolventDensityTable(tableValues); } +void unpack(GridDims& data, + std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + std::array NXYZ; + + unpack(NXYZ, buffer, position, comm); + data = GridDims(NXYZ); +} + #define INSTANTIATE_PACK_VECTOR(...) \ template std::size_t packSize(const std::vector<__VA_ARGS__>& data, \ Dune::MPIHelper::MPICommunicator comm); \ diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index a8f8cd36f..acfcc09e5 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -103,6 +103,7 @@ class FaultCollection; class FaultFace; class FoamConfig; class FoamData; +class GridDims; class InitConfig; class IOConfig; template class IOrderSet; @@ -721,6 +722,7 @@ ADD_PACK_PROTOTYPES(GConSale) ADD_PACK_PROTOTYPES(GConSale::GCONSALEGroup) ADD_PACK_PROTOTYPES(GConSump) ADD_PACK_PROTOTYPES(GConSump::GCONSUMPGroup) +ADD_PACK_PROTOTYPES(GridDims) ADD_PACK_PROTOTYPES(GuideRateConfig) ADD_PACK_PROTOTYPES(GuideRateConfig::GroupTarget) ADD_PACK_PROTOTYPES(GuideRateConfig::WellTarget) diff --git a/tests/test_ParallelRestart.cpp b/tests/test_ParallelRestart.cpp index 2c38999bd..fc2fb47e5 100644 --- a/tests/test_ParallelRestart.cpp +++ b/tests/test_ParallelRestart.cpp @@ -2520,6 +2520,16 @@ BOOST_AUTO_TEST_CASE(SolventDensityTable) } +BOOST_AUTO_TEST_CASE(GridDims) +{ +#ifdef HAVE_MPI + Opm::GridDims val1{ 1, 2, 3}; + auto val2 = PackUnpack(val1); + DO_CHECKS(GridDims) +#endif +} + + bool init_unit_test_func() { return true; From ef89d656fb09c0312917701f1a2f66361e13a39d Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Wed, 29 Jan 2020 16:46:27 +0100 Subject: [PATCH 2/7] communicate global size setting up blackoil extension boundary conditions --- ebos/eclproblem.hh | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/ebos/eclproblem.hh b/ebos/eclproblem.hh index db61bc199..9cb0f0235 100644 --- a/ebos/eclproblem.hh +++ b/ebos/eclproblem.hh @@ -2753,11 +2753,16 @@ private: const auto& simulator = this->simulator(); const auto& vanguard = simulator.vanguard(); const auto& eclState = vanguard.eclState(); + const auto& comm = vanguard.gridView().comm(); size_t numDof = this->model().numGridDof(); + size_t globalSize; + if (comm.rank() == 0) + globalSize = eclState.getInputGrid().getCartesianSize(); + comm.broadcast(&globalSize, 1, 0); if (enableSolvent) { - std::vector solventSaturationData(eclState.getInputGrid().getCartesianSize(), 0.0); + std::vector solventSaturationData(globalSize, 0.0); if (eclState.fieldProps().has_double("SSOL")) solventSaturationData = eclState.fieldProps().get_global_double("SSOL"); @@ -2771,7 +2776,7 @@ private: } if (enablePolymer) { - std::vector polyConcentrationData(eclState.getInputGrid().getCartesianSize(), 0.0); + std::vector polyConcentrationData(globalSize, 0.0); if (eclState.fieldProps().has_double("SPOLY")) polyConcentrationData = eclState.fieldProps().get_global_double("SPOLY"); @@ -2785,7 +2790,7 @@ private: } if (enablePolymerMolarWeight) { - std::vector polyMoleWeightData(eclState.getInputGrid().getCartesianSize(), 0.0); + std::vector polyMoleWeightData(globalSize, 0.0); if (eclState.fieldProps().has_double("SPOLYMW")) polyMoleWeightData = eclState.fieldProps().get_global_double("SPOLYMW"); polymerMoleWeight_.resize(numDof, 0.0); From ac28bbfd1573de0555b9c627c9cc18c6135cdf14 Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Wed, 15 Jan 2020 14:36:52 +0100 Subject: [PATCH 3/7] changed: avoid usage of eclipseGrid on non-root processes setting up transmissibilities obtain data from root process --- ebos/eclcpgridvanguard.hh | 2 +- ebos/eclproblem.hh | 2 +- ebos/ecltransmissibility.hh | 78 ++++++++++++++++++++++++++++++++----- 3 files changed, 70 insertions(+), 12 deletions(-) diff --git a/ebos/eclcpgridvanguard.hh b/ebos/eclcpgridvanguard.hh index 6e26ba43d..5ac5decdc 100644 --- a/ebos/eclcpgridvanguard.hh +++ b/ebos/eclcpgridvanguard.hh @@ -151,7 +151,7 @@ public: if (grid_->size(0)) { globalTrans_.reset(new EclTransmissibility(*this)); - globalTrans_->update(); + globalTrans_->update(false); } Dune::EdgeWeightMethod edgeWeightsMethod = this->edgeWeightsMethod(); diff --git a/ebos/eclproblem.hh b/ebos/eclproblem.hh index 9cb0f0235..a1c418f0f 100644 --- a/ebos/eclproblem.hh +++ b/ebos/eclproblem.hh @@ -815,7 +815,7 @@ public: eclState.applyModifierDeck(miniDeck); // re-compute all quantities which may possibly be affected. - transmissibilities_.update(); + transmissibilities_.update(true); referencePorosity_[1] = referencePorosity_[0]; updateReferencePorosity_(); updatePffDofData_(); diff --git a/ebos/ecltransmissibility.hh b/ebos/ecltransmissibility.hh index b8fba39a4..c9cd29c83 100644 --- a/ebos/ecltransmissibility.hh +++ b/ebos/ecltransmissibility.hh @@ -117,22 +117,23 @@ public: * either but at least it seems to be much better. */ void finishInit() - { update(); } + { update(true); } /*! * \brief Compute all transmissibilities * + * \param global If true, update is called on all processes * Also, this updates the "thermal half transmissibilities" if energy is enabled. */ - void update() + void update(bool global) { const auto& gridView = vanguard_.gridView(); const auto& cartMapper = vanguard_.cartesianIndexMapper(); const auto& eclState = vanguard_.eclState(); - const auto& eclGrid = eclState.getInputGrid(); const auto& cartDims = cartMapper.cartesianDimensions(); auto& transMult = eclState.getTransMult(); + const auto& comm = vanguard_.gridView().comm(); ElementMapper elemMapper(gridView, Dune::mcmgElementLayout()); // get the ntg values, the ntg values are modified for the cells merged with minpv @@ -148,17 +149,66 @@ public: for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx) axisCentroids[dimIdx].resize(numElements); + std::vector centroids; +#if HAVE_MPI + size_t cells = vanguard_.grid().numCells(); + if (global && comm.size() > 1) { + std::vector sizes(comm.size()); + if (comm.rank() == 0) { + const auto& eclGrid = eclState.getInputGrid(); + comm.gather(&cells, sizes.data(), 1, 0); + for (int i = 1; i < comm.size(); ++i) { + std::vector cell_id(sizes[i]); + MPI_Recv(cell_id.data(), sizes[i], MPI_INT, + i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + centroids.resize(dimWorld * sizes[i]); + + auto cIt = centroids.begin(); + for (int idx : cell_id) { + const auto& centroid = eclGrid.getCellCenter(idx); + for (const auto& it : centroid) + *cIt++ = it; + } + MPI_Send(centroids.data(), dimWorld * sizes[i], + MPI_DOUBLE, i, 0, MPI_COMM_WORLD); + } + centroids.clear(); + } else { + comm.gather(&cells, sizes.data(), 1, 0); + std::vector cell_ids; + cell_ids.reserve(cells); + auto elemIt = gridView.template begin(); + const auto& elemEndIt = gridView.template end(); + for (; elemIt != elemEndIt; ++elemIt) { + const auto& elem = *elemIt; + cell_ids.push_back(cartMapper.cartesianIndex(elemMapper.index(elem))); + } + MPI_Send(cell_ids.data(), cells, MPI_INT, 0, 0, MPI_COMM_WORLD); + centroids.resize(cells * dimWorld); + MPI_Recv(centroids.data(), dimWorld * cells, MPI_DOUBLE, + 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + } + } +#endif + auto elemIt = gridView.template begin(); const auto& elemEndIt = gridView.template end(); - for (; elemIt != elemEndIt; ++elemIt) { + size_t centroidIdx = 0; + for (; elemIt != elemEndIt; ++elemIt, ++centroidIdx) { const auto& elem = *elemIt; unsigned elemIdx = elemMapper.index(elem); // compute the axis specific "centroids" used for the transmissibilities. for // consistency with the flow simulator, we use the element centers as // computed by opm-parser's Opm::EclipseGrid class for all axes. - unsigned cartesianCellIdx = cartMapper.cartesianIndex(elemIdx); - const auto& centroid = eclGrid.getCellCenter(cartesianCellIdx); + const double* centroid; + if (vanguard_.gridView().comm().rank() == 0) { + const auto& eclGrid = eclState.getInputGrid(); + unsigned cartesianCellIdx = cartMapper.cartesianIndex(elemIdx); + centroid = &eclGrid.getCellCenter(cartesianCellIdx)[0]; + } else + centroid = ¢roids[centroidIdx * dimWorld]; + for (unsigned axisIdx = 0; axisIdx < dimWorld; ++axisIdx) for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx) axisCentroids[axisIdx][elemIdx][dimIdx] = centroid[dimIdx]; @@ -181,6 +231,18 @@ public: thermalHalfTransBoundary_.clear(); } + // The MULTZ needs special case if the option is ALL + // Then the smallest multiplier is applied. + // Default is to apply the top and bottom multiplier + bool useSmallestMultiplier; + if (comm.rank() == 0) { + const auto& eclGrid = eclState.getInputGrid(); + useSmallestMultiplier = eclGrid.getMultzOption() == Opm::PinchMode::ModeEnum::ALL; + } + if (global && comm.size() > 1) { + comm.broadcast(&useSmallestMultiplier, 1, 0); + } + // compute the transmissibilities for all intersections elemIt = gridView.template begin(); for (; elemIt != elemEndIt; ++elemIt) { @@ -330,10 +392,6 @@ public: // apply the full face transmissibility multipliers // for the inside ... - // The MULTZ needs special case if the option is ALL - // Then the smallest multiplier is applied. - // Default is to apply the top and bottom multiplier - bool useSmallestMultiplier = eclGrid.getMultzOption() == Opm::PinchMode::ModeEnum::ALL; if (useSmallestMultiplier) applyAllZMultipliers_(trans, insideFaceIdx, insideCartElemIdx, outsideCartElemIdx, transMult, cartDims); else From 4904f09d4d3ad8dbe3f555a1a6e3190b93d5fb7c Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Tue, 25 Feb 2020 12:33:15 +0100 Subject: [PATCH 4/7] changed: avoid use of EclipseGrid outputting RFT data --- ebos/ecloutputblackoilmodule.hh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ebos/ecloutputblackoilmodule.hh b/ebos/ecloutputblackoilmodule.hh index 19d08b485..ac26e0cde 100644 --- a/ebos/ecloutputblackoilmodule.hh +++ b/ebos/ecloutputblackoilmodule.hh @@ -272,7 +272,7 @@ public: const size_t i = size_t(connection.getI()); const size_t j = size_t(connection.getJ()); const size_t k = size_t(connection.getK()); - const size_t index = simulator_.vanguard().eclState().getInputGrid().getGlobalIndex(i, j, k); + const size_t index = simulator_.vanguard().eclState().gridDims().getGlobalIndex(i, j, k); oilConnectionPressures_.emplace(std::make_pair(index, 0.0)); waterConnectionSaturations_.emplace(std::make_pair(index, 0.0)); @@ -840,7 +840,7 @@ public: const size_t j = size_t(connection.getJ()); const size_t k = size_t(connection.getK()); - const size_t index = simulator_.vanguard().eclState().getInputGrid().getGlobalIndex(i, j, k); + const size_t index = simulator_.vanguard().eclState().gridDims().getGlobalIndex(i, j, k); auto& connectionData = wellData.connections[count]; connectionData.index = index; count++; From 35de9fa53df5740be7c90acc6f61c068dee77f8c Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Wed, 22 Jan 2020 10:27:36 +0100 Subject: [PATCH 5/7] add ParallelEclipseState and ParallelFieldProps these are wrappers sitting on top of the EclipseState and FieldPropsManager classes. The former has some additional methods related to parallelism, and the latter is a parallel frontend to the FieldPropManager which only hold the process-local data (in compressed arrays). --- CMakeLists_files.cmake | 2 + opm/simulators/utils/ParallelEclipseState.cpp | 354 ++++++++++++++++++ opm/simulators/utils/ParallelEclipseState.hpp | 159 ++++++++ 3 files changed, 515 insertions(+) create mode 100644 opm/simulators/utils/ParallelEclipseState.cpp create mode 100644 opm/simulators/utils/ParallelEclipseState.hpp diff --git a/CMakeLists_files.cmake b/CMakeLists_files.cmake index 46887cc98..d22649929 100644 --- a/CMakeLists_files.cmake +++ b/CMakeLists_files.cmake @@ -37,6 +37,7 @@ list (APPEND MAIN_SOURCE_FILES opm/simulators/utils/DeferredLogger.cpp opm/simulators/utils/gatherDeferredLogger.cpp opm/simulators/utils/moduleVersion.cpp + opm/simulators/utils/ParallelEclipseState.cpp opm/simulators/utils/ParallelRestart.cpp opm/simulators/wells/VFPProdProperties.cpp opm/simulators/wells/VFPInjProperties.cpp @@ -176,6 +177,7 @@ list (APPEND PUBLIC_HEADER_FILES opm/simulators/utils/DeferredLogger.hpp opm/simulators/utils/gatherDeferredLogger.hpp opm/simulators/utils/moduleVersion.hpp + opm/simulators/utils/ParallelEclipseState.hpp opm/simulators/utils/ParallelRestart.hpp opm/simulators/wells/PerforationData.hpp opm/simulators/wells/RateConverter.hpp diff --git a/opm/simulators/utils/ParallelEclipseState.cpp b/opm/simulators/utils/ParallelEclipseState.cpp new file mode 100644 index 000000000..1549c8884 --- /dev/null +++ b/opm/simulators/utils/ParallelEclipseState.cpp @@ -0,0 +1,354 @@ +/* + Copyright 2019 Equinor AS. + + This file is part of the Open Porous Media project (OPM). + + OPM is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + OPM is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with OPM. If not, see . +*/ +#include + +#include "ParallelEclipseState.hpp" +#include "ParallelRestart.hpp" +#include + +namespace Opm { + + +ParallelFieldPropsManager::ParallelFieldPropsManager(FieldPropsManager& manager) + : m_manager(manager) + , m_comm(Dune::MPIHelper::getCollectiveCommunication()) +{ +} + + +std::vector ParallelFieldPropsManager::actnum() const +{ + if (m_comm.rank() == 0) + return m_manager.actnum(); + + return{}; +} + + +void ParallelFieldPropsManager::reset_actnum(const std::vector& actnum) +{ + if (m_comm.rank() != 0) + OPM_THROW(std::runtime_error, "reset_actnum should only be called on root process."); + m_manager.reset_actnum(actnum); +} + + +std::vector ParallelFieldPropsManager::porv(bool global) const +{ + std::vector result; + if (m_comm.rank() == 0) + result = m_manager.porv(global); + size_t size = result.size(); + m_comm.broadcast(&size, 1, 0); + result.resize(size); + m_comm.broadcast(result.data(), size, 0); + return result; +} + + +const std::vector& ParallelFieldPropsManager::get_int(const std::string& keyword) const +{ + auto it = m_intProps.find(keyword); + if (it == m_intProps.end()) + OPM_THROW(std::runtime_error, "No integer property field: " + keyword); + + return it->second; +} + +std::vector ParallelFieldPropsManager::get_global_int(const std::string& keyword) const +{ + std::vector result; + if (m_comm.rank() == 0) + result = m_manager.get_global_int(keyword); + size_t size = result.size(); + m_comm.broadcast(&size, 1, 0); + result.resize(size); + m_comm.broadcast(result.data(), size, 0); + + return result; +} + + +const std::vector& ParallelFieldPropsManager::get_double(const std::string& keyword) const +{ + auto it = m_doubleProps.find(keyword); + if (it == m_doubleProps.end()) + OPM_THROW(std::runtime_error, "No double property field: " + keyword); + + return it->second; +} + + +std::vector ParallelFieldPropsManager::get_global_double(const std::string& keyword) const +{ + std::vector result; + if (m_comm.rank() == 0) + result = m_manager.get_global_double(keyword); + size_t size = result.size(); + m_comm.broadcast(&size, 1, 0); + result.resize(size); + m_comm.broadcast(result.data(), size, 0); + + return result; +} + + +bool ParallelFieldPropsManager::has_int(const std::string& keyword) const +{ + auto it = m_intProps.find(keyword); + return it != m_intProps.end(); +} + + +bool ParallelFieldPropsManager::has_double(const std::string& keyword) const +{ + auto it = m_doubleProps.find(keyword); + return it != m_doubleProps.end(); +} + + +ParallelEclipseState::ParallelEclipseState() + : m_fieldProps(field_props) +{ +} + + +ParallelEclipseState::ParallelEclipseState(const Deck& deck) + : EclipseState(deck) + , m_fieldProps(field_props) +{ +} + + +std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const +{ + return serializer.packSize(m_tables) + + serializer.packSize(m_runspec) + + serializer.packSize(m_eclipseConfig) + + serializer.packSize(m_deckUnitSystem) + + serializer.packSize(m_inputNnc) + + serializer.packSize(m_inputEditNnc) + + serializer.packSize(m_gridDims) + + serializer.packSize(m_simulationConfig) + + serializer.packSize(m_transMult) + + serializer.packSize(m_faults) + + serializer.packSize(m_title); + +} + + +void ParallelEclipseState::pack(std::vector& buffer, int& position, + EclMpiSerializer& serializer) const +{ + serializer.pack(m_tables, buffer, position); + serializer.pack(m_runspec, buffer, position); + serializer.pack(m_eclipseConfig, buffer, position); + serializer.pack(m_deckUnitSystem, buffer, position); + serializer.pack(m_inputNnc, buffer, position); + serializer.pack(m_inputEditNnc, buffer, position); + serializer.pack(m_gridDims, buffer, position); + serializer.pack(m_simulationConfig, buffer, position); + serializer.pack(m_transMult, buffer, position); + serializer.pack(m_faults, buffer, position); + serializer.pack(m_title, buffer, position); +} + + +void ParallelEclipseState::unpack(std::vector& buffer, int& position, + EclMpiSerializer& serializer) +{ + serializer.unpack(m_tables, buffer, position); + serializer.unpack(m_runspec, buffer, position); + serializer.unpack(m_eclipseConfig, buffer, position); + serializer.unpack(m_deckUnitSystem, buffer, position); + serializer.unpack(m_inputNnc, buffer, position); + serializer.unpack(m_inputEditNnc, buffer, position); + serializer.unpack(m_gridDims, buffer, position); + serializer.unpack(m_simulationConfig, buffer, position); + serializer.unpack(m_transMult, buffer, position); + serializer.unpack(m_faults, buffer, position); + serializer.unpack(m_title, buffer, position); +} + + +const FieldPropsManager& ParallelEclipseState::fieldProps() const +{ + if (!m_parProps && Dune::MPIHelper::getCollectiveCommunication().rank() != 0) + OPM_THROW(std::runtime_error, "Attempt to access field properties on no-root process before switch to parallel properties"); + + if (!m_parProps || Dune::MPIHelper::getCollectiveCommunication().size() == 1) + return this->EclipseState::fieldProps(); + + return m_fieldProps; +} + + +const FieldPropsManager& ParallelEclipseState::globalFieldProps() const +{ + if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0) + OPM_THROW(std::runtime_error, "Attempt to access global field properties on non-root process"); + return this->EclipseState::globalFieldProps(); +} + + +const EclipseGrid& ParallelEclipseState::getInputGrid() const +{ + if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0) + OPM_THROW(std::runtime_error, "Attempt to access eclipse grid on non-root process"); + return this->EclipseState::getInputGrid(); +} + + +void ParallelEclipseState::switchToGlobalProps() +{ + m_parProps = false; +} + + +void ParallelEclipseState::switchToDistributedProps() +{ + const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); + if (comm.size() == 1) // No need for the parallel frontend + return; + + m_parProps = true; +} + + +namespace { + + +template +struct GetField { + GetField(const FieldPropsManager& propMan) : props(propMan) {} + std::vector getField(const std::string& key) const; + const FieldPropsManager& props; +}; + + +template<> +std::vector GetField::getField(const std::string& key) const { + return props.get_global_int(key); +} + + +template<> +std::vector GetField::getField(const std::string& key) const { + return props.get_global_double(key); +} + + +template +void extractRootProps(const std::vector& localToGlobal, + const std::vector& keys, + const GetField& getter, + std::map>& localMap) +{ + for (const std::string& key : keys) { + auto prop = getter.getField(key); + std::vector& local = localMap[key]; + local.reserve(localToGlobal.size()); + for (int cell : localToGlobal) { + local.push_back(prop[cell]); + } + } +} + + +template +void packProps(const std::vector& l2gCell, + const std::vector& keys, + const GetField& getter, + std::vector& buffer, int& position) +{ + const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); + std::vector sendData(l2gCell.size()); + for (const std::string& key : keys) { + auto prop = getter.getField(key); + size_t idx = 0; + for (int cell : l2gCell) + sendData[idx++] = prop[cell]; + Mpi::pack(sendData, buffer, position, comm); + } +} + + +} + + +void ParallelEclipseState::setupLocalProps(const std::vector& localToGlobal) +{ +#if HAVE_MPI + const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); + if (comm.rank() == 0) { + extractRootProps(localToGlobal, this->globalFieldProps().keys(), + GetField(this->globalFieldProps()), + m_fieldProps.m_intProps); + extractRootProps(localToGlobal, this->globalFieldProps().keys(), + GetField(this->globalFieldProps()), + m_fieldProps.m_doubleProps); + for (int i = 1; i < comm.size(); ++i) { + std::vector l2gCell; + size_t size; + MPI_Recv(&size, 1, Dune::MPITraits::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + l2gCell.resize(size); + MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + size_t cells = l2gCell.size(); + const auto& intKeys = this->globalFieldProps().keys(); + const auto& dblKeys = this->globalFieldProps().keys(); + size = Mpi::packSize(intKeys, comm) + + Mpi::packSize(dblKeys,comm) + + intKeys.size() * Mpi::packSize(std::vector(cells), comm) + + dblKeys.size() * Mpi::packSize(std::vector(cells), comm); + + std::vector buffer(size); + int position = 0; + Mpi::pack(intKeys, buffer, position, comm); + Mpi::pack(dblKeys, buffer, position, comm); + packProps(l2gCell, intKeys, GetField(this->globalFieldProps()), + buffer, position); + packProps(l2gCell, dblKeys, GetField(this->globalFieldProps()), + buffer, position); + MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD); + MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD); + } + } else { + size_t l2gSize = localToGlobal.size(); + MPI_Send(&l2gSize, 1, Dune::MPITraits::getType(), 0, 0, MPI_COMM_WORLD); + MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD); + int size; + MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + std::vector buffer(size); + MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE); + std::vector intKeys, dblKeys; + int position = 0; + Mpi::unpack(intKeys, buffer, position, comm); + Mpi::unpack(dblKeys, buffer, position, comm); + for (const std::string& key : intKeys) { + Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm); + } + for (const std::string& key : dblKeys) { + Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm); + } + } +#endif +} + + +} // end namespace Opm diff --git a/opm/simulators/utils/ParallelEclipseState.hpp b/opm/simulators/utils/ParallelEclipseState.hpp new file mode 100644 index 000000000..2c1ecd63a --- /dev/null +++ b/opm/simulators/utils/ParallelEclipseState.hpp @@ -0,0 +1,159 @@ +/* + Copyright 2019 Equinor AS. + + This file is part of the Open Porous Media project (OPM). + + OPM is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + OPM is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with OPM. If not, see . +*/ +#ifndef PARALLEL_ECLIPSE_STATE_HPP +#define PARALLEL_ECLIPSE_STATE_HPP + +#include +#include + +namespace Opm { + + +class EclMpiSerializer; + +/*! \brief Parallel frontend to the field properties. + * + * \details This is a parallel frontend to the mpi-unaware + * FieldPropsManager in opm-common. It contains + * process-local field properties on each process using + * compressed indexing. +*/ + +class ParallelFieldPropsManager : public FieldPropsManager { +public: + friend class ParallelEclipseState; //!< Friend so props can be setup. + + //! \brief Constructor. + //! \param manager The field property manager to wrap. + ParallelFieldPropsManager(FieldPropsManager& manager); + + //! \brief Returns actnum vector. + //! \details If called on non-root process an empty vector is returned + std::vector actnum() const override; + + //! \brief Reset the actnum vector. + //! \details Can only be called on root process + void reset_actnum(const std::vector& actnum) override; + + //! \brief Returns the pore volume vector. + std::vector porv(bool global = false) const override; + + //! \brief Returns an int property using compressed indices. + //! \param keyword Name of property + const std::vector& get_int(const std::string& keyword) const override; + + //! \brief Returns a double property using compressed indices. + //! \param keyword Name of property + const std::vector& get_double(const std::string& keyword) const override; + + //! \brief Returns an int property using global cartesian indices. + //! \param keyword Name of property + //! \details The vector is broadcast from root process + std::vector get_global_int(const std::string& keyword) const override; + + //! \brief Returns a double property using global cartesian indices. + //! \param keyword Name of property + //! \details The vector is broadcast from root process + std::vector get_global_double(const std::string& keyword) const override; + + //! \brief Check if an integer property is available. + //! \param keyword Name of property + bool has_int(const std::string& keyword) const override; + + //! \brief Check if a double property is available. + //! \param keyword Name of property + bool has_double(const std::string& keyword) const override; + +protected: + std::map> m_intProps; //!< Map of integer properties in process-local compressed indices. + std::map> m_doubleProps; //!< Map of double properties in process-local compressed indices. + FieldPropsManager& m_manager; //!< Underlying field property manager (only used on root process). + Dune::CollectiveCommunication m_comm; //!< Collective communication handler. +}; + + +/*! \brief Parallel frontend to the EclipseState + * + * \details This is a parallel frontend to the mpi-unaware EclipseState in opm-common. + * It extends the eclipse state class with serialization support, and + * contains methods to switch between full global field properties, + * and distributed field properties for consumption in the simulator. + * Additionally, it has a few sanity checks to ensure that the data that + * is only available on the root process is not attempted to be accessed + * on non-root processes. +*/ + +class ParallelEclipseState : public EclipseState { +public: + //! \brief Default constructor. + ParallelEclipseState(); + + //! \brief Construct from a deck instance. + //! \param deck The deck to construct from + //! \details Only called on root process + ParallelEclipseState(const Deck& deck); + + //! \brief Calculates the size of serialized data. + //! \param serializer The serializer to use + std::size_t packSize(EclMpiSerializer& serializer) const; + + //! \brief Serialize data. + //! \param buffer Buffer to write serialized data into + //! \param Position in buffer + //! \param serializer The serializer to use + void pack(std::vector& buffer, int& position, EclMpiSerializer& serializer) const; + //! \brief Deserialize data. + //! \param buffer Buffer to read serialized data from + //! \param Position in buffer + //! \param serializer The serializer to use + void unpack(std::vector& buffer, int& position, EclMpiSerializer& serializer); + + //! \brief Switch to global field properties. + //! \details Called on root process to use the global field properties + void switchToGlobalProps(); + + //! \brief Switch to distributed field properies. + //! \details Called on root process to use the distributed field properties. + //! setupLocalProps must be called prior to this. + void switchToDistributedProps(); + + //! \brief Setup local properties. + //! \param localToGlobal Map from local cells on calling process to global cartesian cell + //! \details Must be called after grid has been paritioned + void setupLocalProps(const std::vector& localToGlobal); + + //! \brief Returns a const ref to current field properties. + const FieldPropsManager& fieldProps() const override; + + //! \brief Returns a const ref to global field properties. + //! \details Can only be called on root process. + const FieldPropsManager& globalFieldProps() const override; + + //! \brief Returns a const ref to the eclipse grid. + //! \details Can only be called on root process. + const EclipseGrid& getInputGrid() const override; + +private: + bool m_parProps = false; //! True to use distributed properties on root process + ParallelFieldPropsManager m_fieldProps; //!< The parallel field properties +}; + + +} // end namespace Opm +#endif // PARALLEL_ECLIPSE_STATE_HPP From 68a9d17de1e47fd73f33edeedf903964806c3894 Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Wed, 22 Jan 2020 10:17:02 +0100 Subject: [PATCH 6/7] changed: pass/store deck as a pointer and remove asserts for null-ptr. preparatory step for only having deck on root mpi process --- ebos/eclbasevanguard.hh | 2 -- flow/flow.cpp | 18 +++++++++--------- flow/flow_ebos_blackoil.cpp | 4 ++-- flow/flow_ebos_blackoil.hpp | 2 +- flow/flow_ebos_brine.cpp | 4 ++-- flow/flow_ebos_brine.hpp | 2 +- flow/flow_ebos_energy.cpp | 4 ++-- flow/flow_ebos_energy.hpp | 2 +- flow/flow_ebos_foam.cpp | 4 ++-- flow/flow_ebos_foam.hpp | 2 +- flow/flow_ebos_gasoil.cpp | 4 ++-- flow/flow_ebos_gasoil.hpp | 2 +- flow/flow_ebos_oilwater.cpp | 4 ++-- flow/flow_ebos_oilwater.hpp | 2 +- flow/flow_ebos_oilwater_polymer.cpp | 4 ++-- flow/flow_ebos_oilwater_polymer.hpp | 2 +- flow/flow_ebos_polymer.cpp | 4 ++-- flow/flow_ebos_polymer.hpp | 2 +- flow/flow_ebos_solvent.cpp | 4 ++-- flow/flow_ebos_solvent.hpp | 2 +- 20 files changed, 36 insertions(+), 38 deletions(-) diff --git a/ebos/eclbasevanguard.hh b/ebos/eclbasevanguard.hh index 9bd3fa268..d02d46d44 100644 --- a/ebos/eclbasevanguard.hh +++ b/ebos/eclbasevanguard.hh @@ -322,7 +322,6 @@ public: Opm::checkDeck(*deck_, parser, *parseContext_, *errorGuard_); } else { - assert(externalDeck_); deck_ = externalDeck_; } @@ -331,7 +330,6 @@ public: eclState_ = internalEclState_.get(); } else { - assert(externalDeck_); assert(externalEclState_); deck_ = externalDeck_; diff --git a/flow/flow.cpp b/flow/flow.cpp index ff6cdc37f..7f6d07605 100644 --- a/flow/flow.cpp +++ b/flow/flow.cpp @@ -410,13 +410,13 @@ int main(int argc, char** argv) // oil-gas if (phases.active( Opm::Phase::GAS )) { - Opm::flowEbosGasOilSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosGasOilSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosGasOilMain(argc, argv, outputCout, outputFiles); } // oil-water else if ( phases.active( Opm::Phase::WATER ) ) { - Opm::flowEbosOilWaterSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosOilWaterSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosOilWaterMain(argc, argv, outputCout, outputFiles); } else { @@ -444,37 +444,37 @@ int main(int argc, char** argv) } if ( phases.size() == 3 ) { // oil water polymer case - Opm::flowEbosOilWaterPolymerSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosOilWaterPolymerSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosOilWaterPolymerMain(argc, argv, outputCout, outputFiles); } else { - Opm::flowEbosPolymerSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosPolymerSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosPolymerMain(argc, argv, outputCout, outputFiles); } } // Foam case else if ( phases.active( Opm::Phase::FOAM ) ) { - Opm::flowEbosFoamSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosFoamSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosFoamMain(argc, argv, outputCout, outputFiles); } // Brine case else if ( phases.active( Opm::Phase::BRINE ) ) { - Opm::flowEbosBrineSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosBrineSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosBrineMain(argc, argv, outputCout, outputFiles); } // Solvent case else if ( phases.active( Opm::Phase::SOLVENT ) ) { - Opm::flowEbosSolventSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosSolventSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosSolventMain(argc, argv, outputCout, outputFiles); } // Energy case else if (eclipseState->getSimulationConfig().isThermal()) { - Opm::flowEbosEnergySetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosEnergySetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosEnergyMain(argc, argv, outputCout, outputFiles); } #endif // FLOW_BLACKOIL_ONLY // Blackoil case else if( phases.size() == 3 ) { - Opm::flowEbosBlackoilSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig); + Opm::flowEbosBlackoilSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig); return Opm::flowEbosBlackoilMain(argc, argv, outputCout, outputFiles); } else diff --git a/flow/flow_ebos_blackoil.cpp b/flow/flow_ebos_blackoil.cpp index 9558de347..809610682 100644 --- a/flow/flow_ebos_blackoil.cpp +++ b/flow/flow_ebos_blackoil.cpp @@ -34,13 +34,13 @@ namespace Opm { -void flowEbosBlackoilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosBlackoilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_blackoil.hpp b/flow/flow_ebos_blackoil.hpp index 703326fc8..641d74aa2 100644 --- a/flow/flow_ebos_blackoil.hpp +++ b/flow/flow_ebos_blackoil.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosBlackoilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosBlackoilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosBlackoilMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_brine.cpp b/flow/flow_ebos_brine.cpp index 484091161..c39379d49 100644 --- a/flow/flow_ebos_brine.cpp +++ b/flow/flow_ebos_brine.cpp @@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowBrineProblem, EnableBrine, true); }} namespace Opm { -void flowEbosBrineSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosBrineSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowBrineProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_brine.hpp b/flow/flow_ebos_brine.hpp index 0f974f524..4b8df12ce 100644 --- a/flow/flow_ebos_brine.hpp +++ b/flow/flow_ebos_brine.hpp @@ -24,7 +24,7 @@ namespace Opm { -void flowEbosBrineSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosBrineSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosBrineMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_energy.cpp b/flow/flow_ebos_energy.cpp index 6fd2cb949..30f2506f7 100644 --- a/flow/flow_ebos_energy.cpp +++ b/flow/flow_ebos_energy.cpp @@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowEnergyProblem, EnableEnergy, true); }} namespace Opm { -void flowEbosEnergySetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosEnergySetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowEnergyProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_energy.hpp b/flow/flow_ebos_energy.hpp index 58e091e78..6c02e4b35 100644 --- a/flow/flow_ebos_energy.hpp +++ b/flow/flow_ebos_energy.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosEnergySetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosEnergySetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosEnergyMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_foam.cpp b/flow/flow_ebos_foam.cpp index 9037c5622..973617666 100644 --- a/flow/flow_ebos_foam.cpp +++ b/flow/flow_ebos_foam.cpp @@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowFoamProblem, EnableFoam, true); }} namespace Opm { -void flowEbosFoamSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosFoamSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowFoamProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_foam.hpp b/flow/flow_ebos_foam.hpp index e35734743..0ad4d750a 100644 --- a/flow/flow_ebos_foam.hpp +++ b/flow/flow_ebos_foam.hpp @@ -24,7 +24,7 @@ namespace Opm { -void flowEbosFoamSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosFoamSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosFoamMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_gasoil.cpp b/flow/flow_ebos_gasoil.cpp index 32fd14f50..989f889db 100644 --- a/flow/flow_ebos_gasoil.cpp +++ b/flow/flow_ebos_gasoil.cpp @@ -60,13 +60,13 @@ public: }} namespace Opm { -void flowEbosGasOilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosGasOilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowGasOilProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_gasoil.hpp b/flow/flow_ebos_gasoil.hpp index ea5746bf4..da2c09551 100644 --- a/flow/flow_ebos_gasoil.hpp +++ b/flow/flow_ebos_gasoil.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosGasOilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosGasOilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosGasOilMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_oilwater.cpp b/flow/flow_ebos_oilwater.cpp index fb0013750..c2d000379 100644 --- a/flow/flow_ebos_oilwater.cpp +++ b/flow/flow_ebos_oilwater.cpp @@ -60,13 +60,13 @@ public: }} namespace Opm { -void flowEbosOilWaterSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosOilWaterSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowOilWaterProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_oilwater.hpp b/flow/flow_ebos_oilwater.hpp index 0cf0fee6a..d93fd6782 100644 --- a/flow/flow_ebos_oilwater.hpp +++ b/flow/flow_ebos_oilwater.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosOilWaterSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosOilWaterSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosOilWaterMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_oilwater_polymer.cpp b/flow/flow_ebos_oilwater_polymer.cpp index 19537a3a7..104af11fc 100644 --- a/flow/flow_ebos_oilwater_polymer.cpp +++ b/flow/flow_ebos_oilwater_polymer.cpp @@ -61,13 +61,13 @@ public: }} namespace Opm { -void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck& deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck* deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowOilWaterPolymerProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_oilwater_polymer.hpp b/flow/flow_ebos_oilwater_polymer.hpp index 4d759c57f..9190622e1 100644 --- a/flow/flow_ebos_oilwater_polymer.hpp +++ b/flow/flow_ebos_oilwater_polymer.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck& deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck* deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosOilWaterPolymerMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_polymer.cpp b/flow/flow_ebos_polymer.cpp index 23357e288..f03448e77 100644 --- a/flow/flow_ebos_polymer.cpp +++ b/flow/flow_ebos_polymer.cpp @@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowPolymerProblem, EnablePolymer, true); }} namespace Opm { -void flowEbosPolymerSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosPolymerSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowPolymerProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_polymer.hpp b/flow/flow_ebos_polymer.hpp index d278cfef5..4e5e755d9 100644 --- a/flow/flow_ebos_polymer.hpp +++ b/flow/flow_ebos_polymer.hpp @@ -23,7 +23,7 @@ #include namespace Opm { -void flowEbosPolymerSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosPolymerSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosPolymerMain(int argc, char** argv, bool outputCout, bool outputFiles); } diff --git a/flow/flow_ebos_solvent.cpp b/flow/flow_ebos_solvent.cpp index 5b720e9e0..0beb3c43a 100644 --- a/flow/flow_ebos_solvent.cpp +++ b/flow/flow_ebos_solvent.cpp @@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowSolventProblem, EnableSolvent, true); }} namespace Opm { -void flowEbosSolventSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) +void flowEbosSolventSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig) { typedef TTAG(EclFlowSolventProblem) TypeTag; typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard; Vanguard::setExternalSetupTime(setupTime); - Vanguard::setExternalDeck(&deck); + Vanguard::setExternalDeck(deck); Vanguard::setExternalEclState(&eclState); Vanguard::setExternalSchedule(&schedule); Vanguard::setExternalSummaryConfig(&summaryConfig); diff --git a/flow/flow_ebos_solvent.hpp b/flow/flow_ebos_solvent.hpp index b9e4b2aff..95f24e7e4 100644 --- a/flow/flow_ebos_solvent.hpp +++ b/flow/flow_ebos_solvent.hpp @@ -24,7 +24,7 @@ namespace Opm { -void flowEbosSolventSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); +void flowEbosSolventSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig); int flowEbosSolventMain(int argc, char** argv, bool outoutCout, bool outputFiles); } From 05c0c4b6e8c407f0d9036de7236ddfdff2a245fe Mon Sep 17 00:00:00 2001 From: Arne Morten Kvarving Date: Fri, 24 Jan 2020 12:30:22 +0100 Subject: [PATCH 7/7] Use the ParallelEclipseState and ParallelFieldPropsManager with these in place, we now only parse the deck on the root process. --- ebos/eclcpgridvanguard.hh | 13 +++++++++++- flow/flow.cpp | 30 ++++++++++++++++++++-------- opm/simulators/flow/FlowMainEbos.hpp | 8 ++++++-- 3 files changed, 40 insertions(+), 11 deletions(-) diff --git a/ebos/eclcpgridvanguard.hh b/ebos/eclcpgridvanguard.hh index 5ac5decdc..67a60ead1 100644 --- a/ebos/eclcpgridvanguard.hh +++ b/ebos/eclcpgridvanguard.hh @@ -33,6 +33,7 @@ #include #include +#include #include @@ -206,8 +207,18 @@ public: #endif cartesianIndexMapper_.reset(new CartesianIndexMapper(*grid_)); - this->updateGridView_(); + if (mpiSize > 1) { + std::vector cartIndices; + cartIndices.reserve(grid_->numCells()); + auto locElemIt = this->gridView().template begin(); + const auto& locElemEndIt = this->gridView().template end(); + for (; locElemIt != locElemEndIt; ++locElemIt) { + cartIndices.push_back(cartesianIndexMapper_->cartesianIndex(locElemIt->index())); + } + static_cast(this->eclState()).setupLocalProps(cartIndices); + static_cast(this->eclState()).switchToDistributedProps(); + } } /*! diff --git a/flow/flow.cpp b/flow/flow.cpp index 7f6d07605..3ebd2ca8b 100644 --- a/flow/flow.cpp +++ b/flow/flow.cpp @@ -20,6 +20,7 @@ */ #include "config.h" +#include #include @@ -38,6 +39,7 @@ #include #include #include +#include #include #include #include @@ -348,13 +350,21 @@ int main(int argc, char** argv) Opm::FlowMainEbos::printPRTHeader(outputCout); - deck.reset( new Opm::Deck( parser.parseFile(deckFilename , parseContext, errorGuard))); - Opm::MissingFeatures::checkKeywords(*deck, parseContext, errorGuard); - if ( outputCout ) - Opm::checkDeck(*deck, parser, parseContext, errorGuard); - - eclipseState.reset( new Opm::EclipseState(*deck)); +#ifdef HAVE_MPI + Opm::ParallelEclipseState* parState; +#endif if (mpiRank == 0) { + deck.reset( new Opm::Deck( parser.parseFile(deckFilename , parseContext, errorGuard))); + Opm::MissingFeatures::checkKeywords(*deck, parseContext, errorGuard); + if ( outputCout ) + Opm::checkDeck(*deck, parser, parseContext, errorGuard); + +#ifdef HAVE_MPI + parState = new Opm::ParallelEclipseState(*deck); + eclipseState.reset(parState); +#else + eclipseState.reset(new Opm::EclipseState(*deck); +#endif /* For the time being initializing wells and groups from the restart file is not possible, but work is underways and it is @@ -382,9 +392,13 @@ int main(int argc, char** argv) else { summaryConfig.reset(new Opm::SummaryConfig); schedule.reset(new Opm::Schedule); - Opm::Mpi::receiveAndUnpack(*summaryConfig, Dune::MPIHelper::getCollectiveCommunication()); - Opm::Mpi::receiveAndUnpack(*schedule, Dune::MPIHelper::getCollectiveCommunication()); + parState = new Opm::ParallelEclipseState; + Opm::Mpi::receiveAndUnpack(*summaryConfig, mpiHelper.getCollectiveCommunication()); + Opm::Mpi::receiveAndUnpack(*schedule, mpiHelper.getCollectiveCommunication()); + eclipseState.reset(parState); } + Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication()); + ser.broadcast(*parState); #endif Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard); diff --git a/opm/simulators/flow/FlowMainEbos.hpp b/opm/simulators/flow/FlowMainEbos.hpp index 6898e1da9..30cd0ba17 100644 --- a/opm/simulators/flow/FlowMainEbos.hpp +++ b/opm/simulators/flow/FlowMainEbos.hpp @@ -450,11 +450,15 @@ namespace Opm // Run relperm diagnostics if we have more than one phase. if (FluidSystem::numActivePhases() > 1) { RelpermDiagnostics diagnostic; - if (mpi_size_ > 1) + if (mpi_size_ > 1) { this->grid().switchToGlobalView(); + static_cast(this->eclState()).switchToGlobalProps(); + } diagnostic.diagnosis(eclState(), deck(), this->grid()); - if (mpi_size_ > 1) + if (mpi_size_ > 1) { this->grid().switchToDistributedView(); + static_cast(this->eclState()).switchToDistributedProps(); + } } }