mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge pull request #2311 from akva2/noecl_no_parsing
Only parse on root process
This commit is contained in:
commit
36761e9347
@ -37,6 +37,7 @@ list (APPEND MAIN_SOURCE_FILES
|
||||
opm/simulators/utils/DeferredLogger.cpp
|
||||
opm/simulators/utils/gatherDeferredLogger.cpp
|
||||
opm/simulators/utils/moduleVersion.cpp
|
||||
opm/simulators/utils/ParallelEclipseState.cpp
|
||||
opm/simulators/utils/ParallelRestart.cpp
|
||||
opm/simulators/wells/VFPProdProperties.cpp
|
||||
opm/simulators/wells/VFPInjProperties.cpp
|
||||
@ -176,6 +177,7 @@ list (APPEND PUBLIC_HEADER_FILES
|
||||
opm/simulators/utils/DeferredLogger.hpp
|
||||
opm/simulators/utils/gatherDeferredLogger.hpp
|
||||
opm/simulators/utils/moduleVersion.hpp
|
||||
opm/simulators/utils/ParallelEclipseState.hpp
|
||||
opm/simulators/utils/ParallelRestart.hpp
|
||||
opm/simulators/wells/PerforationData.hpp
|
||||
opm/simulators/wells/RateConverter.hpp
|
||||
|
@ -322,7 +322,6 @@ public:
|
||||
Opm::checkDeck(*deck_, parser, *parseContext_, *errorGuard_);
|
||||
}
|
||||
else {
|
||||
assert(externalDeck_);
|
||||
deck_ = externalDeck_;
|
||||
}
|
||||
|
||||
@ -331,7 +330,6 @@ public:
|
||||
eclState_ = internalEclState_.get();
|
||||
}
|
||||
else {
|
||||
assert(externalDeck_);
|
||||
assert(externalEclState_);
|
||||
|
||||
deck_ = externalDeck_;
|
||||
|
@ -33,6 +33,7 @@
|
||||
|
||||
#include <opm/grid/CpGrid.hpp>
|
||||
#include <opm/grid/cpgrid/GridHelpers.hpp>
|
||||
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
||||
|
||||
#include <dune/grid/common/mcmgmapper.hh>
|
||||
|
||||
@ -151,7 +152,7 @@ public:
|
||||
if (grid_->size(0))
|
||||
{
|
||||
globalTrans_.reset(new EclTransmissibility<TypeTag>(*this));
|
||||
globalTrans_->update();
|
||||
globalTrans_->update(false);
|
||||
}
|
||||
|
||||
Dune::EdgeWeightMethod edgeWeightsMethod = this->edgeWeightsMethod();
|
||||
@ -206,8 +207,18 @@ public:
|
||||
#endif
|
||||
|
||||
cartesianIndexMapper_.reset(new CartesianIndexMapper(*grid_));
|
||||
|
||||
this->updateGridView_();
|
||||
if (mpiSize > 1) {
|
||||
std::vector<int> cartIndices;
|
||||
cartIndices.reserve(grid_->numCells());
|
||||
auto locElemIt = this->gridView().template begin</*codim=*/0>();
|
||||
const auto& locElemEndIt = this->gridView().template end</*codim=*/0>();
|
||||
for (; locElemIt != locElemEndIt; ++locElemIt) {
|
||||
cartIndices.push_back(cartesianIndexMapper_->cartesianIndex(locElemIt->index()));
|
||||
}
|
||||
static_cast<ParallelEclipseState&>(this->eclState()).setupLocalProps(cartIndices);
|
||||
static_cast<ParallelEclipseState&>(this->eclState()).switchToDistributedProps();
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
|
@ -272,7 +272,7 @@ public:
|
||||
const size_t i = size_t(connection.getI());
|
||||
const size_t j = size_t(connection.getJ());
|
||||
const size_t k = size_t(connection.getK());
|
||||
const size_t index = simulator_.vanguard().eclState().getInputGrid().getGlobalIndex(i, j, k);
|
||||
const size_t index = simulator_.vanguard().eclState().gridDims().getGlobalIndex(i, j, k);
|
||||
|
||||
oilConnectionPressures_.emplace(std::make_pair(index, 0.0));
|
||||
waterConnectionSaturations_.emplace(std::make_pair(index, 0.0));
|
||||
@ -840,7 +840,7 @@ public:
|
||||
const size_t j = size_t(connection.getJ());
|
||||
const size_t k = size_t(connection.getK());
|
||||
|
||||
const size_t index = simulator_.vanguard().eclState().getInputGrid().getGlobalIndex(i, j, k);
|
||||
const size_t index = simulator_.vanguard().eclState().gridDims().getGlobalIndex(i, j, k);
|
||||
auto& connectionData = wellData.connections[count];
|
||||
connectionData.index = index;
|
||||
count++;
|
||||
|
@ -815,7 +815,7 @@ public:
|
||||
eclState.applyModifierDeck(miniDeck);
|
||||
|
||||
// re-compute all quantities which may possibly be affected.
|
||||
transmissibilities_.update();
|
||||
transmissibilities_.update(true);
|
||||
referencePorosity_[1] = referencePorosity_[0];
|
||||
updateReferencePorosity_();
|
||||
updatePffDofData_();
|
||||
@ -2753,11 +2753,16 @@ private:
|
||||
const auto& simulator = this->simulator();
|
||||
const auto& vanguard = simulator.vanguard();
|
||||
const auto& eclState = vanguard.eclState();
|
||||
const auto& comm = vanguard.gridView().comm();
|
||||
size_t numDof = this->model().numGridDof();
|
||||
|
||||
size_t globalSize;
|
||||
if (comm.rank() == 0)
|
||||
globalSize = eclState.getInputGrid().getCartesianSize();
|
||||
comm.broadcast(&globalSize, 1, 0);
|
||||
|
||||
if (enableSolvent) {
|
||||
std::vector<double> solventSaturationData(eclState.getInputGrid().getCartesianSize(), 0.0);
|
||||
std::vector<double> solventSaturationData(globalSize, 0.0);
|
||||
if (eclState.fieldProps().has_double("SSOL"))
|
||||
solventSaturationData = eclState.fieldProps().get_global_double("SSOL");
|
||||
|
||||
@ -2771,7 +2776,7 @@ private:
|
||||
}
|
||||
|
||||
if (enablePolymer) {
|
||||
std::vector<double> polyConcentrationData(eclState.getInputGrid().getCartesianSize(), 0.0);
|
||||
std::vector<double> polyConcentrationData(globalSize, 0.0);
|
||||
if (eclState.fieldProps().has_double("SPOLY"))
|
||||
polyConcentrationData = eclState.fieldProps().get_global_double("SPOLY");
|
||||
|
||||
@ -2785,7 +2790,7 @@ private:
|
||||
}
|
||||
|
||||
if (enablePolymerMolarWeight) {
|
||||
std::vector<double> polyMoleWeightData(eclState.getInputGrid().getCartesianSize(), 0.0);
|
||||
std::vector<double> polyMoleWeightData(globalSize, 0.0);
|
||||
if (eclState.fieldProps().has_double("SPOLYMW"))
|
||||
polyMoleWeightData = eclState.fieldProps().get_global_double("SPOLYMW");
|
||||
polymerMoleWeight_.resize(numDof, 0.0);
|
||||
|
@ -117,22 +117,23 @@ public:
|
||||
* either but at least it seems to be much better.
|
||||
*/
|
||||
void finishInit()
|
||||
{ update(); }
|
||||
{ update(true); }
|
||||
|
||||
|
||||
/*!
|
||||
* \brief Compute all transmissibilities
|
||||
*
|
||||
* \param global If true, update is called on all processes
|
||||
* Also, this updates the "thermal half transmissibilities" if energy is enabled.
|
||||
*/
|
||||
void update()
|
||||
void update(bool global)
|
||||
{
|
||||
const auto& gridView = vanguard_.gridView();
|
||||
const auto& cartMapper = vanguard_.cartesianIndexMapper();
|
||||
const auto& eclState = vanguard_.eclState();
|
||||
const auto& eclGrid = eclState.getInputGrid();
|
||||
const auto& cartDims = cartMapper.cartesianDimensions();
|
||||
auto& transMult = eclState.getTransMult();
|
||||
const auto& comm = vanguard_.gridView().comm();
|
||||
ElementMapper elemMapper(gridView, Dune::mcmgElementLayout());
|
||||
|
||||
// get the ntg values, the ntg values are modified for the cells merged with minpv
|
||||
@ -148,17 +149,66 @@ public:
|
||||
for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx)
|
||||
axisCentroids[dimIdx].resize(numElements);
|
||||
|
||||
std::vector<double> centroids;
|
||||
#if HAVE_MPI
|
||||
size_t cells = vanguard_.grid().numCells();
|
||||
if (global && comm.size() > 1) {
|
||||
std::vector<size_t> sizes(comm.size());
|
||||
if (comm.rank() == 0) {
|
||||
const auto& eclGrid = eclState.getInputGrid();
|
||||
comm.gather(&cells, sizes.data(), 1, 0);
|
||||
for (int i = 1; i < comm.size(); ++i) {
|
||||
std::vector<int> cell_id(sizes[i]);
|
||||
MPI_Recv(cell_id.data(), sizes[i], MPI_INT,
|
||||
i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
centroids.resize(dimWorld * sizes[i]);
|
||||
|
||||
auto cIt = centroids.begin();
|
||||
for (int idx : cell_id) {
|
||||
const auto& centroid = eclGrid.getCellCenter(idx);
|
||||
for (const auto& it : centroid)
|
||||
*cIt++ = it;
|
||||
}
|
||||
MPI_Send(centroids.data(), dimWorld * sizes[i],
|
||||
MPI_DOUBLE, i, 0, MPI_COMM_WORLD);
|
||||
}
|
||||
centroids.clear();
|
||||
} else {
|
||||
comm.gather(&cells, sizes.data(), 1, 0);
|
||||
std::vector<int> cell_ids;
|
||||
cell_ids.reserve(cells);
|
||||
auto elemIt = gridView.template begin</*codim=*/ 0>();
|
||||
const auto& elemEndIt = gridView.template end</*codim=*/ 0>();
|
||||
for (; elemIt != elemEndIt; ++elemIt) {
|
||||
const auto& elem = *elemIt;
|
||||
cell_ids.push_back(cartMapper.cartesianIndex(elemMapper.index(elem)));
|
||||
}
|
||||
MPI_Send(cell_ids.data(), cells, MPI_INT, 0, 0, MPI_COMM_WORLD);
|
||||
centroids.resize(cells * dimWorld);
|
||||
MPI_Recv(centroids.data(), dimWorld * cells, MPI_DOUBLE,
|
||||
0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
auto elemIt = gridView.template begin</*codim=*/ 0>();
|
||||
const auto& elemEndIt = gridView.template end</*codim=*/ 0>();
|
||||
for (; elemIt != elemEndIt; ++elemIt) {
|
||||
size_t centroidIdx = 0;
|
||||
for (; elemIt != elemEndIt; ++elemIt, ++centroidIdx) {
|
||||
const auto& elem = *elemIt;
|
||||
unsigned elemIdx = elemMapper.index(elem);
|
||||
|
||||
// compute the axis specific "centroids" used for the transmissibilities. for
|
||||
// consistency with the flow simulator, we use the element centers as
|
||||
// computed by opm-parser's Opm::EclipseGrid class for all axes.
|
||||
unsigned cartesianCellIdx = cartMapper.cartesianIndex(elemIdx);
|
||||
const auto& centroid = eclGrid.getCellCenter(cartesianCellIdx);
|
||||
const double* centroid;
|
||||
if (vanguard_.gridView().comm().rank() == 0) {
|
||||
const auto& eclGrid = eclState.getInputGrid();
|
||||
unsigned cartesianCellIdx = cartMapper.cartesianIndex(elemIdx);
|
||||
centroid = &eclGrid.getCellCenter(cartesianCellIdx)[0];
|
||||
} else
|
||||
centroid = ¢roids[centroidIdx * dimWorld];
|
||||
|
||||
for (unsigned axisIdx = 0; axisIdx < dimWorld; ++axisIdx)
|
||||
for (unsigned dimIdx = 0; dimIdx < dimWorld; ++dimIdx)
|
||||
axisCentroids[axisIdx][elemIdx][dimIdx] = centroid[dimIdx];
|
||||
@ -181,6 +231,18 @@ public:
|
||||
thermalHalfTransBoundary_.clear();
|
||||
}
|
||||
|
||||
// The MULTZ needs special case if the option is ALL
|
||||
// Then the smallest multiplier is applied.
|
||||
// Default is to apply the top and bottom multiplier
|
||||
bool useSmallestMultiplier;
|
||||
if (comm.rank() == 0) {
|
||||
const auto& eclGrid = eclState.getInputGrid();
|
||||
useSmallestMultiplier = eclGrid.getMultzOption() == Opm::PinchMode::ModeEnum::ALL;
|
||||
}
|
||||
if (global && comm.size() > 1) {
|
||||
comm.broadcast(&useSmallestMultiplier, 1, 0);
|
||||
}
|
||||
|
||||
// compute the transmissibilities for all intersections
|
||||
elemIt = gridView.template begin</*codim=*/ 0>();
|
||||
for (; elemIt != elemEndIt; ++elemIt) {
|
||||
@ -330,10 +392,6 @@ public:
|
||||
// apply the full face transmissibility multipliers
|
||||
// for the inside ...
|
||||
|
||||
// The MULTZ needs special case if the option is ALL
|
||||
// Then the smallest multiplier is applied.
|
||||
// Default is to apply the top and bottom multiplier
|
||||
bool useSmallestMultiplier = eclGrid.getMultzOption() == Opm::PinchMode::ModeEnum::ALL;
|
||||
if (useSmallestMultiplier)
|
||||
applyAllZMultipliers_(trans, insideFaceIdx, insideCartElemIdx, outsideCartElemIdx, transMult, cartDims);
|
||||
else
|
||||
|
@ -20,6 +20,7 @@
|
||||
*/
|
||||
#include "config.h"
|
||||
|
||||
#include <ebos/eclmpiserializer.hh>
|
||||
|
||||
#include <flow/flow_ebos_blackoil.hpp>
|
||||
|
||||
@ -38,6 +39,7 @@
|
||||
#include <opm/simulators/flow/SimulatorFullyImplicitBlackoilEbos.hpp>
|
||||
#include <opm/simulators/flow/FlowMainEbos.hpp>
|
||||
#include <opm/simulators/utils/moduleVersion.hpp>
|
||||
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
||||
#include <opm/models/utils/propertysystem.hh>
|
||||
#include <opm/models/utils/parametersystem.hh>
|
||||
#include <opm/simulators/flow/MissingFeatures.hpp>
|
||||
@ -348,13 +350,21 @@ int main(int argc, char** argv)
|
||||
|
||||
Opm::FlowMainEbos<PreTypeTag>::printPRTHeader(outputCout);
|
||||
|
||||
deck.reset( new Opm::Deck( parser.parseFile(deckFilename , parseContext, errorGuard)));
|
||||
Opm::MissingFeatures::checkKeywords(*deck, parseContext, errorGuard);
|
||||
if ( outputCout )
|
||||
Opm::checkDeck(*deck, parser, parseContext, errorGuard);
|
||||
|
||||
eclipseState.reset( new Opm::EclipseState(*deck));
|
||||
#ifdef HAVE_MPI
|
||||
Opm::ParallelEclipseState* parState;
|
||||
#endif
|
||||
if (mpiRank == 0) {
|
||||
deck.reset( new Opm::Deck( parser.parseFile(deckFilename , parseContext, errorGuard)));
|
||||
Opm::MissingFeatures::checkKeywords(*deck, parseContext, errorGuard);
|
||||
if ( outputCout )
|
||||
Opm::checkDeck(*deck, parser, parseContext, errorGuard);
|
||||
|
||||
#ifdef HAVE_MPI
|
||||
parState = new Opm::ParallelEclipseState(*deck);
|
||||
eclipseState.reset(parState);
|
||||
#else
|
||||
eclipseState.reset(new Opm::EclipseState(*deck);
|
||||
#endif
|
||||
/*
|
||||
For the time being initializing wells and groups from the
|
||||
restart file is not possible, but work is underways and it is
|
||||
@ -382,9 +392,13 @@ int main(int argc, char** argv)
|
||||
else {
|
||||
summaryConfig.reset(new Opm::SummaryConfig);
|
||||
schedule.reset(new Opm::Schedule);
|
||||
Opm::Mpi::receiveAndUnpack(*summaryConfig, Dune::MPIHelper::getCollectiveCommunication());
|
||||
Opm::Mpi::receiveAndUnpack(*schedule, Dune::MPIHelper::getCollectiveCommunication());
|
||||
parState = new Opm::ParallelEclipseState;
|
||||
Opm::Mpi::receiveAndUnpack(*summaryConfig, mpiHelper.getCollectiveCommunication());
|
||||
Opm::Mpi::receiveAndUnpack(*schedule, mpiHelper.getCollectiveCommunication());
|
||||
eclipseState.reset(parState);
|
||||
}
|
||||
Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication());
|
||||
ser.broadcast(*parState);
|
||||
#endif
|
||||
|
||||
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);
|
||||
@ -410,13 +424,13 @@ int main(int argc, char** argv)
|
||||
// oil-gas
|
||||
if (phases.active( Opm::Phase::GAS ))
|
||||
{
|
||||
Opm::flowEbosGasOilSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosGasOilSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosGasOilMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
// oil-water
|
||||
else if ( phases.active( Opm::Phase::WATER ) )
|
||||
{
|
||||
Opm::flowEbosOilWaterSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosOilWaterSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosOilWaterMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
else {
|
||||
@ -444,37 +458,37 @@ int main(int argc, char** argv)
|
||||
}
|
||||
|
||||
if ( phases.size() == 3 ) { // oil water polymer case
|
||||
Opm::flowEbosOilWaterPolymerSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosOilWaterPolymerSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosOilWaterPolymerMain(argc, argv, outputCout, outputFiles);
|
||||
} else {
|
||||
Opm::flowEbosPolymerSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosPolymerSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosPolymerMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
}
|
||||
// Foam case
|
||||
else if ( phases.active( Opm::Phase::FOAM ) ) {
|
||||
Opm::flowEbosFoamSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosFoamSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosFoamMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
// Brine case
|
||||
else if ( phases.active( Opm::Phase::BRINE ) ) {
|
||||
Opm::flowEbosBrineSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosBrineSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosBrineMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
// Solvent case
|
||||
else if ( phases.active( Opm::Phase::SOLVENT ) ) {
|
||||
Opm::flowEbosSolventSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosSolventSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosSolventMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
// Energy case
|
||||
else if (eclipseState->getSimulationConfig().isThermal()) {
|
||||
Opm::flowEbosEnergySetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosEnergySetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosEnergyMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
#endif // FLOW_BLACKOIL_ONLY
|
||||
// Blackoil case
|
||||
else if( phases.size() == 3 ) {
|
||||
Opm::flowEbosBlackoilSetDeck(externalSetupTimer.elapsed(), *deck, *eclipseState, *schedule, *summaryConfig);
|
||||
Opm::flowEbosBlackoilSetDeck(externalSetupTimer.elapsed(), deck.get(), *eclipseState, *schedule, *summaryConfig);
|
||||
return Opm::flowEbosBlackoilMain(argc, argv, outputCout, outputFiles);
|
||||
}
|
||||
else
|
||||
|
@ -34,13 +34,13 @@
|
||||
|
||||
namespace Opm {
|
||||
|
||||
void flowEbosBlackoilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosBlackoilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosBlackoilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosBlackoilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosBlackoilMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowBrineProblem, EnableBrine, true);
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosBrineSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosBrineSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowBrineProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosBrineSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosBrineSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosBrineMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowEnergyProblem, EnableEnergy, true);
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosEnergySetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosEnergySetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowEnergyProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosEnergySetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosEnergySetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosEnergyMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowFoamProblem, EnableFoam, true);
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosFoamSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosFoamSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowFoamProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosFoamSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosFoamSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosFoamMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -60,13 +60,13 @@ public:
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosGasOilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosGasOilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowGasOilProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosGasOilSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosGasOilSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosGasOilMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -60,13 +60,13 @@ public:
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosOilWaterSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosOilWaterSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowOilWaterProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosOilWaterSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosOilWaterSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosOilWaterMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -61,13 +61,13 @@ public:
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck& deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck* deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowOilWaterPolymerProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck& deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosOilWaterPolymerSetDeck(double setupTime, Deck* deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosOilWaterPolymerMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowPolymerProblem, EnablePolymer, true);
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosPolymerSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosPolymerSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowPolymerProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosPolymerSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosPolymerSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosPolymerMain(int argc, char** argv, bool outputCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -36,13 +36,13 @@ SET_BOOL_PROP(EclFlowSolventProblem, EnableSolvent, true);
|
||||
}}
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosSolventSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
void flowEbosSolventSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig)
|
||||
{
|
||||
typedef TTAG(EclFlowSolventProblem) TypeTag;
|
||||
typedef GET_PROP_TYPE(TypeTag, Vanguard) Vanguard;
|
||||
|
||||
Vanguard::setExternalSetupTime(setupTime);
|
||||
Vanguard::setExternalDeck(&deck);
|
||||
Vanguard::setExternalDeck(deck);
|
||||
Vanguard::setExternalEclState(&eclState);
|
||||
Vanguard::setExternalSchedule(&schedule);
|
||||
Vanguard::setExternalSummaryConfig(&summaryConfig);
|
||||
|
@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
namespace Opm {
|
||||
void flowEbosSolventSetDeck(double setupTime, Deck &deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
void flowEbosSolventSetDeck(double setupTime, Deck *deck, EclipseState& eclState, Schedule& schedule, SummaryConfig& summaryConfig);
|
||||
int flowEbosSolventMain(int argc, char** argv, bool outoutCout, bool outputFiles);
|
||||
}
|
||||
|
||||
|
@ -450,11 +450,15 @@ namespace Opm
|
||||
// Run relperm diagnostics if we have more than one phase.
|
||||
if (FluidSystem::numActivePhases() > 1) {
|
||||
RelpermDiagnostics diagnostic;
|
||||
if (mpi_size_ > 1)
|
||||
if (mpi_size_ > 1) {
|
||||
this->grid().switchToGlobalView();
|
||||
static_cast<ParallelEclipseState&>(this->eclState()).switchToGlobalProps();
|
||||
}
|
||||
diagnostic.diagnosis(eclState(), deck(), this->grid());
|
||||
if (mpi_size_ > 1)
|
||||
if (mpi_size_ > 1) {
|
||||
this->grid().switchToDistributedView();
|
||||
static_cast<ParallelEclipseState&>(this->eclState()).switchToDistributedProps();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
354
opm/simulators/utils/ParallelEclipseState.cpp
Normal file
354
opm/simulators/utils/ParallelEclipseState.cpp
Normal file
@ -0,0 +1,354 @@
|
||||
/*
|
||||
Copyright 2019 Equinor AS.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <config.h>
|
||||
|
||||
#include "ParallelEclipseState.hpp"
|
||||
#include "ParallelRestart.hpp"
|
||||
#include <ebos/eclmpiserializer.hh>
|
||||
|
||||
namespace Opm {
|
||||
|
||||
|
||||
ParallelFieldPropsManager::ParallelFieldPropsManager(FieldPropsManager& manager)
|
||||
: m_manager(manager)
|
||||
, m_comm(Dune::MPIHelper::getCollectiveCommunication())
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
std::vector<int> ParallelFieldPropsManager::actnum() const
|
||||
{
|
||||
if (m_comm.rank() == 0)
|
||||
return m_manager.actnum();
|
||||
|
||||
return{};
|
||||
}
|
||||
|
||||
|
||||
void ParallelFieldPropsManager::reset_actnum(const std::vector<int>& actnum)
|
||||
{
|
||||
if (m_comm.rank() != 0)
|
||||
OPM_THROW(std::runtime_error, "reset_actnum should only be called on root process.");
|
||||
m_manager.reset_actnum(actnum);
|
||||
}
|
||||
|
||||
|
||||
std::vector<double> ParallelFieldPropsManager::porv(bool global) const
|
||||
{
|
||||
std::vector<double> result;
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.porv(global);
|
||||
size_t size = result.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
m_comm.broadcast(result.data(), size, 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
const std::vector<int>& ParallelFieldPropsManager::get_int(const std::string& keyword) const
|
||||
{
|
||||
auto it = m_intProps.find(keyword);
|
||||
if (it == m_intProps.end())
|
||||
OPM_THROW(std::runtime_error, "No integer property field: " + keyword);
|
||||
|
||||
return it->second;
|
||||
}
|
||||
|
||||
std::vector<int> ParallelFieldPropsManager::get_global_int(const std::string& keyword) const
|
||||
{
|
||||
std::vector<int> result;
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.get_global_int(keyword);
|
||||
size_t size = result.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
m_comm.broadcast(result.data(), size, 0);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
const std::vector<double>& ParallelFieldPropsManager::get_double(const std::string& keyword) const
|
||||
{
|
||||
auto it = m_doubleProps.find(keyword);
|
||||
if (it == m_doubleProps.end())
|
||||
OPM_THROW(std::runtime_error, "No double property field: " + keyword);
|
||||
|
||||
return it->second;
|
||||
}
|
||||
|
||||
|
||||
std::vector<double> ParallelFieldPropsManager::get_global_double(const std::string& keyword) const
|
||||
{
|
||||
std::vector<double> result;
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.get_global_double(keyword);
|
||||
size_t size = result.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
m_comm.broadcast(result.data(), size, 0);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
bool ParallelFieldPropsManager::has_int(const std::string& keyword) const
|
||||
{
|
||||
auto it = m_intProps.find(keyword);
|
||||
return it != m_intProps.end();
|
||||
}
|
||||
|
||||
|
||||
bool ParallelFieldPropsManager::has_double(const std::string& keyword) const
|
||||
{
|
||||
auto it = m_doubleProps.find(keyword);
|
||||
return it != m_doubleProps.end();
|
||||
}
|
||||
|
||||
|
||||
ParallelEclipseState::ParallelEclipseState()
|
||||
: m_fieldProps(field_props)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
ParallelEclipseState::ParallelEclipseState(const Deck& deck)
|
||||
: EclipseState(deck)
|
||||
, m_fieldProps(field_props)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const
|
||||
{
|
||||
return serializer.packSize(m_tables) +
|
||||
serializer.packSize(m_runspec) +
|
||||
serializer.packSize(m_eclipseConfig) +
|
||||
serializer.packSize(m_deckUnitSystem) +
|
||||
serializer.packSize(m_inputNnc) +
|
||||
serializer.packSize(m_inputEditNnc) +
|
||||
serializer.packSize(m_gridDims) +
|
||||
serializer.packSize(m_simulationConfig) +
|
||||
serializer.packSize(m_transMult) +
|
||||
serializer.packSize(m_faults) +
|
||||
serializer.packSize(m_title);
|
||||
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::pack(std::vector<char>& buffer, int& position,
|
||||
EclMpiSerializer& serializer) const
|
||||
{
|
||||
serializer.pack(m_tables, buffer, position);
|
||||
serializer.pack(m_runspec, buffer, position);
|
||||
serializer.pack(m_eclipseConfig, buffer, position);
|
||||
serializer.pack(m_deckUnitSystem, buffer, position);
|
||||
serializer.pack(m_inputNnc, buffer, position);
|
||||
serializer.pack(m_inputEditNnc, buffer, position);
|
||||
serializer.pack(m_gridDims, buffer, position);
|
||||
serializer.pack(m_simulationConfig, buffer, position);
|
||||
serializer.pack(m_transMult, buffer, position);
|
||||
serializer.pack(m_faults, buffer, position);
|
||||
serializer.pack(m_title, buffer, position);
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::unpack(std::vector<char>& buffer, int& position,
|
||||
EclMpiSerializer& serializer)
|
||||
{
|
||||
serializer.unpack(m_tables, buffer, position);
|
||||
serializer.unpack(m_runspec, buffer, position);
|
||||
serializer.unpack(m_eclipseConfig, buffer, position);
|
||||
serializer.unpack(m_deckUnitSystem, buffer, position);
|
||||
serializer.unpack(m_inputNnc, buffer, position);
|
||||
serializer.unpack(m_inputEditNnc, buffer, position);
|
||||
serializer.unpack(m_gridDims, buffer, position);
|
||||
serializer.unpack(m_simulationConfig, buffer, position);
|
||||
serializer.unpack(m_transMult, buffer, position);
|
||||
serializer.unpack(m_faults, buffer, position);
|
||||
serializer.unpack(m_title, buffer, position);
|
||||
}
|
||||
|
||||
|
||||
const FieldPropsManager& ParallelEclipseState::fieldProps() const
|
||||
{
|
||||
if (!m_parProps && Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
|
||||
OPM_THROW(std::runtime_error, "Attempt to access field properties on no-root process before switch to parallel properties");
|
||||
|
||||
if (!m_parProps || Dune::MPIHelper::getCollectiveCommunication().size() == 1)
|
||||
return this->EclipseState::fieldProps();
|
||||
|
||||
return m_fieldProps;
|
||||
}
|
||||
|
||||
|
||||
const FieldPropsManager& ParallelEclipseState::globalFieldProps() const
|
||||
{
|
||||
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
|
||||
OPM_THROW(std::runtime_error, "Attempt to access global field properties on non-root process");
|
||||
return this->EclipseState::globalFieldProps();
|
||||
}
|
||||
|
||||
|
||||
const EclipseGrid& ParallelEclipseState::getInputGrid() const
|
||||
{
|
||||
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
|
||||
OPM_THROW(std::runtime_error, "Attempt to access eclipse grid on non-root process");
|
||||
return this->EclipseState::getInputGrid();
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::switchToGlobalProps()
|
||||
{
|
||||
m_parProps = false;
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::switchToDistributedProps()
|
||||
{
|
||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (comm.size() == 1) // No need for the parallel frontend
|
||||
return;
|
||||
|
||||
m_parProps = true;
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
|
||||
template<class T>
|
||||
struct GetField {
|
||||
GetField(const FieldPropsManager& propMan) : props(propMan) {}
|
||||
std::vector<T> getField(const std::string& key) const;
|
||||
const FieldPropsManager& props;
|
||||
};
|
||||
|
||||
|
||||
template<>
|
||||
std::vector<int> GetField<int>::getField(const std::string& key) const {
|
||||
return props.get_global_int(key);
|
||||
}
|
||||
|
||||
|
||||
template<>
|
||||
std::vector<double> GetField<double>::getField(const std::string& key) const {
|
||||
return props.get_global_double(key);
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
void extractRootProps(const std::vector<int>& localToGlobal,
|
||||
const std::vector<std::string>& keys,
|
||||
const GetField<T>& getter,
|
||||
std::map<std::string,std::vector<T>>& localMap)
|
||||
{
|
||||
for (const std::string& key : keys) {
|
||||
auto prop = getter.getField(key);
|
||||
std::vector<T>& local = localMap[key];
|
||||
local.reserve(localToGlobal.size());
|
||||
for (int cell : localToGlobal) {
|
||||
local.push_back(prop[cell]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
void packProps(const std::vector<int>& l2gCell,
|
||||
const std::vector<std::string>& keys,
|
||||
const GetField<T>& getter,
|
||||
std::vector<char>& buffer, int& position)
|
||||
{
|
||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||
std::vector<T> sendData(l2gCell.size());
|
||||
for (const std::string& key : keys) {
|
||||
auto prop = getter.getField(key);
|
||||
size_t idx = 0;
|
||||
for (int cell : l2gCell)
|
||||
sendData[idx++] = prop[cell];
|
||||
Mpi::pack(sendData, buffer, position, comm);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal)
|
||||
{
|
||||
#if HAVE_MPI
|
||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (comm.rank() == 0) {
|
||||
extractRootProps(localToGlobal, this->globalFieldProps().keys<int>(),
|
||||
GetField<int>(this->globalFieldProps()),
|
||||
m_fieldProps.m_intProps);
|
||||
extractRootProps(localToGlobal, this->globalFieldProps().keys<double>(),
|
||||
GetField<double>(this->globalFieldProps()),
|
||||
m_fieldProps.m_doubleProps);
|
||||
for (int i = 1; i < comm.size(); ++i) {
|
||||
std::vector<int> l2gCell;
|
||||
size_t size;
|
||||
MPI_Recv(&size, 1, Dune::MPITraits<size_t>::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
l2gCell.resize(size);
|
||||
MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
size_t cells = l2gCell.size();
|
||||
const auto& intKeys = this->globalFieldProps().keys<int>();
|
||||
const auto& dblKeys = this->globalFieldProps().keys<double>();
|
||||
size = Mpi::packSize(intKeys, comm) +
|
||||
Mpi::packSize(dblKeys,comm) +
|
||||
intKeys.size() * Mpi::packSize(std::vector<int>(cells), comm) +
|
||||
dblKeys.size() * Mpi::packSize(std::vector<double>(cells), comm);
|
||||
|
||||
std::vector<char> buffer(size);
|
||||
int position = 0;
|
||||
Mpi::pack(intKeys, buffer, position, comm);
|
||||
Mpi::pack(dblKeys, buffer, position, comm);
|
||||
packProps(l2gCell, intKeys, GetField<int>(this->globalFieldProps()),
|
||||
buffer, position);
|
||||
packProps(l2gCell, dblKeys, GetField<double>(this->globalFieldProps()),
|
||||
buffer, position);
|
||||
MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD);
|
||||
MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD);
|
||||
}
|
||||
} else {
|
||||
size_t l2gSize = localToGlobal.size();
|
||||
MPI_Send(&l2gSize, 1, Dune::MPITraits<size_t>::getType(), 0, 0, MPI_COMM_WORLD);
|
||||
MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD);
|
||||
int size;
|
||||
MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
std::vector<char> buffer(size);
|
||||
MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
std::vector<std::string> intKeys, dblKeys;
|
||||
int position = 0;
|
||||
Mpi::unpack(intKeys, buffer, position, comm);
|
||||
Mpi::unpack(dblKeys, buffer, position, comm);
|
||||
for (const std::string& key : intKeys) {
|
||||
Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm);
|
||||
}
|
||||
for (const std::string& key : dblKeys) {
|
||||
Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
} // end namespace Opm
|
159
opm/simulators/utils/ParallelEclipseState.hpp
Normal file
159
opm/simulators/utils/ParallelEclipseState.hpp
Normal file
@ -0,0 +1,159 @@
|
||||
/*
|
||||
Copyright 2019 Equinor AS.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#ifndef PARALLEL_ECLIPSE_STATE_HPP
|
||||
#define PARALLEL_ECLIPSE_STATE_HPP
|
||||
|
||||
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
||||
#include <dune/common/parallel/mpihelper.hh>
|
||||
|
||||
namespace Opm {
|
||||
|
||||
|
||||
class EclMpiSerializer;
|
||||
|
||||
/*! \brief Parallel frontend to the field properties.
|
||||
*
|
||||
* \details This is a parallel frontend to the mpi-unaware
|
||||
* FieldPropsManager in opm-common. It contains
|
||||
* process-local field properties on each process using
|
||||
* compressed indexing.
|
||||
*/
|
||||
|
||||
class ParallelFieldPropsManager : public FieldPropsManager {
|
||||
public:
|
||||
friend class ParallelEclipseState; //!< Friend so props can be setup.
|
||||
|
||||
//! \brief Constructor.
|
||||
//! \param manager The field property manager to wrap.
|
||||
ParallelFieldPropsManager(FieldPropsManager& manager);
|
||||
|
||||
//! \brief Returns actnum vector.
|
||||
//! \details If called on non-root process an empty vector is returned
|
||||
std::vector<int> actnum() const override;
|
||||
|
||||
//! \brief Reset the actnum vector.
|
||||
//! \details Can only be called on root process
|
||||
void reset_actnum(const std::vector<int>& actnum) override;
|
||||
|
||||
//! \brief Returns the pore volume vector.
|
||||
std::vector<double> porv(bool global = false) const override;
|
||||
|
||||
//! \brief Returns an int property using compressed indices.
|
||||
//! \param keyword Name of property
|
||||
const std::vector<int>& get_int(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Returns a double property using compressed indices.
|
||||
//! \param keyword Name of property
|
||||
const std::vector<double>& get_double(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Returns an int property using global cartesian indices.
|
||||
//! \param keyword Name of property
|
||||
//! \details The vector is broadcast from root process
|
||||
std::vector<int> get_global_int(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Returns a double property using global cartesian indices.
|
||||
//! \param keyword Name of property
|
||||
//! \details The vector is broadcast from root process
|
||||
std::vector<double> get_global_double(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Check if an integer property is available.
|
||||
//! \param keyword Name of property
|
||||
bool has_int(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Check if a double property is available.
|
||||
//! \param keyword Name of property
|
||||
bool has_double(const std::string& keyword) const override;
|
||||
|
||||
protected:
|
||||
std::map<std::string, std::vector<int>> m_intProps; //!< Map of integer properties in process-local compressed indices.
|
||||
std::map<std::string, std::vector<double>> m_doubleProps; //!< Map of double properties in process-local compressed indices.
|
||||
FieldPropsManager& m_manager; //!< Underlying field property manager (only used on root process).
|
||||
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> m_comm; //!< Collective communication handler.
|
||||
};
|
||||
|
||||
|
||||
/*! \brief Parallel frontend to the EclipseState
|
||||
*
|
||||
* \details This is a parallel frontend to the mpi-unaware EclipseState in opm-common.
|
||||
* It extends the eclipse state class with serialization support, and
|
||||
* contains methods to switch between full global field properties,
|
||||
* and distributed field properties for consumption in the simulator.
|
||||
* Additionally, it has a few sanity checks to ensure that the data that
|
||||
* is only available on the root process is not attempted to be accessed
|
||||
* on non-root processes.
|
||||
*/
|
||||
|
||||
class ParallelEclipseState : public EclipseState {
|
||||
public:
|
||||
//! \brief Default constructor.
|
||||
ParallelEclipseState();
|
||||
|
||||
//! \brief Construct from a deck instance.
|
||||
//! \param deck The deck to construct from
|
||||
//! \details Only called on root process
|
||||
ParallelEclipseState(const Deck& deck);
|
||||
|
||||
//! \brief Calculates the size of serialized data.
|
||||
//! \param serializer The serializer to use
|
||||
std::size_t packSize(EclMpiSerializer& serializer) const;
|
||||
|
||||
//! \brief Serialize data.
|
||||
//! \param buffer Buffer to write serialized data into
|
||||
//! \param Position in buffer
|
||||
//! \param serializer The serializer to use
|
||||
void pack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer) const;
|
||||
//! \brief Deserialize data.
|
||||
//! \param buffer Buffer to read serialized data from
|
||||
//! \param Position in buffer
|
||||
//! \param serializer The serializer to use
|
||||
void unpack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer);
|
||||
|
||||
//! \brief Switch to global field properties.
|
||||
//! \details Called on root process to use the global field properties
|
||||
void switchToGlobalProps();
|
||||
|
||||
//! \brief Switch to distributed field properies.
|
||||
//! \details Called on root process to use the distributed field properties.
|
||||
//! setupLocalProps must be called prior to this.
|
||||
void switchToDistributedProps();
|
||||
|
||||
//! \brief Setup local properties.
|
||||
//! \param localToGlobal Map from local cells on calling process to global cartesian cell
|
||||
//! \details Must be called after grid has been paritioned
|
||||
void setupLocalProps(const std::vector<int>& localToGlobal);
|
||||
|
||||
//! \brief Returns a const ref to current field properties.
|
||||
const FieldPropsManager& fieldProps() const override;
|
||||
|
||||
//! \brief Returns a const ref to global field properties.
|
||||
//! \details Can only be called on root process.
|
||||
const FieldPropsManager& globalFieldProps() const override;
|
||||
|
||||
//! \brief Returns a const ref to the eclipse grid.
|
||||
//! \details Can only be called on root process.
|
||||
const EclipseGrid& getInputGrid() const override;
|
||||
|
||||
private:
|
||||
bool m_parProps = false; //! True to use distributed properties on root process
|
||||
ParallelFieldPropsManager m_fieldProps; //!< The parallel field properties
|
||||
};
|
||||
|
||||
|
||||
} // end namespace Opm
|
||||
#endif // PARALLEL_ECLIPSE_STATE_HPP
|
@ -2037,6 +2037,12 @@ std::size_t packSize(const SolventDensityTable& data,
|
||||
return packSize(data.getSolventDensityColumn(), comm);
|
||||
}
|
||||
|
||||
std::size_t packSize(const GridDims& data,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
return packSize(data.getNXYZ(), comm);
|
||||
}
|
||||
|
||||
////// pack routines
|
||||
|
||||
template<class T>
|
||||
@ -3956,6 +3962,13 @@ void pack(const SolventDensityTable& data,
|
||||
pack(data.getSolventDensityColumn(), buffer, position, comm);
|
||||
}
|
||||
|
||||
void pack(const GridDims& data,
|
||||
std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
pack(data.getNXYZ(), buffer, position, comm);
|
||||
}
|
||||
|
||||
/// unpack routines
|
||||
|
||||
template<class T>
|
||||
@ -6722,6 +6735,16 @@ void unpack(SolventDensityTable& data, std::vector<char>& buffer, int& position,
|
||||
data = SolventDensityTable(tableValues);
|
||||
}
|
||||
|
||||
void unpack(GridDims& data,
|
||||
std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
std::array<int,3> NXYZ;
|
||||
|
||||
unpack(NXYZ, buffer, position, comm);
|
||||
data = GridDims(NXYZ);
|
||||
}
|
||||
|
||||
#define INSTANTIATE_PACK_VECTOR(...) \
|
||||
template std::size_t packSize(const std::vector<__VA_ARGS__>& data, \
|
||||
Dune::MPIHelper::MPICommunicator comm); \
|
||||
|
@ -103,6 +103,7 @@ class FaultCollection;
|
||||
class FaultFace;
|
||||
class FoamConfig;
|
||||
class FoamData;
|
||||
class GridDims;
|
||||
class InitConfig;
|
||||
class IOConfig;
|
||||
template<class T> class IOrderSet;
|
||||
@ -721,6 +722,7 @@ ADD_PACK_PROTOTYPES(GConSale)
|
||||
ADD_PACK_PROTOTYPES(GConSale::GCONSALEGroup)
|
||||
ADD_PACK_PROTOTYPES(GConSump)
|
||||
ADD_PACK_PROTOTYPES(GConSump::GCONSUMPGroup)
|
||||
ADD_PACK_PROTOTYPES(GridDims)
|
||||
ADD_PACK_PROTOTYPES(GuideRateConfig)
|
||||
ADD_PACK_PROTOTYPES(GuideRateConfig::GroupTarget)
|
||||
ADD_PACK_PROTOTYPES(GuideRateConfig::WellTarget)
|
||||
|
@ -2520,6 +2520,16 @@ BOOST_AUTO_TEST_CASE(SolventDensityTable)
|
||||
}
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_CASE(GridDims)
|
||||
{
|
||||
#ifdef HAVE_MPI
|
||||
Opm::GridDims val1{ 1, 2, 3};
|
||||
auto val2 = PackUnpack(val1);
|
||||
DO_CHECKS(GridDims)
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
bool init_unit_test_func()
|
||||
{
|
||||
return true;
|
||||
|
Loading…
Reference in New Issue
Block a user