Merge pull request #2459 from jalvestad/group_constraints_summary

Make available current Production/Injection Group Control parameters for use in  Restart
This commit is contained in:
Bård Skaflestad
2020-03-19 16:54:34 +01:00
committed by GitHub
7 changed files with 106 additions and 7 deletions

View File

@@ -26,6 +26,7 @@
#include <opm/output/data/Cells.hpp>
#include <opm/output/data/Solution.hpp>
#include <opm/output/data/Wells.hpp>
#include <opm/output/data/Groups.hpp>
#include <opm/grid/common/p2pcommunicator.hh>
#include <dune/grid/utility/persistentcontainer.hh>
@@ -546,6 +547,46 @@ public:
};
class PackUnPackGroupData : public P2PCommunicatorType::DataHandleInterface
{
const Opm::data::Group& localGroupData_;
Opm::data::Group& globalGroupData_;
public:
PackUnPackGroupData(const Opm::data::Group& localGroupData,
Opm::data::Group& globalGroupData,
bool isIORank)
: localGroupData_(localGroupData)
, globalGroupData_(globalGroupData)
{
if (isIORank) {
MessageBufferType buffer;
pack(0, buffer);
// pass a dummy_link to satisfy virtual class
int dummyLink = -1;
unpack(dummyLink, buffer);
}
}
// pack all data associated with link
void pack(int link, MessageBufferType& buffer)
{
// we should only get one link
if (link != 0)
throw std::logic_error("link in method pack is not 0 as expected");
// write all group data
localGroupData_.write(buffer);
}
// unpack all data associated with link
void unpack(int /*link*/, MessageBufferType& buffer)
{ globalGroupData_.read(buffer); }
};
class PackUnPackBlockData : public P2PCommunicatorType::DataHandleInterface
{
const std::map<std::pair<std::string, int>, double>& localBlockData_;
@@ -607,11 +648,13 @@ public:
// gather solution to rank 0 for EclipseWriter
void collect(const Opm::data::Solution& localCellData,
const std::map<std::pair<std::string, int>, double>& localBlockData,
const Opm::data::Wells& localWellData)
const Opm::data::Wells& localWellData,
const Opm::data::Group& localGroupData)
{
globalCellData_ = {};
globalBlockData_.clear();
globalWellData_.clear();
globalGroupData_.clear();
// index maps only have to be build when reordering is needed
if(!needsReordering && !isParallel())
@@ -635,6 +678,12 @@ public:
globalWellData_,
isIORank());
PackUnPackGroupData
packUnpackGroupData(localGroupData,
globalGroupData_,
isIORank());
PackUnPackBlockData
packUnpackBlockData(localBlockData,
globalBlockData_,
@@ -642,6 +691,7 @@ public:
toIORankComm_.exchange(packUnpackCellData);
toIORankComm_.exchange(packUnpackWellData);
toIORankComm_.exchange(packUnpackGroupData);
toIORankComm_.exchange(packUnpackBlockData);
@@ -661,6 +711,9 @@ public:
const Opm::data::Wells& globalWellData() const
{ return globalWellData_; }
const Opm::data::Group& globalGroupData() const
{ return globalGroupData_; }
bool isIORank() const
{ return toIORankComm_.rank() == ioRank; }
@@ -707,6 +760,7 @@ protected:
Opm::data::Solution globalCellData_;
std::map<std::pair<std::string, int>, double> globalBlockData_;
Opm::data::Wells globalWellData_;
Opm::data::Group globalGroupData_;
std::vector<int> localIdxToGlobalIdx_;
};

View File

@@ -42,6 +42,7 @@
#include <opm/output/eclipse/RestartValue.hpp>
#include <opm/output/data/Wells.hpp>
#include <opm/output/data/Groups.hpp>
#include <opm/material/common/Exceptions.hpp>
#include <opm/models/utils/propertysystem.hh>
@@ -567,6 +568,10 @@ public:
return wellDat;
}
Opm::data::Group groupData(const int /* reportStepIdx */, Opm::Schedule& /* sched */) const {
return {};
}
/*!
* \brief This method writes the complete state of all wells
* to the hard disk.

View File

@@ -32,6 +32,9 @@
#include "ecloutputblackoilmodule.hh"
#include <opm/models/blackoil/blackoilmodel.hh>
#include <opm/simulators/wells/BlackoilWellModel.hpp>
#include <opm/models/discretization/ecfv/ecfvdiscretization.hh>
#include <opm/models/io/baseoutputwriter.hh>
#include <opm/models/parallel/tasklets.hh>
@@ -39,6 +42,7 @@
#include <ebos/nncsorter.hpp>
#include <opm/output/eclipse/EclipseIO.hpp>
#include <opm/output/eclipse/RestartValue.hpp>
#include <opm/output/eclipse/Summary.hpp>
#include <opm/parser/eclipse/Units/UnitSystem.hpp>
@@ -257,6 +261,8 @@ public:
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
const auto& gridView = simulator_.vanguard().gridView();
int numElements = gridView.size(/*codim=*/0);
bool log = collectToIORank_.isIORank();
@@ -273,7 +279,7 @@ public:
}
if (collectToIORank_.isParallel())
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData);
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData, localGroupData);
std::map<std::string, double> miscSummaryData;
std::map<std::string, std::vector<double>> regionData;
@@ -296,6 +302,7 @@ public:
miscSummaryData["TCPU"] = totalCpuTime;
const Opm::data::Wells& wellData = collectToIORank_.isParallel() ? collectToIORank_.globalWellData() : localWellData;
const Opm::data::Group& groupData = collectToIORank_.isParallel() ? collectToIORank_.globalGroupData() : localGroupData;
const std::map<std::pair<std::string, int>, double>& blockData
= collectToIORank_.isParallel()
@@ -308,6 +315,7 @@ public:
eclState,
schedule(),
wellData,
groupData,
miscSummaryData,
regionData,
blockData);
@@ -333,13 +341,14 @@ public:
void writeOutput(bool isSubStep)
{
int reportStepNum = simulator_.episodeIndex() + 1;
Scalar curTime = simulator_.time() + simulator_.timeStepSize();
Scalar nextStepSize = simulator_.problem().nextTimeStepSize();
// output using eclWriter if enabled
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
int reportStepNum = simulator_.episodeIndex() + 1;
const auto& gridView = simulator_.vanguard().gridView();
int numElements = gridView.size(/*codim=*/0);
bool log = collectToIORank_.isIORank();
@@ -366,7 +375,7 @@ public:
eclOutputModule_.addRftDataToWells(localWellData, reportStepNum);
if (collectToIORank_.isParallel())
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData);
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData, localGroupData);
if (collectToIORank_.isIORank()) {