mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge pull request #2459 from jalvestad/group_constraints_summary
Make available current Production/Injection Group Control parameters for use in Restart
This commit is contained in:
commit
fda4a29c7e
@ -26,6 +26,7 @@
|
||||
#include <opm/output/data/Cells.hpp>
|
||||
#include <opm/output/data/Solution.hpp>
|
||||
#include <opm/output/data/Wells.hpp>
|
||||
#include <opm/output/data/Groups.hpp>
|
||||
|
||||
#include <opm/grid/common/p2pcommunicator.hh>
|
||||
#include <dune/grid/utility/persistentcontainer.hh>
|
||||
@ -546,6 +547,46 @@ public:
|
||||
|
||||
};
|
||||
|
||||
class PackUnPackGroupData : public P2PCommunicatorType::DataHandleInterface
|
||||
{
|
||||
const Opm::data::Group& localGroupData_;
|
||||
Opm::data::Group& globalGroupData_;
|
||||
|
||||
public:
|
||||
PackUnPackGroupData(const Opm::data::Group& localGroupData,
|
||||
Opm::data::Group& globalGroupData,
|
||||
bool isIORank)
|
||||
: localGroupData_(localGroupData)
|
||||
, globalGroupData_(globalGroupData)
|
||||
{
|
||||
if (isIORank) {
|
||||
MessageBufferType buffer;
|
||||
pack(0, buffer);
|
||||
|
||||
// pass a dummy_link to satisfy virtual class
|
||||
int dummyLink = -1;
|
||||
unpack(dummyLink, buffer);
|
||||
}
|
||||
}
|
||||
|
||||
// pack all data associated with link
|
||||
void pack(int link, MessageBufferType& buffer)
|
||||
{
|
||||
// we should only get one link
|
||||
if (link != 0)
|
||||
throw std::logic_error("link in method pack is not 0 as expected");
|
||||
|
||||
// write all group data
|
||||
localGroupData_.write(buffer);
|
||||
}
|
||||
|
||||
// unpack all data associated with link
|
||||
void unpack(int /*link*/, MessageBufferType& buffer)
|
||||
{ globalGroupData_.read(buffer); }
|
||||
|
||||
};
|
||||
|
||||
|
||||
class PackUnPackBlockData : public P2PCommunicatorType::DataHandleInterface
|
||||
{
|
||||
const std::map<std::pair<std::string, int>, double>& localBlockData_;
|
||||
@ -607,11 +648,13 @@ public:
|
||||
// gather solution to rank 0 for EclipseWriter
|
||||
void collect(const Opm::data::Solution& localCellData,
|
||||
const std::map<std::pair<std::string, int>, double>& localBlockData,
|
||||
const Opm::data::Wells& localWellData)
|
||||
const Opm::data::Wells& localWellData,
|
||||
const Opm::data::Group& localGroupData)
|
||||
{
|
||||
globalCellData_ = {};
|
||||
globalBlockData_.clear();
|
||||
globalWellData_.clear();
|
||||
globalGroupData_.clear();
|
||||
|
||||
// index maps only have to be build when reordering is needed
|
||||
if(!needsReordering && !isParallel())
|
||||
@ -635,6 +678,12 @@ public:
|
||||
globalWellData_,
|
||||
isIORank());
|
||||
|
||||
PackUnPackGroupData
|
||||
packUnpackGroupData(localGroupData,
|
||||
globalGroupData_,
|
||||
isIORank());
|
||||
|
||||
|
||||
PackUnPackBlockData
|
||||
packUnpackBlockData(localBlockData,
|
||||
globalBlockData_,
|
||||
@ -642,6 +691,7 @@ public:
|
||||
|
||||
toIORankComm_.exchange(packUnpackCellData);
|
||||
toIORankComm_.exchange(packUnpackWellData);
|
||||
toIORankComm_.exchange(packUnpackGroupData);
|
||||
toIORankComm_.exchange(packUnpackBlockData);
|
||||
|
||||
|
||||
@ -661,6 +711,9 @@ public:
|
||||
const Opm::data::Wells& globalWellData() const
|
||||
{ return globalWellData_; }
|
||||
|
||||
const Opm::data::Group& globalGroupData() const
|
||||
{ return globalGroupData_; }
|
||||
|
||||
bool isIORank() const
|
||||
{ return toIORankComm_.rank() == ioRank; }
|
||||
|
||||
@ -707,6 +760,7 @@ protected:
|
||||
Opm::data::Solution globalCellData_;
|
||||
std::map<std::pair<std::string, int>, double> globalBlockData_;
|
||||
Opm::data::Wells globalWellData_;
|
||||
Opm::data::Group globalGroupData_;
|
||||
std::vector<int> localIdxToGlobalIdx_;
|
||||
};
|
||||
|
||||
|
@ -42,6 +42,7 @@
|
||||
#include <opm/output/eclipse/RestartValue.hpp>
|
||||
|
||||
#include <opm/output/data/Wells.hpp>
|
||||
#include <opm/output/data/Groups.hpp>
|
||||
#include <opm/material/common/Exceptions.hpp>
|
||||
|
||||
#include <opm/models/utils/propertysystem.hh>
|
||||
@ -567,6 +568,10 @@ public:
|
||||
return wellDat;
|
||||
}
|
||||
|
||||
Opm::data::Group groupData(const int /* reportStepIdx */, Opm::Schedule& /* sched */) const {
|
||||
return {};
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief This method writes the complete state of all wells
|
||||
* to the hard disk.
|
||||
|
@ -32,6 +32,9 @@
|
||||
#include "ecloutputblackoilmodule.hh"
|
||||
|
||||
#include <opm/models/blackoil/blackoilmodel.hh>
|
||||
|
||||
#include <opm/simulators/wells/BlackoilWellModel.hpp>
|
||||
|
||||
#include <opm/models/discretization/ecfv/ecfvdiscretization.hh>
|
||||
#include <opm/models/io/baseoutputwriter.hh>
|
||||
#include <opm/models/parallel/tasklets.hh>
|
||||
@ -39,6 +42,7 @@
|
||||
#include <ebos/nncsorter.hpp>
|
||||
|
||||
#include <opm/output/eclipse/EclipseIO.hpp>
|
||||
|
||||
#include <opm/output/eclipse/RestartValue.hpp>
|
||||
#include <opm/output/eclipse/Summary.hpp>
|
||||
#include <opm/parser/eclipse/Units/UnitSystem.hpp>
|
||||
@ -257,6 +261,8 @@ public:
|
||||
|
||||
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
|
||||
|
||||
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
|
||||
|
||||
const auto& gridView = simulator_.vanguard().gridView();
|
||||
int numElements = gridView.size(/*codim=*/0);
|
||||
bool log = collectToIORank_.isIORank();
|
||||
@ -273,7 +279,7 @@ public:
|
||||
}
|
||||
|
||||
if (collectToIORank_.isParallel())
|
||||
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData);
|
||||
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData, localGroupData);
|
||||
|
||||
std::map<std::string, double> miscSummaryData;
|
||||
std::map<std::string, std::vector<double>> regionData;
|
||||
@ -296,6 +302,7 @@ public:
|
||||
miscSummaryData["TCPU"] = totalCpuTime;
|
||||
|
||||
const Opm::data::Wells& wellData = collectToIORank_.isParallel() ? collectToIORank_.globalWellData() : localWellData;
|
||||
const Opm::data::Group& groupData = collectToIORank_.isParallel() ? collectToIORank_.globalGroupData() : localGroupData;
|
||||
|
||||
const std::map<std::pair<std::string, int>, double>& blockData
|
||||
= collectToIORank_.isParallel()
|
||||
@ -308,6 +315,7 @@ public:
|
||||
eclState,
|
||||
schedule(),
|
||||
wellData,
|
||||
groupData,
|
||||
miscSummaryData,
|
||||
regionData,
|
||||
blockData);
|
||||
@ -333,13 +341,14 @@ public:
|
||||
|
||||
void writeOutput(bool isSubStep)
|
||||
{
|
||||
int reportStepNum = simulator_.episodeIndex() + 1;
|
||||
Scalar curTime = simulator_.time() + simulator_.timeStepSize();
|
||||
Scalar nextStepSize = simulator_.problem().nextTimeStepSize();
|
||||
|
||||
// output using eclWriter if enabled
|
||||
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
|
||||
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
|
||||
|
||||
int reportStepNum = simulator_.episodeIndex() + 1;
|
||||
const auto& gridView = simulator_.vanguard().gridView();
|
||||
int numElements = gridView.size(/*codim=*/0);
|
||||
bool log = collectToIORank_.isIORank();
|
||||
@ -366,7 +375,7 @@ public:
|
||||
eclOutputModule_.addRftDataToWells(localWellData, reportStepNum);
|
||||
|
||||
if (collectToIORank_.isParallel())
|
||||
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData);
|
||||
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData, localGroupData);
|
||||
|
||||
|
||||
if (collectToIORank_.isIORank()) {
|
||||
|
@ -33,9 +33,12 @@
|
||||
#include <cassert>
|
||||
#include <tuple>
|
||||
|
||||
#include <opm/parser/eclipse/EclipseState/Runspec.hpp>
|
||||
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WellTestState.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRate.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
|
||||
|
||||
#include <opm/simulators/timestepping/SimulatorReport.hpp>
|
||||
#include <opm/simulators/wells/PerforationData.hpp>
|
||||
@ -183,6 +186,32 @@ namespace Opm {
|
||||
|
||||
void initFromRestartFile(const RestartValue& restartValues);
|
||||
|
||||
Opm::data::Group groupData(const int reportStepIdx, Opm::Schedule& sched) const
|
||||
{
|
||||
Opm::data::Group dw;
|
||||
for (const std::string gname : sched.groupNames(reportStepIdx)) {
|
||||
const auto& grup = sched.getGroup(gname, reportStepIdx);
|
||||
const auto& grup_type = grup.getGroupType();
|
||||
Opm::data::currentGroupConstraints cgc;
|
||||
cgc.currentProdConstraint = Opm::Group::ProductionCMode::NONE;
|
||||
cgc.currentGasInjectionConstraint = Opm::Group::InjectionCMode::NONE;
|
||||
cgc.currentWaterInjectionConstraint = Opm::Group::InjectionCMode::NONE;
|
||||
if (this->well_state_.hasProductionGroupControl(gname)) {
|
||||
cgc.currentProdConstraint = this->well_state_.currentProductionGroupControl(gname);
|
||||
}
|
||||
if ((grup_type == Opm::Group::GroupType::INJECTION) || (grup_type == Opm::Group::GroupType::MIXED)) {
|
||||
if (this->well_state_.hasInjectionGroupControl(Opm::Phase::WATER, gname)) {
|
||||
cgc.currentWaterInjectionConstraint = this->well_state_.currentInjectionGroupControl(Opm::Phase::WATER, gname);
|
||||
}
|
||||
if (this->well_state_.hasInjectionGroupControl(Opm::Phase::GAS, gname)) {
|
||||
cgc.currentGasInjectionConstraint = this->well_state_.currentInjectionGroupControl(Opm::Phase::GAS, gname);
|
||||
}
|
||||
}
|
||||
dw.emplace(gname, cgc);
|
||||
}
|
||||
return dw;
|
||||
}
|
||||
|
||||
Opm::data::Wells wellData() const
|
||||
{ return well_state_.report(phase_usage_, Opm::UgGridHelpers::globalCell(grid())); }
|
||||
|
||||
|
@ -21,6 +21,8 @@
|
||||
#ifndef OPM_WELLGROUPHELPERS_HEADER_INCLUDED
|
||||
#define OPM_WELLGROUPHELPERS_HEADER_INCLUDED
|
||||
|
||||
#include <opm/output/data/Groups.hpp>
|
||||
|
||||
#include <vector>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/ScheduleTypes.hpp>
|
||||
|
||||
@ -129,7 +131,6 @@ namespace Opm {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline void accumulateGroupEfficiencyFactor(const Group& group, const Schedule& schedule, const int reportStepIdx, double& factor) {
|
||||
factor *= group.getGroupEfficiencyFactor();
|
||||
if (group.parent() != "FIELD")
|
||||
|
@ -22,6 +22,7 @@
|
||||
|
||||
#include <opm/core/props/BlackoilPhases.hpp>
|
||||
#include <opm/output/data/Wells.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Well/Well.hpp>
|
||||
#include <opm/simulators/wells/PerforationData.hpp>
|
||||
|
||||
|
@ -315,11 +315,11 @@ namespace Opm
|
||||
std::vector<Well::ProducerCMode>& currentProductionControls() { return current_production_controls_; }
|
||||
const std::vector<Well::ProducerCMode>& currentProductionControls() const { return current_production_controls_; }
|
||||
|
||||
bool hasProductionGroupControl(const std::string& groupName) {
|
||||
bool hasProductionGroupControl(const std::string& groupName) const {
|
||||
return current_production_group_controls_.count(groupName) > 0;
|
||||
}
|
||||
|
||||
bool hasInjectionGroupControl(const Opm::Phase& phase, const std::string& groupName) {
|
||||
bool hasInjectionGroupControl(const Opm::Phase& phase, const std::string& groupName) const {
|
||||
return current_injection_group_controls_.count(std::make_pair(phase, groupName)) > 0;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user