further changes to report currentStatusGroupControls to Summary

This commit is contained in:
Jostein Alvestad 2020-02-19 13:00:35 +01:00
parent 0dcacb41d4
commit 4006319ff7
5 changed files with 92 additions and 52 deletions

View File

@ -26,6 +26,7 @@
#include <opm/output/data/Cells.hpp>
#include <opm/output/data/Solution.hpp>
#include <opm/output/data/Wells.hpp>
#include <opm/output/data/Groups.hpp>
#include <opm/grid/common/p2pcommunicator.hh>
#include <dune/grid/utility/persistentcontainer.hh>
@ -546,6 +547,46 @@ public:
};
class PackUnPackGroupData : public P2PCommunicatorType::DataHandleInterface
{
const Opm::data::Group& localGroupData_;
Opm::data::Group& globalGroupData_;
public:
PackUnPackGroupData(const Opm::data::Group& localGroupData,
Opm::data::Group& globalGroupData,
bool isIORank)
: localGroupData_(localGroupData)
, globalGroupData_(globalGroupData)
{
if (isIORank) {
MessageBufferType buffer;
pack(0, buffer);
// pass a dummy_link to satisfy virtual class
int dummyLink = -1;
unpack(dummyLink, buffer);
}
}
// pack all data associated with link
void pack(int link, MessageBufferType& buffer)
{
// we should only get one link
if (link != 0)
throw std::logic_error("link in method pack is not 0 as expected");
// write all group data
localGroupData_.write(buffer);
}
// unpack all data associated with link
void unpack(int /*link*/, MessageBufferType& buffer)
{ globalGroupData_.read(buffer); }
};
class PackUnPackBlockData : public P2PCommunicatorType::DataHandleInterface
{
const std::map<std::pair<std::string, int>, double>& localBlockData_;
@ -607,11 +648,13 @@ public:
// gather solution to rank 0 for EclipseWriter
void collect(const Opm::data::Solution& localCellData,
const std::map<std::pair<std::string, int>, double>& localBlockData,
const Opm::data::Wells& localWellData)
const Opm::data::Wells& localWellData,
const Opm::data::Group& localGroupData)
{
globalCellData_ = {};
globalBlockData_.clear();
globalWellData_.clear();
globalGroupData_.clear();
// index maps only have to be build when reordering is needed
if(!needsReordering && !isParallel())
@ -635,6 +678,12 @@ public:
globalWellData_,
isIORank());
PackUnPackGroupData
packUnpackGroupData(localGroupData,
globalGroupData_,
isIORank());
PackUnPackBlockData
packUnpackBlockData(localBlockData,
globalBlockData_,
@ -642,6 +691,7 @@ public:
toIORankComm_.exchange(packUnpackCellData);
toIORankComm_.exchange(packUnpackWellData);
toIORankComm_.exchange(packUnpackGroupData);
toIORankComm_.exchange(packUnpackBlockData);
@ -661,6 +711,9 @@ public:
const Opm::data::Wells& globalWellData() const
{ return globalWellData_; }
const Opm::data::Group& globalGroupData() const
{ return globalGroupData_; }
bool isIORank() const
{ return toIORankComm_.rank() == ioRank; }
@ -707,6 +760,7 @@ protected:
Opm::data::Solution globalCellData_;
std::map<std::pair<std::string, int>, double> globalBlockData_;
Opm::data::Wells globalWellData_;
Opm::data::Group globalGroupData_;
std::vector<int> localIdxToGlobalIdx_;
};

View File

@ -261,14 +261,11 @@ public:
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
/*const Group& fieldGroup = schedule().getGroup("FIELD", reportStepNum);
wellGroupHelpers::setCmodeGroup(fieldGroup, simulator_.vanguard().schedule(), simulator_.vanguard().summaryState(), reportStepNum, well_state_);
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
for (const auto& cp_constr : localGroupData.currentProdConstraint) {
std::cout << "cp_constr.first: " << cp_constr.first << " cp_constr.second: " << static_cast<int>(cp_constr.second) << std::endl;
}*/
for (const auto& cp_constr : localGroupData) {
std::cout << "cp_constr.first: " << cp_constr.first << " cp_constr.second.currentProdConstraint: " << static_cast<int>(cp_constr.second.currentProdConstraint) << std::endl;
}
const auto& gridView = simulator_.vanguard().gridView();
int numElements = gridView.size(/*codim=*/0);
@ -286,7 +283,7 @@ public:
}
if (collectToIORank_.isParallel())
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData);
collectToIORank_.collect({}, eclOutputModule_.getBlockData(), localWellData, localGroupData);
std::map<std::string, double> miscSummaryData;
std::map<std::string, std::vector<double>> regionData;
@ -309,6 +306,7 @@ public:
miscSummaryData["TCPU"] = totalCpuTime;
const Opm::data::Wells& wellData = collectToIORank_.isParallel() ? collectToIORank_.globalWellData() : localWellData;
const Opm::data::Group& groupData = collectToIORank_.isParallel() ? collectToIORank_.globalGroupData() : localGroupData;
const std::map<std::pair<std::string, int>, double>& blockData
= collectToIORank_.isParallel()
@ -321,6 +319,7 @@ public:
eclState,
schedule(),
wellData,
groupData,
miscSummaryData,
regionData,
blockData);
@ -346,13 +345,14 @@ public:
void writeOutput(bool isSubStep)
{
int reportStepNum = simulator_.episodeIndex() + 1;
Scalar curTime = simulator_.time() + simulator_.timeStepSize();
Scalar nextStepSize = simulator_.problem().nextTimeStepSize();
// output using eclWriter if enabled
Opm::data::Wells localWellData = simulator_.problem().wellModel().wellData();
Opm::data::Group localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
int reportStepNum = simulator_.episodeIndex() + 1;
const auto& gridView = simulator_.vanguard().gridView();
int numElements = gridView.size(/*codim=*/0);
bool log = collectToIORank_.isIORank();
@ -379,7 +379,7 @@ public:
eclOutputModule_.addRftDataToWells(localWellData, reportStepNum);
if (collectToIORank_.isParallel())
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData);
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData, localGroupData);
if (collectToIORank_.isIORank()) {
@ -389,6 +389,7 @@ public:
bool enableDoublePrecisionOutput = EWOMS_GET_PARAM(TypeTag, bool, EclOutputDoublePrecision);
const Opm::data::Solution& cellData = collectToIORank_.isParallel() ? collectToIORank_.globalCellData() : localCellData;
const Opm::data::Wells& wellData = collectToIORank_.isParallel() ? collectToIORank_.globalWellData() : localWellData;
const Opm::data::Group& groupData = collectToIORank_.isParallel() ? collectToIORank_.globalGroupData() : localGroupData;
Opm::RestartValue restartValue(cellData, wellData);
if (simConfig.useThresholdPressure())

View File

@ -33,9 +33,12 @@
#include <cassert>
#include <tuple>
#include <opm/parser/eclipse/EclipseState/Runspec.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WellTestState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRate.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
//#include <opm/output/data/Groups.hpp>
#include <opm/simulators/timestepping/SimulatorReport.hpp>
@ -184,30 +187,40 @@ namespace Opm {
void initFromRestartFile(const RestartValue& restartValues);
/*Opm::data::Group groupData(const int reportStepIdx, Opm::Schedule& sched)
Opm::data::Group groupData(const int reportStepIdx, Opm::Schedule& sched) const
{
Opm::data::Group dw;
std::pair<const std::string, const Opm::Group::ProductionCMode> groupPCPair;
std::pair<const std::string, const Opm::Group::InjectionCMode> groupICPair;
for (const std::string gname : sched.groupNames(reportStepIdx)) {
const auto& grup = sched.getGroup(gname, reportStepIdx);
const auto& grup_type = grup.getGroupType();
const auto& i_phase = grup.injection_phase();
Opm::data::currentGroupConstraints cgc;
cgc.currentProdConstraint = Opm::Group::ProductionCMode::NONE;
cgc.currentGasInjectionConstraint = Opm::Group::InjectionCMode::NONE;
cgc.currentWaterInjectionConstraint = Opm::Group::InjectionCMode::NONE;
if (this->well_state_.hasProductionGroupControl(gname)) {
groupPCPair = std::make_pair(gname, this->well_state_.currentProductionGroupControl(gname));
dw.currentProdConstraint.insert(groupPCPair);
cgc.currentProdConstraint = this->well_state_.currentProductionGroupControl(gname);
}
if (this->well_state_.hasInjectionGroupControl(gname)) {
groupICPair = std::make_pair(gname, this->well_state_.currentInjectionGroupControl(gname));
dw.currentInjectionConstraint.insert(groupICPair);
if ((grup_type == Opm::Group::GroupType::INJECTION) || (grup_type == Opm::Group::GroupType::MIXED)) {
if (i_phase == Opm::Phase::WATER) {
if (this->well_state_.hasInjectionGroupControl(gname)) {
cgc.currentWaterInjectionConstraint = this->well_state_.currentInjectionGroupControl(gname);
}
}
if (i_phase == Opm::Phase::GAS) {
if (this->well_state_.hasInjectionGroupControl(gname)) {
cgc.currentGasInjectionConstraint = this->well_state_.currentInjectionGroupControl(gname);
}
}
}
dw.emplace(gname, cgc);
}
return dw;
} */
}
Opm::data::Wells wellData() const
{ return well_state_.report(phase_usage_, Opm::UgGridHelpers::globalCell(grid())); }
//Opm::data::Group groupData(const int reportStepIdx) const
//{ return g_report(reportStepIdx); }
// substract Binv(D)rw from r;
void apply( BVector& r) const;

View File

@ -131,34 +131,6 @@ namespace Opm {
}
}
Opm::data::Group getActiveCmodeGroup(const Group& group, const Schedule& schedule, const SummaryState& summaryState, const int reportStepIdx, WellStateFullyImplicitBlackoil& wellState) {
Opm::data::Group cagc;
std::pair<const std::string, const Opm::Group::ProductionCMode> groupPCPair;
std::pair<const std::string, const Opm::Group::InjectionCMode> groupICPair;
for (const std::string& groupName : group.groups()) {
Opm::data::Group cagc_tmp = getActiveCmodeGroup( schedule.getGroup(groupName, reportStepIdx), schedule, summaryState, reportStepIdx, wellState);
for (const auto& item : cagc_tmp.currentInjectionConstraint) {
cagc.currentInjectionConstraint.insert(item);
}
for (const auto& item : cagc_tmp.currentProdConstraint) {
cagc.currentProdConstraint.insert(item);
}
}
if (wellState.hasInjectionGroupControl(group.name())) {
groupICPair = std::make_pair(group.name(), wellState.currentInjectionGroupControl(group.name()));
cagc.currentInjectionConstraint.insert(groupICPair);
}
if (wellState.hasProductionGroupControl(group.name())) {
groupPCPair = std::make_pair(group.name(), wellState.currentProductionGroupControl(group.name()));
cagc.currentProdConstraint.insert(groupPCPair);
}
}
inline void accumulateGroupEfficiencyFactor(const Group& group, const Schedule& schedule, const int reportStepIdx, double& factor) {
factor *= group.getGroupEfficiencyFactor();
if (group.parent() != "FIELD")

View File

@ -315,11 +315,11 @@ namespace Opm
std::vector<Well::ProducerCMode>& currentProductionControls() { return current_production_controls_; }
const std::vector<Well::ProducerCMode>& currentProductionControls() const { return current_production_controls_; }
bool hasProductionGroupControl(const std::string& groupName) {
bool hasProductionGroupControl(const std::string& groupName) const {
return current_production_group_controls_.count(groupName) > 0;
}
bool hasInjectionGroupControl(const Opm::Phase& phase, const std::string& groupName) {
bool hasInjectionGroupControl(const Opm::Phase& phase, const std::string& groupName) const {
return current_injection_group_controls_.count(std::make_pair(phase, groupName)) > 0;
}