Add struct SimulatorUpdate to transport results back to simulator

This commit is contained in:
Joakim Hove 2021-11-10 15:24:36 +01:00
parent 381e54f4a9
commit f1e768e184
6 changed files with 76 additions and 31 deletions

View File

@ -786,6 +786,7 @@ if(ENABLE_ECL_INPUT)
opm/parser/eclipse/EclipseState/Schedule/Action/Enums.hpp
opm/parser/eclipse/EclipseState/Schedule/Action/ASTNode.hpp
opm/parser/eclipse/EclipseState/Schedule/Action/PyAction.hpp
opm/parser/eclipse/EclipseState/Schedule/Action/SimulatorUpdate.hpp
opm/parser/eclipse/EclipseState/Schedule/Action/State.hpp
opm/parser/eclipse/EclipseState/Schedule/ArrayDimChecker.hpp
opm/parser/eclipse/EclipseState/Schedule/GasLiftOpt.hpp

View File

@ -0,0 +1,43 @@
/*
Copyright 2021 Equinor ASA.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
but eliminates the memory saving DynamicState is intended to enable. You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef SIMULATOR_UPDATE_HPP
#define SIMULATOR_UPDATE_HPP
#include <unordered_set>
namespace Opm {
/*
This struct is used to communicate back from the Schdule::applyAction() what
needs to be updated in the simulator when execution is returned to the
simulator code.
*/
struct SimulatorUpdate {
// These wells have been affected by the ACTIONX and the simulator needs to
// reapply rates and state from the newly updated Schedule object.
std::unordered_set<std::string> affected_wells;
};
}
#endif

View File

@ -50,6 +50,7 @@
#include <opm/parser/eclipse/EclipseState/Schedule/ScheduleDeck.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/ScheduleState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/RPTConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Action/SimulatorUpdate.hpp>
#include <opm/parser/eclipse/Python/Python.hpp>
@ -274,7 +275,7 @@ namespace Opm
bool write_rst_file(std::size_t report_step) const;
const std::map< std::string, int >& rst_keywords( size_t timestep ) const;
std::unordered_set<std::string> applyAction(std::size_t reportStep, const time_point& sim_time, const Action::ActionX& action, const Action::Result& result, const std::unordered_map<std::string, double>& wellpi);
SimulatorUpdate applyAction(std::size_t reportStep, const time_point& sim_time, const Action::ActionX& action, const Action::Result& result, const std::unordered_map<std::string, double>& wellpi);
void applyWellProdIndexScaling(const std::string& well_name, const std::size_t reportStep, const double scalingFactor);
@ -523,7 +524,7 @@ namespace Opm
const FieldPropsManager* fp,
const std::vector<std::string>& matching_wells,
bool runtime,
std::unordered_set<std::string> * affected_wells,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi);
static std::string formatDate(std::time_t t);
@ -532,7 +533,7 @@ namespace Opm
bool must_write_rst_file(std::size_t report_step) const;
void applyEXIT(const DeckKeyword&, std::size_t currentStep);
void applyWELOPEN(const DeckKeyword&, std::size_t currentStep, const ParseContext&, ErrorGuard&, const std::vector<std::string>& matching_wells = {}, std::unordered_set<std::string> * affected_wells = nullptr);
void applyWELOPEN(const DeckKeyword&, std::size_t currentStep, const ParseContext&, ErrorGuard&, const std::vector<std::string>& matching_wells = {}, SimulatorUpdate * sim_update = nullptr);
struct HandlerContext {
const ScheduleBlock& block;
@ -540,7 +541,7 @@ namespace Opm
const std::size_t currentStep;
const std::vector<std::string>& matching_wells;
const bool actionx_mode;
std::unordered_set<std::string> * affected_wells;
SimulatorUpdate * sim_update;
const std::unordered_map<std::string, double> * target_wellpi;
const ScheduleGrid& grid;
const FieldPropsManager* fp_ptr;
@ -551,22 +552,22 @@ namespace Opm
const std::size_t currentStep_,
const std::vector<std::string>& matching_wells_,
bool actionx_mode_,
std::unordered_set<std::string> * affected_wells_,
SimulatorUpdate * sim_update_,
const std::unordered_map<std::string, double> * target_wellpi_)
: block(block_)
, keyword(keyword_)
, currentStep(currentStep_)
, matching_wells(matching_wells_)
, actionx_mode(actionx_mode_)
, affected_wells(affected_wells_)
, sim_update(sim_update_)
, target_wellpi(target_wellpi_)
, grid(grid_)
, fp_ptr(nullptr)
{}
void affected_well(const std::string& well_name) {
if (this->affected_wells)
this->affected_wells->insert(well_name);
if (this->sim_update)
this->sim_update->affected_wells.insert(well_name);
}
};

View File

@ -1233,8 +1233,8 @@ namespace {
}
}
void Schedule::handleWELOPEN (HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) {
this->applyWELOPEN(handlerContext.keyword, handlerContext.currentStep, parseContext, errors, handlerContext.matching_wells, handlerContext.affected_wells);
void Schedule::handleWELOPEN(HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) {
this->applyWELOPEN(handlerContext.keyword, handlerContext.currentStep, parseContext, errors, handlerContext.matching_wells, handlerContext.sim_update);
}
void Schedule::handleWELPI(HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) {
@ -1391,8 +1391,7 @@ namespace {
well2.updateRefDepth();
this->snapshots.back().wellgroup_events().addEvent( wellName, ScheduleEvents::WELL_WELSPECS_UPDATE);
this->snapshots.back().wells.update( std::move(well2) );
if (handlerContext.affected_wells)
handlerContext.affected_wells->insert(wellName);
handlerContext.affected_well(wellName);
}
}
}

View File

@ -299,7 +299,7 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
const FieldPropsManager* fp,
const std::vector<std::string>& matching_wells,
bool actionx_mode,
std::unordered_set<std::string> * affected_wells,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi) {
static const std::unordered_set<std::string> require_grid = {
@ -308,7 +308,7 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, affected_wells, target_wellpi};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, sim_update, target_wellpi};
/*
The grid and fieldProps members create problems for reiterating the
Schedule section. We therefor single them out very clearly here.
@ -631,7 +631,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
const ParseContext& parseContext,
ErrorGuard& errors,
const std::vector<std::string>& matching_wells,
std::unordered_set<std::string> * affected_wells) {
SimulatorUpdate * sim_update) {
auto conn_defaulted = []( const DeckRecord& rec ) {
auto defaulted = []( const DeckItem& item ) {
@ -668,8 +668,8 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
OpmLog::note(msg);
} else {
this->updateWellStatus( wname, currentStep, well_status);
if (affected_wells)
affected_wells->insert(wname);
if (sim_update)
sim_update->affected_wells.insert(wname);
}
}
}
@ -696,8 +696,8 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
this->snapshots[currentStep].wells.update( std::move(well) );
}
if (affected_wells)
affected_wells->insert(wname);
if (sim_update)
sim_update->affected_wells.insert(wname);
this->snapshots.back().events().addEvent( ScheduleEvents::COMPLETION_CHANGE);
}
}
@ -1247,11 +1247,11 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
}
std::unordered_set<std::string> Schedule::applyAction(std::size_t reportStep, const time_point&, const Action::ActionX& action, const Action::Result& result, const std::unordered_map<std::string, double>& target_wellpi) {
SimulatorUpdate Schedule::applyAction(std::size_t reportStep, const time_point&, const Action::ActionX& action, const Action::Result& result, const std::unordered_map<std::string, double>& target_wellpi) {
const std::string prefix = "| ";
ParseContext parseContext;
ErrorGuard errors;
std::unordered_set<std::string> affected_wells;
SimulatorUpdate sim_update;
ScheduleGrid grid(this->completed_cells);
OpmLog::info("/----------------------------------------------------------------------");
@ -1271,13 +1271,13 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
nullptr,
result.wells(),
true,
&affected_wells,
&sim_update,
&target_wellpi);
}
this->end_report(reportStep);
if (!affected_wells.empty()) {
if (!sim_update.affected_wells.empty()) {
this->snapshots.back().events().addEvent( ScheduleEvents::ACTIONX_WELL_EVENT );
for (const auto& well: affected_wells)
for (const auto& well: sim_update.affected_wells)
this->snapshots.back().wellgroup_events().addEvent(well, ScheduleEvents::ACTIONX_WELL_EVENT);
}
@ -1285,7 +1285,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
iterateScheduleSection(reportStep + 1, this->m_sched_deck.size(), parseContext, errors, grid, &target_wellpi, nullptr, prefix);
OpmLog::info("\\----------------------------------------------------------------------");
return affected_wells;
return sim_update;
}

View File

@ -158,7 +158,8 @@ TSTEP
Action::Result action_result(true);
auto sim_time = TimeService::now();
const auto& action1 = sched[0].actions.get()["ACTION"];
auto affected_wells = sched.applyAction(0, sim_time, action1, action_result, {});
auto sim_update = sched.applyAction(0, sim_time, action1, action_result, {});
const auto& affected_wells = sim_update.affected_wells;
std::vector<std::string> expected_wells{"W0", "W1", "W3"};
BOOST_CHECK( std::is_permutation(affected_wells.begin(), affected_wells.end(),
expected_wells.begin(), expected_wells.end() ));
@ -1210,8 +1211,8 @@ TSTEP
Action::Result action_result(true);
const auto& affected_wells = sched.applyAction(0, TimeService::now(), action1, action_result, {});
BOOST_CHECK( affected_wells.empty() );
const auto& sim_update = sched.applyAction(0, TimeService::now(), action1, action_result, {});
BOOST_CHECK( sim_update.affected_wells.empty() );
{
const auto& glo = sched.glo(0);
BOOST_CHECK(glo.has_group("PLAT-A"));
@ -1278,9 +1279,9 @@ TSTEP
BOOST_CHECK_THROW( sched.applyAction(0, TimeService::now(), action1, action_result, {}), std::exception);
{
const auto& well = sched.getWell("PROD1", 0);
const auto& affected_wells = sched.applyAction(0, TimeService::now(), action1, action_result, {{"PROD1", well.convertDeckPI(500)}});
BOOST_CHECK_EQUAL( affected_wells.count("PROD1"), 1);
BOOST_CHECK_EQUAL( affected_wells.size(), 1);
const auto& sim_update = sched.applyAction(0, TimeService::now(), action1, action_result, {{"PROD1", well.convertDeckPI(500)}});
BOOST_CHECK_EQUAL( sim_update.affected_wells.count("PROD1"), 1);
BOOST_CHECK_EQUAL( sim_update.affected_wells.size(), 1);
}
{
const auto& target_wellpi = sched[0].target_wellpi;