diff --git a/CMakeLists_files.cmake b/CMakeLists_files.cmake
index 22979a676..e2bd95250 100644
--- a/CMakeLists_files.cmake
+++ b/CMakeLists_files.cmake
@@ -24,6 +24,7 @@
# find opm -name '*.c*' -printf '\t%p\n' | sort
list (APPEND MAIN_SOURCE_FILES
ebos/collecttoiorank.cc
+ ebos/eclactionhandler.cc
ebos/eclgenericcpgridvanguard.cc
ebos/eclgenericoutputblackoilmodule.cc
ebos/eclgenericproblem.cc
diff --git a/ebos/eclactionhandler.cc b/ebos/eclactionhandler.cc
new file mode 100644
index 000000000..c105ccf3c
--- /dev/null
+++ b/ebos/eclactionhandler.cc
@@ -0,0 +1,186 @@
+// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+// vi: set et ts=4 sw=4 sts=4:
+/*
+ This file is part of the Open Porous Media project (OPM).
+
+ OPM is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 2 of the License, or
+ (at your option) any later version.
+
+ OPM is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with OPM. If not, see .
+
+ Consult the COPYING file in the top-level source directory of this
+ module for the precise wording of the license and the list of
+ copyright holders.
+*/
+
+#include
+
+#include
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+
+#include
+#include
+
+namespace Opm {
+
+EclActionHandler::EclActionHandler(EclipseState& ecl_state,
+ Schedule& schedule,
+ Action::State& actionState,
+ SummaryState& summaryState,
+ BlackoilWellModelGeneric& wellModel,
+ Parallel::Communication comm)
+ : ecl_state_(ecl_state)
+ , schedule_(schedule)
+ , actionState_(actionState)
+ , summaryState_(summaryState)
+ , wellModel_(wellModel)
+ , comm_(comm)
+{
+}
+
+void EclActionHandler::applyActions(int reportStep,
+ double sim_time,
+ const TransFunc& transUp)
+{
+ const auto& actions = schedule_[reportStep].actions();
+ if (actions.empty())
+ return;
+
+ Action::Context context( summaryState_, schedule_[reportStep].wlist_manager() );
+ auto now = TimeStampUTC( schedule_.getStartTime() ) + std::chrono::duration(sim_time);
+ std::string ts;
+ {
+ std::ostringstream os;
+ os << std::setw(4) << std::to_string(now.year()) << '/'
+ << std::setw(2) << std::setfill('0') << std::to_string(now.month()) << '/'
+ << std::setw(2) << std::setfill('0') << std::to_string(now.day()) << " report:" << std::to_string(reportStep);
+
+ ts = os.str();
+ }
+
+ bool commit_wellstate = false;
+ for (const auto& pyaction : actions.pending_python(actionState_)) {
+ auto sim_update = schedule_.runPyAction(reportStep, *pyaction, actionState_,
+ ecl_state_, summaryState_);
+ this->applySimulatorUpdate(reportStep, sim_update, commit_wellstate, transUp);
+ }
+
+ auto simTime = asTimeT(now);
+ for (const auto& action : actions.pending(actionState_, simTime)) {
+ auto actionResult = action->eval(context);
+ if (actionResult) {
+ std::string wells_string;
+ const auto& matching_wells = actionResult.wells();
+ if (!matching_wells.empty()) {
+ for (std::size_t iw = 0; iw < matching_wells.size() - 1; iw++)
+ wells_string += matching_wells[iw] + ", ";
+ wells_string += matching_wells.back();
+ }
+ std::string msg = "The action: " + action->name() + " evaluated to true at " + ts + " wells: " + wells_string;
+ OpmLog::info(msg);
+
+ const auto& wellpi = this->fetchWellPI(reportStep, *action, matching_wells);
+
+ auto sim_update = schedule_.applyAction(reportStep, *action,
+ actionResult.wells(), wellpi);
+ this->applySimulatorUpdate(reportStep, sim_update, commit_wellstate, transUp);
+ actionState_.add_run(*action, simTime, std::move(actionResult));
+ } else {
+ std::string msg = "The action: " + action->name() + " evaluated to false at " + ts;
+ OpmLog::info(msg);
+ }
+ }
+ /*
+ The well state has been stored in a previous object when the time step
+ has completed successfully, the action process might have modified the
+ well state, and to be certain that is not overwritten when starting
+ the next timestep we must commit it.
+ */
+ if (commit_wellstate)
+ this->wellModel_.commitWGState();
+}
+
+void EclActionHandler::applySimulatorUpdate(int report_step,
+ const SimulatorUpdate& sim_update,
+ bool& commit_wellstate,
+ const TransFunc& updateTrans)
+ {
+ this->wellModel_.updateEclWells(report_step, sim_update.affected_wells, summaryState_);
+ if (!sim_update.affected_wells.empty())
+ commit_wellstate = true;
+
+ if (sim_update.tran_update) {
+ const auto& keywords = schedule_[report_step].geo_keywords();
+ ecl_state_.apply_schedule_keywords( keywords );
+ eclBroadcast(comm_, ecl_state_.getTransMult() );
+
+ // re-compute transmissibility
+ updateTrans(true);
+ }
+ }
+
+std::unordered_map
+EclActionHandler::fetchWellPI(int reportStep,
+ const Action::ActionX& action,
+ const std::vector& matching_wells)
+{
+
+ auto wellpi_wells = action.wellpi_wells(WellMatcher(schedule_[reportStep].well_order(),
+ schedule_[reportStep].wlist_manager()),
+ matching_wells);
+
+ if (wellpi_wells.empty())
+ return {};
+
+ const auto num_wells = schedule_[reportStep].well_order().size();
+ std::vector wellpi_vector(num_wells);
+ for (const auto& wname : wellpi_wells) {
+ if (this->wellModel_.hasWell(wname)) {
+ const auto& well = schedule_.getWell( wname, reportStep );
+ wellpi_vector[well.seqIndex()] = this->wellModel_.wellPI(wname);
+ }
+ }
+
+ if (comm_.size() > 1) {
+ std::vector wellpi_buffer(num_wells * comm_.size());
+ comm_.gather( wellpi_vector.data(), wellpi_buffer.data(), num_wells, 0 );
+ if (comm_.rank() == 0) {
+ for (int rank=1; rank < comm_.size(); rank++) {
+ for (std::size_t well_index=0; well_index < num_wells; well_index++) {
+ const auto global_index = rank*num_wells + well_index;
+ const auto value = wellpi_buffer[global_index];
+ if (value != 0)
+ wellpi_vector[well_index] = value;
+ }
+ }
+ }
+ comm_.broadcast(wellpi_vector.data(), wellpi_vector.size(), 0);
+ }
+
+ std::unordered_map wellpi;
+ for (const auto& wname : wellpi_wells) {
+ const auto& well = schedule_.getWell( wname, reportStep );
+ wellpi[wname] = wellpi_vector[ well.seqIndex() ];
+ }
+ return wellpi;
+}
+
+} // namespace Opm
diff --git a/ebos/eclactionhandler.hh b/ebos/eclactionhandler.hh
new file mode 100644
index 000000000..f111e0d84
--- /dev/null
+++ b/ebos/eclactionhandler.hh
@@ -0,0 +1,92 @@
+// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+// vi: set et ts=4 sw=4 sts=4:
+/*
+ This file is part of the Open Porous Media project (OPM).
+
+ OPM is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 2 of the License, or
+ (at your option) any later version.
+
+ OPM is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with OPM. If not, see .
+
+ Consult the COPYING file in the top-level source directory of this
+ module for the precise wording of the license and the list of
+ copyright holders.
+*/
+
+#ifndef ECL_ACTION_HANDLER_HH
+#define ECL_ACTION_HANDLER_HH
+
+#include
+
+#include
+#include
+#include
+#include
+
+namespace Opm {
+
+namespace Action {
+class ActionX;
+class State;
+}
+
+class BlackoilWellModelGeneric;
+class EclipseState;
+class Schedule;
+class SimulatorUpdate;
+class SummaryState;
+
+//! \brief Class handling Action support in simulator
+class EclActionHandler
+{
+public:
+ //! \brief Function handle to update transmissiblities.
+ using TransFunc = std::function;
+
+ EclActionHandler(EclipseState& ecl_state,
+ Schedule& schedule,
+ Action::State& actionState,
+ SummaryState& summaryState,
+ BlackoilWellModelGeneric& wellModel,
+ Parallel::Communication comm);
+
+ void applyActions(int reportStep,
+ double sim_time,
+ const TransFunc& updateTrans);
+
+private:
+ /*
+ This function is run after applyAction has been completed in the Schedule
+ implementation. The sim_update argument should have members & flags for
+ the simulator properties which need to be updated. This functionality is
+ probably not complete.
+ */
+ void applySimulatorUpdate(int report_step,
+ const SimulatorUpdate& sim_update,
+ bool& commit_wellstate,
+ const TransFunc& updateTrans);
+
+ std::unordered_map
+ fetchWellPI(int reportStep,
+ const Action::ActionX& action,
+ const std::vector& matching_wells);
+
+ EclipseState& ecl_state_;
+ Schedule& schedule_;
+ Action::State& actionState_;
+ SummaryState& summaryState_;
+ BlackoilWellModelGeneric& wellModel_;
+ Parallel::Communication comm_;
+};
+
+} // namespace Opm
+
+#endif
diff --git a/ebos/eclproblem.hh b/ebos/eclproblem.hh
index 5dc6d3885..1ec574c66 100644
--- a/ebos/eclproblem.hh
+++ b/ebos/eclproblem.hh
@@ -48,6 +48,7 @@
#include "eclcpgridvanguard.hh"
#endif
+#include "eclactionhandler.hh"
#include "eclequilinitializer.hh"
#include "eclwriter.hh"
#include "ecloutputblackoilmodule.hh"
@@ -85,11 +86,7 @@
#include
#include
-#include
#include
-#include
-#include
-#include
#include
#include
@@ -781,6 +778,12 @@ public:
, aquiferModel_(simulator)
, pffDofData_(simulator.gridView(), this->elementMapper())
, tracerModel_(simulator)
+ , actionHandler_(simulator.vanguard().eclState(),
+ simulator.vanguard().schedule(),
+ simulator.vanguard().actionState(),
+ simulator.vanguard().summaryState(),
+ wellModel_,
+ simulator.vanguard().grid().comm())
{
this->model().addOutputModule(new VtkEclTracerModule(simulator));
// Tell the black-oil extensions to initialize their internal data structures
@@ -1191,8 +1194,6 @@ public:
bool isSubStep = !EWOMS_GET_PARAM(TypeTag, bool, EnableWriteAllSolutions) && !this->simulator().episodeWillBeOver();
eclWriter_->evalSummaryState(isSubStep);
- auto& schedule = simulator.vanguard().schedule();
- auto& ecl_state = simulator.vanguard().eclState();
int episodeIdx = this->episodeIndex();
// Re-ordering in case of Alugrid
@@ -1207,14 +1208,14 @@ public:
}
#endif // HAVE_DUNE_ALUGRID
- this->applyActions(episodeIdx,
- simulator.time() + simulator.timeStepSize(),
- simulator.vanguard().grid().comm(),
- ecl_state,
- schedule,
- simulator.vanguard().actionState(),
- simulator.vanguard().summaryState(),
- gridToEquilGrid);
+ std::function transUp =
+ [this,gridToEquilGrid](bool global) {
+ this->transmissibilities_.update(global,gridToEquilGrid);
+ };
+
+ actionHandler_.applyActions(episodeIdx,
+ simulator.time() + simulator.timeStepSize(),
+ transUp);
// deal with "clogging" for the MICP model
if constexpr (enableMICP){
@@ -1272,142 +1273,6 @@ public:
}
- std::unordered_map fetchWellPI(int reportStep,
- const Action::ActionX& action,
- const Schedule& schedule,
- const std::vector& matching_wells) {
-
- auto wellpi_wells = action.wellpi_wells(WellMatcher(schedule[reportStep].well_order(),
- schedule[reportStep].wlist_manager()),
- matching_wells);
-
- if (wellpi_wells.empty())
- return {};
-
- const auto num_wells = schedule[reportStep].well_order().size();
- std::vector wellpi_vector(num_wells);
- for (const auto& wname : wellpi_wells) {
- if (this->wellModel_.hasWell(wname)) {
- const auto& well = schedule.getWell( wname, reportStep );
- wellpi_vector[well.seqIndex()] = this->wellModel_.wellPI(wname);
- }
- }
-
- const auto& comm = this->simulator().vanguard().grid().comm();
- if (comm.size() > 1) {
- std::vector wellpi_buffer(num_wells * comm.size());
- comm.gather( wellpi_vector.data(), wellpi_buffer.data(), num_wells, 0 );
- if (comm.rank() == 0) {
- for (int rank=1; rank < comm.size(); rank++) {
- for (std::size_t well_index=0; well_index < num_wells; well_index++) {
- const auto global_index = rank*num_wells + well_index;
- const auto value = wellpi_buffer[global_index];
- if (value != 0)
- wellpi_vector[well_index] = value;
- }
- }
- }
- comm.broadcast(wellpi_vector.data(), wellpi_vector.size(), 0);
- }
-
- std::unordered_map wellpi;
- for (const auto& wname : wellpi_wells) {
- const auto& well = schedule.getWell( wname, reportStep );
- wellpi[wname] = wellpi_vector[ well.seqIndex() ];
- }
- return wellpi;
- }
-
-
-
- /*
- This function is run after applyAction has been completed in the Schedule
- implementation. The sim_update argument should have members & flags for
- the simulator properties which need to be updated. This functionality is
- probably not complete.
- */
- void applySimulatorUpdate(int report_step, Parallel::Communication comm, const SimulatorUpdate& sim_update, EclipseState& ecl_state, Schedule& schedule, SummaryState& summary_state, bool& commit_wellstate, const std::function& map = {}) {
- this->wellModel_.updateEclWells(report_step, sim_update.affected_wells, summary_state);
- if (!sim_update.affected_wells.empty())
- commit_wellstate = true;
-
- if (sim_update.tran_update) {
- const auto& keywords = schedule[report_step].geo_keywords();
- ecl_state.apply_schedule_keywords( keywords );
- eclBroadcast(comm, ecl_state.getTransMult() );
-
- // re-compute transmissibility
- transmissibilities_.update(true,map);
- }
-
- }
-
-
- void applyActions(int reportStep,
- double sim_time,
- Parallel::Communication comm,
- EclipseState& ecl_state,
- Schedule& schedule,
- Action::State& actionState,
- SummaryState& summaryState,
- const std::function& map = {}) {
- const auto& actions = schedule[reportStep].actions();
- if (actions.empty())
- return;
-
- Action::Context context( summaryState, schedule[reportStep].wlist_manager() );
- auto now = TimeStampUTC( schedule.getStartTime() ) + std::chrono::duration(sim_time);
- std::string ts;
- {
- std::ostringstream os;
- os << std::setw(4) << std::to_string(now.year()) << '/'
- << std::setw(2) << std::setfill('0') << std::to_string(now.month()) << '/'
- << std::setw(2) << std::setfill('0') << std::to_string(now.day()) << " report:" << std::to_string(reportStep);
-
- ts = os.str();
- }
-
- bool commit_wellstate = false;
- for (const auto& pyaction : actions.pending_python(actionState)) {
- auto sim_update = schedule.runPyAction(reportStep, *pyaction, actionState, ecl_state, summaryState);
- this->applySimulatorUpdate(reportStep, comm, sim_update, ecl_state, schedule, summaryState, commit_wellstate, map);
- }
-
- auto simTime = asTimeT(now);
- for (const auto& action : actions.pending(actionState, simTime)) {
- auto actionResult = action->eval(context);
- if (actionResult) {
- std::string wells_string;
- const auto& matching_wells = actionResult.wells();
- if (!matching_wells.empty()) {
- for (std::size_t iw = 0; iw < matching_wells.size() - 1; iw++)
- wells_string += matching_wells[iw] + ", ";
- wells_string += matching_wells.back();
- }
- std::string msg = "The action: " + action->name() + " evaluated to true at " + ts + " wells: " + wells_string;
- OpmLog::info(msg);
-
- const auto& wellpi = this->fetchWellPI(reportStep, *action, schedule, matching_wells);
-
- auto sim_update = schedule.applyAction(reportStep, *action, actionResult.wells(), wellpi);
- this->applySimulatorUpdate(reportStep, comm, sim_update, ecl_state, schedule, summaryState, commit_wellstate, map);
- actionState.add_run(*action, simTime, std::move(actionResult));
- } else {
- std::string msg = "The action: " + action->name() + " evaluated to false at " + ts;
- OpmLog::info(msg);
- }
- }
- /*
- The well state has been stored in a previous object when the time step
- has completed successfully, the action process might have modified the
- well state, and to be certain that is not overwritten when starting
- the next timestep we must commit it.
- */
- if (commit_wellstate)
- this->wellModel_.commitWGState();
- }
-
-
/*!
* \copydoc FvBaseMultiPhaseProblem::intrinsicPermeability
*/
@@ -3107,6 +2972,8 @@ private:
PffGridVector pffDofData_;
TracerModel tracerModel_;
+ EclActionHandler actionHandler_;
+
std::vector freebcX_;
std::vector freebcXMinus_;
std::vector freebcY_;