diff --git a/opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp b/opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp index 4199324fd..96111f43a 100644 --- a/opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp +++ b/opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp @@ -481,6 +481,7 @@ namespace Opm const ParseContext& parseContext, ErrorGuard& errors, bool runtime, + const std::unordered_map * target_wellpi, const EclipseGrid* grid, const FieldPropsManager* fp); void addACTIONX(const Action::ActionX& action); @@ -496,6 +497,7 @@ namespace Opm const FieldPropsManager* fp, const std::vector& matching_wells, bool runtime, + const std::unordered_map * target_wellpi, std::vector >& rftProperties); static std::string formatDate(std::time_t t); @@ -512,6 +514,7 @@ namespace Opm const std::size_t currentStep; const std::vector& matching_wells; const bool runtime; + const std::unordered_map * target_wellpi; const EclipseGrid* grid_ptr; const FieldPropsManager* fp_ptr; @@ -519,12 +522,14 @@ namespace Opm const DeckKeyword& keyword_, const std::size_t currentStep_, const std::vector& matching_wells_, - bool runtime_) : + bool runtime_, + const std::unordered_map * target_wellpi_): block(block_), keyword(keyword_), currentStep(currentStep_), matching_wells(matching_wells_), runtime(runtime_), + target_wellpi(target_wellpi_), grid_ptr(nullptr), fp_ptr(nullptr) {} @@ -555,6 +560,7 @@ namespace Opm void handleGCONINJE(const DeckKeyword& keyword, std::size_t current_step, const ParseContext& parseContext, ErrorGuard& errors); void handleGLIFTOPT(const DeckKeyword& keyword, std::size_t report_step, const ParseContext& parseContext, ErrorGuard& errors); void handleWELPI (const DeckKeyword& keyword, std::size_t report_step, const ParseContext& parseContext, ErrorGuard& errors, const std::vector& matching_wells = {}); + void handleWELPIRuntime(const HandlerContext&); // Normal keyword handlers -- in KeywordHandlers.cpp void handleBRANPROP (const HandlerContext&, const ParseContext&, ErrorGuard&); diff --git a/opm/parser/eclipse/EclipseState/Schedule/ScheduleState.hpp b/opm/parser/eclipse/EclipseState/Schedule/ScheduleState.hpp index 9ddbd49b8..36e05ec54 100644 --- a/opm/parser/eclipse/EclipseState/Schedule/ScheduleState.hpp +++ b/opm/parser/eclipse/EclipseState/Schedule/ScheduleState.hpp @@ -23,6 +23,7 @@ #include #include #include +#include #include @@ -408,10 +409,10 @@ namespace Opm { map_member vfpinj; map_member groups; map_member wells; - std::map target_wellpi; + std::unordered_map target_wellpi; - using WellPIMapType = std::map; + using WellPIMapType = std::unordered_map; template void serializeOp(Serializer& serializer) { serializer(m_start_time); diff --git a/src/opm/parser/eclipse/EclipseState/Schedule/KeywordHandlers.cpp b/src/opm/parser/eclipse/EclipseState/Schedule/KeywordHandlers.cpp index 7d108d300..7ec925675 100644 --- a/src/opm/parser/eclipse/EclipseState/Schedule/KeywordHandlers.cpp +++ b/src/opm/parser/eclipse/EclipseState/Schedule/KeywordHandlers.cpp @@ -1117,7 +1117,38 @@ namespace { } void Schedule::handleWELPI(const HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) { - this->handleWELPI(handlerContext.keyword, handlerContext.currentStep, parseContext, errors); + if (handlerContext.runtime) + this->handleWELPIRuntime(handlerContext); + else + this->handleWELPI(handlerContext.keyword, handlerContext.currentStep, parseContext, errors); + } + + void Schedule::handleWELPIRuntime(const HandlerContext& handlerContext) { + using WELL_NAME = ParserKeywords::WELPI::WELL_NAME; + using PI = ParserKeywords::WELPI::STEADY_STATE_PRODUCTIVITY_OR_INJECTIVITY_INDEX_VALUE; + + auto report_step = handlerContext.currentStep; + for (const auto& record : handlerContext.keyword) { + const auto well_names = this->wellNames(record.getItem().getTrimmedString(0), + report_step, + handlerContext.matching_wells); + const auto targetPI = record.getItem().get(0); + + std::vector scalingApplicable; + const auto& current_wellpi = *handlerContext.target_wellpi; + for (const auto& well_name : well_names) { + auto wellpi_iter = current_wellpi.find(well_name); + if (wellpi_iter == current_wellpi.end()) + throw std::logic_error(fmt::format("Missing current PI for well {}", well_name)); + + auto new_well = this->getWell(well_name, report_step); + auto scalingFactor = new_well.convertDeckPI(targetPI) / wellpi_iter->second; + new_well.updateWellProductivityIndex(); + new_well.applyWellProdIndexScaling(scalingFactor, scalingApplicable); + this->snapshots.back().wells.update( std::move(new_well) ); + this->snapshots.back().target_wellpi[well_name] = targetPI; + } + } } void Schedule::handleWELPI(const DeckKeyword& keyword, std::size_t report_step, const ParseContext& parseContext, ErrorGuard& errors, const std::vector& matching_wells) { diff --git a/src/opm/parser/eclipse/EclipseState/Schedule/Schedule.cpp b/src/opm/parser/eclipse/EclipseState/Schedule/Schedule.cpp index c99047e9b..2432abae0 100644 --- a/src/opm/parser/eclipse/EclipseState/Schedule/Schedule.cpp +++ b/src/opm/parser/eclipse/EclipseState/Schedule/Schedule.cpp @@ -118,11 +118,11 @@ namespace { { if (rst) { auto restart_step = rst->header.restart_info().second; - this->iterateScheduleSection( 0, restart_step, parseContext, errors, false, &grid, &fp); + this->iterateScheduleSection( 0, restart_step, parseContext, errors, false, nullptr, &grid, &fp); this->load_rst(*rst, grid, fp); - this->iterateScheduleSection( restart_step, this->m_sched_deck.size(), parseContext, errors, false, &grid, &fp); + this->iterateScheduleSection( restart_step, this->m_sched_deck.size(), parseContext, errors, false, nullptr, &grid, &fp); } else - this->iterateScheduleSection( 0, this->m_sched_deck.size(), parseContext, errors, false, &grid, &fp); + this->iterateScheduleSection( 0, this->m_sched_deck.size(), parseContext, errors, false, nullptr, &grid, &fp); /* The code in the #ifdef SCHEDULE_DEBUG is an enforced integration test @@ -274,6 +274,7 @@ namespace { const FieldPropsManager* fp, const std::vector& matching_wells, bool runtime, + const std::unordered_map * target_wellpi, std::vector >& rftProperties) { static const std::unordered_set require_grid = { @@ -282,7 +283,7 @@ namespace { }; - HandlerContext handlerContext { block, keyword, currentStep, matching_wells, runtime }; + HandlerContext handlerContext { block, keyword, currentStep, matching_wells, runtime , target_wellpi}; /* The grid and fieldProps members create problems for reiterating the @@ -353,6 +354,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e const ParseContext& parseContext , ErrorGuard& errors, bool runtime, + const std::unordered_map * target_wellpi, const EclipseGrid* grid, const FieldPropsManager* fp) { @@ -472,6 +474,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e fp, {}, runtime, + target_wellpi, rftProperties); keyword_index++; } @@ -1192,7 +1195,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e } - void Schedule::applyAction(std::size_t reportStep, const std::chrono::system_clock::time_point&, const Action::ActionX& action, const Action::Result& result, const std::unordered_map& ) { + void Schedule::applyAction(std::size_t reportStep, const std::chrono::system_clock::time_point&, const Action::ActionX& action, const Action::Result& result, const std::unordered_map& target_wellpi) { ParseContext parseContext; ErrorGuard errors; std::vector > ignored_rftProperties; @@ -1211,10 +1214,11 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e nullptr, result.wells(), true, + &target_wellpi, ignored_rftProperties); } if (reportStep < this->m_sched_deck.size() - 1) - iterateScheduleSection(reportStep + 1, this->m_sched_deck.size(), parseContext, errors, true, nullptr, nullptr); + iterateScheduleSection(reportStep + 1, this->m_sched_deck.size(), parseContext, errors, true, &target_wellpi, nullptr, nullptr); //this->m_sched_deck[reportStep].push diff --git a/tests/parser/ACTIONX.cpp b/tests/parser/ACTIONX.cpp index bade8d4a7..22af3957f 100644 --- a/tests/parser/ACTIONX.cpp +++ b/tests/parser/ACTIONX.cpp @@ -1145,19 +1145,35 @@ TSTEP const auto st = SummaryState{ std::chrono::system_clock::now() }; Schedule sched = make_schedule(deck_string); const auto& action1 = sched[0].actions.get().get("A"); + double CF0; { const auto& target_wellpi = sched[0].target_wellpi; BOOST_CHECK_EQUAL( target_wellpi.count("PROD1"), 0); + + const auto& well = sched.getWell("PROD1", 0); + CF0 = well.getConnections()[0].CF(); } - std::unordered_set required_summary; - action1.required_summary(required_summary); - BOOST_CHECK_EQUAL( required_summary.count("WWCT"), 1); + Action::Result action_result(true); - sched.applyAction(0, std::chrono::system_clock::now(), action1, action_result, {}); + BOOST_CHECK_THROW( sched.applyAction(0, std::chrono::system_clock::now(), action1, action_result, {}), std::exception); + { + const auto& well = sched.getWell("PROD1", 0); + sched.applyAction(0, std::chrono::system_clock::now(), action1, action_result, {{"PROD1", well.convertDeckPI(500)}}); + } { const auto& target_wellpi = sched[0].target_wellpi; BOOST_CHECK_EQUAL( target_wellpi.at("PROD1"), 1000); + + const auto& well = sched.getWell("PROD1", 0); + auto CF1 = well.getConnections()[0].CF(); + BOOST_CHECK_CLOSE(CF1 / CF0, 2.0, 1e-4 ); + } + + { + std::unordered_set required_summary; + action1.required_summary(required_summary); + BOOST_CHECK_EQUAL( required_summary.count("WWCT"), 1); } }