From 3f025070fe937ac3d0bfe5273c2c7129f36038f0 Mon Sep 17 00:00:00 2001 From: Kai Bao Date: Tue, 28 Jun 2022 14:45:04 +0200 Subject: [PATCH 1/4] fixing interpreting multiple WPIMULT records During a report step, if there are multple records for a well has defaulted connection and completion information, only the last record will apply to that well. But the following record is considered not to have defaulted connection and completion information. WMPIMUL 'WEL' 0.8 0 0 0 / / --- .../eclipse/Schedule/KeywordHandlers.cpp | 49 ++++++++++++++++++- src/opm/input/eclipse/Schedule/Well/Well.cpp | 2 +- .../share/keywords/000_Eclipse100/W/WPIMULT | 15 ++++-- 3 files changed, 59 insertions(+), 7 deletions(-) diff --git a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp index 60fa99aac..d5eb6fab6 100644 --- a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp +++ b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp @@ -1774,11 +1774,58 @@ Well{0} entered with disallowed 'FIELD' parent group: } void Schedule::handleWPIMULT(HandlerContext& handlerContext) { - for (const auto& record : handlerContext.keyword) { + // from the third item to the seventh item in the WPIMULT record, they are numbers indicate + // the I, J, K location and completion number range. + // When defaulted, it assumes it is negative + // When inputting a negative value, it assumes it is defaulted. + auto defaultConCompRec = [] (const DeckRecord& rec)-> bool { + bool default_connections = true; + for (size_t i = 2; i < rec.size(); ++i) { + const auto& item = rec.getItem(i); + if (item.get(0) >= 0) { + default_connections = false; + break; + } + } + return default_connections; + }; + + auto lastRecordWithDefaultConnections = [&defaultConCompRec, this, &handlerContext] (const DeckKeyword& keyword) { + std::unordered_map last_index_default_cons_comps; + for (size_t i = 0; i < keyword.size(); ++i) { + const auto& record = keyword.getRecord(i); + if (defaultConCompRec(record)) { + const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0); + const auto& well_names = this->wellNames(wellNamePattern, handlerContext); + + for (const auto& wname : well_names) { + last_index_default_cons_comps[wname] = i; + } + } + } + return last_index_default_cons_comps; + }; + + const auto last_index_default_cons_comps = lastRecordWithDefaultConnections(handlerContext.keyword); + + for (size_t i_rec = 0; i_rec < handlerContext.keyword.size(); ++i_rec) { + const auto& record = handlerContext.keyword.getRecord(i_rec); + // whether this record has defaulted connection and completion information + const bool default_con_comp = defaultConCompRec(record); + const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0); const auto& well_names = this->wellNames(wellNamePattern, handlerContext); for (const auto& wname : well_names) { + // for records with defaulted connection and completion information, we only use the last record for that well + if (default_con_comp) { + const auto search = last_index_default_cons_comps.find(wname); + if (search != last_index_default_cons_comps.end() && i_rec < search->second) { + // it is not the last record with defaulted connection and completion information for that well + // we skp this record for this well + continue; + } + } auto well = this->snapshots.back().wells( wname ); if (well.handleWPIMULT(record)) this->snapshots.back().wells.update( std::move(well)); diff --git a/src/opm/input/eclipse/Schedule/Well/Well.cpp b/src/opm/input/eclipse/Schedule/Well/Well.cpp index 7c1729d96..b01201376 100644 --- a/src/opm/input/eclipse/Schedule/Well/Well.cpp +++ b/src/opm/input/eclipse/Schedule/Well/Well.cpp @@ -61,7 +61,7 @@ namespace { if (item.defaultApplied(0)) return true; - if (item.get(0) == 0) + if (item.get(0) <= 0) return true; return false; diff --git a/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT b/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT index eaeee663a..38a6b0990 100644 --- a/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT +++ b/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT @@ -15,23 +15,28 @@ }, { "name": "I", - "value_type": "INT" + "value_type": "INT", + "default" : -1 }, { "name": "J", - "value_type": "INT" + "value_type": "INT", + "default" : -1 }, { "name": "K", - "value_type": "INT" + "value_type": "INT", + "default" : -1 }, { "name": "FIRST", - "value_type": "INT" + "value_type": "INT", + "default" : -1 }, { "name": "LAST", - "value_type": "INT" + "value_type": "INT", + "default" : -1 } ] } From ffa12e5d0948ebdbe7e8a92d43111cf1e96a8e97 Mon Sep 17 00:00:00 2001 From: Kai Bao Date: Thu, 30 Jun 2022 13:29:33 +0200 Subject: [PATCH 2/4] adding a test for WPIMULT to test new functionality from this PR. --- tests/parser/ScheduleTests.cpp | 135 +++++++++++++++++++++++++++++++++ 1 file changed, 135 insertions(+) diff --git a/tests/parser/ScheduleTests.cpp b/tests/parser/ScheduleTests.cpp index 6e76c2f30..c1cfb5aff 100644 --- a/tests/parser/ScheduleTests.cpp +++ b/tests/parser/ScheduleTests.cpp @@ -1232,6 +1232,141 @@ COMPDAT BOOST_CHECK_EQUAL(sim_time1.year(), 2011); } +BOOST_AUTO_TEST_CASE(createDeckWithMultipleWPIMULT) { + std::string input = R"( +START -- 0 +19 JUN 2007 / +GRID +PORO + 1000*0.1 / +PERMX + 1000*1 / +PERMY + 1000*0.1 / +PERMZ + 1000*0.01 / +SCHEDULE +WELSPECS + 'OP_1' 'OP' 9 9 1* 'OIL' 1* 1* 1* 1* 1* 1* 1* / +/ +WELSPECS + 'OP_2' 'OP' 8 8 1* 'OIL' 1* 1* 1* 1* 1* 1* 1* / +/ +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_2' 8 8 1 1 'OPEN' 1* 50 2* 2* 'X' 22.100 / + 'OP_2' 8 8 2 2 'OPEN' 1* 50 2* 2* 'X' 22.100 / + 'OP_2' 8 8 3 3 'OPEN' 1* 50 2* 2* 'X' 22.100 / +/ +DATES -- 0 + 20 JAN 2009 / +/ +WPIMULT + 'OP_1' 2.0 / + 'OP_2' 3.0 / + 'OP_1' 0.8 -1 -1 -1 / -- all connections + 'OP_2' 7.0 / +/ +DATES -- 1 + 20 JAN 2010 / +/ +WPIMULT + 'OP_1' 0.5 / +/ +DATES -- 2 + 20 JAN 2011 / +/ + +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ + +WPIMULT + 'OP_1' 2.0 / + 'OP_1' 0.8 0 0 0 / -- all connections +/ + +DATES -- 3 + 20 JAN 2012 / +/ + +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ + +WPIMULT + 'OP_1' 2.0 / + 'OP_1' 0.8 / -- all connections +/ + +DATES -- 4 + 20 JAN 2013 / +/ + +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ + +WPIMULT + 'OP_1' 2.0 / + 'OP_1' 0.8 / -- all connections + 'OP_1' 0.50 2* 4 / + 'OP_1' 0.10 2* 4 / +/ +DATES -- 5 + 20 JAN 2014 / +/ +END +)"; + + const auto& schedule = make_schedule(input); + const auto& cs0 = schedule.getWell("OP_1", 0).getConnections(); + const auto& cs1 = schedule.getWell("OP_1", 1).getConnections(); + const auto& cs2 = schedule.getWell("OP_1", 2).getConnections(); + const auto& cs3 = schedule.getWell("OP_1", 3).getConnections(); + const auto& cs4 = schedule.getWell("OP_1", 4).getConnections(); + const auto& cs5 = schedule.getWell("OP_1", 5).getConnections(); + const auto& cs0_2 = schedule.getWell("OP_2", 0).getConnections(); + const auto& cs1_2 = schedule.getWell("OP_2", 1).getConnections(); + const auto& cs2_2 = schedule.getWell("OP_2", 2).getConnections(); + + for (size_t i = 0; i < cs1_2.size(); ++i ) { + BOOST_CHECK_CLOSE(cs1_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13); + BOOST_CHECK_CLOSE(cs2_2.get(i).CF() / cs1_2.get(i).CF(), 1.0, 1.e-13); + } + for (size_t i = 0; i < cs1.size(); ++i ) { + BOOST_CHECK_CLOSE(cs1.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); + BOOST_CHECK_CLOSE(cs2.get(i).CF() / cs1.get(i).CF(), 0.5, 1.e-13); + BOOST_CHECK_CLOSE(cs3.get(i).CF() / cs0.get(i).CF(), 1.6, 1.e-13); + BOOST_CHECK_CLOSE(cs4.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); + } + + for (size_t i = 0; i < 3; ++i) { + BOOST_CHECK_CLOSE(cs5.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); + } + BOOST_CHECK_CLOSE(cs5.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13); +} + + BOOST_AUTO_TEST_CASE(WELSPECS_WGNAME_SPACE) { Opm::Parser parser; const std::string input = R"( From b87867d1448c48862954a2007c42d54e15a66d5a Mon Sep 17 00:00:00 2001 From: Kai Bao Date: Fri, 1 Jul 2022 14:45:59 +0200 Subject: [PATCH 3/4] multple MPIMULT keywords within one report step is handled and ScheduleTests is updated with tests --- opm/input/eclipse/Schedule/Schedule.hpp | 16 +++-- opm/input/eclipse/Schedule/Well/Well.hpp | 1 + .../eclipse/Schedule/KeywordHandlers.cpp | 58 ++++++++----------- src/opm/input/eclipse/Schedule/Schedule.cpp | 24 ++++++-- src/opm/input/eclipse/Schedule/Well/Well.cpp | 14 ++++- tests/parser/ScheduleTests.cpp | 52 ++++++++++++++++- 6 files changed, 119 insertions(+), 46 deletions(-) diff --git a/opm/input/eclipse/Schedule/Schedule.hpp b/opm/input/eclipse/Schedule/Schedule.hpp index 5d740fbe7..2433568dd 100644 --- a/opm/input/eclipse/Schedule/Schedule.hpp +++ b/opm/input/eclipse/Schedule/Schedule.hpp @@ -504,6 +504,7 @@ namespace Opm ErrorGuard& errors; SimulatorUpdate * sim_update; const std::unordered_map * target_wellpi; + std::unordered_map* wellpi_global_factor; const ScheduleGrid& grid; HandlerContext(const ScheduleBlock& block_, @@ -515,7 +516,8 @@ namespace Opm const ParseContext& parseContext_, ErrorGuard& errors_, SimulatorUpdate * sim_update_, - const std::unordered_map * target_wellpi_) + const std::unordered_map * target_wellpi_, + std::unordered_map* wellpi_global_factor_ = nullptr) : block(block_) , keyword(keyword_) , currentStep(currentStep_) @@ -525,6 +527,7 @@ namespace Opm , errors(errors_) , sim_update(sim_update_) , target_wellpi(target_wellpi_) + , wellpi_global_factor(wellpi_global_factor_) , grid(grid_) {} @@ -586,12 +589,14 @@ namespace Opm void handleKeyword(std::size_t currentStep, const ScheduleBlock& block, const DeckKeyword& keyword, - const ParseContext& parseContext, ErrorGuard& errors, + const ParseContext& parseContext, + ErrorGuard& errors, const ScheduleGrid& grid, const std::vector& matching_wells, - bool runtime, - SimulatorUpdate * sim_update, - const std::unordered_map * target_wellpi); + bool actionx_mode, + SimulatorUpdate* sim_update, + const std::unordered_map* target_wellpi, + std::unordered_map* wellpi_global_factor = nullptr); void prefetch_cell_properties(const ScheduleGrid& grid, const DeckKeyword& keyword); void store_wgnames(const DeckKeyword& keyword); @@ -600,6 +605,7 @@ namespace Opm void invalidNamePattern( const std::string& namePattern, const HandlerContext& context) const; static std::string formatDate(std::time_t t); std::string simulationDays(std::size_t currentStep) const; + void applyGlobalWPIMULT( const std::unordered_map& factors); bool must_write_rst_file(std::size_t report_step) const; diff --git a/opm/input/eclipse/Schedule/Well/Well.hpp b/opm/input/eclipse/Schedule/Well/Well.hpp index 868e60bf1..8164fe2ad 100644 --- a/opm/input/eclipse/Schedule/Well/Well.hpp +++ b/opm/input/eclipse/Schedule/Well/Well.hpp @@ -626,6 +626,7 @@ public: bool handleWELOPENConnections(const DeckRecord& record, Connection::State status); bool handleCOMPLUMP(const DeckRecord& record); bool handleWPIMULT(const DeckRecord& record); + bool applyGlobalWPIMULT(double scale_factor); void filterConnections(const ActiveGridCells& grid); ProductionControls productionControls(const SummaryState& st) const; diff --git a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp index d5eb6fab6..1abc8dcb6 100644 --- a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp +++ b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp @@ -1779,53 +1779,41 @@ Well{0} entered with disallowed 'FIELD' parent group: // When defaulted, it assumes it is negative // When inputting a negative value, it assumes it is defaulted. auto defaultConCompRec = [] (const DeckRecord& rec)-> bool { - bool default_connections = true; + bool default_con_comp = true; for (size_t i = 2; i < rec.size(); ++i) { const auto& item = rec.getItem(i); if (item.get(0) >= 0) { - default_connections = false; + default_con_comp = false; break; } } - return default_connections; + return default_con_comp; }; - auto lastRecordWithDefaultConnections = [&defaultConCompRec, this, &handlerContext] (const DeckKeyword& keyword) { - std::unordered_map last_index_default_cons_comps; - for (size_t i = 0; i < keyword.size(); ++i) { - const auto& record = keyword.getRecord(i); - if (defaultConCompRec(record)) { - const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0); - const auto& well_names = this->wellNames(wellNamePattern, handlerContext); - - for (const auto& wname : well_names) { - last_index_default_cons_comps[wname] = i; - } - } - } - return last_index_default_cons_comps; - }; - - const auto last_index_default_cons_comps = lastRecordWithDefaultConnections(handlerContext.keyword); - - for (size_t i_rec = 0; i_rec < handlerContext.keyword.size(); ++i_rec) { - const auto& record = handlerContext.keyword.getRecord(i_rec); - // whether this record has defaulted connection and completion information - const bool default_con_comp = defaultConCompRec(record); - + for (const auto& record : handlerContext.keyword) { const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0); const auto& well_names = this->wellNames(wellNamePattern, handlerContext); - for (const auto& wname : well_names) { - // for records with defaulted connection and completion information, we only use the last record for that well - if (default_con_comp) { - const auto search = last_index_default_cons_comps.find(wname); - if (search != last_index_default_cons_comps.end() && i_rec < search->second) { - // it is not the last record with defaulted connection and completion information for that well - // we skp this record for this well - continue; - } + // for the record has defaulted connection and completion information, we do not apply it immediately + // because we only need to apply the last record with defaulted connection and completion information + // as a result, we here only record the information of the record with defaulted connection and completion + // information without applying, because there might be multiple WPIMULT keywords here, and we do not know + // whether it is the last one. + const bool default_con_comp = defaultConCompRec(record); + if (default_con_comp) { + auto wellpi_global_factor = handlerContext.wellpi_global_factor; + if (!wellpi_global_factor) { + throw std::runtime_error(" wellpi_global_factor is nullptr in function handleWPIMULT "); } + const auto scaling_factor = record.getItem("WELLPI").get(0); + for (const auto& wname : well_names) { + (*wellpi_global_factor)[wname] = scaling_factor; + } + continue; + } + + // the record with non-defaulted connection and completion information will be applied immediately + for (const auto& wname : well_names) { auto well = this->snapshots.back().wells( wname ); if (well.handleWPIMULT(record)) this->snapshots.back().wells.update( std::move(well)); diff --git a/src/opm/input/eclipse/Schedule/Schedule.cpp b/src/opm/input/eclipse/Schedule/Schedule.cpp index de0e6f7d5..357eaa1ca 100644 --- a/src/opm/input/eclipse/Schedule/Schedule.cpp +++ b/src/opm/input/eclipse/Schedule/Schedule.cpp @@ -312,8 +312,10 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional const ScheduleGrid& grid, const std::vector& matching_wells, bool actionx_mode, - SimulatorUpdate * sim_update, - const std::unordered_map * target_wellpi) { + SimulatorUpdate* sim_update, + const std::unordered_map* target_wellpi, + std::unordered_map* wellpi_global_factor) + { static const std::unordered_set require_grid = { "COMPDAT", @@ -321,7 +323,7 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional }; - HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi}; + HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi, wellpi_global_factor}; /* The grid and fieldProps members create problems for reiterating the Schedule section. We therefor single them out very clearly here. @@ -474,6 +476,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e } this->create_next(block); + std::unordered_map wellpi_global_factor; while (true) { if (keyword_index == block.size()) break; @@ -520,15 +523,28 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e {}, false, nullptr, - target_wellpi); + target_wellpi, + &wellpi_global_factor); keyword_index++; } + this->applyGlobalWPIMULT(wellpi_global_factor); this->end_report(report_step); if (this->must_write_rst_file(report_step)) { this->restart_output.addRestartOutput(report_step); } + } // for (auto report_step = load_start + } + + void Schedule::applyGlobalWPIMULT( const std::unordered_map& wellpi_global_factor) { + for (const auto& elem : wellpi_global_factor) { + const auto& well_name = elem.first; + const auto factor = elem.second; + auto well = this->snapshots.back().wells(well_name); + if (well.applyGlobalWPIMULT(factor)) { + this->snapshots.back().wells.update(std::move(well)); + } } } diff --git a/src/opm/input/eclipse/Schedule/Well/Well.cpp b/src/opm/input/eclipse/Schedule/Well/Well.cpp index b01201376..34a4e6776 100644 --- a/src/opm/input/eclipse/Schedule/Well/Well.cpp +++ b/src/opm/input/eclipse/Schedule/Well/Well.cpp @@ -61,7 +61,7 @@ namespace { if (item.defaultApplied(0)) return true; - if (item.get(0) <= 0) + if (item.get(0) == 0) return true; return false; @@ -1166,6 +1166,18 @@ bool Well::handleWPIMULT(const DeckRecord& record) { } +bool Opm::Well::applyGlobalWPIMULT(const double scaling_factor) +{ + auto new_connections = std::make_shared(this->connections->ordering(), this->headI, this->headJ); + for (auto c : *this->connections) { + c.scaleWellPi(scaling_factor); + new_connections->add(c); + } + + return this->updateConnections(std::move(new_connections), false); +} + + void Well::updateSegments(std::shared_ptr segments_arg) { this->segments = std::move(segments_arg); this->updateRefDepth( this->segments->depthTopSegment() ); diff --git a/tests/parser/ScheduleTests.cpp b/tests/parser/ScheduleTests.cpp index c1cfb5aff..7c079de7b 100644 --- a/tests/parser/ScheduleTests.cpp +++ b/tests/parser/ScheduleTests.cpp @@ -1294,7 +1294,7 @@ COMPDAT WPIMULT 'OP_1' 2.0 / - 'OP_1' 0.8 0 0 0 / -- all connections + 'OP_1' 0.8 0 0 0 / -- all connections but not defaulted / DATES -- 3 @@ -1335,6 +1335,49 @@ WPIMULT DATES -- 5 20 JAN 2014 / / +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ + +WPIMULT + 'OP_1' 2.0 / + 'OP_1' 0.10 2* 4 / +/ +WPIMULT + 'OP_1' 0.8 / -- all connections + 'OP_1' 0.50 2* 4 / +/ +DATES -- 6 + 20 FEB 2014 / +/ +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 / + 'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 / +/ +COMPDAT +-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro + 'OP_2' 8 8 1 1 'OPEN' 1* 50 2* 2* 'X' 22.100 / + 'OP_2' 8 8 2 2 'OPEN' 1* 50 2* 2* 'X' 22.100 / + 'OP_2' 8 8 3 3 'OPEN' 1* 50 2* 2* 'X' 22.100 / +/ +WPIMULT + 'OP_1' 2.0 / + 'OP_2' 3.0 / +/ +WPIMULT + 'OP_1' 0.8 -1 -1 -1 / -- all connections + 'OP_2' 7.0 / +/ +DATES -- 7 + 20 FEB 2014 / +/ END )"; @@ -1345,25 +1388,32 @@ END const auto& cs3 = schedule.getWell("OP_1", 3).getConnections(); const auto& cs4 = schedule.getWell("OP_1", 4).getConnections(); const auto& cs5 = schedule.getWell("OP_1", 5).getConnections(); + const auto& cs6 = schedule.getWell("OP_1", 6).getConnections(); + const auto& cs7 = schedule.getWell("OP_1", 7).getConnections(); const auto& cs0_2 = schedule.getWell("OP_2", 0).getConnections(); const auto& cs1_2 = schedule.getWell("OP_2", 1).getConnections(); const auto& cs2_2 = schedule.getWell("OP_2", 2).getConnections(); + const auto& cs7_2 = schedule.getWell("OP_2", 7).getConnections(); for (size_t i = 0; i < cs1_2.size(); ++i ) { BOOST_CHECK_CLOSE(cs1_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13); BOOST_CHECK_CLOSE(cs2_2.get(i).CF() / cs1_2.get(i).CF(), 1.0, 1.e-13); + BOOST_CHECK_CLOSE(cs7_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13); } for (size_t i = 0; i < cs1.size(); ++i ) { BOOST_CHECK_CLOSE(cs1.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); BOOST_CHECK_CLOSE(cs2.get(i).CF() / cs1.get(i).CF(), 0.5, 1.e-13); BOOST_CHECK_CLOSE(cs3.get(i).CF() / cs0.get(i).CF(), 1.6, 1.e-13); BOOST_CHECK_CLOSE(cs4.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); + BOOST_CHECK_CLOSE(cs7.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); } for (size_t i = 0; i < 3; ++i) { BOOST_CHECK_CLOSE(cs5.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); + BOOST_CHECK_CLOSE(cs6.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13); } BOOST_CHECK_CLOSE(cs5.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13); + BOOST_CHECK_CLOSE(cs6.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13); } From 96ab0e036d9f21a632f1f7a8c3a3a097aeec0bc5 Mon Sep 17 00:00:00 2001 From: Kai Bao Date: Wed, 6 Jul 2022 14:32:59 +0200 Subject: [PATCH 4/4] addressing some reviewing comments for PR#3067 --- opm/input/eclipse/Schedule/Schedule.hpp | 10 ++++---- .../eclipse/Schedule/KeywordHandlers.cpp | 25 ++++++++----------- src/opm/input/eclipse/Schedule/Schedule.cpp | 11 ++++---- .../share/keywords/000_Eclipse100/W/WPIMULT | 15 ++++------- 4 files changed, 26 insertions(+), 35 deletions(-) diff --git a/opm/input/eclipse/Schedule/Schedule.hpp b/opm/input/eclipse/Schedule/Schedule.hpp index 2433568dd..f695aca43 100644 --- a/opm/input/eclipse/Schedule/Schedule.hpp +++ b/opm/input/eclipse/Schedule/Schedule.hpp @@ -504,7 +504,7 @@ namespace Opm ErrorGuard& errors; SimulatorUpdate * sim_update; const std::unordered_map * target_wellpi; - std::unordered_map* wellpi_global_factor; + std::unordered_map* wpimult_global_factor; const ScheduleGrid& grid; HandlerContext(const ScheduleBlock& block_, @@ -517,7 +517,7 @@ namespace Opm ErrorGuard& errors_, SimulatorUpdate * sim_update_, const std::unordered_map * target_wellpi_, - std::unordered_map* wellpi_global_factor_ = nullptr) + std::unordered_map* wpimult_global_factor_) : block(block_) , keyword(keyword_) , currentStep(currentStep_) @@ -527,7 +527,7 @@ namespace Opm , errors(errors_) , sim_update(sim_update_) , target_wellpi(target_wellpi_) - , wellpi_global_factor(wellpi_global_factor_) + , wpimult_global_factor(wpimult_global_factor_) , grid(grid_) {} @@ -596,7 +596,7 @@ namespace Opm bool actionx_mode, SimulatorUpdate* sim_update, const std::unordered_map* target_wellpi, - std::unordered_map* wellpi_global_factor = nullptr); + std::unordered_map* wpimult_global_factor = nullptr); void prefetch_cell_properties(const ScheduleGrid& grid, const DeckKeyword& keyword); void store_wgnames(const DeckKeyword& keyword); @@ -605,7 +605,7 @@ namespace Opm void invalidNamePattern( const std::string& namePattern, const HandlerContext& context) const; static std::string formatDate(std::time_t t); std::string simulationDays(std::size_t currentStep) const; - void applyGlobalWPIMULT( const std::unordered_map& factors); + void applyGlobalWPIMULT( const std::unordered_map& wpimult_global_factor); bool must_write_rst_file(std::size_t report_step) const; diff --git a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp index 1abc8dcb6..dca365ad8 100644 --- a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp +++ b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp @@ -1778,16 +1778,13 @@ Well{0} entered with disallowed 'FIELD' parent group: // the I, J, K location and completion number range. // When defaulted, it assumes it is negative // When inputting a negative value, it assumes it is defaulted. - auto defaultConCompRec = [] (const DeckRecord& rec)-> bool { - bool default_con_comp = true; - for (size_t i = 2; i < rec.size(); ++i) { - const auto& item = rec.getItem(i); - if (item.get(0) >= 0) { - default_con_comp = false; - break; - } - } - return default_con_comp; + auto defaultConCompRec = [](const DeckRecord& wpimult) + { + return std::all_of(wpimult.begin() + 2, wpimult.end(), + [](const DeckItem& item) + { + return item.defaultApplied(0) || (item.get(0) < 0); + }); }; for (const auto& record : handlerContext.keyword) { @@ -1801,13 +1798,13 @@ Well{0} entered with disallowed 'FIELD' parent group: // whether it is the last one. const bool default_con_comp = defaultConCompRec(record); if (default_con_comp) { - auto wellpi_global_factor = handlerContext.wellpi_global_factor; - if (!wellpi_global_factor) { - throw std::runtime_error(" wellpi_global_factor is nullptr in function handleWPIMULT "); + auto wpimult_global_factor = handlerContext.wpimult_global_factor; + if (!wpimult_global_factor) { + throw std::runtime_error(" wpimult_global_factor is nullptr in function handleWPIMULT "); } const auto scaling_factor = record.getItem("WELLPI").get(0); for (const auto& wname : well_names) { - (*wellpi_global_factor)[wname] = scaling_factor; + (*wpimult_global_factor)[wname] = scaling_factor; } continue; } diff --git a/src/opm/input/eclipse/Schedule/Schedule.cpp b/src/opm/input/eclipse/Schedule/Schedule.cpp index 357eaa1ca..bcaf5e62c 100644 --- a/src/opm/input/eclipse/Schedule/Schedule.cpp +++ b/src/opm/input/eclipse/Schedule/Schedule.cpp @@ -314,7 +314,7 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional bool actionx_mode, SimulatorUpdate* sim_update, const std::unordered_map* target_wellpi, - std::unordered_map* wellpi_global_factor) + std::unordered_map* wpimult_global_factor) { static const std::unordered_set require_grid = { @@ -323,7 +323,8 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional }; - HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi, wellpi_global_factor}; + HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi, + wpimult_global_factor}; /* The grid and fieldProps members create problems for reiterating the Schedule section. We therefor single them out very clearly here. @@ -537,10 +538,8 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e } // for (auto report_step = load_start } - void Schedule::applyGlobalWPIMULT( const std::unordered_map& wellpi_global_factor) { - for (const auto& elem : wellpi_global_factor) { - const auto& well_name = elem.first; - const auto factor = elem.second; + void Schedule::applyGlobalWPIMULT( const std::unordered_map& wpimult_global_factor) { + for (const auto& [well_name, factor] : wpimult_global_factor) { auto well = this->snapshots.back().wells(well_name); if (well.applyGlobalWPIMULT(factor)) { this->snapshots.back().wells.update(std::move(well)); diff --git a/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT b/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT index 38a6b0990..eaeee663a 100644 --- a/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT +++ b/src/opm/input/eclipse/share/keywords/000_Eclipse100/W/WPIMULT @@ -15,28 +15,23 @@ }, { "name": "I", - "value_type": "INT", - "default" : -1 + "value_type": "INT" }, { "name": "J", - "value_type": "INT", - "default" : -1 + "value_type": "INT" }, { "name": "K", - "value_type": "INT", - "default" : -1 + "value_type": "INT" }, { "name": "FIRST", - "value_type": "INT", - "default" : -1 + "value_type": "INT" }, { "name": "LAST", - "value_type": "INT", - "default" : -1 + "value_type": "INT" } ] }