diff --git a/opm/input/eclipse/Schedule/Schedule.hpp b/opm/input/eclipse/Schedule/Schedule.hpp index 26edcd3d3..778a0d47a 100644 --- a/opm/input/eclipse/Schedule/Schedule.hpp +++ b/opm/input/eclipse/Schedule/Schedule.hpp @@ -499,8 +499,11 @@ namespace Opm SimulatorUpdate * sim_update; const std::unordered_map * target_wellpi; std::unordered_map* wpimult_global_factor; + std::set *welsegs_wells, *compsegs_wells; const ScheduleGrid& grid; + /// \param welsegs_wells All wells with a WELSEGS entry for checks. + /// \param compegs_wells All wells with a COMPSEGS entry for checks. HandlerContext(const ScheduleBlock& block_, const DeckKeyword& keyword_, const ScheduleGrid& grid_, @@ -511,7 +514,9 @@ namespace Opm ErrorGuard& errors_, SimulatorUpdate * sim_update_, const std::unordered_map * target_wellpi_, - std::unordered_map* wpimult_global_factor_) + std::unordered_map* wpimult_global_factor_, + std::set* welsegs_wells_, + std::set* compsegs_wells_) : block(block_) , keyword(keyword_) , currentStep(currentStep_) @@ -522,6 +527,8 @@ namespace Opm , sim_update(sim_update_) , target_wellpi(target_wellpi_) , wpimult_global_factor(wpimult_global_factor_) + , welsegs_wells(welsegs_wells_) + , compsegs_wells(compsegs_wells_) , grid(grid_) {} @@ -530,6 +537,20 @@ namespace Opm this->sim_update->affected_wells.insert(well_name); } + /// \brief Mark that the well occured in a WELSEGS keyword + void welsegs_handled(const std::string& well_name) + { + if (welsegs_wells) + welsegs_wells->insert(well_name); + } + + /// \brief Mark that the well occured in a COMPSEGS keyword + void compsegs_handled(const std::string& well_name) + { + if (compsegs_wells) + compsegs_wells->insert(well_name); + } + }; // Please update the member functions @@ -586,6 +607,8 @@ namespace Opm void addWell(const std::string& wellName, const DeckRecord& record, std::size_t timeStep, Connection::Order connection_order); void checkIfAllConnectionsIsShut(std::size_t currentStep); void end_report(std::size_t report_step); + /// \param welsegs_wells All wells with a WELSEGS entry for checks. + /// \param compegs_wells All wells with a COMPSEGS entry for checks. void handleKeyword(std::size_t currentStep, const ScheduleBlock& block, const DeckKeyword& keyword, @@ -596,7 +619,9 @@ namespace Opm bool actionx_mode, SimulatorUpdate* sim_update, const std::unordered_map* target_wellpi, - std::unordered_map* wpimult_global_factor = nullptr); + std::unordered_map* wpimult_global_factor = nullptr, + std::set* welsegs_wells = nullptr, + std::set* compsegs_wells = nullptr); void prefetch_cell_properties(const ScheduleGrid& grid, const DeckKeyword& keyword); void store_wgnames(const DeckKeyword& keyword); diff --git a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp index 6d1440acb..dd35e6c7a 100644 --- a/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp +++ b/src/opm/input/eclipse/Schedule/KeywordHandlers.cpp @@ -229,6 +229,8 @@ File {} line {}.)", wname, location.keyword, location.filename, location.lineno) if (well.handleCOMPSEGS(handlerContext.keyword, handlerContext.grid, handlerContext.parseContext, handlerContext.errors)) this->snapshots.back().wells.update( std::move(well) ); + + handlerContext.compsegs_handled(wname); } void Schedule::handleDRSDT(HandlerContext& handlerContext) { @@ -1390,6 +1392,7 @@ File {} line {}.)", wname, location.keyword, location.filename, location.lineno) auto well = this->snapshots.back().wells.get(wname); if (well.handleWELSEGS(handlerContext.keyword)) this->snapshots.back().wells.update( std::move(well) ); + handlerContext.welsegs_handled(wname); } else { const auto& location = handlerContext.keyword.location(); if (this->action_wgnames.has_well(wname)) { diff --git a/src/opm/input/eclipse/Schedule/Schedule.cpp b/src/opm/input/eclipse/Schedule/Schedule.cpp index 99f60238f..f503b70b8 100644 --- a/src/opm/input/eclipse/Schedule/Schedule.cpp +++ b/src/opm/input/eclipse/Schedule/Schedule.cpp @@ -324,7 +324,9 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional bool actionx_mode, SimulatorUpdate* sim_update, const std::unordered_map* target_wellpi, - std::unordered_map* wpimult_global_factor) + std::unordered_map* wpimult_global_factor, + std::set* welsegs_wells, + std::set* compsegs_wells) { static const std::unordered_set require_grid = { @@ -333,8 +335,9 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional }; - HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi, - wpimult_global_factor}; + HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, + parseContext, errors, sim_update, target_wellpi, + wpimult_global_factor, welsegs_wells, compsegs_wells}; /* The grid and fieldProps members create problems for reiterating the Schedule section. We therefor single them out very clearly here. @@ -472,7 +475,43 @@ private: return ScheduleLogger::Stream::Info; } } +} // end namespace Opm +namespace +{ +/// \brief Check whether each MS well has COMPSEGS entry andissue error if not. +/// \param welsegs All wells with a WELSEGS entry +/// \param compegs All wells with a COMPSEGS entry +/// \param block_location location where recent DATES/TSTEP started +/// \param last_location Last location red in this block +void check_compsegs_consistency(std::set& welsegs, std::set& compsegs, + const Opm::KeywordLocation& block_location, + const Opm::KeywordLocation& last_location) +{ + std::vector difference; + difference.reserve(welsegs.size()); + std::set_difference(welsegs.begin(), welsegs.end(), + compsegs.begin(), compsegs.end(), + std::back_inserter(difference)); + if (difference.size()) { + std::string wells = "well"; + if (difference.size()>1) { + wells.append("s"); + } + + for(const auto& missing_wells: difference) { + wells.append(" "+missing_wells); + } + auto msg = fmt::format("Missing COMPSEGS keyword for {} between {} line {} and {} line {}.", wells, + block_location.filename, block_location.lineno, + last_location.filename, last_location.lineno); + throw Opm::OpmInputError(msg, last_location); + } +} +}// end anonymous namespace + +namespace Opm +{ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_end, const ParseContext& parseContext, ErrorGuard& errors, @@ -529,6 +568,8 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e location.lineno)); } + std::set welsegs_wells, compsegs_wells; + for (auto report_step = load_start; report_step < load_end; report_step++) { std::size_t keyword_index = 0; auto& block = this->m_sched_deck[report_step]; @@ -558,13 +599,16 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e this->create_next(block); std::unordered_map wpimult_global_factor; + + auto last_location = block.location(); + while (true) { if (keyword_index == block.size()) break; const auto& keyword = block[keyword_index]; const auto& location = keyword.location(); - logger.location(keyword.location()); + logger.location(location); if (keyword.is()) { Action::ActionX action(keyword, @@ -588,6 +632,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e std::string msg_fmt = fmt::format("The keyword {} is not supported in the ACTIONX block", action_keyword.name()); parseContext.handleError( ParseContext::ACTIONX_ILLEGAL_KEYWORD, msg_fmt, action_keyword.location(), errors); } + last_location = action_keyword.location(); } this->addACTIONX(action); keyword_index++; @@ -605,10 +650,14 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e false, nullptr, target_wellpi, - &wpimult_global_factor); + &wpimult_global_factor, + &welsegs_wells, + &compsegs_wells); + last_location = location; keyword_index++; } + check_compsegs_consistency(welsegs_wells, compsegs_wells, block.location(), last_location); this->applyGlobalWPIMULT(wpimult_global_factor); this->end_report(report_step);