Merge pull request #3067 from GitPaean/fixing_multiple_wpimult

fixing multiple WPIMULT records
This commit is contained in:
Markus Blatt 2022-07-29 10:24:50 +02:00 committed by GitHub
commit 6561138236
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 260 additions and 9 deletions

View File

@ -504,6 +504,7 @@ namespace Opm
ErrorGuard& errors;
SimulatorUpdate * sim_update;
const std::unordered_map<std::string, double> * target_wellpi;
std::unordered_map<std::string, double>* wpimult_global_factor;
const ScheduleGrid& grid;
HandlerContext(const ScheduleBlock& block_,
@ -515,7 +516,8 @@ namespace Opm
const ParseContext& parseContext_,
ErrorGuard& errors_,
SimulatorUpdate * sim_update_,
const std::unordered_map<std::string, double> * target_wellpi_)
const std::unordered_map<std::string, double> * target_wellpi_,
std::unordered_map<std::string, double>* wpimult_global_factor_)
: block(block_)
, keyword(keyword_)
, currentStep(currentStep_)
@ -525,6 +527,7 @@ namespace Opm
, errors(errors_)
, sim_update(sim_update_)
, target_wellpi(target_wellpi_)
, wpimult_global_factor(wpimult_global_factor_)
, grid(grid_)
{}
@ -586,12 +589,14 @@ namespace Opm
void handleKeyword(std::size_t currentStep,
const ScheduleBlock& block,
const DeckKeyword& keyword,
const ParseContext& parseContext, ErrorGuard& errors,
const ParseContext& parseContext,
ErrorGuard& errors,
const ScheduleGrid& grid,
const std::vector<std::string>& matching_wells,
bool runtime,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi);
bool actionx_mode,
SimulatorUpdate* sim_update,
const std::unordered_map<std::string, double>* target_wellpi,
std::unordered_map<std::string, double>* wpimult_global_factor = nullptr);
void prefetch_cell_properties(const ScheduleGrid& grid, const DeckKeyword& keyword);
void store_wgnames(const DeckKeyword& keyword);
@ -600,6 +605,7 @@ namespace Opm
void invalidNamePattern( const std::string& namePattern, const HandlerContext& context) const;
static std::string formatDate(std::time_t t);
std::string simulationDays(std::size_t currentStep) const;
void applyGlobalWPIMULT( const std::unordered_map<std::string, double>& wpimult_global_factor);
bool must_write_rst_file(std::size_t report_step) const;

View File

@ -626,6 +626,7 @@ public:
bool handleWELOPENConnections(const DeckRecord& record, Connection::State status);
bool handleCOMPLUMP(const DeckRecord& record);
bool handleWPIMULT(const DeckRecord& record);
bool applyGlobalWPIMULT(double scale_factor);
void filterConnections(const ActiveGridCells& grid);
ProductionControls productionControls(const SummaryState& st) const;

View File

@ -1773,10 +1773,42 @@ Well{0} entered with disallowed 'FIELD' parent group:
}
void Schedule::handleWPIMULT(HandlerContext& handlerContext) {
// from the third item to the seventh item in the WPIMULT record, they are numbers indicate
// the I, J, K location and completion number range.
// When defaulted, it assumes it is negative
// When inputting a negative value, it assumes it is defaulted.
auto defaultConCompRec = [](const DeckRecord& wpimult)
{
return std::all_of(wpimult.begin() + 2, wpimult.end(),
[](const DeckItem& item)
{
return item.defaultApplied(0) || (item.get<int>(0) < 0);
});
};
for (const auto& record : handlerContext.keyword) {
const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0);
const auto& well_names = this->wellNames(wellNamePattern, handlerContext);
// for the record has defaulted connection and completion information, we do not apply it immediately
// because we only need to apply the last record with defaulted connection and completion information
// as a result, we here only record the information of the record with defaulted connection and completion
// information without applying, because there might be multiple WPIMULT keywords here, and we do not know
// whether it is the last one.
const bool default_con_comp = defaultConCompRec(record);
if (default_con_comp) {
auto wpimult_global_factor = handlerContext.wpimult_global_factor;
if (!wpimult_global_factor) {
throw std::runtime_error(" wpimult_global_factor is nullptr in function handleWPIMULT ");
}
const auto scaling_factor = record.getItem("WELLPI").get<double>(0);
for (const auto& wname : well_names) {
(*wpimult_global_factor)[wname] = scaling_factor;
}
continue;
}
// the record with non-defaulted connection and completion information will be applied immediately
for (const auto& wname : well_names) {
auto well = this->snapshots.back().wells( wname );
if (well.handleWPIMULT(record))

View File

@ -313,8 +313,10 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
const ScheduleGrid& grid,
const std::vector<std::string>& matching_wells,
bool actionx_mode,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi) {
SimulatorUpdate* sim_update,
const std::unordered_map<std::string, double>* target_wellpi,
std::unordered_map<std::string, double>* wpimult_global_factor)
{
static const std::unordered_set<std::string> require_grid = {
"COMPDAT",
@ -322,7 +324,8 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi,
wpimult_global_factor};
/*
The grid and fieldProps members create problems for reiterating the
Schedule section. We therefor single them out very clearly here.
@ -475,6 +478,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
}
this->create_next(block);
std::unordered_map<std::string, double> wellpi_global_factor;
while (true) {
if (keyword_index == block.size())
break;
@ -521,15 +525,26 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
{},
false,
nullptr,
target_wellpi);
target_wellpi,
&wellpi_global_factor);
keyword_index++;
}
this->applyGlobalWPIMULT(wellpi_global_factor);
this->end_report(report_step);
if (this->must_write_rst_file(report_step)) {
this->restart_output.addRestartOutput(report_step);
}
} // for (auto report_step = load_start
}
void Schedule::applyGlobalWPIMULT( const std::unordered_map<std::string, double>& wpimult_global_factor) {
for (const auto& [well_name, factor] : wpimult_global_factor) {
auto well = this->snapshots.back().wells(well_name);
if (well.applyGlobalWPIMULT(factor)) {
this->snapshots.back().wells.update(std::move(well));
}
}
}

View File

@ -1166,6 +1166,18 @@ bool Well::handleWPIMULT(const DeckRecord& record) {
}
bool Opm::Well::applyGlobalWPIMULT(const double scaling_factor)
{
auto new_connections = std::make_shared<WellConnections>(this->connections->ordering(), this->headI, this->headJ);
for (auto c : *this->connections) {
c.scaleWellPi(scaling_factor);
new_connections->add(c);
}
return this->updateConnections(std::move(new_connections), false);
}
void Well::updateSegments(std::shared_ptr<WellSegments> segments_arg) {
this->segments = std::move(segments_arg);
this->updateRefDepth( this->segments->depthTopSegment() );

View File

@ -1234,6 +1234,191 @@ COMPDAT
BOOST_CHECK_EQUAL(sim_time1.year(), 2011);
}
BOOST_AUTO_TEST_CASE(createDeckWithMultipleWPIMULT) {
std::string input = R"(
START -- 0
19 JUN 2007 /
GRID
PORO
1000*0.1 /
PERMX
1000*1 /
PERMY
1000*0.1 /
PERMZ
1000*0.01 /
SCHEDULE
WELSPECS
'OP_1' 'OP' 9 9 1* 'OIL' 1* 1* 1* 1* 1* 1* 1* /
/
WELSPECS
'OP_2' 'OP' 8 8 1* 'OIL' 1* 1* 1* 1* 1* 1* 1* /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_2' 8 8 1 1 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 2 2 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 3 3 'OPEN' 1* 50 2* 2* 'X' 22.100 /
/
DATES -- 0
20 JAN 2009 /
/
WPIMULT
'OP_1' 2.0 /
'OP_2' 3.0 /
'OP_1' 0.8 -1 -1 -1 / -- all connections
'OP_2' 7.0 /
/
DATES -- 1
20 JAN 2010 /
/
WPIMULT
'OP_1' 0.5 /
/
DATES -- 2
20 JAN 2011 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.8 0 0 0 / -- all connections but not defaulted
/
DATES -- 3
20 JAN 2012 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.8 / -- all connections
/
DATES -- 4
20 JAN 2013 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.8 / -- all connections
'OP_1' 0.50 2* 4 /
'OP_1' 0.10 2* 4 /
/
DATES -- 5
20 JAN 2014 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.10 2* 4 /
/
WPIMULT
'OP_1' 0.8 / -- all connections
'OP_1' 0.50 2* 4 /
/
DATES -- 6
20 FEB 2014 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_2' 8 8 1 1 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 2 2 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 3 3 'OPEN' 1* 50 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_2' 3.0 /
/
WPIMULT
'OP_1' 0.8 -1 -1 -1 / -- all connections
'OP_2' 7.0 /
/
DATES -- 7
20 FEB 2014 /
/
END
)";
const auto& schedule = make_schedule(input);
const auto& cs0 = schedule.getWell("OP_1", 0).getConnections();
const auto& cs1 = schedule.getWell("OP_1", 1).getConnections();
const auto& cs2 = schedule.getWell("OP_1", 2).getConnections();
const auto& cs3 = schedule.getWell("OP_1", 3).getConnections();
const auto& cs4 = schedule.getWell("OP_1", 4).getConnections();
const auto& cs5 = schedule.getWell("OP_1", 5).getConnections();
const auto& cs6 = schedule.getWell("OP_1", 6).getConnections();
const auto& cs7 = schedule.getWell("OP_1", 7).getConnections();
const auto& cs0_2 = schedule.getWell("OP_2", 0).getConnections();
const auto& cs1_2 = schedule.getWell("OP_2", 1).getConnections();
const auto& cs2_2 = schedule.getWell("OP_2", 2).getConnections();
const auto& cs7_2 = schedule.getWell("OP_2", 7).getConnections();
for (size_t i = 0; i < cs1_2.size(); ++i ) {
BOOST_CHECK_CLOSE(cs1_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13);
BOOST_CHECK_CLOSE(cs2_2.get(i).CF() / cs1_2.get(i).CF(), 1.0, 1.e-13);
BOOST_CHECK_CLOSE(cs7_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13);
}
for (size_t i = 0; i < cs1.size(); ++i ) {
BOOST_CHECK_CLOSE(cs1.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs2.get(i).CF() / cs1.get(i).CF(), 0.5, 1.e-13);
BOOST_CHECK_CLOSE(cs3.get(i).CF() / cs0.get(i).CF(), 1.6, 1.e-13);
BOOST_CHECK_CLOSE(cs4.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs7.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
}
for (size_t i = 0; i < 3; ++i) {
BOOST_CHECK_CLOSE(cs5.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs6.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
}
BOOST_CHECK_CLOSE(cs5.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13);
BOOST_CHECK_CLOSE(cs6.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13);
}
BOOST_AUTO_TEST_CASE(WELSPECS_WGNAME_SPACE) {
Opm::Parser parser;
const std::string input = R"(