multple MPIMULT keywords within one report step is handled

and ScheduleTests is updated with tests
This commit is contained in:
Kai Bao 2022-07-01 14:45:59 +02:00
parent ffa12e5d09
commit b87867d144
6 changed files with 119 additions and 46 deletions

View File

@ -504,6 +504,7 @@ namespace Opm
ErrorGuard& errors;
SimulatorUpdate * sim_update;
const std::unordered_map<std::string, double> * target_wellpi;
std::unordered_map<std::string, double>* wellpi_global_factor;
const ScheduleGrid& grid;
HandlerContext(const ScheduleBlock& block_,
@ -515,7 +516,8 @@ namespace Opm
const ParseContext& parseContext_,
ErrorGuard& errors_,
SimulatorUpdate * sim_update_,
const std::unordered_map<std::string, double> * target_wellpi_)
const std::unordered_map<std::string, double> * target_wellpi_,
std::unordered_map<std::string, double>* wellpi_global_factor_ = nullptr)
: block(block_)
, keyword(keyword_)
, currentStep(currentStep_)
@ -525,6 +527,7 @@ namespace Opm
, errors(errors_)
, sim_update(sim_update_)
, target_wellpi(target_wellpi_)
, wellpi_global_factor(wellpi_global_factor_)
, grid(grid_)
{}
@ -586,12 +589,14 @@ namespace Opm
void handleKeyword(std::size_t currentStep,
const ScheduleBlock& block,
const DeckKeyword& keyword,
const ParseContext& parseContext, ErrorGuard& errors,
const ParseContext& parseContext,
ErrorGuard& errors,
const ScheduleGrid& grid,
const std::vector<std::string>& matching_wells,
bool runtime,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi);
bool actionx_mode,
SimulatorUpdate* sim_update,
const std::unordered_map<std::string, double>* target_wellpi,
std::unordered_map<std::string, double>* wellpi_global_factor = nullptr);
void prefetch_cell_properties(const ScheduleGrid& grid, const DeckKeyword& keyword);
void store_wgnames(const DeckKeyword& keyword);
@ -600,6 +605,7 @@ namespace Opm
void invalidNamePattern( const std::string& namePattern, const HandlerContext& context) const;
static std::string formatDate(std::time_t t);
std::string simulationDays(std::size_t currentStep) const;
void applyGlobalWPIMULT( const std::unordered_map<std::string, double>& factors);
bool must_write_rst_file(std::size_t report_step) const;

View File

@ -626,6 +626,7 @@ public:
bool handleWELOPENConnections(const DeckRecord& record, Connection::State status);
bool handleCOMPLUMP(const DeckRecord& record);
bool handleWPIMULT(const DeckRecord& record);
bool applyGlobalWPIMULT(double scale_factor);
void filterConnections(const ActiveGridCells& grid);
ProductionControls productionControls(const SummaryState& st) const;

View File

@ -1779,53 +1779,41 @@ Well{0} entered with disallowed 'FIELD' parent group:
// When defaulted, it assumes it is negative
// When inputting a negative value, it assumes it is defaulted.
auto defaultConCompRec = [] (const DeckRecord& rec)-> bool {
bool default_connections = true;
bool default_con_comp = true;
for (size_t i = 2; i < rec.size(); ++i) {
const auto& item = rec.getItem(i);
if (item.get<int>(0) >= 0) {
default_connections = false;
default_con_comp = false;
break;
}
}
return default_connections;
return default_con_comp;
};
auto lastRecordWithDefaultConnections = [&defaultConCompRec, this, &handlerContext] (const DeckKeyword& keyword) {
std::unordered_map<std::string, size_t> last_index_default_cons_comps;
for (size_t i = 0; i < keyword.size(); ++i) {
const auto& record = keyword.getRecord(i);
if (defaultConCompRec(record)) {
const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0);
const auto& well_names = this->wellNames(wellNamePattern, handlerContext);
for (const auto& wname : well_names) {
last_index_default_cons_comps[wname] = i;
}
}
}
return last_index_default_cons_comps;
};
const auto last_index_default_cons_comps = lastRecordWithDefaultConnections(handlerContext.keyword);
for (size_t i_rec = 0; i_rec < handlerContext.keyword.size(); ++i_rec) {
const auto& record = handlerContext.keyword.getRecord(i_rec);
// whether this record has defaulted connection and completion information
const bool default_con_comp = defaultConCompRec(record);
for (const auto& record : handlerContext.keyword) {
const std::string& wellNamePattern = record.getItem("WELL").getTrimmedString(0);
const auto& well_names = this->wellNames(wellNamePattern, handlerContext);
for (const auto& wname : well_names) {
// for records with defaulted connection and completion information, we only use the last record for that well
if (default_con_comp) {
const auto search = last_index_default_cons_comps.find(wname);
if (search != last_index_default_cons_comps.end() && i_rec < search->second) {
// it is not the last record with defaulted connection and completion information for that well
// we skp this record for this well
continue;
}
// for the record has defaulted connection and completion information, we do not apply it immediately
// because we only need to apply the last record with defaulted connection and completion information
// as a result, we here only record the information of the record with defaulted connection and completion
// information without applying, because there might be multiple WPIMULT keywords here, and we do not know
// whether it is the last one.
const bool default_con_comp = defaultConCompRec(record);
if (default_con_comp) {
auto wellpi_global_factor = handlerContext.wellpi_global_factor;
if (!wellpi_global_factor) {
throw std::runtime_error(" wellpi_global_factor is nullptr in function handleWPIMULT ");
}
const auto scaling_factor = record.getItem("WELLPI").get<double>(0);
for (const auto& wname : well_names) {
(*wellpi_global_factor)[wname] = scaling_factor;
}
continue;
}
// the record with non-defaulted connection and completion information will be applied immediately
for (const auto& wname : well_names) {
auto well = this->snapshots.back().wells( wname );
if (well.handleWPIMULT(record))
this->snapshots.back().wells.update( std::move(well));

View File

@ -312,8 +312,10 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
const ScheduleGrid& grid,
const std::vector<std::string>& matching_wells,
bool actionx_mode,
SimulatorUpdate * sim_update,
const std::unordered_map<std::string, double> * target_wellpi) {
SimulatorUpdate* sim_update,
const std::unordered_map<std::string, double>* target_wellpi,
std::unordered_map<std::string, double>* wellpi_global_factor)
{
static const std::unordered_set<std::string> require_grid = {
"COMPDAT",
@ -321,7 +323,7 @@ Schedule::Schedule(const Deck& deck, const EclipseState& es, const std::optional
};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi};
HandlerContext handlerContext { block, keyword, grid, currentStep, matching_wells, actionx_mode, parseContext, errors, sim_update, target_wellpi, wellpi_global_factor};
/*
The grid and fieldProps members create problems for reiterating the
Schedule section. We therefor single them out very clearly here.
@ -474,6 +476,7 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
}
this->create_next(block);
std::unordered_map<std::string, double> wellpi_global_factor;
while (true) {
if (keyword_index == block.size())
break;
@ -520,15 +523,28 @@ void Schedule::iterateScheduleSection(std::size_t load_start, std::size_t load_e
{},
false,
nullptr,
target_wellpi);
target_wellpi,
&wellpi_global_factor);
keyword_index++;
}
this->applyGlobalWPIMULT(wellpi_global_factor);
this->end_report(report_step);
if (this->must_write_rst_file(report_step)) {
this->restart_output.addRestartOutput(report_step);
}
} // for (auto report_step = load_start
}
void Schedule::applyGlobalWPIMULT( const std::unordered_map<std::string, double>& wellpi_global_factor) {
for (const auto& elem : wellpi_global_factor) {
const auto& well_name = elem.first;
const auto factor = elem.second;
auto well = this->snapshots.back().wells(well_name);
if (well.applyGlobalWPIMULT(factor)) {
this->snapshots.back().wells.update(std::move(well));
}
}
}

View File

@ -61,7 +61,7 @@ namespace {
if (item.defaultApplied(0))
return true;
if (item.get<int>(0) <= 0)
if (item.get<int>(0) == 0)
return true;
return false;
@ -1166,6 +1166,18 @@ bool Well::handleWPIMULT(const DeckRecord& record) {
}
bool Opm::Well::applyGlobalWPIMULT(const double scaling_factor)
{
auto new_connections = std::make_shared<WellConnections>(this->connections->ordering(), this->headI, this->headJ);
for (auto c : *this->connections) {
c.scaleWellPi(scaling_factor);
new_connections->add(c);
}
return this->updateConnections(std::move(new_connections), false);
}
void Well::updateSegments(std::shared_ptr<WellSegments> segments_arg) {
this->segments = std::move(segments_arg);
this->updateRefDepth( this->segments->depthTopSegment() );

View File

@ -1294,7 +1294,7 @@ COMPDAT
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.8 0 0 0 / -- all connections
'OP_1' 0.8 0 0 0 / -- all connections but not defaulted
/
DATES -- 3
@ -1335,6 +1335,49 @@ WPIMULT
DATES -- 5
20 JAN 2014 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_1' 0.10 2* 4 /
/
WPIMULT
'OP_1' 0.8 / -- all connections
'OP_1' 0.50 2* 4 /
/
DATES -- 6
20 FEB 2014 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_1' 9 9 1 1 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 2 2 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 3 3 'OPEN' 1* 100 2* 2* 'X' 22.100 /
'OP_1' 9 9 4 4 'OPEN' 1* 100 2* 2* 'X' 22.100 /
/
COMPDAT
-- WELL I J K1 K2 Sat. CF DIAM KH SKIN ND DIR Ro
'OP_2' 8 8 1 1 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 2 2 'OPEN' 1* 50 2* 2* 'X' 22.100 /
'OP_2' 8 8 3 3 'OPEN' 1* 50 2* 2* 'X' 22.100 /
/
WPIMULT
'OP_1' 2.0 /
'OP_2' 3.0 /
/
WPIMULT
'OP_1' 0.8 -1 -1 -1 / -- all connections
'OP_2' 7.0 /
/
DATES -- 7
20 FEB 2014 /
/
END
)";
@ -1345,25 +1388,32 @@ END
const auto& cs3 = schedule.getWell("OP_1", 3).getConnections();
const auto& cs4 = schedule.getWell("OP_1", 4).getConnections();
const auto& cs5 = schedule.getWell("OP_1", 5).getConnections();
const auto& cs6 = schedule.getWell("OP_1", 6).getConnections();
const auto& cs7 = schedule.getWell("OP_1", 7).getConnections();
const auto& cs0_2 = schedule.getWell("OP_2", 0).getConnections();
const auto& cs1_2 = schedule.getWell("OP_2", 1).getConnections();
const auto& cs2_2 = schedule.getWell("OP_2", 2).getConnections();
const auto& cs7_2 = schedule.getWell("OP_2", 7).getConnections();
for (size_t i = 0; i < cs1_2.size(); ++i ) {
BOOST_CHECK_CLOSE(cs1_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13);
BOOST_CHECK_CLOSE(cs2_2.get(i).CF() / cs1_2.get(i).CF(), 1.0, 1.e-13);
BOOST_CHECK_CLOSE(cs7_2.get(i).CF() / cs0_2.get(i).CF(), 7.0, 1.e-13);
}
for (size_t i = 0; i < cs1.size(); ++i ) {
BOOST_CHECK_CLOSE(cs1.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs2.get(i).CF() / cs1.get(i).CF(), 0.5, 1.e-13);
BOOST_CHECK_CLOSE(cs3.get(i).CF() / cs0.get(i).CF(), 1.6, 1.e-13);
BOOST_CHECK_CLOSE(cs4.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs7.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
}
for (size_t i = 0; i < 3; ++i) {
BOOST_CHECK_CLOSE(cs5.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
BOOST_CHECK_CLOSE(cs6.get(i).CF() / cs0.get(i).CF(), 0.8, 1.e-13);
}
BOOST_CHECK_CLOSE(cs5.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13);
BOOST_CHECK_CLOSE(cs6.get(3).CF() / cs0.get(3).CF(), 0.04, 1.e-13);
}