Merge pull request #2190 from joakim-hove/schedule-tuning

Schedule tuning
This commit is contained in:
Joakim Hove 2021-01-07 07:49:23 +01:00 committed by GitHub
commit 167ada2020
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 55 additions and 31 deletions

View File

@ -36,7 +36,6 @@
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSale.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSump.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/OilVaporizationProperties.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Tuning.hpp>
#include <opm/parser/eclipse/EclipseState/Util/OrderedMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MessageLimits.hpp>
#include <opm/parser/eclipse/EclipseState/Runspec.hpp>
@ -259,7 +258,6 @@ namespace Opm
bool hasGroup(const std::string& groupName, std::size_t timeStep) const;
const Group& getGroup(const std::string& groupName, std::size_t timeStep) const;
const Tuning& getTuning(std::size_t timeStep) const;
const MessageLimits& getMessageLimits() const;
void invalidNamePattern (const std::string& namePattern, std::size_t report_step, const ParseContext& parseContext, ErrorGuard& errors, const DeckKeyword& keyword) const;
const GuideRateConfig& guideRateConfig(std::size_t timeStep) const;
@ -323,7 +321,6 @@ namespace Opm
m_oilvaporizationproperties.serializeOp(serializer);
m_events.serializeOp(serializer);
m_modifierDeck.serializeOp(serializer);
m_tuning.serializeOp(serializer);
m_messageLimits.serializeOp(serializer);
m_runspec.serializeOp(serializer);
auto splitvfpprod = splitDynMap<Map2>(vfpprod_tables);
@ -367,7 +364,6 @@ namespace Opm
DynamicState< OilVaporizationProperties > m_oilvaporizationproperties;
Events m_events;
DynamicVector< Deck > m_modifierDeck;
DynamicState<Tuning> m_tuning;
MessageLimits m_messageLimits;
Runspec m_runspec;
VFPProdMap vfpprod_tables;

View File

@ -25,6 +25,7 @@
#include <optional>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/PAvg.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Tuning.hpp>
namespace Opm {
@ -55,12 +56,16 @@ namespace Opm {
void pavg(PAvg pavg);
const PAvg& pavg() const;
void tuning(Tuning tuning);
const Tuning& tuning() const;
template<class Serializer>
void serializeOp(Serializer& serializer) {
serializer(m_start_time);
serializer(m_end_time);
serializer(m_pavg);
m_tuning.serializeOp(serializer);
}
private:
@ -68,6 +73,7 @@ namespace Opm {
std::optional<std::chrono::system_clock::time_point> m_end_time;
std::shared_ptr<PAvg> m_pavg;
Tuning m_tuning;
};
}

View File

@ -139,7 +139,7 @@ createDoubHead(const EclipseState& es,
const auto tconv = getTimeConv(usys);
auto dh = DoubHEAD{}
.tuningParameters(sched.getTuning(lookup_step), tconv)
.tuningParameters(sched[lookup_step].tuning(), tconv)
.timeStamp (computeTimeStamp(sched, simTime))
.drsdt (sched, lookup_step, tconv)
.udq_param(rspec.udqParams())

View File

@ -501,7 +501,7 @@ createInteHead(const EclipseState& es,
// n{isa}caqz: number of data elements per aquifer connection in {ISA}CAQ
.params_NAAQZ (1, 18, 24, 10, 7, 2, 4)
.stepParam (num_solver_steps, report_step)
.tuningParam (getTuningPars(sched.getTuning(lookup_step)))
.tuningParam (getTuningPars(sched[lookup_step].tuning()))
.liftOptParam (getLiftOptPar(sched, lookup_step))
.wellSegDimensions (getWellSegDims(rspec, sched, report_step, lookup_step))
.regionDimensions (getRegDims(tdim, rdim))

View File

@ -745,7 +745,7 @@ namespace {
void Schedule::handleTUNING(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
const auto numrecords = handlerContext.keyword.size();
Tuning tuning(m_tuning.get(handlerContext.currentStep));
auto tuning = this->snapshots.back().tuning();
if (numrecords > 0) {
const auto& record1 = handlerContext.keyword.getRecord(0);
@ -813,7 +813,7 @@ namespace {
tuning.MXWSIT = ParserKeywords::TUNING::MXWSIT::defaultValue;
}
m_tuning.update(handlerContext.currentStep, tuning);
this->snapshots.back().tuning( std::move( tuning ));
m_events.addEvent(ScheduleEvents::TUNING_CHANGE, handlerContext.currentStep);
}
@ -1620,8 +1620,7 @@ namespace {
}
void Schedule::handleWSEGITER(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
Tuning tuning(m_tuning.get(handlerContext.currentStep));
auto tuning = this->snapshots.back().tuning();
const auto& record = handlerContext.keyword.getRecord(0);
tuning.MXWSIT = record.getItem<ParserKeywords::WSEGITER::MAX_WELL_ITERATIONS>().get<int>(0);
@ -1629,7 +1628,7 @@ namespace {
tuning.WSEG_REDUCTION_FACTOR = record.getItem<ParserKeywords::WSEGITER::REDUCTION_FACTOR>().get<double>(0);
tuning.WSEG_INCREASE_FACTOR = record.getItem<ParserKeywords::WSEGITER::INCREASING_FACTOR>().get<double>(0);
m_tuning.update(handlerContext.currentStep, tuning);
this->snapshots.back().tuning(tuning);
m_events.addEvent(ScheduleEvents::TUNING_CHANGE, handlerContext.currentStep);
}

View File

@ -114,7 +114,6 @@ namespace {
m_oilvaporizationproperties( this->m_timeMap, OilVaporizationProperties(runspec.tabdims().getNumPVTTables()) ),
m_events( this->m_timeMap ),
m_modifierDeck( this->m_timeMap, Deck{} ),
m_tuning( this->m_timeMap, Tuning() ),
m_messageLimits( this->m_timeMap ),
m_runspec( runspec ),
wtest_config(this->m_timeMap, std::make_shared<WellTestConfig>() ),
@ -269,7 +268,6 @@ namespace {
result.m_oilvaporizationproperties = {{Opm::OilVaporizationProperties::serializeObject()},1};
result.m_events = Events::serializeObject();
result.m_modifierDeck = DynamicVector<Deck>({Deck::serializeObject()});
result.m_tuning = {{Tuning::serializeObject()}, 1};
result.m_messageLimits = MessageLimits::serializeObject();
result.m_runspec = Runspec::serializeObject();
result.vfpprod_tables = {{1, {{std::make_shared<VFPProdTable>(VFPProdTable::serializeObject())}, 1}}};
@ -454,7 +452,8 @@ private:
time_unit,
block.location().lineno));
}
this->create_next(block);
if (time_type != ScheduleTimeType::RESTART)
this->create_next(block);
while (true) {
if (keyword_index == block.size())
@ -1324,10 +1323,6 @@ private:
}
const Tuning& Schedule::getTuning(std::size_t timeStep) const {
return this->m_tuning.get( timeStep );
}
const Deck& Schedule::getModifierDeck(std::size_t timeStep) const {
return m_modifierDeck.iget( timeStep );
}
@ -1659,7 +1654,6 @@ private:
this->m_oilvaporizationproperties == data.m_oilvaporizationproperties &&
this->m_events == data.m_events &&
this->m_modifierDeck == data.m_modifierDeck &&
this->m_tuning == data.m_tuning &&
this->m_messageLimits == data.m_messageLimits &&
this->m_runspec == data.m_runspec &&
compareMap(this->vfpprod_tables, data.vfpprod_tables) &&
@ -1715,6 +1709,20 @@ namespace {
{
double udq_undefined = 0;
const auto report_step = rst_state.header.report_step - 1;
auto start_time = std::chrono::system_clock::from_time_t( this->getStartTime() );
{
auto first_state = ScheduleState( start_time, start_time );
this->snapshots.push_back(first_state);
}
for (int step = 1; step < report_step; step++) {
auto state = ScheduleState( this->snapshots.back(), start_time, start_time );
this->snapshots.push_back(std::move(state));
}
{
auto restart_time = std::chrono::system_clock::from_time_t( rst_state.header.restart_info().first );
auto last_state = ScheduleState( start_time, restart_time );
this->snapshots.push_back(std::move(last_state));
}
for (const auto& rst_group : rst_state.groups) {
auto group = Group{ rst_group, this->groups.size(), static_cast<std::size_t>(report_step), udq_undefined, this->unit_system };
@ -1772,7 +1780,7 @@ namespace {
this->addWell(well, report_step);
this->addWellToGroup(well.groupName(), well.name(), report_step);
}
m_tuning.update(report_step + 1, rst_state.tuning);
this->snapshots[report_step + 1].tuning(rst_state.tuning);
m_events.addEvent( ScheduleEvents::TUNING_CHANGE , report_step + 1);

View File

@ -136,14 +136,19 @@ ScheduleDeck::ScheduleDeck(const Deck& deck, const std::pair<std::time_t, std::s
// 1983...
start_time = std::chrono::system_clock::from_time_t( TimeMap::mkdate(1983, 1, 1) );
}
this->m_blocks.emplace_back(KeywordLocation{}, ScheduleTimeType::START, start_time);
const auto& [restart_time, restart_offset] = restart;
this->m_restart_time = std::chrono::system_clock::from_time_t(restart_time);
this->m_restart_offset = restart_offset;
for (std::size_t it = 1; it < this->m_restart_offset; it++) {
this->m_blocks.back().end_time(start_time);
this->m_blocks.emplace_back(KeywordLocation{}, ScheduleTimeType::RESTART, start_time);
}
if (restart_offset > 0) {
for (std::size_t it = 0; it < this->m_restart_offset; it++) {
this->m_blocks.emplace_back(KeywordLocation{}, ScheduleTimeType::RESTART, start_time);
if (it < this->m_restart_offset - 1)
this->m_blocks.back().end_time(start_time);
}
} else
this->m_blocks.emplace_back(KeywordLocation{}, ScheduleTimeType::START, start_time);
ScheduleDeckContext context(this->m_restart_offset > 0, start_time);
for( const auto& keyword : SCHEDULESection(deck)) {

View File

@ -68,6 +68,7 @@ const PAvg& ScheduleState::pavg() const {
bool ScheduleState::operator==(const ScheduleState& other) const {
return this->m_start_time == other.m_start_time &&
this->m_tuning == other.m_tuning &&
this->m_end_time == other.m_end_time;
}
@ -77,4 +78,13 @@ ScheduleState ScheduleState::serializeObject() {
ScheduleState ts(t1, t2);
return ts;
}
void ScheduleState::tuning(Tuning tuning) {
this->m_tuning = std::move(tuning);
}
const Tuning& ScheduleState::tuning() const {
return this->m_tuning;
}
}

View File

@ -91,7 +91,7 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
size_t timestep = 4;
BOOST_CHECK(!event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
const auto& tuning = schedule.getTuning(4);
const auto& tuning = schedule[4].tuning();
double TSINIT_default = tuning.TSINIT;
BOOST_CHECK_CLOSE(TSINIT_default, 1 * Metric::Time, diff);
@ -207,10 +207,10 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
/*** TIMESTEP 5***/
/********* Record 1 ***********/
{
std::size_t timestep = 5;
const auto& tuning = schedule.getTuning(timestep);
std::size_t timeStep = 5;
const auto& tuning = schedule[timeStep].tuning();
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE, timeStep));
double TSINIT = tuning.TSINIT;
BOOST_CHECK_CLOSE(TSINIT, 2 * Metric::Time, diff);
@ -323,7 +323,7 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
{
/********* Record 1 ***********/
std::size_t timestep = 10;
const auto& tuning = schedule.getTuning(timestep);
const auto& tuning = schedule[10].tuning();
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
BOOST_CHECK_EQUAL(true, tuning.TMAXWC_has_value);
BOOST_CHECK_CLOSE(tuning.TMAXWC, 10.0 * Metric::Time, diff);

View File

@ -159,7 +159,7 @@ BOOST_AUTO_TEST_CASE(Wsegiter)
const std::size_t lookup_step = 1;
const auto dh = Opm::RestartIO::DoubHEAD{}
.tuningParameters(sched.getTuning(lookup_step), tconv);
.tuningParameters(sched[lookup_step].tuning(), tconv);
const auto& v = dh.data();