Manage events with the ScheduleState machinery

This commit is contained in:
Joakim Hove
2021-01-11 14:17:53 +01:00
parent e185ddf96b
commit e445bf738c
15 changed files with 315 additions and 258 deletions

View File

@@ -106,7 +106,7 @@ namespace Opm {
std::string getTitle() const;
void applyModifierDeck(const Deck& deck);
void apply_geo_keywords(const std::vector<DeckKeyword>& keywords);
const Runspec& runspec() const;
const AquiferConfig& aquifer() const;

View File

@@ -20,8 +20,8 @@
#define SCHEDULE_EVENTS_HPP
#include <cstdint>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicVector.hpp>
#include <string>
#include <unordered_map>
namespace Opm
{
@@ -131,25 +131,46 @@ namespace Opm
class Events {
public:
Events() = default;
explicit Events(const TimeMap& timeMap);
static Events serializeObject();
void addEvent(ScheduleEvents::Events event, size_t reportStep);
bool hasEvent(uint64_t eventMask, size_t reportStep) const;
void addEvent(ScheduleEvents::Events event);
bool hasEvent(uint64_t eventMask) const;
void reset();
bool operator==(const Events& data) const;
template<class Serializer>
void serializeOp(Serializer& serializer)
{
m_events.template serializeOp<Serializer,false>(serializer);
serializer(m_events);
}
private:
DynamicVector<uint64_t> m_events;
uint64_t m_events = 0;
};
class WellGroupEvents {
public:
static WellGroupEvents serializeObject();
void addWell(const std::string& wname);
void addGroup(const std::string& gname);
void addEvent(const std::string& wgname, ScheduleEvents::Events event);
bool hasEvent(const std::string& wgname, uint64_t eventMask) const;
void reset();
bool operator==(const WellGroupEvents& data) const;
template<class Serializer>
void serializeOp(Serializer& serializer)
{
serializer.map(m_wellgroup_events);
}
private:
std::unordered_map<std::string, Events> m_wellgroup_events;
};
}
#endif

View File

@@ -29,7 +29,6 @@
#include <opm/parser/eclipse/EclipseState/Schedule/GasLiftOpt.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicVector.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRateConfig.hpp>
@@ -261,10 +260,6 @@ namespace Opm
const GuideRateConfig& guideRateConfig(std::size_t timeStep) const;
const RFTConfig& rftConfig() const;
const Events& getEvents() const;
const Events& getWellGroupEvents(const std::string& wellGroup) const;
bool hasWellGroupEvent(const std::string& wellGroup, uint64_t event_mask, std::size_t reportStep) const;
const Deck& getModifierDeck(std::size_t timeStep) const;
const VFPProdTable& getVFPProdTable(int table_id, std::size_t timeStep) const;
const VFPInjTable& getVFPInjTable(int table_id, std::size_t timeStep) const;
std::map<int, std::shared_ptr<const VFPProdTable> > getVFPProdTables(std::size_t timeStep) const;
@@ -316,8 +311,6 @@ namespace Opm
auto splitGroups = splitDynMap(groups);
serializer.vector(splitGroups.first);
serializer(splitGroups.second);
m_events.serializeOp(serializer);
m_modifierDeck.serializeOp(serializer);
m_messageLimits.serializeOp(serializer);
m_runspec.serializeOp(serializer);
auto splitvfpprod = splitDynMap<Map2>(vfpprod_tables);
@@ -339,7 +332,6 @@ namespace Opm
m_glo.serializeOp(serializer);
rft_config.serializeOp(serializer);
restart_config.serializeOp(serializer);
serializer.map(wellgroup_events);
if (!serializer.isSerializing()) {
reconstructDynMap(splitWells.first, splitWells.second, wells_static);
reconstructDynMap(splitGroups.first, splitGroups.second, groups);
@@ -359,8 +351,6 @@ namespace Opm
TimeMap m_timeMap;
WellMap wells_static;
GroupMap groups;
Events m_events;
DynamicVector< Deck > m_modifierDeck;
MessageLimits m_messageLimits;
Runspec m_runspec;
VFPProdMap vfpprod_tables;
@@ -380,7 +370,6 @@ namespace Opm
RestartConfig restart_config;
UnitSystem unit_system;
std::optional<int> exit_status;
std::map<std::string,Events> wellgroup_events;
DynamicState<std::shared_ptr<RPTConfig>> rpt_config;
std::vector<ScheduleState> snapshots;
@@ -433,7 +422,6 @@ namespace Opm
const EclipseGrid& grid,
const FieldPropsManager& fp,
std::vector<std::pair<const DeckKeyword*, std::size_t > >& rftProperties);
void addWellGroupEvent(const std::string& wellGroup, ScheduleEvents::Events event, std::size_t reportStep);
template<template<class, class> class Map, class Type, class Key>
std::pair<std::vector<Type>, std::vector<std::pair<Key, std::vector<std::size_t>>>>

View File

@@ -24,9 +24,12 @@
#include <memory>
#include <optional>
#include <opm/parser/eclipse/Deck/DeckKeyword.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/PAvg.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Tuning.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/OilVaporizationProperties.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
namespace Opm {
@@ -68,6 +71,18 @@ namespace Opm {
const OilVaporizationProperties& oilvap() const;
OilVaporizationProperties& oilvap();
void events(Events events);
Events& events();
const Events& events() const;
void wellgroup_events(WellGroupEvents wgevents);
WellGroupEvents& wellgroup_events();
const WellGroupEvents& wellgroup_events() const;
void geo_keywords(std::vector<DeckKeyword> geo_keywords);
std::vector<DeckKeyword>& geo_keywords();
const std::vector<DeckKeyword>& geo_keywords() const;
template<class Serializer>
void serializeOp(Serializer& serializer) {
serializer(m_start_time);
@@ -76,6 +91,9 @@ namespace Opm {
m_tuning.serializeOp(serializer);
serializer(m_nupcol);
m_oilvap.serializeOp(serializer);
m_events.serializeOp(serializer);
m_wellgroup_events.serializeOp(serializer);
serializer.vector(m_geo_keywords);
}
private:
@@ -86,6 +104,9 @@ namespace Opm {
Tuning m_tuning;
int m_nupcol;
OilVaporizationProperties m_oilvap;
Events m_events;
WellGroupEvents m_wellgroup_events;
std::vector<DeckKeyword> m_geo_keywords;
};
}

View File

@@ -246,9 +246,9 @@ namespace Opm {
}
}
void EclipseState::applyModifierDeck(const Deck& deck) {
void EclipseState::apply_geo_keywords(const std::vector<DeckKeyword>& keywords) {
using namespace ParserKeywords;
for (const auto& keyword : deck) {
for (const auto& keyword : keywords) {
if (keyword.isKeyword<MULTFLT>()) {
for (const auto& record : keyword) {

View File

@@ -16,42 +16,80 @@
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <cstddef>
#include <fmt/format.h>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
namespace Opm {
Events::Events(const TimeMap& timeMap) :
m_events( DynamicVector<uint64_t>( timeMap , 0 ) )
{ }
Events Events::serializeObject()
{
Events result;
result.m_events = DynamicVector<uint64_t>({1,2,3,4,5});
result.m_events = 12345;
return result;
}
bool Events::hasEvent(uint64_t eventMask , size_t reportStep) const {
uint64_t eventSum = m_events[reportStep];
if (eventSum & eventMask)
return true;
else
return false;
bool Events::hasEvent(uint64_t eventMask) const {
return (this->m_events & eventMask);
}
void Events::addEvent(ScheduleEvents::Events event, size_t reportStep) {
m_events[reportStep] |= event;
void Events::addEvent(ScheduleEvents::Events event) {
this->m_events |= event;
}
bool Events::operator==(const Events& data) const {
return this->m_events == data.m_events;
}
void Events::reset() {
this->m_events = 0;
}
WellGroupEvents WellGroupEvents::serializeObject() {
WellGroupEvents wg;
wg.addWell("WG1");
wg.addGroup("GG1");
return wg;
}
void WellGroupEvents::addWell(const std::string& wname) {
Events events;
events.addEvent( ScheduleEvents::NEW_WELL );
this->m_wellgroup_events.insert( std::make_pair( wname, events ));
}
void WellGroupEvents::addGroup(const std::string& gname) {
Events events;
events.addEvent( ScheduleEvents::NEW_GROUP );
this->m_wellgroup_events.insert( std::make_pair( gname, events ));
}
bool WellGroupEvents::hasEvent(const std::string& wgname, uint64_t eventMask) const {
const auto events_iter = this->m_wellgroup_events.find(wgname);
if (events_iter == this->m_wellgroup_events.end())
return false;
return events_iter->second.hasEvent(eventMask);
}
void WellGroupEvents::addEvent(const std::string& wgname, ScheduleEvents::Events event) {
const auto events_iter = this->m_wellgroup_events.find(wgname);
if (events_iter == this->m_wellgroup_events.end())
throw std::logic_error(fmt::format("Adding event for unknown well/group: {}", wgname));
events_iter->second.addEvent(event);
}
void WellGroupEvents::reset() {
for (auto& [_, events] : this->m_wellgroup_events) {
(void)_;
events.reset();
}
}
bool WellGroupEvents::operator==(const WellGroupEvents& data) const {
return this->m_wellgroup_events == data.m_wellgroup_events;
}
}

View File

@@ -169,10 +169,10 @@ namespace {
"Well {} is not connected to grid - will remain SHUT", name, location.filename, location.lineno, name);
OpmLog::warning(msg);
}
this->addWellGroupEvent(name, ScheduleEvents::COMPLETION_CHANGE, handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( name, ScheduleEvents::COMPLETION_CHANGE);
}
}
m_events.addEvent(ScheduleEvents::COMPLETION_CHANGE, handlerContext.currentStep);
this->snapshots.back().events().addEvent(ScheduleEvents::COMPLETION_CHANGE);
// In the case the wells reference depth has been defaulted in the
// WELSPECS keyword we need to force a calculation of the wells
@@ -340,8 +340,8 @@ namespace {
if (group_ptr->updateInjection(injection)) {
this->updateGroup(std::move(group_ptr), current_step);
m_events.addEvent( ScheduleEvents::GROUP_INJECTION_UPDATE , current_step);
this->addWellGroupEvent(group_name, ScheduleEvents::GROUP_INJECTION_UPDATE, current_step);
this->snapshots.back().events().addEvent( ScheduleEvents::GROUP_INJECTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent( group_name, ScheduleEvents::GROUP_INJECTION_UPDATE);
}
}
}
@@ -458,8 +458,8 @@ namespace {
this->guide_rate_config.update( current_step, std::move(new_config) );
this->updateGroup(std::move(group_ptr), current_step);
m_events.addEvent(ScheduleEvents::GROUP_PRODUCTION_UPDATE, current_step);
this->addWellGroupEvent(group_name, ScheduleEvents::GROUP_PRODUCTION_UPDATE, current_step);
this->snapshots.back().events().addEvent(ScheduleEvents::GROUP_PRODUCTION_UPDATE);
this->snapshots.back().wellgroup_events().addEvent( group_name, ScheduleEvents::GROUP_PRODUCTION_UPDATE);
auto udq = std::make_shared<UDQActive>(this->udqActive(current_step));
if (production.updateUDQActive(this->getUDQConfig(current_step), *udq))
@@ -527,8 +527,8 @@ namespace {
for (const auto& group_name : group_names) {
auto group_ptr = std::make_shared<Group>(this->getGroup(group_name, handlerContext.currentStep));
if (group_ptr->update_gefac(gefac, transfer)) {
this->addWellGroupEvent( group_name, ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE , handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( group_name, ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE);
this->snapshots.back().events().addEvent( ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE );
this->updateGroup(std::move(group_ptr), handlerContext.currentStep);
}
}
@@ -679,8 +679,8 @@ namespace {
}
void Schedule::handleMULTFLT (const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
this->m_modifierDeck[handlerContext.currentStep].addKeyword(handlerContext.keyword);
m_events.addEvent(ScheduleEvents::GEO_MODIFIER, handlerContext.currentStep);
this->snapshots.back().geo_keywords().push_back(handlerContext.keyword);
this->snapshots.back().events().addEvent( ScheduleEvents::GEO_MODIFIER );
}
void Schedule::handleMXUNSUPP(const HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) {
@@ -816,7 +816,7 @@ namespace {
}
this->snapshots.back().tuning( std::move( tuning ));
m_events.addEvent(ScheduleEvents::TUNING_CHANGE, handlerContext.currentStep);
this->snapshots.back().events().addEvent(ScheduleEvents::TUNING_CHANGE);
}
void Schedule::handleUDQ(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
@@ -848,7 +848,7 @@ namespace {
auto& table_state = vfpinj_tables.at(table_id);
table_state.update(handlerContext.currentStep, table);
this->m_events.addEvent( ScheduleEvents::VFPINJ_UPDATE , handlerContext.currentStep);
this->snapshots.back().events().addEvent( ScheduleEvents::VFPINJ_UPDATE );
}
void Schedule::handleVFPPROD(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
@@ -862,7 +862,7 @@ namespace {
auto& table_state = vfpprod_tables.at(table_id);
table_state.update(handlerContext.currentStep, table);
this->m_events.addEvent( ScheduleEvents::VFPPROD_UPDATE , handlerContext.currentStep);
this->snapshots.back().events().addEvent( ScheduleEvents::VFPPROD_UPDATE );
}
void Schedule::handleWCONHIST(const HandlerContext& handlerContext, const ParseContext& parseContext, ErrorGuard& errors) {
@@ -911,8 +911,8 @@ namespace {
update_well = true;
if (update_well) {
m_events.addEvent( ScheduleEvents::PRODUCTION_UPDATE , handlerContext.currentStep);
this->addWellGroupEvent( well2->name(), ScheduleEvents::PRODUCTION_UPDATE, handlerContext.currentStep);
this->snapshots.back().events().addEvent( ScheduleEvents::PRODUCTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent( well2->name(), ScheduleEvents::PRODUCTION_UPDATE);
this->updateWell(well2, handlerContext.currentStep);
}
@@ -977,8 +977,8 @@ namespace {
update_well = true;
if (update_well) {
m_events.addEvent( ScheduleEvents::PRODUCTION_UPDATE , handlerContext.currentStep);
this->addWellGroupEvent( well2->name(), ScheduleEvents::PRODUCTION_UPDATE, handlerContext.currentStep);
this->snapshots.back().events().addEvent( ScheduleEvents::PRODUCTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent( well2->name(), ScheduleEvents::PRODUCTION_UPDATE);
this->updateWell(std::move(well2), handlerContext.currentStep);
}
@@ -1018,8 +1018,8 @@ namespace {
if (update_well) {
this->updateWell(well2, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::INJECTION_UPDATE , handlerContext.currentStep );
this->addWellGroupEvent( well_name, ScheduleEvents::INJECTION_UPDATE, handlerContext.currentStep);
this->snapshots.back().events().addEvent(ScheduleEvents::INJECTION_UPDATE);
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::INJECTION_UPDATE);
}
// if the well has zero surface rate limit or reservior rate limit, while does not allow crossflow,
@@ -1062,7 +1062,6 @@ namespace {
for (const auto& well_name : well_names) {
this->updateWellStatus(well_name, handlerContext.currentStep, false, status, handlerContext.keyword.location());
bool update_well = false;
auto& dynamic_state = this->wells_static.at(well_name);
auto well2 = std::make_shared<Well>(*dynamic_state[handlerContext.currentStep]);
@@ -1080,8 +1079,8 @@ namespace {
if (update_well) {
this->updateWell(well2, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::INJECTION_UPDATE , handlerContext.currentStep );
this->addWellGroupEvent( well_name, ScheduleEvents::INJECTION_UPDATE, handlerContext.currentStep);
this->snapshots.back().events().addEvent( ScheduleEvents::INJECTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::INJECTION_UPDATE);
}
if ( ! well2->getAllowCrossFlow() && (injection->surfaceInjectionRate.zero())) {
@@ -1125,8 +1124,8 @@ namespace {
auto& dynamic_state = this->wells_static.at(well_name);
auto well2 = std::make_shared<Well>(*dynamic_state[handlerContext.currentStep]);
if (well2->updateEfficiencyFactor(efficiencyFactor)){
this->addWellGroupEvent( well_name, ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE , handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE);
this->snapshots.back().events().addEvent(ScheduleEvents::WELLGROUP_EFFICIENCY_UPDATE);
this->updateWell(std::move(well2), handlerContext.currentStep);
}
}
@@ -1181,11 +1180,11 @@ namespace {
if (well2->updateWellProductivityIndex(rawProdIndex))
this->updateWell(std::move(well2), event_step);
this->addWellGroupEvent(well_name, ScheduleEvents::WELL_PRODUCTIVITY_INDEX, event_step);
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::WELL_PRODUCTIVITY_INDEX);
}
}
this->m_events.addEvent(ScheduleEvents::WELL_PRODUCTIVITY_INDEX, event_step);
this->snapshots.back().events().addEvent(ScheduleEvents::WELL_PRODUCTIVITY_INDEX);
}
void Schedule::handleWELSEGS(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {
@@ -1259,7 +1258,7 @@ namespace {
if (update) {
well2->updateRefDepth();
this->updateWell(std::move(well2), handlerContext.currentStep);
this->addWellGroupEvent(wellName, ScheduleEvents::WELL_WELSPECS_UPDATE, handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( wellName, ScheduleEvents::WELL_WELSPECS_UPDATE);
}
}
}
@@ -1319,11 +1318,11 @@ namespace {
if (update)
{
if (well2->isProducer()) {
this->addWellGroupEvent( well_name, ScheduleEvents::PRODUCTION_UPDATE, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::PRODUCTION_UPDATE , handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::PRODUCTION_UPDATE);
this->snapshots.back().events().addEvent( ScheduleEvents::PRODUCTION_UPDATE );
} else {
this->addWellGroupEvent( well_name, ScheduleEvents::INJECTION_UPDATE, handlerContext.currentStep);
m_events.addEvent( ScheduleEvents::INJECTION_UPDATE , handlerContext.currentStep);
this->snapshots.back().wellgroup_events().addEvent( well_name, ScheduleEvents::INJECTION_UPDATE);
this->snapshots.back().events().addEvent( ScheduleEvents::INJECTION_UPDATE );
}
this->updateWell(std::move(well2), handlerContext.currentStep);
}
@@ -1629,7 +1628,7 @@ namespace {
tuning.WSEG_REDUCTION_FACTOR = record.getItem<ParserKeywords::WSEGITER::REDUCTION_FACTOR>().get<double>(0);
tuning.WSEG_INCREASE_FACTOR = record.getItem<ParserKeywords::WSEGITER::INCREASING_FACTOR>().get<double>(0);
m_events.addEvent(ScheduleEvents::TUNING_CHANGE, handlerContext.currentStep);
this->snapshots.back().events().addEvent(ScheduleEvents::TUNING_CHANGE);
}
void Schedule::handleWSEGSICD(const HandlerContext& handlerContext, const ParseContext&, ErrorGuard&) {

View File

@@ -112,8 +112,6 @@ namespace {
m_input_path(deck.getInputPath()),
m_sched_deck(deck, restart_info(rst) ),
m_timeMap( deck , restart_info( rst )),
m_events( this->m_timeMap ),
m_modifierDeck( this->m_timeMap, Deck{} ),
m_messageLimits( this->m_timeMap ),
m_runspec( runspec ),
wtest_config(this->m_timeMap, std::make_shared<WellTestConfig>() ),
@@ -132,7 +130,6 @@ namespace {
unit_system(deck.getActiveUnitSystem()),
rpt_config(this->m_timeMap, std::make_shared<RPTConfig>())
{
addGroup( "FIELD", 0);
if (rst)
this->load_rst(*rst, grid, fp);
@@ -161,13 +158,13 @@ namespace {
if (this->size() == 0)
return;
// Verify that we can safely re-iterate over the Schedule section
if (!rst)
this->iterateScheduleSection(0, parseContext, errors, grid, fp);
else {
auto restart_offset = this->m_sched_deck.restart_offset();
this->iterateScheduleSection(restart_offset, parseContext, errors, grid, fp);
}
//Verify that we can safely re-iterate over the Schedule section
//if (!rst)
// this->iterateScheduleSection(0, parseContext, errors, grid, fp);
//else {
// auto restart_offset = this->m_sched_deck.restart_offset();
// this->iterateScheduleSection(restart_offset, parseContext, errors, grid, fp);
//}
// Verify that the time schedule is correct.
for (std::size_t report_step = 0; report_step < this->size() - 1; report_step++) {
@@ -279,8 +276,6 @@ namespace {
result.m_timeMap = TimeMap::serializeObject();
result.wells_static.insert({"test1", {{std::make_shared<Opm::Well>(Opm::Well::serializeObject())},1}});
result.groups.insert({"test2", {{std::make_shared<Opm::Group>(Opm::Group::serializeObject())},1}});
result.m_events = Events::serializeObject();
result.m_modifierDeck = DynamicVector<Deck>({Deck::serializeObject()});
result.m_messageLimits = MessageLimits::serializeObject();
result.m_runspec = Runspec::serializeObject();
result.vfpprod_tables = {{1, {{std::make_shared<VFPProdTable>(VFPProdTable::serializeObject())}, 1}}};
@@ -298,7 +293,6 @@ namespace {
result.m_actions = {{std::make_shared<Action::Actions>(Action::Actions::serializeObject())}, 1};
result.rft_config = RFTConfig::serializeObject();
result.restart_config = RestartConfig::serializeObject();
result.wellgroup_events = {{"test", Events::serializeObject()}};
result.unit_system = UnitSystem::newFIELD();
result.snapshots = { ScheduleState::serializeObject() };
result.m_input_path = "Some/funny/path";
@@ -445,7 +439,6 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
"LIFTOPT",
"LINCOM",
"MESSAGES",
"MULTFLT",
"MXUNSUPP",
"NODEPROP",
"RPTSCHED",
@@ -711,8 +704,8 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
event.
*/
if (old_status != status) {
this->m_events.addEvent( ScheduleEvents::WELL_STATUS_CHANGE, reportStep );
this->addWellGroupEvent( well2->name(), ScheduleEvents::WELL_STATUS_CHANGE, reportStep);
this->snapshots.back().events().addEvent( ScheduleEvents::WELL_STATUS_CHANGE);
this->snapshots.back().wellgroup_events().addEvent( well2->name(), ScheduleEvents::WELL_STATUS_CHANGE);
}
update = true;
@@ -826,7 +819,7 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
}
}
m_events.addEvent( ScheduleEvents::COMPLETION_CHANGE, currentStep );
this->snapshots.back().events().addEvent( ScheduleEvents::COMPLETION_CHANGE);
}
}
}
@@ -991,9 +984,8 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
void Schedule::addWell(Well well, std::size_t report_step) {
const std::string wname = well.name();
m_events.addEvent( ScheduleEvents::NEW_WELL , report_step );
this->wellgroup_events.insert( std::make_pair(wname, Events(this->m_timeMap)));
this->addWellGroupEvent(wname, ScheduleEvents::NEW_WELL, report_step);
this->snapshots.back().events().addEvent( ScheduleEvents::NEW_WELL );
this->snapshots.back().wellgroup_events().addWell( wname );
well.setInsertIndex(this->wells_static.size());
this->wells_static.insert( std::make_pair(wname, DynamicState<std::shared_ptr<Well>>(m_timeMap, nullptr)));
@@ -1340,9 +1332,8 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
auto& dynamic_state = this->groups.at(group.name());
dynamic_state.update(timeStep, group_ptr);
this->m_events.addEvent( ScheduleEvents::NEW_GROUP , timeStep );
this->wellgroup_events.insert( std::make_pair(group.name(), Events(this->m_timeMap)));
this->addWellGroupEvent(group.name(), ScheduleEvents::NEW_GROUP, timeStep);
this->snapshots.back().events().addEvent( ScheduleEvents::NEW_GROUP );
this->snapshots.back().wellgroup_events().addGroup(group.name());
// All newly created groups are attached to the field group,
// can then be relocated with the GRUPTREE keyword.
@@ -1412,7 +1403,7 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
auto well_ptr = std::make_shared<Well>( well );
well_ptr->updateGroup(group_name);
this->updateWell(well_ptr, timeStep);
this->addWellGroupEvent(well_ptr->name(), ScheduleEvents::WELL_WELSPECS_UPDATE, timeStep);
this->snapshots.back().wellgroup_events().addEvent( well_ptr->name(), ScheduleEvents::WELL_WELSPECS_UPDATE );
// Remove well child reference from previous group
auto group = std::make_shared<Group>(this->getGroup(old_gname, timeStep));
@@ -1424,40 +1415,15 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
auto group_ptr = std::make_shared<Group>(this->getGroup(group_name, timeStep));
group_ptr->addWell(well_name);
this->updateGroup(group_ptr, timeStep);
this->m_events.addEvent( ScheduleEvents::GROUP_CHANGE , timeStep);
this->snapshots.back().events().addEvent( ScheduleEvents::GROUP_CHANGE );
}
const Deck& Schedule::getModifierDeck(std::size_t timeStep) const {
return m_modifierDeck.iget( timeStep );
}
const MessageLimits& Schedule::getMessageLimits() const {
return m_messageLimits;
}
const Events& Schedule::getWellGroupEvents(const std::string& wellGroup) const {
if (this->wellgroup_events.count(wellGroup) > 0)
return this->wellgroup_events.at(wellGroup);
else
throw std::invalid_argument("No such well og group " + wellGroup);
}
void Schedule::addWellGroupEvent(const std::string& wellGroup, ScheduleEvents::Events event, std::size_t reportStep) {
auto& events = this->wellgroup_events.at(wellGroup);
events.addEvent(event, reportStep);
}
bool Schedule::hasWellGroupEvent(const std::string& wellGroup, uint64_t event_mask, std::size_t reportStep) const {
const auto& events = this->getWellGroupEvents(wellGroup);
return events.hasEvent(event_mask, reportStep);
}
const Events& Schedule::getEvents() const {
return this->m_events;
}
const Well::ProducerCMode& Schedule::getGlobalWhistctlMmode(std::size_t timestep) const {
return global_whistctl_mode.get(timestep);
}
@@ -1743,8 +1709,6 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
this->m_input_path == data.m_input_path &&
compareMap(this->wells_static, data.wells_static) &&
compareMap(this->groups, data.groups) &&
this->m_events == data.m_events &&
this->m_modifierDeck == data.m_modifierDeck &&
this->m_messageLimits == data.m_messageLimits &&
this->m_runspec == data.m_runspec &&
compareMap(this->vfpprod_tables, data.vfpprod_tables) &&
@@ -1763,8 +1727,7 @@ void Schedule::iterateScheduleSection(std::optional<std::size_t> load_offset,
compareDynState(this->rpt_config, data.rpt_config) &&
rft_config == data.rft_config &&
this->restart_config == data.restart_config &&
this->unit_system == data.unit_system &&
this->wellgroup_events == data.wellgroup_events;
this->unit_system == data.unit_system;
}
@@ -1812,13 +1775,15 @@ namespace {
this->addGroup(group, report_step);
if (group.isProductionGroup()) {
this->m_events.addEvent(ScheduleEvents::GROUP_PRODUCTION_UPDATE, report_step + 1);
this->addWellGroupEvent(rst_group.name, ScheduleEvents::GROUP_PRODUCTION_UPDATE, report_step + 1);
// Was originally at report_step + 1
this->snapshots.back().events().addEvent(ScheduleEvents::GROUP_PRODUCTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent(rst_group.name, ScheduleEvents::GROUP_PRODUCTION_UPDATE);
}
if (group.isInjectionGroup()) {
this->m_events.addEvent(ScheduleEvents::GROUP_INJECTION_UPDATE, report_step + 1);
this->addWellGroupEvent(rst_group.name, ScheduleEvents::GROUP_INJECTION_UPDATE, report_step + 1);
// Was originally at report_step + 1
this->snapshots.back().events().addEvent(ScheduleEvents::GROUP_INJECTION_UPDATE );
this->snapshots.back().wellgroup_events().addEvent(rst_group.name, ScheduleEvents::GROUP_INJECTION_UPDATE);
}
}
@@ -1864,7 +1829,8 @@ namespace {
this->addWellToGroup(well.groupName(), well.name(), report_step);
}
this->snapshots[report_step + 1].tuning(rst_state.tuning);
m_events.addEvent( ScheduleEvents::TUNING_CHANGE , report_step + 1);
// Originally at report_step + 1
this->snapshots.back().events().addEvent( ScheduleEvents::TUNING_CHANGE );
{
@@ -2172,6 +2138,8 @@ void Schedule::create_first(const std::chrono::system_clock::time_point& start_t
auto& sched_state = snapshots.back();
sched_state.nupcol( this->m_runspec.nupcol() );
sched_state.oilvap( OilVaporizationProperties( this->m_runspec.tabdims().getNumPVTTables() ));
this->addGroup("FIELD", 0);
}
void Schedule::create_next(const std::chrono::system_clock::time_point& start_time, const std::optional<std::chrono::system_clock::time_point>& end_time) {

View File

@@ -38,12 +38,15 @@ ScheduleState::ScheduleState(const ScheduleState& src, const std::chrono::system
{
this->m_start_time = start_time;
this->m_end_time = std::nullopt;
this->m_events.reset();
this->m_wellgroup_events.reset();
this->m_geo_keywords.clear();
}
ScheduleState::ScheduleState(const ScheduleState& src, const std::chrono::system_clock::time_point& start_time, const std::chrono::system_clock::time_point& end_time) :
ScheduleState(src)
ScheduleState(src, start_time)
{
this->m_start_time = start_time;
this->m_end_time = end_time;
}
@@ -85,11 +88,27 @@ OilVaporizationProperties& ScheduleState::oilvap() {
return this->m_oilvap;
}
void ScheduleState::geo_keywords(std::vector<DeckKeyword> geo_keywords) {
this->m_geo_keywords = std::move(geo_keywords);
}
std::vector<DeckKeyword>& ScheduleState::geo_keywords() {
return this->m_geo_keywords;
}
const std::vector<DeckKeyword>& ScheduleState::geo_keywords() const {
return this->m_geo_keywords;
}
bool ScheduleState::operator==(const ScheduleState& other) const {
return this->m_start_time == other.m_start_time &&
this->m_oilvap == other.m_oilvap &&
this->m_tuning == other.m_tuning &&
this->m_end_time == other.m_end_time &&
this->m_events == other.m_events &&
this->m_wellgroup_events == other.m_wellgroup_events &&
this->m_geo_keywords == other.m_geo_keywords &&
this->m_nupcol == other.m_nupcol;
}
@@ -97,6 +116,7 @@ ScheduleState ScheduleState::serializeObject() {
auto t1 = std::chrono::system_clock::now();
auto t2 = t1 + std::chrono::hours(48);
ScheduleState ts(t1, t2);
ts.events( Events::serializeObject() );
ts.nupcol(77);
ts.oilvap( Opm::OilVaporizationProperties::serializeObject() );
return ts;
@@ -114,4 +134,28 @@ Tuning& ScheduleState::tuning() {
return this->m_tuning;
}
void ScheduleState::events(Events events) {
this->m_events = events;
}
Events& ScheduleState::events() {
return this->m_events;
}
const Events& ScheduleState::events() const {
return this->m_events;
}
void ScheduleState::wellgroup_events(WellGroupEvents wgevents) {
this->m_wellgroup_events = std::move(wgevents);
}
WellGroupEvents& ScheduleState::wellgroup_events() {
return this->m_wellgroup_events;
}
const WellGroupEvents& ScheduleState::wellgroup_events() const {
return this->m_wellgroup_events;
}
}

View File

@@ -25,53 +25,22 @@
#include <boost/test/unit_test.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
#include <opm/common/utility/TimeService.hpp>
BOOST_AUTO_TEST_CASE(CreateEmpty) {
std::vector<std::time_t> tp = { Opm::asTimeT(Opm::TimeStampUTC(2010,1,1)) };
Opm::Events events;
for (int i = 0; i < 11; i++)
tp.push_back( Opm::asTimeT(Opm::TimeStampUTC(2010,1,i+2)));
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL));
Opm::TimeMap timeMap(tp);
Opm::Events events( timeMap );
Opm::DynamicVector<double> vector(timeMap , 9.99);
events.addEvent( Opm::ScheduleEvents::NEW_WELL);
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL));
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 10));
events.addEvent( Opm::ScheduleEvents::NEW_WELL , 0 );
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 0));
events.addEvent( Opm::ScheduleEvents::NEW_WELL , 10 );
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 9));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 10));
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 11));
events.addEvent( Opm::ScheduleEvents::NEW_WELL , 10 );
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 9));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 10));
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 11));
events.addEvent( Opm::ScheduleEvents::WELL_STATUS_CHANGE , 9 );
events.addEvent( Opm::ScheduleEvents::WELL_STATUS_CHANGE , 10 );
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 9));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 10));
BOOST_CHECK_EQUAL( false , events.hasEvent(Opm::ScheduleEvents::NEW_WELL , 11));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::WELL_STATUS_CHANGE , 9));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::WELL_STATUS_CHANGE , 10));
events.addEvent( Opm::ScheduleEvents::WELL_STATUS_CHANGE);
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::NEW_WELL));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK_EQUAL( true , events.hasEvent(Opm::ScheduleEvents::WELL_STATUS_CHANGE | Opm::ScheduleEvents::NEW_WELL));
}
BOOST_AUTO_TEST_CASE(TestMultiple) {
const std::time_t startDate = Opm::TimeMap::mkdate(2010, 1, 1);
Opm::TimeMap timeMap( { startDate } );
Opm::DynamicVector<double> vector(timeMap , 9.99);
Opm::Events events( timeMap );
events.addEvent( Opm::ScheduleEvents::NEW_WELL , 0 );
BOOST_CHECK( events.hasEvent( Opm::ScheduleEvents::NEW_WELL | Opm::ScheduleEvents::NEW_GROUP , 0 ));
}

View File

@@ -45,33 +45,40 @@ using namespace Opm;
BOOST_AUTO_TEST_CASE( CheckUnsoppertedInSCHEDULE ) {
const char * deckString =
"START\n"
" 10 'JAN' 2000 /\n"
"RUNSPEC\n"
"DIMENS\n"
" 10 10 10 / \n"
"GRID\n"
"DX\n"
"1000*0.25 /\n"
"DY\n"
"1000*0.25 /\n"
"DZ\n"
"1000*0.25 /\n"
"TOPS\n"
"100*0.25 /\n"
"SCHEDULE\n"
"TSTEP -- 1,2\n"
" 10 10/\n"
"MULTFLT\n"
" 'F1' 100 /\n"
"/\n"
"MULTFLT\n"
" 'F2' 77 /\n"
"/\n"
"TSTEP -- 3,4\n"
" 10 10/\n"
"\n";
const std::string deckString = R"(
START
10 'JAN' 2000 /
RUNSPEC
DIMENS
10 10 10 /
GRID
DX
1000*0.25 /
DY
1000*0.25 /
DZ
1000*0.25 /
TOPS
100*0.25 /
SCHEDULE
TSTEP -- 1,2
10 10/
MULTFLT
'F1' 100 /
/
MULTFLT
'F2' 77 /
/
TSTEP -- 3,4
10 10/
)";
auto python = std::make_shared<Python>();
Parser parser(true);
@@ -86,27 +93,27 @@ BOOST_AUTO_TEST_CASE( CheckUnsoppertedInSCHEDULE ) {
{
Runspec runspec ( deck );
Schedule schedule( deck, grid , fp, runspec , parseContext, errors, python);
auto events = schedule.getEvents( );
BOOST_CHECK_EQUAL( false , events.hasEvent( ScheduleEvents::GEO_MODIFIER , 1 ));
BOOST_CHECK_EQUAL( true , events.hasEvent( ScheduleEvents::GEO_MODIFIER , 2 ));
BOOST_CHECK_EQUAL( false , events.hasEvent( ScheduleEvents::GEO_MODIFIER , 3 ));
BOOST_CHECK_EQUAL( false , schedule[1].events().hasEvent( ScheduleEvents::GEO_MODIFIER ));
BOOST_CHECK_EQUAL( true , schedule[2].events().hasEvent( ScheduleEvents::GEO_MODIFIER ));
BOOST_CHECK_EQUAL( false , schedule[3].events().hasEvent( ScheduleEvents::GEO_MODIFIER ));
BOOST_CHECK_EQUAL( 0U, schedule.getModifierDeck(1).size() );
BOOST_CHECK_EQUAL( 0U, schedule.getModifierDeck(3).size() );
BOOST_CHECK_EQUAL( 0U, schedule[1].geo_keywords().size() );
BOOST_CHECK_EQUAL( 0U, schedule[3].geo_keywords().size() );
const Deck& multflt_deck = schedule.getModifierDeck(2);
const auto& multflt_deck = schedule[2].geo_keywords();
BOOST_CHECK_EQUAL( 2U , multflt_deck.size());
BOOST_CHECK( multflt_deck.hasKeyword<ParserKeywords::MULTFLT>() );
BOOST_CHECK_EQUAL( multflt_deck[0].name(), "MULTFLT");
BOOST_CHECK_EQUAL( multflt_deck[1].name(), "MULTFLT");
const auto& multflt1 = multflt_deck.getKeyword(0);
const auto& multflt1 = multflt_deck[0];
BOOST_CHECK_EQUAL( 1U , multflt1.size( ) );
const auto& record0 = multflt1.getRecord( 0 );
BOOST_CHECK_EQUAL( 100.0 , record0.getItem<ParserKeywords::MULTFLT::factor>().get< double >(0));
BOOST_CHECK_EQUAL( "F1" , record0.getItem<ParserKeywords::MULTFLT::fault>().get< std::string >(0));
const auto& multflt2 = multflt_deck.getKeyword(1);
const auto& multflt2 = multflt_deck[1];
BOOST_CHECK_EQUAL( 1U , multflt2.size( ) );
const auto& record1 = multflt2.getRecord( 0 );

View File

@@ -820,11 +820,11 @@ DATES -- 6
const auto& well_5 = schedule.getWell("OP_1", 5);
// timestep 3. Close all completions with WELOPEN and immediately open new completions with COMPDAT.
BOOST_CHECK(Well::Status::OPEN == well_3.getStatus());
BOOST_CHECK( !schedule.hasWellGroupEvent( "OP_1", ScheduleEvents::WELL_STATUS_CHANGE , 3 ));
BOOST_CHECK( !schedule[3].wellgroup_events().hasEvent("OP_1", ScheduleEvents::WELL_STATUS_CHANGE));
// timestep 4. Close all completions with WELOPEN. The well will be shut since no completions
// are open.
BOOST_CHECK(Well::Status::SHUT == well_4.getStatus());
BOOST_CHECK( schedule.hasWellGroupEvent( "OP_1", ScheduleEvents::WELL_STATUS_CHANGE , 4 ));
BOOST_CHECK( schedule[4].wellgroup_events().hasEvent("OP_1", ScheduleEvents::WELL_STATUS_CHANGE));
// timestep 5. Open new completions. But keep the well shut,
BOOST_CHECK(Well::Status::SHUT == well_5.getStatus());
}
@@ -1269,8 +1269,8 @@ BOOST_AUTO_TEST_CASE(createDeckModifyMultipleGCONPROD) {
auto gh = schedule.getGroup("H1", 1);
BOOST_CHECK( !schedule.hasWellGroupEvent( "G2", ScheduleEvents::GROUP_PRODUCTION_UPDATE , 1 ));
BOOST_CHECK( schedule.hasWellGroupEvent( "G2", ScheduleEvents::GROUP_PRODUCTION_UPDATE , 2 ));
BOOST_CHECK( !schedule[1].wellgroup_events().hasEvent( "G2", ScheduleEvents::GROUP_PRODUCTION_UPDATE));
BOOST_CHECK( schedule[2].wellgroup_events().hasEvent( "G2", ScheduleEvents::GROUP_PRODUCTION_UPDATE));
}
@@ -3001,9 +3001,9 @@ VFPINJ
const auto& schedule = make_schedule(input);
BOOST_CHECK( schedule.getEvents().hasEvent(ScheduleEvents::VFPINJ_UPDATE, 0));
BOOST_CHECK( !schedule.getEvents().hasEvent(ScheduleEvents::VFPINJ_UPDATE, 1));
BOOST_CHECK( schedule.getEvents().hasEvent(ScheduleEvents::VFPINJ_UPDATE, 2));
BOOST_CHECK( schedule[0].events().hasEvent(ScheduleEvents::VFPINJ_UPDATE));
BOOST_CHECK( !schedule[1].events().hasEvent(ScheduleEvents::VFPINJ_UPDATE));
BOOST_CHECK( schedule[2].events().hasEvent(ScheduleEvents::VFPINJ_UPDATE));
// No such table id
BOOST_CHECK_THROW(schedule.getVFPInjTable(77,0), std::invalid_argument);
@@ -3989,16 +3989,16 @@ END
BOOST_REQUIRE_EQUAL(sched.getTimeMap().numTimesteps(), std::size_t{5});
BOOST_REQUIRE_EQUAL(sched.getTimeMap().last(), std::size_t{5});
BOOST_REQUIRE_MESSAGE(sched.hasWellGroupEvent("P", ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX, 1),
BOOST_REQUIRE_MESSAGE(sched[1].wellgroup_events().hasEvent("P", ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX),
R"(Schedule must have WELL_PRODUCTIVITY_INDEX Event for well "P" at report step 1)");
BOOST_REQUIRE_MESSAGE(sched.hasWellGroupEvent("P", ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX, 3),
BOOST_REQUIRE_MESSAGE(sched[3].wellgroup_events().hasEvent("P", ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX),
R"(Schedule must have WELL_PRODUCTIVITY_INDEX Event for well "P" at report step 3)");
BOOST_REQUIRE_MESSAGE(sched.getEvents().hasEvent(ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX, 1),
BOOST_REQUIRE_MESSAGE(sched[1].events().hasEvent(ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX),
"Schedule must have WELL_PRODUCTIVITY_INDEX Event at report step 1");
BOOST_REQUIRE_MESSAGE(sched.getEvents().hasEvent(ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX, 3),
BOOST_REQUIRE_MESSAGE(sched[3].events().hasEvent(ScheduleEvents::Events::WELL_PRODUCTIVITY_INDEX),
"Schedule must have WELL_PRODUCTIVITY_INDEX Event at report step 3");
auto getScalingFactor = [&sched](const std::size_t report_step, const double wellPI) -> double

View File

@@ -80,7 +80,6 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
FieldPropsManager fp(deck, Phases{true, true, true}, grid, table);
Runspec runspec (deck);
Schedule schedule( deck, grid , fp, runspec, python);
auto event = schedule.getEvents();
const double diff = 1.0e-14;
@@ -89,7 +88,8 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
{
size_t timestep = 4;
BOOST_CHECK(!event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
const auto& event = schedule[timestep].events();
BOOST_CHECK(!event.hasEvent(ScheduleEvents::TUNING_CHANGE));
const auto& tuning = schedule[4].tuning();
double TSINIT_default = tuning.TSINIT;
@@ -208,9 +208,10 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
/********* Record 1 ***********/
{
std::size_t timeStep = 5;
const auto& event = schedule[timeStep].events();
const auto& tuning = schedule[timeStep].tuning();
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE, timeStep));
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE));
double TSINIT = tuning.TSINIT;
BOOST_CHECK_CLOSE(TSINIT, 2 * Metric::Time, diff);
@@ -316,7 +317,8 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
/*** TIMESTEP 7 ***/
{
std::size_t timestep = 7;
BOOST_CHECK(!event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
const auto& event = schedule[timestep].events();
BOOST_CHECK(!event.hasEvent(ScheduleEvents::TUNING_CHANGE));
}
/*** TIMESTEP 10 ***/
@@ -324,7 +326,8 @@ BOOST_AUTO_TEST_CASE(TuningTest) {
/********* Record 1 ***********/
std::size_t timestep = 10;
const auto& tuning = schedule[10].tuning();
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE, timestep));
const auto& event = schedule[timestep].events();
BOOST_CHECK(event.hasEvent(ScheduleEvents::TUNING_CHANGE));
BOOST_CHECK_EQUAL(true, tuning.TMAXWC_has_value);
BOOST_CHECK_CLOSE(tuning.TMAXWC, 10.0 * Metric::Time, diff);

View File

@@ -367,6 +367,7 @@ BOOST_AUTO_TEST_CASE(GroupTreeTest_GRUPTREE_WITH_REPARENT_correct_tree) {
BOOST_CHECK_EQUAL(field_group.groups().size(), 2);
BOOST_CHECK( field_group.hasGroup("GROUP_NEW"));
BOOST_CHECK( field_group.hasGroup("GROUP_BJARNE"));
BOOST_CHECK_EQUAL( new_group.control_group().value_or("ERROR"), "FIELD");
BOOST_CHECK_EQUAL( new_group.flow_group().value_or("ERROR"), "FIELD");
BOOST_CHECK( new_group.hasGroup("GROUP_NILS"));
@@ -860,27 +861,26 @@ BOOST_AUTO_TEST_CASE(TestEvents) {
Runspec runspec (deck);
auto python = std::make_shared<Python>();
Schedule sched(deck , grid , fp, runspec, python);
const Events& events = sched.getEvents();
BOOST_CHECK( events.hasEvent(ScheduleEvents::NEW_WELL , 0 ) );
BOOST_CHECK( !events.hasEvent(ScheduleEvents::NEW_WELL , 1 ) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::NEW_WELL , 2 ) );
BOOST_CHECK( !events.hasEvent(ScheduleEvents::NEW_WELL , 3 ) );
BOOST_CHECK( sched[0].events().hasEvent(ScheduleEvents::NEW_WELL) );
BOOST_CHECK( !sched[1].events().hasEvent(ScheduleEvents::NEW_WELL) );
BOOST_CHECK( sched[2].events().hasEvent(ScheduleEvents::NEW_WELL) );
BOOST_CHECK( !sched[3].events().hasEvent(ScheduleEvents::NEW_WELL) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::COMPLETION_CHANGE , 0 ) );
BOOST_CHECK( !events.hasEvent(ScheduleEvents::COMPLETION_CHANGE , 1) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::COMPLETION_CHANGE , 5 ) );
BOOST_CHECK( sched[0].events().hasEvent(ScheduleEvents::COMPLETION_CHANGE) );
BOOST_CHECK( !sched[1].events().hasEvent(ScheduleEvents::COMPLETION_CHANGE) );
BOOST_CHECK( sched[5].events().hasEvent(ScheduleEvents::COMPLETION_CHANGE) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::WELL_STATUS_CHANGE , 1 ));
BOOST_CHECK( !events.hasEvent(ScheduleEvents::WELL_STATUS_CHANGE , 2 ));
BOOST_CHECK( events.hasEvent(ScheduleEvents::WELL_STATUS_CHANGE , 3 ));
BOOST_CHECK( events.hasEvent(ScheduleEvents::COMPLETION_CHANGE , 5) );
BOOST_CHECK( sched[1].events().hasEvent(ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( !sched[2].events().hasEvent(ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched[3].events().hasEvent(ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched[5].events().hasEvent(ScheduleEvents::COMPLETION_CHANGE) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::GROUP_CHANGE , 0 ));
BOOST_CHECK( !events.hasEvent(ScheduleEvents::GROUP_CHANGE , 1 ));
BOOST_CHECK( events.hasEvent(ScheduleEvents::GROUP_CHANGE , 3 ) );
BOOST_CHECK( !events.hasEvent(ScheduleEvents::NEW_GROUP , 2 ) );
BOOST_CHECK( events.hasEvent(ScheduleEvents::NEW_GROUP , 3 ) );
BOOST_CHECK( sched[0].events().hasEvent(ScheduleEvents::GROUP_CHANGE));
BOOST_CHECK( !sched[1].events().hasEvent(ScheduleEvents::GROUP_CHANGE));
BOOST_CHECK( sched[3].events().hasEvent(ScheduleEvents::GROUP_CHANGE));
BOOST_CHECK( !sched[2].events().hasEvent(ScheduleEvents::NEW_GROUP));
BOOST_CHECK( sched[3].events().hasEvent(ScheduleEvents::NEW_GROUP));
}
@@ -896,17 +896,17 @@ BOOST_AUTO_TEST_CASE(TestWellEvents) {
auto python = std::make_shared<Python>();
Schedule sched(deck , grid , fp, runspec, python);
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::NEW_WELL , 0 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_2", ScheduleEvents::NEW_WELL , 2 ));
BOOST_CHECK( !sched.hasWellGroupEvent( "W_2", ScheduleEvents::NEW_WELL , 3 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_2", ScheduleEvents::WELL_WELSPECS_UPDATE , 3 ));
BOOST_CHECK( sched[0].wellgroup_events().hasEvent( "W_1", ScheduleEvents::NEW_WELL));
BOOST_CHECK( sched[2].wellgroup_events().hasEvent( "W_2", ScheduleEvents::NEW_WELL));
BOOST_CHECK( !sched[3].wellgroup_events().hasEvent( "W_2", ScheduleEvents::NEW_WELL));
BOOST_CHECK( sched[3].wellgroup_events().hasEvent( "W_2", ScheduleEvents::WELL_WELSPECS_UPDATE));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE , 0 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE , 1 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE , 3 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE , 4 ));
BOOST_CHECK( !sched.hasWellGroupEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE , 5 ));
BOOST_CHECK( sched[0].wellgroup_events().hasEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched[1].wellgroup_events().hasEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched[3].wellgroup_events().hasEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched[4].wellgroup_events().hasEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK(!sched[5].wellgroup_events().hasEvent( "W_1", ScheduleEvents::WELL_STATUS_CHANGE));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::COMPLETION_CHANGE , 0 ));
BOOST_CHECK( sched.hasWellGroupEvent( "W_1", ScheduleEvents::COMPLETION_CHANGE , 5 ));
BOOST_CHECK( sched[0].wellgroup_events().hasEvent( "W_1", ScheduleEvents::COMPLETION_CHANGE));
BOOST_CHECK( sched[5].wellgroup_events().hasEvent( "W_1", ScheduleEvents::COMPLETION_CHANGE));
}

View File

@@ -41,14 +41,13 @@ BOOST_AUTO_TEST_CASE(MULTFLT_IN_SCHEDULE) {
EclipseState state(deck);
const auto& trans = state.getTransMult();
Schedule schedule(deck, state, python);
const Events& events = schedule.getEvents();
BOOST_CHECK_EQUAL( 0.10 , trans.getMultiplier( 3,2,0,FaceDir::XPlus ));
BOOST_CHECK_EQUAL( 0.10 , trans.getMultiplier( 2,2,0,FaceDir::XPlus ));
BOOST_CHECK( events.hasEvent( ScheduleEvents::GEO_MODIFIER , 3 ) );
BOOST_CHECK( schedule[3].events().hasEvent( ScheduleEvents::GEO_MODIFIER) );
{
const auto& mini_deck = schedule.getModifierDeck(3);
state.applyModifierDeck( mini_deck );
const auto& keywords = schedule[3].geo_keywords();
state.apply_geo_keywords( keywords );
}
BOOST_CHECK_EQUAL( 2.00 , trans.getMultiplier( 2,2,0,FaceDir::XPlus ));
BOOST_CHECK_EQUAL( 0.10 , trans.getMultiplier( 3,2,0,FaceDir::XPlus ));