mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge branch 'master' into new_numerical_aquifer
This commit is contained in:
@@ -518,6 +518,13 @@ add_test_compareECLFiles(CASENAME udq_in_actionx
|
||||
REL_TOL ${rel_tol}
|
||||
DIR udq_actionx)
|
||||
|
||||
add_test_compareECLFiles(CASENAME co2store
|
||||
FILENAME CO2STORE
|
||||
SIMULATOR flow
|
||||
ABS_TOL ${abs_tol}
|
||||
REL_TOL ${rel_tol}
|
||||
DIR co2store)
|
||||
|
||||
if (opm-common_EMBEDDED_PYTHON)
|
||||
add_test_compareECLFiles(CASENAME udq_pyaction
|
||||
FILENAME PYACTION_WCONPROD
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Action/State.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQState.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQConfig.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
|
||||
|
||||
#include <opm/simulators/utils/readDeck.hpp>
|
||||
@@ -449,7 +450,7 @@ public:
|
||||
std::move(parseContext_), /* initFromRestart = */ false,
|
||||
/* checkDeck = */ enableExperiments);
|
||||
|
||||
this->summaryState_ = std::make_unique<Opm::SummaryState>( std::chrono::system_clock::from_time_t(this->eclSchedule_->getStartTime() ));
|
||||
this->summaryState_ = std::make_unique<Opm::SummaryState>( Opm::TimeService::from_time_t(this->eclSchedule_->getStartTime() ));
|
||||
this->udqState_ = std::make_unique<Opm::UDQState>( this->eclSchedule_->getUDQConfig(0).params().undefinedValue() );
|
||||
this->actionState_ = std::make_unique<Opm::Action::State>() ;
|
||||
|
||||
|
||||
@@ -340,14 +340,22 @@ public:
|
||||
protected:
|
||||
void createGrids_()
|
||||
{
|
||||
const EclipseGrid * input_grid = nullptr;
|
||||
std::vector<double> porv;
|
||||
// At this stage the ParallelEclipseState instance is still in global
|
||||
// view; on rank 0 we have undistributed data for the entire grid, on
|
||||
// the other ranks the EclipseState is empty.
|
||||
if (mpiRank == 0) {
|
||||
input_grid = &this->eclState().getInputGrid();
|
||||
porv = this->eclState().fieldProps().porv(true);
|
||||
}
|
||||
|
||||
grid_.reset(new Dune::CpGrid());
|
||||
grid_->processEclipseFormat(mpiRank == 0 ? &this->eclState().getInputGrid()
|
||||
: nullptr,
|
||||
grid_->processEclipseFormat(input_grid,
|
||||
/*isPeriodic=*/false,
|
||||
/*flipNormals=*/false,
|
||||
/*clipZ=*/false,
|
||||
mpiRank == 0 ? this->eclState().fieldProps().porv(true)
|
||||
: std::vector<double>(),
|
||||
porv,
|
||||
this->eclState().getInputNNC());
|
||||
|
||||
// we use separate grid objects: one for the calculation of the initial condition
|
||||
|
||||
@@ -99,6 +99,7 @@
|
||||
#include <opm/parser/eclipse/EclipseState/Tables/RockwnodTable.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Tables/OverburdTable.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Tables/RocktabTable.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
#include <opm/material/common/ConditionalStorage.hpp>
|
||||
|
||||
#include <dune/common/version.hh>
|
||||
@@ -858,11 +859,10 @@ public:
|
||||
auto& simulator = this->simulator();
|
||||
const auto& eclState = simulator.vanguard().eclState();
|
||||
const auto& schedule = simulator.vanguard().schedule();
|
||||
const auto& timeMap = schedule.getTimeMap();
|
||||
|
||||
// Set the start time of the simulation
|
||||
simulator.setStartTime(timeMap.getStartTime(/*reportStepIdx=*/0));
|
||||
simulator.setEndTime(timeMap.getTotalTime());
|
||||
simulator.setStartTime(schedule.getStartTime());
|
||||
simulator.setEndTime(schedule.simTime(schedule.size() - 1));
|
||||
|
||||
// We want the episode index to be the same as the report step index to make
|
||||
// things simpler, so we have to set the episode index to -1 because it is
|
||||
@@ -968,7 +968,7 @@ public:
|
||||
// to the first "real" episode/report step
|
||||
// for restart the episode index and start is already set
|
||||
if (!initconfig.restartRequested()) {
|
||||
simulator.startNextEpisode(timeMap.getTimeStepLength(0));
|
||||
simulator.startNextEpisode(schedule.seconds(0));
|
||||
simulator.setEpisodeIndex(0);
|
||||
}
|
||||
}
|
||||
@@ -1027,7 +1027,6 @@ public:
|
||||
auto& eclState = simulator.vanguard().eclState();
|
||||
const auto& schedule = simulator.vanguard().schedule();
|
||||
const auto& events = schedule[episodeIdx].events();
|
||||
const auto& timeMap = schedule.getTimeMap();
|
||||
|
||||
if (episodeIdx >= 0 && events.hasEvent(Opm::ScheduleEvents::GEO_MODIFIER)) {
|
||||
// bring the contents of the keywords to the current state of the SCHEDULE
|
||||
@@ -1052,12 +1051,12 @@ public:
|
||||
std::ostringstream ss;
|
||||
boost::posix_time::time_facet* facet = new boost::posix_time::time_facet("%d-%b-%Y");
|
||||
boost::posix_time::ptime curDateTime =
|
||||
boost::posix_time::from_time_t(timeMap.getStartTime(episodeIdx));
|
||||
boost::posix_time::from_time_t(schedule.simTime(episodeIdx));
|
||||
ss.imbue(std::locale(std::locale::classic(), facet));
|
||||
ss << "Report step " << episodeIdx + 1
|
||||
<< "/" << timeMap.numTimesteps()
|
||||
<< " at day " << timeMap.getTimePassedUntil(episodeIdx)/(24*3600)
|
||||
<< "/" << timeMap.getTotalTime()/(24*3600)
|
||||
<< "/" << schedule.size() - 1
|
||||
<< " at day " << schedule.seconds(episodeIdx)/(24*3600)
|
||||
<< "/" << schedule.seconds(schedule.size() - 1)/(24*3600)
|
||||
<< ", date = " << curDateTime.date()
|
||||
<< "\n ";
|
||||
OpmLog::info(ss.str());
|
||||
@@ -1241,18 +1240,17 @@ public:
|
||||
{
|
||||
auto& simulator = this->simulator();
|
||||
auto& schedule = simulator.vanguard().schedule();
|
||||
const auto& timeMap = schedule.getTimeMap();
|
||||
|
||||
int episodeIdx = simulator.episodeIndex();
|
||||
|
||||
// check if we're finished ...
|
||||
if (episodeIdx + 1 >= static_cast<int>(timeMap.numTimesteps())) {
|
||||
if (episodeIdx + 1 >= static_cast<int>(schedule.size() - 1)) {
|
||||
simulator.setFinished(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// .. if we're not yet done, start the next episode (report step)
|
||||
simulator.startNextEpisode(timeMap.getTimeStepLength(episodeIdx + 1));
|
||||
simulator.startNextEpisode(schedule.stepLength(episodeIdx + 1));
|
||||
}
|
||||
|
||||
/*!
|
||||
@@ -1381,7 +1379,7 @@ public:
|
||||
|
||||
const auto& wellpi = this->fetchWellPI(reportStep, *action, schedule, matching_wells);
|
||||
|
||||
schedule.applyAction(reportStep, std::chrono::system_clock::from_time_t(simTime), *action, actionResult, wellpi);
|
||||
schedule.applyAction(reportStep, Opm::TimeService::from_time_t(simTime), *action, actionResult, wellpi);
|
||||
actionState.add_run(*action, simTime);
|
||||
|
||||
for ( const auto& [wname, _] : wellpi) {
|
||||
@@ -2669,11 +2667,10 @@ private:
|
||||
referencePorosity_[/*timeIdx=*/0].resize(numDof);
|
||||
|
||||
const auto& fp = eclState.fieldProps();
|
||||
const std::vector<double> porvData = fp.porv(true);
|
||||
const std::vector<double> porvData = fp.porv(false);
|
||||
const std::vector<int> actnumData = fp.actnum();
|
||||
for (size_t dofIdx = 0; dofIdx < numDof; ++ dofIdx) {
|
||||
unsigned cartElemIdx = vanguard.cartesianIndex(dofIdx);
|
||||
Scalar poreVolume = porvData[cartElemIdx];
|
||||
Scalar poreVolume = porvData[dofIdx];
|
||||
|
||||
// we define the porosity as the accumulated pore volume divided by the
|
||||
// geometric volume of the element. Note that -- in pathetic cases -- it can
|
||||
@@ -2743,16 +2740,15 @@ private:
|
||||
auto& simulator = this->simulator();
|
||||
const auto& schedule = simulator.vanguard().schedule();
|
||||
const auto& eclState = simulator.vanguard().eclState();
|
||||
const auto& timeMap = schedule.getTimeMap();
|
||||
const auto& initconfig = eclState.getInitConfig();
|
||||
{
|
||||
int restart_step = initconfig.getRestartStep();
|
||||
|
||||
simulator.setStartTime(timeMap.getStartTime(/*timeStepIdx=*/0));
|
||||
simulator.setTime(timeMap.getTimePassedUntil(restart_step));
|
||||
simulator.setStartTime(schedule.simTime(0));
|
||||
simulator.setTime(schedule.seconds(restart_step));
|
||||
|
||||
simulator.startNextEpisode(simulator.startTime() + simulator.time(),
|
||||
timeMap.getTimeStepLength(restart_step));
|
||||
schedule.stepLength(restart_step));
|
||||
simulator.setEpisodeIndex(restart_step);
|
||||
}
|
||||
eclWriter_->beginRestart();
|
||||
|
||||
@@ -605,7 +605,7 @@ public:
|
||||
if (wellTopologyChanged_(eclState, reportStepIdx))
|
||||
return true;
|
||||
|
||||
if (schedule.getTimeMap().numTimesteps() <= (unsigned) reportStepIdx)
|
||||
if ((schedule.size() - 1) <= (unsigned) reportStepIdx)
|
||||
// for the "until the universe dies" episode, the wells don't change
|
||||
return false;
|
||||
|
||||
@@ -660,7 +660,7 @@ protected:
|
||||
return true;
|
||||
}
|
||||
|
||||
if (schedule.getTimeMap().numTimesteps() <= (unsigned) reportStepIdx)
|
||||
if ((schedule.size() - 1) <= (unsigned) reportStepIdx)
|
||||
// for the "until the universe dies" episode, the wells don't change
|
||||
return false;
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
#include <opm/simulators/wells/VFPProperties.hpp>
|
||||
#include <opm/simulators/wells/VFPInjProperties.hpp>
|
||||
#include <opm/simulators/wells/VFPProdProperties.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
@@ -52,7 +53,7 @@ struct Setup
|
||||
const Runspec runspec(deck);
|
||||
python = std::make_shared<Python>();
|
||||
schedule.reset( new Schedule(deck, *ecl_state, python));
|
||||
summary_state.reset( new SummaryState(std::chrono::system_clock::from_time_t(schedule->getStartTime())));
|
||||
summary_state.reset( new SummaryState(TimeService::from_time_t(schedule->getStartTime())));
|
||||
}
|
||||
const int step = 0;
|
||||
const auto& sched_state = schedule->operator[](step);
|
||||
|
||||
@@ -608,13 +608,12 @@ namespace Opm
|
||||
{
|
||||
|
||||
const auto& schedule = this->schedule();
|
||||
const auto& timeMap = schedule.getTimeMap();
|
||||
auto& ioConfig = eclState().getIOConfig();
|
||||
simtimer_ = std::make_unique<SimulatorTimer>();
|
||||
|
||||
// initialize variables
|
||||
const auto& initConfig = eclState().getInitConfig();
|
||||
simtimer_->init(timeMap, (size_t)initConfig.getRestartStep());
|
||||
simtimer_->init(schedule, (size_t)initConfig.getRestartStep());
|
||||
|
||||
if (this->output_cout_) {
|
||||
std::ostringstream oss;
|
||||
|
||||
@@ -231,7 +231,7 @@ public:
|
||||
|
||||
ebosSimulator_.startNextEpisode(
|
||||
ebosSimulator_.startTime()
|
||||
+ schedule().getTimeMap().getTimePassedUntil(timer.currentStepNum()),
|
||||
+ schedule().seconds(timer.currentStepNum()),
|
||||
timer.currentStepLength());
|
||||
ebosSimulator_.setEpisodeIndex(timer.currentStepNum());
|
||||
solver->model().beginReportStep();
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
#include <boost/version.hpp>
|
||||
#include <boost/property_tree/json_parser.hpp>
|
||||
|
||||
#include <opm/common/utility/FileSystem.hpp>
|
||||
|
||||
namespace Opm
|
||||
{
|
||||
|
||||
@@ -39,6 +41,9 @@ setupPropertyTree(FlowLinearSolverParameters p) // Note: copying the parameters
|
||||
// Get configuration from file.
|
||||
if (conf.size() > 5 && conf.substr(conf.size() - 5, 5) == ".json") { // the ends_with() method is not available until C++20
|
||||
#if BOOST_VERSION / 100 % 1000 > 48
|
||||
if ( !Opm::filesystem::exists(conf) ) {
|
||||
OPM_THROW(std::invalid_argument, "JSON file " << conf << " does not exist.");
|
||||
}
|
||||
try {
|
||||
boost::property_tree::ptree prm;
|
||||
boost::property_tree::read_json(conf, prm);
|
||||
|
||||
@@ -49,16 +49,16 @@ namespace Opm
|
||||
}
|
||||
|
||||
/// Use the SimulatorTimer as a shim around opm-parser's Opm::TimeMap
|
||||
void SimulatorTimer::init(const TimeMap& timeMap, size_t report_step)
|
||||
void SimulatorTimer::init(const Schedule& schedule, size_t report_step)
|
||||
{
|
||||
total_time_ = timeMap.getTotalTime();
|
||||
timesteps_.resize(timeMap.numTimesteps());
|
||||
for ( size_t i = 0; i < timeMap.numTimesteps(); ++i ) {
|
||||
timesteps_[i] = timeMap.getTimeStepLength(i);
|
||||
total_time_ = schedule.seconds( schedule.size() - 1 );
|
||||
timesteps_.resize(schedule.size() - 1);
|
||||
for ( size_t i = 0; i < schedule.size() - 1; ++i ) {
|
||||
timesteps_[i] = schedule.stepLength(i);
|
||||
}
|
||||
|
||||
setCurrentStepNum(report_step);
|
||||
start_date_ = boost::posix_time::from_time_t( timeMap.getStartTime(0)).date();
|
||||
start_date_ = boost::posix_time::from_time_t(schedule.getStartTime()).date();
|
||||
}
|
||||
|
||||
/// Whether the current step is the first step.
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#ifndef OPM_SIMULATORTIMER_HEADER_INCLUDED
|
||||
#define OPM_SIMULATORTIMER_HEADER_INCLUDED
|
||||
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
#include <opm/simulators/timestepping/SimulatorTimerInterface.hpp>
|
||||
|
||||
#include <iosfwd>
|
||||
@@ -46,8 +46,8 @@ namespace Opm
|
||||
/// stepsize_days (default 1)
|
||||
void init(const ParameterGroup& param);
|
||||
|
||||
/// Use the SimulatorTimer as a shim around opm-parser's Opm::TimeMap
|
||||
void init(const TimeMap& timeMap, size_t report_step = 0);
|
||||
/// Use the SimulatorTimer as a shim around opm-commons Schedule class
|
||||
void init(const Schedule& schedule, size_t report_step = 0);
|
||||
|
||||
/// Whether the current step is the first step.
|
||||
bool initialStep() const;
|
||||
|
||||
@@ -49,14 +49,23 @@ void ParallelFieldPropsManager::reset_actnum(const std::vector<int>& actnum)
|
||||
|
||||
std::vector<double> ParallelFieldPropsManager::porv(bool global) const
|
||||
{
|
||||
std::vector<double> result;
|
||||
std::vector<double> global_porv;
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.porv(global);
|
||||
size_t size = result.size();
|
||||
global_porv = m_manager.porv(true);
|
||||
|
||||
size_t size = global_porv.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
m_comm.broadcast(result.data(), size, 0);
|
||||
return result;
|
||||
global_porv.resize(size);
|
||||
m_comm.broadcast(global_porv.data(), size, 0);
|
||||
if (global)
|
||||
return global_porv;
|
||||
|
||||
std::vector<double> local_porv(this->m_activeSize());
|
||||
for (int i = 0; i < m_activeSize(); ++i)
|
||||
{
|
||||
local_porv[i] = global_porv[this->m_local2Global(i)];
|
||||
}
|
||||
return local_porv;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -283,7 +283,7 @@ std::size_t packSize(const RestartValue& data, Dune::MPIHelper::MPICommunicator
|
||||
+ packSize(data.extra, comm);
|
||||
}
|
||||
|
||||
std::size_t packSize(const std::chrono::system_clock::time_point&, Dune::MPIHelper::MPICommunicator comm)
|
||||
std::size_t packSize(const Opm::time_point&, Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
std::time_t tp;
|
||||
return packSize(tp, comm);
|
||||
@@ -576,10 +576,10 @@ void pack(const RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
pack(data.extra, buffer, position, comm);
|
||||
}
|
||||
|
||||
void pack(const std::chrono::system_clock::time_point& data, std::vector<char>& buffer, int& position,
|
||||
void pack(const Opm::time_point& data, std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
pack(std::chrono::system_clock::to_time_t(data), buffer, position, comm);
|
||||
pack(Opm::TimeService::to_time_t(data), buffer, position, comm);
|
||||
}
|
||||
|
||||
|
||||
@@ -887,12 +887,12 @@ void unpack(RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
unpack(data.extra, buffer, position, comm);
|
||||
}
|
||||
|
||||
void unpack(std::chrono::system_clock::time_point& data, std::vector<char>& buffer, int& position,
|
||||
void unpack(Opm::time_point& data, std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
std::time_t tp;
|
||||
unpack(tp, buffer, position, comm);
|
||||
data = std::chrono::system_clock::from_time_t(tp);
|
||||
data = Opm::TimeService::from_time_t(tp);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
#include <opm/output/eclipse/EclipseIO.hpp>
|
||||
#include <opm/output/eclipse/Summary.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Action/State.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
|
||||
#include <dune/common/parallel/mpihelper.hh>
|
||||
|
||||
@@ -321,7 +322,7 @@ ADD_PACK_PROTOTYPES(data::WellRates)
|
||||
ADD_PACK_PROTOTYPES(RestartKey)
|
||||
ADD_PACK_PROTOTYPES(RestartValue)
|
||||
ADD_PACK_PROTOTYPES(std::string)
|
||||
ADD_PACK_PROTOTYPES(std::chrono::system_clock::time_point)
|
||||
ADD_PACK_PROTOTYPES(Opm::time_point)
|
||||
|
||||
} // end namespace Mpi
|
||||
|
||||
|
||||
@@ -326,9 +326,9 @@ namespace Opm {
|
||||
|
||||
well_state_ = previous_well_state_;
|
||||
well_state_.disableGliftOptimization();
|
||||
updateAndCommunicateGroupData();
|
||||
const int reportStepIdx = ebosSimulator_.episodeIndex();
|
||||
const double simulationTime = ebosSimulator_.time();
|
||||
|
||||
int exception_thrown = 0;
|
||||
try {
|
||||
// test wells
|
||||
@@ -403,7 +403,6 @@ namespace Opm {
|
||||
const auto& comm = ebosSimulator_.vanguard().grid().comm();
|
||||
WellGroupHelpers::updateGuideRatesForWells(schedule(), phase_usage_, reportStepIdx, simulationTime, well_state_, comm, guideRate_.get());
|
||||
try {
|
||||
updateAndCommunicateGroupData();
|
||||
// Compute initial well solution for new wells
|
||||
for (auto& well : well_container_) {
|
||||
const uint64_t effective_events_mask = ScheduleEvents::WELL_STATUS_CHANGE
|
||||
@@ -2333,10 +2332,7 @@ namespace Opm {
|
||||
}
|
||||
|
||||
auto cc = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (cc.size() > 1) {
|
||||
ss << " on rank " << cc.rank();
|
||||
}
|
||||
if (!ss.str().empty())
|
||||
if (!ss.str().empty() && cc.rank() == 0)
|
||||
deferred_logger.info(ss.str());
|
||||
|
||||
}
|
||||
@@ -2389,10 +2385,7 @@ namespace Opm {
|
||||
}
|
||||
|
||||
auto cc = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (cc.size() > 1) {
|
||||
ss << " on rank " << cc.rank();
|
||||
}
|
||||
if (!ss.str().empty())
|
||||
if (!ss.str().empty() && cc.rank() == 0)
|
||||
deferred_logger.info(ss.str());
|
||||
|
||||
|
||||
@@ -2412,14 +2405,10 @@ namespace Opm {
|
||||
ss << "Switching injection control mode for group "<< group.name()
|
||||
<< " from " << Group::InjectionCMode2String(oldControl)
|
||||
<< " to " << Group::InjectionCMode2String(newControl);
|
||||
auto cc = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (cc.size() > 1) {
|
||||
ss << " on rank " << cc.rank();
|
||||
}
|
||||
well_state.setCurrentInjectionGroupControl(controlPhase, group.name(), newControl);
|
||||
}
|
||||
|
||||
if (!ss.str().empty())
|
||||
auto cc = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (!ss.str().empty() && cc.rank() == 0)
|
||||
deferred_logger.info(ss.str());
|
||||
|
||||
}
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
#include <opm/simulators/utils/DeferredLogger.hpp>
|
||||
#include <opm/common/ErrorMacros.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/SICD.hpp>
|
||||
#include <opm/common/OpmLog/OpmLog.hpp>
|
||||
#include <string>
|
||||
#include <dune/istl/solvers.hh>
|
||||
#if HAVE_UMFPACK
|
||||
#include <dune/istl/umfpack.hh>
|
||||
@@ -64,7 +66,9 @@ namespace mswellhelpers
|
||||
for (size_t i_block = 0; i_block < y.size(); ++i_block) {
|
||||
for (size_t i_elem = 0; i_elem < y[i_block].size(); ++i_elem) {
|
||||
if (std::isinf(y[i_block][i_elem]) || std::isnan(y[i_block][i_elem]) ) {
|
||||
OPM_THROW(Opm::NumericalIssue, "nan or inf value found after UMFPack solve due to singular matrix");
|
||||
const std::string msg{"nan or inf value found after UMFPack solve due to singular matrix"};
|
||||
OpmLog::debug(msg);
|
||||
OPM_THROW_NOLOG(Opm::NumericalIssue, msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -156,7 +160,7 @@ namespace mswellhelpers
|
||||
linsolver.apply(y, x, res);
|
||||
|
||||
if ( !res.converged ) {
|
||||
OPM_DEFLOG_THROW(Opm::NumericalIssue, "the invDX does not get converged! ", deferred_logger);
|
||||
OPM_DEFLOG_THROW(Opm::NumericalIssue, "the invDX did not converge ", deferred_logger);
|
||||
}
|
||||
|
||||
return y;
|
||||
|
||||
@@ -465,7 +465,8 @@ namespace Opm
|
||||
|
||||
EvalWell getSegmentSurfaceVolume(const Simulator& ebos_simulator, const int seg_idx) const;
|
||||
|
||||
std::vector<Scalar> getWellResiduals(const std::vector<Scalar>& B_avg) const;
|
||||
std::vector<Scalar> getWellResiduals(const std::vector<Scalar>& B_avg,
|
||||
DeferredLogger& deferred_logger) const;
|
||||
|
||||
void detectOscillations(const std::vector<double>& measure_history,
|
||||
const int it, bool& oscillate, bool& stagnate) const;
|
||||
|
||||
@@ -22,7 +22,9 @@
|
||||
#include <opm/simulators/wells/MSWellHelpers.hpp>
|
||||
#include <opm/simulators/utils/DeferredLoggingErrorHelpers.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/Valve.hpp>
|
||||
#include <opm/common/OpmLog/OpmLog.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <algorithm>
|
||||
|
||||
namespace Opm
|
||||
@@ -2718,7 +2720,7 @@ namespace Opm
|
||||
|
||||
const int max_iter_number = param_.max_inner_iter_ms_wells_;
|
||||
const WellState well_state0 = well_state;
|
||||
const std::vector<Scalar> residuals0 = getWellResiduals(B_avg);
|
||||
const std::vector<Scalar> residuals0 = getWellResiduals(B_avg, deferred_logger);
|
||||
std::vector<std::vector<Scalar> > residual_history;
|
||||
std::vector<double> measure_history;
|
||||
int it = 0;
|
||||
@@ -2743,7 +2745,7 @@ namespace Opm
|
||||
break;
|
||||
}
|
||||
|
||||
residual_history.push_back(getWellResiduals(B_avg));
|
||||
residual_history.push_back(getWellResiduals(B_avg, deferred_logger));
|
||||
measure_history.push_back(getResidualMeasureValue(well_state, residual_history[it], deferred_logger) );
|
||||
|
||||
bool is_oscillate = false;
|
||||
@@ -3185,10 +3187,13 @@ namespace Opm
|
||||
|
||||
const EvalWell d = 1.0 - rs * rv;
|
||||
if (d <= 0.0 || d > 1.0) {
|
||||
OPM_THROW(Opm::NumericalIssue, "Problematic d value " << d << " obtained for well " << name()
|
||||
<< " during convertion to surface volume with rs " << rs
|
||||
std::ostringstream sstr;
|
||||
sstr << "Problematic d value " << d << " obtained for well " << name()
|
||||
<< " during conversion to surface volume with rs " << rs
|
||||
<< ", rv " << rv << " and pressure " << seg_pressure
|
||||
<< " obtaining d " << d);
|
||||
<< " obtaining d " << d;
|
||||
OpmLog::debug(sstr.str());
|
||||
OPM_THROW_NOLOG(Opm::NumericalIssue, sstr.str());
|
||||
}
|
||||
|
||||
if (rs > 0.0) { // rs > 0.0?
|
||||
@@ -3217,7 +3222,8 @@ namespace Opm
|
||||
template<typename TypeTag>
|
||||
std::vector<typename MultisegmentWell<TypeTag>::Scalar>
|
||||
MultisegmentWell<TypeTag>::
|
||||
getWellResiduals(const std::vector<Scalar>& B_avg) const
|
||||
getWellResiduals(const std::vector<Scalar>& B_avg,
|
||||
DeferredLogger& deferred_logger) const
|
||||
{
|
||||
assert(int(B_avg.size() ) == num_components_);
|
||||
std::vector<Scalar> residuals(numWellEq + 1, 0.0);
|
||||
@@ -3233,8 +3239,8 @@ namespace Opm
|
||||
}
|
||||
}
|
||||
if (std::isnan(residual) || std::isinf(residual)) {
|
||||
OPM_THROW(Opm::NumericalIssue, "nan or inf value for residal get for well " << name()
|
||||
<< " segment " << seg << " eq_idx " << eq_idx);
|
||||
OPM_DEFLOG_THROW(Opm::NumericalIssue, "nan or inf value for residal get for well " << name()
|
||||
<< " segment " << seg << " eq_idx " << eq_idx, deferred_logger);
|
||||
}
|
||||
|
||||
if (residual > residuals[eq_idx]) {
|
||||
@@ -3247,7 +3253,7 @@ namespace Opm
|
||||
{
|
||||
const double control_residual = std::abs(resWell_[0][numWellEq - 1]);
|
||||
if (std::isnan(control_residual) || std::isinf(control_residual)) {
|
||||
OPM_THROW(Opm::NumericalIssue, "nan or inf value for control residal get for well " << name());
|
||||
OPM_DEFLOG_THROW(Opm::NumericalIssue, "nan or inf value for control residal get for well " << name(), deferred_logger);
|
||||
}
|
||||
residuals[numWellEq] = control_residual;
|
||||
}
|
||||
|
||||
@@ -128,10 +128,10 @@ namespace WellGroupHelpers
|
||||
double rate = 0.0;
|
||||
for (const std::string& groupName : group.groups()) {
|
||||
const Group& groupTmp = schedule.getGroup(groupName, reportStepIdx);
|
||||
rate += groupTmp.getGroupEfficiencyFactor()
|
||||
* sumWellPhaseRates(rates, groupTmp, schedule, wellState, reportStepIdx, phasePos, injector);
|
||||
rate += sumWellPhaseRates(rates, groupTmp, schedule, wellState, reportStepIdx, phasePos, injector);
|
||||
}
|
||||
const auto& end = wellState.wellMap().end();
|
||||
|
||||
for (const std::string& wellName : group.wells()) {
|
||||
const auto& it = wellState.wellMap().find(wellName);
|
||||
if (it == end) // the well is not found
|
||||
@@ -159,7 +159,8 @@ namespace WellGroupHelpers
|
||||
else
|
||||
rate -= factor * rates[wellrate_index + phasePos];
|
||||
}
|
||||
return rate;
|
||||
const auto& gefac = group.getGroupEfficiencyFactor();
|
||||
return gefac * rate;
|
||||
}
|
||||
|
||||
double sumWellRates(const Group& group,
|
||||
@@ -193,8 +194,7 @@ namespace WellGroupHelpers
|
||||
double rate = 0.0;
|
||||
for (const std::string& groupName : group.groups()) {
|
||||
const Group& groupTmp = schedule.getGroup(groupName, reportStepIdx);
|
||||
rate += groupTmp.getGroupEfficiencyFactor()
|
||||
* sumSolventRates(groupTmp, schedule, wellState, reportStepIdx, injector);
|
||||
rate += sumSolventRates(groupTmp, schedule, wellState, reportStepIdx, injector);
|
||||
}
|
||||
const auto& end = wellState.wellMap().end();
|
||||
for (const std::string& wellName : group.wells()) {
|
||||
@@ -223,7 +223,8 @@ namespace WellGroupHelpers
|
||||
else
|
||||
rate -= factor * wellState.solventWellRate(well_index);
|
||||
}
|
||||
return rate;
|
||||
const auto& gefac = group.getGroupEfficiencyFactor();
|
||||
return gefac * rate;
|
||||
}
|
||||
|
||||
void updateGroupTargetReduction(const Group& group,
|
||||
@@ -1016,6 +1017,9 @@ namespace WellGroupHelpers
|
||||
// will be the name of 'group'. But if we recurse, 'name' and
|
||||
// 'parent' will stay fixed while 'group' will be higher up
|
||||
// in the group tree.
|
||||
// efficiency factor is the well efficiency factor for the first group the well is
|
||||
// part of. Later it is the accumulated factor including the group efficiency factor
|
||||
// of the child of group.
|
||||
|
||||
const Group::InjectionCMode& currentGroupControl
|
||||
= wellState.currentInjectionGroupControl(injectionPhase, group.name());
|
||||
@@ -1085,13 +1089,15 @@ namespace WellGroupHelpers
|
||||
name, group.name(), schedule, wellState, reportStepIdx, guideRate, target, pu, injectionPhase, true);
|
||||
double target_fraction = 1.0;
|
||||
bool constraint_broken = false;
|
||||
double efficiencyFactorInclGroup = efficiencyFactor * group.getGroupEfficiencyFactor();
|
||||
|
||||
switch (currentGroupControl) {
|
||||
case Group::InjectionCMode::RATE: {
|
||||
const double current_rate = rates[phasePos];
|
||||
const double target_rate = fraction
|
||||
* std::max(0.0,
|
||||
(groupcontrols.surface_max_rate - groupTargetReduction + current_rate * efficiencyFactor))
|
||||
/ efficiencyFactor;
|
||||
(groupcontrols.surface_max_rate - groupTargetReduction + current_rate * efficiencyFactorInclGroup))
|
||||
/ efficiencyFactorInclGroup;
|
||||
if (current_rate > target_rate) {
|
||||
constraint_broken = true;
|
||||
target_fraction = target_rate / current_rate;
|
||||
@@ -1104,8 +1110,8 @@ namespace WellGroupHelpers
|
||||
const double target_rate = fraction
|
||||
* std::max(0.0,
|
||||
(groupcontrols.resv_max_rate / coeff - groupTargetReduction
|
||||
+ current_rate * efficiencyFactor))
|
||||
/ efficiencyFactor;
|
||||
+ current_rate * efficiencyFactorInclGroup))
|
||||
/ efficiencyFactorInclGroup;
|
||||
if (current_rate > target_rate) {
|
||||
constraint_broken = true;
|
||||
target_fraction = target_rate / current_rate;
|
||||
@@ -1118,8 +1124,8 @@ namespace WellGroupHelpers
|
||||
const double target_rate = fraction
|
||||
* std::max(0.0,
|
||||
(groupcontrols.target_reinj_fraction * productionRate - groupTargetReduction
|
||||
+ current_rate * efficiencyFactor))
|
||||
/ efficiencyFactor;
|
||||
+ current_rate * efficiencyFactorInclGroup))
|
||||
/ efficiencyFactorInclGroup;
|
||||
if (current_rate > target_rate) {
|
||||
constraint_broken = true;
|
||||
target_fraction = target_rate / current_rate;
|
||||
@@ -1145,8 +1151,8 @@ namespace WellGroupHelpers
|
||||
|
||||
const double current_rate = rates[phasePos];
|
||||
const double target_rate = fraction
|
||||
* std::max(0.0, (voidageRate / coeff - groupTargetReduction + current_rate * efficiencyFactor))
|
||||
/ efficiencyFactor;
|
||||
* std::max(0.0, (voidageRate / coeff - groupTargetReduction + current_rate * efficiencyFactorInclGroup))
|
||||
/ efficiencyFactorInclGroup;
|
||||
if (current_rate > target_rate) {
|
||||
constraint_broken = true;
|
||||
target_fraction = target_rate / current_rate;
|
||||
@@ -1165,7 +1171,7 @@ namespace WellGroupHelpers
|
||||
|
||||
const double current_rate = rates[phasePos];
|
||||
const double target_rate = fraction
|
||||
* std::max(0.0, (inj_rate - groupTargetReduction + current_rate * efficiencyFactor)) / efficiencyFactor;
|
||||
* std::max(0.0, (inj_rate - groupTargetReduction + current_rate * efficiencyFactorInclGroup)) / efficiencyFactorInclGroup;
|
||||
if (current_rate > target_rate) {
|
||||
constraint_broken = true;
|
||||
target_fraction = target_rate / current_rate;
|
||||
@@ -1236,6 +1242,9 @@ namespace WellGroupHelpers
|
||||
// will be the name of 'group'. But if we recurse, 'name' and
|
||||
// 'parent' will stay fixed while 'group' will be higher up
|
||||
// in the group tree.
|
||||
// efficiencyfactor is the well efficiency factor for the first group the well is
|
||||
// part of. Later it is the accumulated factor including the group efficiency factor
|
||||
// of the child of group.
|
||||
|
||||
const Group::ProductionCMode& currentGroupControl = wellState.currentProductionGroupControl(group.name());
|
||||
|
||||
@@ -1304,6 +1313,7 @@ namespace WellGroupHelpers
|
||||
}
|
||||
}
|
||||
|
||||
double efficiencyFactorInclGroup = efficiencyFactor * group.getGroupEfficiencyFactor();
|
||||
double target = orig_target;
|
||||
for (size_t ii = 0; ii < num_ancestors; ++ii) {
|
||||
if ((ii == 0) || guideRate->has(chain[ii])) {
|
||||
@@ -1314,7 +1324,7 @@ namespace WellGroupHelpers
|
||||
|
||||
// Add my reduction back at the level where it is included in the local reduction
|
||||
if (local_reduction_level == ii )
|
||||
target += current_rate * efficiencyFactor;
|
||||
target += current_rate * efficiencyFactorInclGroup;
|
||||
}
|
||||
if (ii < num_ancestors - 1) {
|
||||
// Not final level. Add sub-level reduction back, if
|
||||
@@ -1333,7 +1343,7 @@ namespace WellGroupHelpers
|
||||
target *= localFraction(chain[ii + 1]);
|
||||
}
|
||||
// Avoid negative target rates comming from too large local reductions.
|
||||
const double target_rate = std::max(1e-12, target / efficiencyFactor);
|
||||
const double target_rate = std::max(1e-12, target / efficiencyFactorInclGroup);
|
||||
return std::make_pair(current_rate > target_rate, target_rate / current_rate);
|
||||
}
|
||||
|
||||
|
||||
@@ -110,11 +110,11 @@ namespace WellGroupHelpers
|
||||
for (const std::string& groupName : group.groups()) {
|
||||
std::vector<double> thisPot(np, 0.0);
|
||||
const Group& groupTmp = schedule.getGroup(groupName, reportStepIdx);
|
||||
|
||||
// Note that group effiency factors for groupTmp are applied in updateGuideRateForGroups
|
||||
updateGuideRateForGroups(
|
||||
groupTmp, schedule, pu, reportStepIdx, simTime, isInjector, wellState, comm, guideRate, thisPot);
|
||||
|
||||
const auto gefac = groupTmp.getGroupEfficiencyFactor();
|
||||
|
||||
// accumulate group contribution from sub group unconditionally
|
||||
if (isInjector) {
|
||||
const Phase all[] = {Phase::WATER, Phase::OIL, Phase::GAS};
|
||||
@@ -129,7 +129,7 @@ namespace WellGroupHelpers
|
||||
else
|
||||
continue;
|
||||
|
||||
pot[phasePos] += gefac * thisPot[phasePos];
|
||||
pot[phasePos] += thisPot[phasePos];
|
||||
}
|
||||
} else {
|
||||
const Group::ProductionCMode& currentGroupControl = wellState.currentProductionGroupControl(groupName);
|
||||
@@ -138,7 +138,7 @@ namespace WellGroupHelpers
|
||||
continue;
|
||||
}
|
||||
for (int phase = 0; phase < np; phase++) {
|
||||
pot[phase] += gefac * thisPot[phase];
|
||||
pot[phase] += thisPot[phase];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -173,6 +173,13 @@ namespace WellGroupHelpers
|
||||
}
|
||||
}
|
||||
|
||||
// Apply group efficiency factor for this goup
|
||||
auto gefac = group.getGroupEfficiencyFactor();
|
||||
|
||||
for (int phase = 0; phase < np; phase++) {
|
||||
pot[phase] *= gefac;
|
||||
}
|
||||
|
||||
double oilPot = 0.0;
|
||||
if (pu.phase_used[BlackoilPhases::Liquid])
|
||||
oilPot = pot[pu.phase_pos[BlackoilPhases::Liquid]];
|
||||
|
||||
@@ -2220,6 +2220,7 @@ namespace Opm
|
||||
}
|
||||
}
|
||||
|
||||
efficiencyFactor *= group.getGroupEfficiencyFactor();
|
||||
assert(group.hasInjectionControl(injectionPhase));
|
||||
const auto& groupcontrols = group.injectionControls(injectionPhase, summaryState);
|
||||
|
||||
@@ -2353,6 +2354,7 @@ namespace Opm
|
||||
}
|
||||
}
|
||||
|
||||
efficiencyFactor *= group.getGroupEfficiencyFactor();
|
||||
const auto& well = well_ecl_;
|
||||
const auto pu = phaseUsage();
|
||||
|
||||
|
||||
@@ -29,6 +29,10 @@
|
||||
#include <opm/parser/eclipse/Parser/Parser.hpp>
|
||||
#include <opm/simulators/timestepping/SimulatorTimer.hpp>
|
||||
#include <opm/parser/eclipse/Units/Units.hpp>
|
||||
#include <opm/parser/eclipse/Python/Python.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Grid/FieldPropsManager.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Grid/EclipseGrid.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Tables/TableManager.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
@@ -40,14 +44,17 @@ BOOST_AUTO_TEST_CASE(CreateTimer)
|
||||
const std::string filename1 = "TESTTIMER.DATA";
|
||||
Opm::Parser parser;
|
||||
Opm::Deck parserDeck = parser.parseFile( filename1);
|
||||
|
||||
Opm::TimeMap timeMap( parserDeck );
|
||||
auto python = std::make_shared<Opm::Python>();
|
||||
Opm::EclipseGrid grid(10,10,10);
|
||||
Opm::FieldPropsManager fp(parserDeck, Opm::Phases{true, true, true}, grid, Opm::TableManager());
|
||||
Opm::Runspec runspec(parserDeck);
|
||||
Opm::Schedule schedule( parserDeck, grid, fp, runspec, python );
|
||||
Opm::SimulatorTimer simtimer;
|
||||
|
||||
boost::gregorian::date defaultStartDate( 2012, 1, 1);
|
||||
BOOST_CHECK_EQUAL( boost::posix_time::ptime(defaultStartDate), simtimer.currentDateTime() );
|
||||
|
||||
simtimer.init(timeMap);
|
||||
simtimer.init(schedule);
|
||||
boost::gregorian::date startDate( 2014, 3, 26);
|
||||
BOOST_CHECK_EQUAL( boost::posix_time::ptime(startDate), simtimer.currentDateTime() );
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
|
||||
#include <opm/grid/GridManager.hpp>
|
||||
#include <opm/parser/eclipse/Units/Units.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
|
||||
#include <opm/material/fluidmatrixinteractions/EclMaterialLawManager.hpp>
|
||||
#include <opm/grid/GridHelpers.hpp>
|
||||
@@ -73,7 +74,7 @@ struct SetupTest {
|
||||
const Opm::Runspec runspec (deck);
|
||||
python = std::make_shared<Opm::Python>();
|
||||
schedule.reset( new Opm::Schedule(deck, *ecl_state, python));
|
||||
summaryState.reset( new Opm::SummaryState(std::chrono::system_clock::from_time_t(schedule->getStartTime())));
|
||||
summaryState.reset( new Opm::SummaryState(Opm::TimeService::from_time_t(schedule->getStartTime())));
|
||||
}
|
||||
current_timestep = 0;
|
||||
};
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#include <opm/parser/eclipse/Parser/ParseContext.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
#include <opm/parser/eclipse/Units/Units.hpp>
|
||||
#include <opm/common/utility/TimeService.hpp>
|
||||
|
||||
#include <opm/grid/GridHelpers.hpp>
|
||||
|
||||
@@ -57,7 +58,7 @@ struct Setup
|
||||
, grid (es.getInputGrid())
|
||||
, python( std::make_shared<Opm::Python>() )
|
||||
, sched(deck, es, python)
|
||||
, st(std::chrono::system_clock::from_time_t(sched.getStartTime()))
|
||||
, st(Opm::TimeService::from_time_t(sched.getStartTime()))
|
||||
{
|
||||
initWellPerfData();
|
||||
}
|
||||
|
||||
@@ -143,6 +143,7 @@ tests[model6_msw]="flow model6 1_MSW_MODEL6"
|
||||
tests[norne_reperf]="flow norne NORNE_ATW2013_B1H_RE-PERF"
|
||||
tests[compl_smry]="flow compl_smry COMPL_SMRY"
|
||||
tests[3d_tran_operator]="flow parallel_fieldprops 3D_TRAN_OPERATOR"
|
||||
tests[co2store]="flow co2store CO2STORE"
|
||||
|
||||
changed_tests=""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user