mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-11 05:15:34 -06:00
Automatic choke
This commit is contained in:
parent
0137bd566c
commit
30e1f5178c
@ -465,6 +465,8 @@ template<class Scalar> class WellContributions;
|
||||
const double dt,
|
||||
DeferredLogger& local_deferredLogger);
|
||||
|
||||
void computeWellGroupThp(DeferredLogger& local_deferredLogger);
|
||||
|
||||
/// Update rank's notion of intersecting wells and their
|
||||
/// associate solution variables.
|
||||
///
|
||||
|
@ -1363,7 +1363,6 @@ updateNetworkPressures(const int reportStepIdx)
|
||||
}
|
||||
|
||||
for (auto& well : well_container_generic_) {
|
||||
|
||||
// Producers only, since we so far only support the
|
||||
// "extended" network model (properties defined by
|
||||
// BRANPROP and NODEPROP) which only applies to producers.
|
||||
@ -1380,6 +1379,9 @@ updateNetworkPressures(const int reportStepIdx)
|
||||
if (thp_is_limit) {
|
||||
ws.thp = well->getTHPConstraint(summaryState_);
|
||||
}
|
||||
//PJPE: Set thp of wells belonging to a subsea manifold equal to the node_pressure
|
||||
if (network.node(well->wellEcl().groupName()).as_choke())
|
||||
ws.thp = new_limit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@
|
||||
#endif
|
||||
|
||||
#include <opm/grid/utility/cartesianToCompressed.hpp>
|
||||
#include <opm/common/utility/numeric/RootFinders.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Network/Balance.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Network/ExtNetwork.hpp>
|
||||
@ -40,6 +41,9 @@
|
||||
#include <opm/simulators/wells/ParallelPAvgDynamicSourceData.hpp>
|
||||
#include <opm/simulators/wells/ParallelWBPCalculation.hpp>
|
||||
#include <opm/simulators/wells/VFPProperties.hpp>
|
||||
#include <opm/simulators/wells/WellBhpThpCalculator.hpp>
|
||||
#include <opm/simulators/wells/WellGroupHelpers.hpp>
|
||||
#include <opm/simulators/wells/TargetCalculator.hpp>
|
||||
|
||||
#include <opm/simulators/utils/DeferredLoggingErrorHelpers.hpp>
|
||||
#include <opm/simulators/utils/MPIPacker.hpp>
|
||||
@ -1162,6 +1166,9 @@ namespace Opm {
|
||||
BlackoilWellModel<TypeTag>::
|
||||
updateWellControlsAndNetwork(const bool mandatory_network_balance, const double dt, DeferredLogger& local_deferredLogger)
|
||||
{
|
||||
// PJPE: calculate common THP for subsea manifold well group (item 3 of NODEPROP set to YES)
|
||||
computeWellGroupThp(local_deferredLogger);
|
||||
|
||||
// not necessarily that we always need to update once of the network solutions
|
||||
bool do_network_update = true;
|
||||
bool well_group_control_changed = false;
|
||||
@ -1243,7 +1250,82 @@ namespace Opm {
|
||||
return {more_network_update, well_group_control_changed};
|
||||
}
|
||||
|
||||
//PJPE: This function is to be used for well groups in an extended network that act as a subsea manifold
|
||||
// The wells of such group should have a common THP and total phase rate(s) obeying (if possible)
|
||||
// the well group constraint set by GCONPROD
|
||||
// subsea manifolds
|
||||
template <typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
computeWellGroupThp(DeferredLogger& local_deferredLogger)
|
||||
{
|
||||
const int reportStepIdx = this->ebosSimulator_.episodeIndex();
|
||||
const auto& network = schedule()[reportStepIdx].network();
|
||||
|
||||
if (!network.active()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto& well_state = this->wellState();
|
||||
auto& group_state = this->groupState();
|
||||
|
||||
for (const std::string& nodeName : network.node_names()) {
|
||||
const bool has_choke = network.node(nodeName).as_choke();
|
||||
if (has_choke) {
|
||||
const auto& summary_state = this->ebosSimulator_.vanguard().summaryState();
|
||||
const Group& group = schedule().getGroup(nodeName, reportStepIdx);
|
||||
std::optional<Group::ProductionControls> ctrl = group.productionControls(summary_state);
|
||||
const auto cmode = ctrl->cmode;
|
||||
const auto pu = this->phase_usage_;
|
||||
//PJPE: conversion factor res<-> surface rates TODO: to be calculated
|
||||
std::vector<double> resv_coeff(pu.num_phases, 1.0);
|
||||
double gratTargetFromSales = 0.0; //PJPE: check this
|
||||
WellGroupHelpers::TargetCalculator tcalc(cmode, pu, resv_coeff,
|
||||
gratTargetFromSales, nodeName, group_state,
|
||||
group.has_gpmaint_control(cmode));
|
||||
double orig_target = tcalc.groupTarget(ctrl, local_deferredLogger);
|
||||
//PJPE: TODO: include efficiency factor
|
||||
auto mismatch = [&] (auto well_group_thp) {
|
||||
double well_group_rate(0.0);
|
||||
for (auto& well : this->well_container_) {
|
||||
std::string well_name = well->name();
|
||||
if (group.hasWell(well_name)) {
|
||||
well->setDynamicThpLimit(well_group_thp);
|
||||
well->updateWellStateWithTHPTargetProd(this->ebosSimulator_, well_state, local_deferredLogger);
|
||||
auto& ws = well_state.well(well_name);
|
||||
well_group_rate += -tcalc.calcModeRateFromRates(ws.surface_rates);
|
||||
}
|
||||
}
|
||||
return well_group_rate - orig_target;
|
||||
};
|
||||
|
||||
// PJPE: TODO what settings to take here?
|
||||
const double tolerance = 1E-4;
|
||||
const int max_iteration_solve = 100;
|
||||
int iteration = 0;
|
||||
const std::array<double, 2> range {20E5, 40E5};
|
||||
double low(range[0]);
|
||||
double high(range[1]);
|
||||
|
||||
// PJPE: TODO use bisectBracket first and use bruteForceBracket as fallback option.
|
||||
// see computeBhpAtThpLimit()
|
||||
WellBhpThpCalculator::bruteForceBracket(mismatch, range, low, high, local_deferredLogger);
|
||||
|
||||
// double aa = mismatch(low);
|
||||
// double bb = mismatch(high);
|
||||
// std::cout << " low = " << aa << ", high = " << bb << std::endl;
|
||||
|
||||
double well_group_thp = RegulaFalsiBisection<ThrowOnError>::
|
||||
solve(mismatch, low, high, max_iteration_solve, tolerance, iteration);
|
||||
|
||||
double check = mismatch(well_group_thp);
|
||||
std::cout << " mismatch = " << check << ", well_group_thp = " << well_group_thp << std::endl;
|
||||
|
||||
//PJPE: use the common THP in computeNetworkPressures
|
||||
group_state.update_well_group_thp(nodeName, well_group_thp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template<typename TypeTag>
|
||||
void
|
||||
@ -2374,9 +2456,6 @@ namespace Opm {
|
||||
deferred_logger.warning("WELL_INITIAL_SOLVE_FAILED", msg);
|
||||
}
|
||||
}
|
||||
// If we're using local well solves that include control switches, they also update
|
||||
// operability, so reset before main iterations begin
|
||||
well->resetWellOperability();
|
||||
}
|
||||
updatePrimaryVariables(deferred_logger);
|
||||
|
||||
|
@ -101,6 +101,23 @@ GroupState<Scalar>::production_rates(const std::string& gname) const
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
template<class Scalar>
|
||||
bool GroupState<Scalar>::
|
||||
GroupState::update_well_group_thp(const std::string& gname, const double& thp)
|
||||
{
|
||||
this->group_thp[gname] = thp;
|
||||
}
|
||||
|
||||
template<class Scalar>
|
||||
double GroupState<Scalar>::
|
||||
GroupState::well_group_thp(const std::string& gname) const
|
||||
{
|
||||
auto group_iter = this->group_thp.find(gname);
|
||||
if (group_iter == this->group_thp.end())
|
||||
throw std::logic_error("No such group");
|
||||
|
||||
return group_iter->second;
|
||||
}
|
||||
|
||||
template<class Scalar>
|
||||
bool GroupState<Scalar>::
|
||||
|
@ -49,6 +49,9 @@ public:
|
||||
const std::vector<Scalar>& rates);
|
||||
const std::vector<Scalar>& production_rates(const std::string& gname) const;
|
||||
|
||||
void update_well_group_thp(const std::string& gname, const double& thp);
|
||||
double well_group_thp(const std::string& gname) const;
|
||||
|
||||
bool has_production_reduction_rates(const std::string& gname) const;
|
||||
void update_production_reduction_rates(const std::string& gname,
|
||||
const std::vector<Scalar>& rates);
|
||||
@ -175,6 +178,7 @@ public:
|
||||
serializer(num_phases);
|
||||
serializer(m_production_rates);
|
||||
serializer(production_controls);
|
||||
serializer(group_thp);
|
||||
serializer(prod_red_rates);
|
||||
serializer(inj_red_rates);
|
||||
serializer(inj_surface_rates);
|
||||
@ -199,6 +203,7 @@ private:
|
||||
std::map<std::string, Scalar> inj_vrep_rate;
|
||||
std::map<std::string, Scalar> m_grat_sales_target;
|
||||
std::map<std::string, Scalar> m_gpmaint_target;
|
||||
std::map<std::string, double> group_thp;
|
||||
|
||||
std::map<std::pair<Phase, std::string>, Group::InjectionCMode> injection_controls;
|
||||
WellContainer<GPMaint::State> gpmaint_state;
|
||||
|
@ -35,6 +35,7 @@
|
||||
#include <opm/simulators/wells/WellHelpers.hpp>
|
||||
#include <opm/simulators/wells/WellInterfaceFluidSystem.hpp>
|
||||
#include <opm/simulators/wells/WellState.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Network/ExtNetwork.hpp>
|
||||
|
||||
#include <cassert>
|
||||
#include <stdexcept>
|
||||
@ -162,17 +163,30 @@ assembleControlEqProd(const WellState<Scalar>& well_state,
|
||||
well_.rateConverter().calcCoeff(id, region, coeff);
|
||||
|
||||
};
|
||||
WellGroupControls(well_).getGroupProductionControl(group,
|
||||
well_state,
|
||||
group_state,
|
||||
schedule,
|
||||
summaryState,
|
||||
bhp,
|
||||
active_rates,
|
||||
rCoeff,
|
||||
efficiencyFactor,
|
||||
control_eq,
|
||||
deferred_logger);
|
||||
|
||||
bool has_choke(false);
|
||||
const std::size_t report_step = well_.currentStep();
|
||||
auto& network = schedule[report_step].network();
|
||||
if (network.active())
|
||||
has_choke = network.node(group.name()).as_choke();
|
||||
|
||||
if(!has_choke) {
|
||||
WellGroupControls(well_).getGroupProductionControl(group,
|
||||
well_state,
|
||||
group_state,
|
||||
schedule,
|
||||
summaryState,
|
||||
bhp,
|
||||
active_rates,
|
||||
rCoeff,
|
||||
efficiencyFactor,
|
||||
control_eq,
|
||||
deferred_logger);
|
||||
|
||||
} else {
|
||||
//PJPE: the group is a subsea manifold: wells are operated on a common THP
|
||||
control_eq = bhp - bhp_from_thp();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Well::ProducerCMode::CMODE_UNDEFINED: {
|
||||
|
@ -118,6 +118,11 @@ public:
|
||||
getFloIPR(const WellState<Scalar>& well_state,
|
||||
const Well& well,
|
||||
const SummaryState& summary_state) const;
|
||||
//! \brief Find limits using brute-force solver.
|
||||
static bool bruteForceBracket(const std::function<double(const double)>& eq,
|
||||
const std::array<double, 2>& range,
|
||||
double& low, double& high,
|
||||
DeferredLogger& deferred_logger);
|
||||
|
||||
private:
|
||||
//! \brief Compute BHP from THP limit for an injector - implementation.
|
||||
|
@ -41,6 +41,7 @@
|
||||
#include <opm/simulators/wells/TargetCalculator.hpp>
|
||||
#include <opm/simulators/wells/VFPProdProperties.hpp>
|
||||
#include <opm/simulators/wells/WellState.hpp>
|
||||
#include <opm/simulators/wells/GroupState.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
@ -935,7 +936,15 @@ computeNetworkPressures(const Network::ExtNetwork& network,
|
||||
#endif
|
||||
} else {
|
||||
// Table number specified as 9999 in the deck, no pressure loss.
|
||||
node_pressures[node] = up_press;
|
||||
if (network.node(node).as_choke()){
|
||||
// Node pressure is set to the common THP of the wells.
|
||||
// The choke pressure must be non-negative therefore the node pressure of
|
||||
// the auto-choke node must be larger or equal to the pressure of the uptree node of its branch
|
||||
const auto group_thp = group_state.well_group_thp(node);
|
||||
node_pressures[node] = group_thp >= up_press ? group_thp : up_press;
|
||||
} else {
|
||||
node_pressures[node] = up_press;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1401,25 +1401,36 @@ namespace Opm
|
||||
assert(well.isAvailableForGroupControl());
|
||||
const auto& group = schedule.getGroup(well.groupName(), this->currentStep());
|
||||
const Scalar efficiencyFactor = well.getEfficiencyFactor();
|
||||
Scalar scale = this->getGroupProductionTargetRate(group,
|
||||
well_state,
|
||||
group_state,
|
||||
schedule,
|
||||
summaryState,
|
||||
efficiencyFactor,
|
||||
deferred_logger);
|
||||
|
||||
// we don't want to scale with zero and get zero rates.
|
||||
if (scale > 0) {
|
||||
for (int p = 0; p<np; ++p) {
|
||||
ws.surface_rates[p] *= scale;
|
||||
bool has_choke(false);
|
||||
const std::size_t report_step = this->currentStep();
|
||||
auto& network = schedule[report_step].network();
|
||||
if (network.active())
|
||||
has_choke = network.node(group.name()).as_choke();
|
||||
if (!has_choke) {
|
||||
Scalar scale = this->getGroupProductionTargetRate(group,
|
||||
well_state,
|
||||
group_state,
|
||||
schedule,
|
||||
summaryState,
|
||||
efficiencyFactor,
|
||||
deferred_logger);
|
||||
|
||||
// we don't want to scale with zero and get zero rates.
|
||||
if (scale > 0) {
|
||||
for (int p = 0; p<np; ++p) {
|
||||
ws.surface_rates[p] *= scale;
|
||||
}
|
||||
ws.trivial_target = false;
|
||||
} else {
|
||||
ws.trivial_target = true;
|
||||
}
|
||||
ws.trivial_target = false;
|
||||
} else {
|
||||
ws.trivial_target = true;
|
||||
}
|
||||
// PJPE: the group is a subsea manifold.Guide rates to be ignored.
|
||||
// The wells of the group are to be operated on a common THP (= manifold node pressure)
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
case Well::ProducerCMode::CMODE_UNDEFINED:
|
||||
case Well::ProducerCMode::NONE:
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user