mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
changed: move the auxillary well module from BlackoilModule to WellConnectionAuxillary
this is an adapter for the non-linear solver. it is still model using is-a but this can now be changed if desired.
This commit is contained in:
@@ -24,13 +24,11 @@
|
||||
#ifndef OPM_BLACKOILWELLMODEL_HEADER_INCLUDED
|
||||
#define OPM_BLACKOILWELLMODEL_HEADER_INCLUDED
|
||||
|
||||
#include <opm/common/OpmLog/OpmLog.hpp>
|
||||
#include <dune/common/fmatrix.hh>
|
||||
#include <dune/istl/bcrsmatrix.hh>
|
||||
#include <dune/istl/matrixmatrix.hh>
|
||||
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <opm/common/OpmLog/OpmLog.hpp>
|
||||
|
||||
#include <opm/grid/utility/SparseTable.hpp>
|
||||
|
||||
@@ -39,7 +37,7 @@
|
||||
#include <opm/input/eclipse/Schedule/Schedule.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/WellTestState.hpp>
|
||||
|
||||
#include <opm/models/discretization/common/baseauxiliarymodule.hh>
|
||||
#include <opm/material/densead/Math.hpp>
|
||||
|
||||
#include <opm/simulators/flow/countGlobalCells.hpp>
|
||||
#include <opm/simulators/flow/FlowBaseVanguard.hpp>
|
||||
@@ -47,6 +45,11 @@
|
||||
|
||||
#include <opm/simulators/linalg/matrixblock.hh>
|
||||
|
||||
#include <opm/simulators/timestepping/SimulatorReport.hpp>
|
||||
#include <opm/simulators/timestepping/gatherConvergenceReport.hpp>
|
||||
|
||||
#include <opm/simulators/utils/DeferredLogger.hpp>
|
||||
|
||||
#include <opm/simulators/wells/BlackoilWellModelGasLift.hpp>
|
||||
#include <opm/simulators/wells/BlackoilWellModelGeneric.hpp>
|
||||
#include <opm/simulators/wells/BlackoilWellModelGuideRates.hpp>
|
||||
@@ -63,22 +66,18 @@
|
||||
#include <opm/simulators/wells/StandardWell.hpp>
|
||||
#include <opm/simulators/wells/VFPInjProperties.hpp>
|
||||
#include <opm/simulators/wells/VFPProdProperties.hpp>
|
||||
#include <opm/simulators/wells/WGState.hpp>
|
||||
#include <opm/simulators/wells/WellConnectionAuxiliaryModule.hpp>
|
||||
#include <opm/simulators/wells/WellGroupHelpers.hpp>
|
||||
#include <opm/simulators/wells/WellInterface.hpp>
|
||||
#include <opm/simulators/wells/WellProdIndexCalculator.hpp>
|
||||
#include <opm/simulators/wells/WellState.hpp>
|
||||
#include <opm/simulators/wells/WGState.hpp>
|
||||
|
||||
#include <opm/simulators/timestepping/SimulatorReport.hpp>
|
||||
#include <opm/simulators/timestepping/gatherConvergenceReport.hpp>
|
||||
|
||||
#include <dune/common/fmatrix.hh>
|
||||
#include <dune/istl/bcrsmatrix.hh>
|
||||
#include <dune/istl/matrixmatrix.hh>
|
||||
|
||||
#include <opm/material/densead/Math.hpp>
|
||||
|
||||
#include <opm/simulators/utils/DeferredLogger.hpp>
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace Opm {
|
||||
|
||||
@@ -88,7 +87,7 @@ template<class Scalar> class WellContributions;
|
||||
|
||||
/// Class for handling the blackoil well model.
|
||||
template<typename TypeTag>
|
||||
class BlackoilWellModel : public BaseAuxiliaryModule<TypeTag>
|
||||
class BlackoilWellModel : public WellConnectionAuxiliaryModule<TypeTag, BlackoilWellModel<TypeTag>>
|
||||
, public BlackoilWellModelGeneric<GetPropType<TypeTag,
|
||||
Properties::Scalar>>
|
||||
{
|
||||
@@ -106,8 +105,9 @@ template<class Scalar> class WellContributions;
|
||||
using SparseMatrixAdapter = GetPropType<TypeTag, Properties::SparseMatrixAdapter>;
|
||||
using ModelParameters = BlackoilModelParameters<Scalar>;
|
||||
|
||||
using WellConnectionModule = WellConnectionAuxiliaryModule<TypeTag, BlackoilWellModel<TypeTag>>;
|
||||
|
||||
constexpr static std::size_t pressureVarIndex = GetPropType<TypeTag, Properties::Indices>::pressureSwitchIdx;
|
||||
typedef typename BaseAuxiliaryModule<TypeTag>::NeighborSet NeighborSet;
|
||||
|
||||
static const int numEq = Indices::numEq;
|
||||
static const int solventSaturationIdx = Indices::solventSaturationIdx;
|
||||
@@ -139,51 +139,6 @@ template<class Scalar> class WellContributions;
|
||||
void init();
|
||||
void initWellContainer(const int reportStepIdx) override;
|
||||
|
||||
/////////////
|
||||
// <eWoms auxiliary module stuff>
|
||||
/////////////
|
||||
unsigned numDofs() const override
|
||||
// No extra dofs are inserted for wells. (we use a Schur complement.)
|
||||
{ return 0; }
|
||||
|
||||
void addNeighbors(std::vector<NeighborSet>& neighbors) const override;
|
||||
|
||||
void applyInitial() override
|
||||
{}
|
||||
|
||||
void linearize(SparseMatrixAdapter& jacobian, GlobalEqVector& res) override;
|
||||
void linearizeDomain(const Domain& domain, SparseMatrixAdapter& jacobian, GlobalEqVector& res);
|
||||
|
||||
void postSolve(GlobalEqVector& deltaX) override
|
||||
{
|
||||
recoverWellSolutionAndUpdateWellState(deltaX);
|
||||
}
|
||||
|
||||
void postSolveDomain(GlobalEqVector& deltaX, const Domain& domain)
|
||||
{
|
||||
recoverWellSolutionAndUpdateWellStateDomain(deltaX, domain);
|
||||
}
|
||||
|
||||
/////////////
|
||||
// </ eWoms auxiliary module stuff>
|
||||
/////////////
|
||||
|
||||
template <class Restarter>
|
||||
void deserialize(Restarter& /* res */)
|
||||
{
|
||||
// TODO (?)
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief This method writes the complete state of the well
|
||||
* to the harddisk.
|
||||
*/
|
||||
template <class Restarter>
|
||||
void serialize(Restarter& /* res*/)
|
||||
{
|
||||
// TODO (?)
|
||||
}
|
||||
|
||||
void beginEpisode()
|
||||
{
|
||||
OPM_TIMEBLOCK(beginEpsiode);
|
||||
@@ -371,6 +326,22 @@ template<class Scalar> class WellContributions;
|
||||
auto end() const { return well_container_.end(); }
|
||||
bool empty() const { return well_container_.empty(); }
|
||||
|
||||
bool addMatrixContributions() const { return param_.matrix_add_well_contributions_; }
|
||||
|
||||
int compressedIndexForInterior(int cartesian_cell_idx) const override
|
||||
{
|
||||
return simulator_.vanguard().compressedIndexForInterior(cartesian_cell_idx);
|
||||
}
|
||||
|
||||
// using the solution x to recover the solution xw for wells and applying
|
||||
// xw to update Well State
|
||||
void recoverWellSolutionAndUpdateWellState(const BVector& x);
|
||||
|
||||
// using the solution x to recover the solution xw for wells and applying
|
||||
// xw to update Well State
|
||||
void recoverWellSolutionAndUpdateWellStateDomain(const BVector& x,
|
||||
const Domain& domain);
|
||||
|
||||
protected:
|
||||
Simulator& simulator_;
|
||||
|
||||
@@ -470,14 +441,6 @@ template<class Scalar> class WellContributions;
|
||||
// called at the end of a report step
|
||||
void endReportStep();
|
||||
|
||||
// using the solution x to recover the solution xw for wells and applying
|
||||
// xw to update Well State
|
||||
void recoverWellSolutionAndUpdateWellState(const BVector& x);
|
||||
|
||||
// using the solution x to recover the solution xw for wells and applying
|
||||
// xw to update Well State
|
||||
void recoverWellSolutionAndUpdateWellStateDomain(const BVector& x, const Domain& domain);
|
||||
|
||||
// setting the well_solutions_ based on well_state.
|
||||
void updatePrimaryVariables(DeferredLogger& deferred_logger);
|
||||
|
||||
@@ -529,10 +492,6 @@ template<class Scalar> class WellContributions;
|
||||
|
||||
void computeWellTemperature();
|
||||
|
||||
int compressedIndexForInterior(int cartesian_cell_idx) const override {
|
||||
return simulator_.vanguard().compressedIndexForInterior(cartesian_cell_idx);
|
||||
}
|
||||
|
||||
private:
|
||||
BlackoilWellModel(Simulator& simulator, const PhaseUsage& pu);
|
||||
|
||||
@@ -543,8 +502,6 @@ template<class Scalar> class WellContributions;
|
||||
// Their state is not relevant between function calls, so they can
|
||||
// (and must) be mutable, as the functions using them are const.
|
||||
mutable BVector x_local_;
|
||||
mutable BVector res_local_;
|
||||
mutable GlobalEqVector linearize_res_local_;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -214,6 +214,8 @@ public:
|
||||
return well_domain_;
|
||||
}
|
||||
|
||||
std::vector<int> getCellsForConnections(const Well& well) const;
|
||||
|
||||
bool reportStepStarts() const { return report_step_starts_; }
|
||||
|
||||
bool shouldBalanceNetwork(const int reportStepIndex,
|
||||
@@ -435,7 +437,6 @@ protected:
|
||||
/// \brief get compressed index for interior cells (-1, otherwise
|
||||
virtual int compressedIndexForInterior(int cartesian_cell_idx) const = 0;
|
||||
|
||||
std::vector<int> getCellsForConnections(const Well& well) const;
|
||||
std::vector<std::vector<int>> getMaxWellConnections() const;
|
||||
|
||||
std::vector<std::string> getWellsForTesting(const int timeStepIdx,
|
||||
|
||||
@@ -57,10 +57,6 @@
|
||||
#include <opm/simulators/linalg/gpubridge/WellContributions.hpp>
|
||||
#endif
|
||||
|
||||
#if HAVE_MPI
|
||||
#include <opm/simulators/utils/MPISerializer.hpp>
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <iomanip>
|
||||
@@ -73,7 +69,8 @@ namespace Opm {
|
||||
template<typename TypeTag>
|
||||
BlackoilWellModel<TypeTag>::
|
||||
BlackoilWellModel(Simulator& simulator, const PhaseUsage& phase_usage)
|
||||
: BlackoilWellModelGeneric<Scalar>(simulator.vanguard().schedule(),
|
||||
: WellConnectionModule(*this, simulator.gridView().comm())
|
||||
, BlackoilWellModelGeneric<Scalar>(simulator.vanguard().schedule(),
|
||||
simulator.vanguard().summaryState(),
|
||||
simulator.vanguard().eclState(),
|
||||
phase_usage,
|
||||
@@ -190,116 +187,6 @@ namespace Opm {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
addNeighbors(std::vector<NeighborSet>& neighbors) const
|
||||
{
|
||||
if (!param_.matrix_add_well_contributions_) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create cartesian to compressed mapping
|
||||
const auto& schedule_wells = this->schedule().getWellsatEnd();
|
||||
auto possibleFutureConnections = this->schedule().getPossibleFutureConnections();
|
||||
|
||||
#if HAVE_MPI
|
||||
// Communicate Map to other processes, since it is only available on rank 0
|
||||
const auto& comm = this->simulator_.vanguard().grid().comm();
|
||||
Parallel::MpiSerializer ser(comm);
|
||||
ser.broadcast(possibleFutureConnections);
|
||||
#endif
|
||||
// initialize the additional cell connections introduced by wells.
|
||||
for (const auto& well : schedule_wells)
|
||||
{
|
||||
std::vector<int> wellCells = this->getCellsForConnections(well);
|
||||
// Now add the cells of the possible future connections
|
||||
const auto possibleFutureConnectionSetIt = possibleFutureConnections.find(well.name());
|
||||
if (possibleFutureConnectionSetIt != possibleFutureConnections.end()) {
|
||||
for (auto& global_index : possibleFutureConnectionSetIt->second) {
|
||||
int compressed_idx = compressedIndexForInterior(global_index);
|
||||
if (compressed_idx >= 0) { // Ignore connections in inactive/remote cells.
|
||||
wellCells.push_back(compressed_idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (int cellIdx : wellCells) {
|
||||
neighbors[cellIdx].insert(wellCells.begin(),
|
||||
wellCells.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
linearize(SparseMatrixAdapter& jacobian, GlobalEqVector& res)
|
||||
{
|
||||
OPM_BEGIN_PARALLEL_TRY_CATCH();
|
||||
for (const auto& well: well_container_) {
|
||||
// Modifiy the Jacobian with explicit Schur complement
|
||||
// contributions if requested.
|
||||
if (param_.matrix_add_well_contributions_) {
|
||||
well->addWellContributions(jacobian);
|
||||
}
|
||||
// Apply as Schur complement the well residual to reservoir residuals:
|
||||
// r = r - duneC_^T * invDuneD_ * resWell_
|
||||
// Well equations B and C uses only the perforated cells, so need to apply on local residual
|
||||
const auto& cells = well->cells();
|
||||
linearize_res_local_.resize(cells.size());
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
linearize_res_local_[i] = res[cells[i]];
|
||||
}
|
||||
|
||||
well->apply(linearize_res_local_);
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
res[cells[i]] = linearize_res_local_[i];
|
||||
}
|
||||
}
|
||||
OPM_END_PARALLEL_TRY_CATCH("BlackoilWellModel::linearize failed: ",
|
||||
simulator_.gridView().comm());
|
||||
}
|
||||
|
||||
|
||||
template<typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
linearizeDomain(const Domain& domain, SparseMatrixAdapter& jacobian, GlobalEqVector& res)
|
||||
{
|
||||
// Note: no point in trying to do a parallel gathering
|
||||
// try/catch here, as this function is not called in
|
||||
// parallel but for each individual domain of each rank.
|
||||
for (const auto& well: well_container_) {
|
||||
if (this->well_domain_.at(well->name()) == domain.index) {
|
||||
// Modifiy the Jacobian with explicit Schur complement
|
||||
// contributions if requested.
|
||||
if (param_.matrix_add_well_contributions_) {
|
||||
well->addWellContributions(jacobian);
|
||||
}
|
||||
// Apply as Schur complement the well residual to reservoir residuals:
|
||||
// r = r - duneC_^T * invDuneD_ * resWell_
|
||||
// Well equations B and C uses only the perforated cells, so need to apply on local residual
|
||||
const auto& cells = well->cells();
|
||||
linearize_res_local_.resize(cells.size());
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
linearize_res_local_[i] = res[cells[i]];
|
||||
}
|
||||
|
||||
well->apply(linearize_res_local_);
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
res[cells[i]] = linearize_res_local_[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
@@ -1711,7 +1598,9 @@ namespace Opm {
|
||||
template<typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
addWellPressureEquations(PressureMatrix& jacobian, const BVector& weights, const bool use_well_weights) const
|
||||
addWellPressureEquations(PressureMatrix& jacobian,
|
||||
const BVector& weights,
|
||||
const bool use_well_weights) const
|
||||
{
|
||||
int nw = this->numLocalWellsEnd();
|
||||
int rdofs = local_num_cells_;
|
||||
@@ -1720,8 +1609,12 @@ namespace Opm {
|
||||
jacobian[wdof][wdof] = 1.0;// better scaling ?
|
||||
}
|
||||
|
||||
for ( const auto& well : well_container_ ) {
|
||||
well->addWellPressureEquations(jacobian, weights, pressureVarIndex, use_well_weights, this->wellState());
|
||||
for (const auto& well : well_container_) {
|
||||
well->addWellPressureEquations(jacobian,
|
||||
weights,
|
||||
pressureVarIndex,
|
||||
use_well_weights,
|
||||
this->wellState());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1810,14 +1703,15 @@ namespace Opm {
|
||||
DeferredLogger local_deferredLogger;
|
||||
OPM_BEGIN_PARALLEL_TRY_CATCH();
|
||||
{
|
||||
for (auto& well : well_container_) {
|
||||
for (const auto& well : well_container_) {
|
||||
const auto& cells = well->cells();
|
||||
x_local_.resize(cells.size());
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
x_local_[i] = x[cells[i]];
|
||||
}
|
||||
well->recoverWellSolutionAndUpdateWellState(simulator_, x_local_, this->wellState(), local_deferredLogger);
|
||||
well->recoverWellSolutionAndUpdateWellState(simulator_, x_local_,
|
||||
this->wellState(), local_deferredLogger);
|
||||
}
|
||||
}
|
||||
OPM_END_PARALLEL_TRY_CATCH_LOG(local_deferredLogger,
|
||||
|
||||
@@ -23,51 +23,172 @@
|
||||
|
||||
#include <opm/models/discretization/common/baseauxiliarymodule.hh>
|
||||
|
||||
#include <vector>
|
||||
#include <opm/simulators/flow/SubDomain.hpp>
|
||||
|
||||
namespace Dune { class CpGrid; }
|
||||
#include <opm/simulators/utils/DeferredLoggingErrorHelpers.hpp>
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
namespace Opm
|
||||
{
|
||||
|
||||
class Schedule;
|
||||
|
||||
template<class TypeTag>
|
||||
class WellConnectionAuxiliaryModule
|
||||
: public BaseAuxiliaryModule<TypeTag>
|
||||
#if HAVE_MPI
|
||||
#include <opm/simulators/utils/MPISerializer.hpp>
|
||||
#endif
|
||||
|
||||
namespace Opm {
|
||||
|
||||
template<class TypeTag, class Model>
|
||||
class WellConnectionAuxiliaryModule : public BaseAuxiliaryModule<TypeTag>
|
||||
{
|
||||
using Grid = GetPropType<TypeTag, Properties::Grid>;
|
||||
using GlobalEqVector = GetPropType<TypeTag, Properties::GlobalEqVector>;
|
||||
using SparseMatrixAdapter = GetPropType<TypeTag, Properties::SparseMatrixAdapter>;
|
||||
|
||||
public:
|
||||
|
||||
using NeighborSet = typename
|
||||
::Opm::BaseAuxiliaryModule<TypeTag>::NeighborSet;
|
||||
|
||||
WellConnectionAuxiliaryModule(const Schedule& schedule,
|
||||
const Dune::CpGrid& grid)
|
||||
using Domain = SubDomain<Grid>;
|
||||
|
||||
WellConnectionAuxiliaryModule(Model& model, Parallel::Communication comm)
|
||||
: model_(model)
|
||||
, lin_comm_(std::move(comm))
|
||||
{
|
||||
}
|
||||
|
||||
unsigned numDofs() const
|
||||
unsigned numDofs() const override
|
||||
{
|
||||
// No extra dofs are inserted for wells.
|
||||
return 0;
|
||||
}
|
||||
|
||||
void addNeighbors(std::vector<NeighborSet>& neighbors) const
|
||||
void addNeighbors(std::vector<NeighborSet>& neighbors) const override
|
||||
{
|
||||
if (!model_.addMatrixContributions()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create cartesian to compressed mapping
|
||||
const auto& schedule_wells = model_.schedule().getWellsatEnd();
|
||||
auto possibleFutureConnections = model_.schedule().getPossibleFutureConnections();
|
||||
|
||||
#if HAVE_MPI
|
||||
// Communicate Map to other processes, since it is only available on rank 0
|
||||
Parallel::MpiSerializer ser(lin_comm_);
|
||||
ser.broadcast(possibleFutureConnections);
|
||||
#endif
|
||||
// initialize the additional cell connections introduced by wells.
|
||||
for (const auto& well : schedule_wells)
|
||||
{
|
||||
std::vector<int> wellCells = model_.getCellsForConnections(well);
|
||||
// Now add the cells of the possible future connections
|
||||
const auto possibleFutureConnectionSetIt = possibleFutureConnections.find(well.name());
|
||||
if (possibleFutureConnectionSetIt != possibleFutureConnections.end()) {
|
||||
for (auto& global_index : possibleFutureConnectionSetIt->second) {
|
||||
int compressed_idx = model_.compressedIndexForInterior(global_index);
|
||||
if (compressed_idx >= 0) { // Ignore connections in inactive/remote cells.
|
||||
wellCells.push_back(compressed_idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (int cellIdx : wellCells) {
|
||||
neighbors[cellIdx].insert(wellCells.begin(),
|
||||
wellCells.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void applyInitial()
|
||||
void applyInitial() override
|
||||
{}
|
||||
|
||||
void linearize(SparseMatrixAdapter& , GlobalEqVector&)
|
||||
void linearize(SparseMatrixAdapter& jacobian, GlobalEqVector& res) override
|
||||
{
|
||||
// Linearization is done in StandardDenseWells
|
||||
OPM_BEGIN_PARALLEL_TRY_CATCH();
|
||||
for (const auto& well : model_) {
|
||||
// Modifiy the Jacobian with explicit Schur complement
|
||||
// contributions if requested.
|
||||
if (model_.addMatrixContributions()) {
|
||||
well->addWellContributions(jacobian);
|
||||
}
|
||||
// Apply as Schur complement the well residual to reservoir residuals:
|
||||
// r = r - duneC_^T * invDuneD_ * resWell_
|
||||
// Well equations B and C uses only the perforated cells, so need to apply on local residual
|
||||
const auto& cells = well->cells();
|
||||
linearize_res_local_.resize(cells.size());
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
linearize_res_local_[i] = res[cells[i]];
|
||||
}
|
||||
|
||||
well->apply(linearize_res_local_);
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
res[cells[i]] = linearize_res_local_[i];
|
||||
}
|
||||
}
|
||||
OPM_END_PARALLEL_TRY_CATCH("BlackoilWellModel::linearize failed: ", lin_comm_);
|
||||
}
|
||||
|
||||
private:
|
||||
void postSolve(GlobalEqVector& deltaX) override
|
||||
{
|
||||
model_.recoverWellSolutionAndUpdateWellState(deltaX);
|
||||
}
|
||||
|
||||
void linearizeDomain(const Domain& domain,
|
||||
SparseMatrixAdapter& jacobian,
|
||||
GlobalEqVector& res)
|
||||
{
|
||||
// Note: no point in trying to do a parallel gathering
|
||||
// try/catch here, as this function is not called in
|
||||
// parallel but for each individual domain of each rank.
|
||||
for (const auto& well : model_) {
|
||||
if (model_.well_domain().at(well->name()) == domain.index) {
|
||||
// Modifiy the Jacobian with explicit Schur complement
|
||||
// contributions if requested.
|
||||
if (model_.addMatrixContributions()) {
|
||||
well->addWellContributions(jacobian);
|
||||
}
|
||||
// Apply as Schur complement the well residual to reservoir residuals:
|
||||
// r = r - duneC_^T * invDuneD_ * resWell_
|
||||
// Well equations B and C uses only the perforated cells, so need to apply on local residual
|
||||
const auto& cells = well->cells();
|
||||
linearize_res_local_.resize(cells.size());
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
linearize_res_local_[i] = res[cells[i]];
|
||||
}
|
||||
|
||||
well->apply(linearize_res_local_);
|
||||
|
||||
for (size_t i = 0; i < cells.size(); ++i) {
|
||||
res[cells[i]] = linearize_res_local_[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void postSolveDomain(GlobalEqVector& deltaX, const Domain& domain)
|
||||
{
|
||||
model_.recoverWellSolutionAndUpdateWellStateDomain(deltaX, domain);
|
||||
}
|
||||
|
||||
template <class Restarter>
|
||||
void deserialize(Restarter& /* res */)
|
||||
{
|
||||
// TODO (?)
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief This method writes the complete state of the well
|
||||
* to the harddisk.
|
||||
*/
|
||||
template <class Restarter>
|
||||
void serialize(Restarter& /* res*/)
|
||||
{
|
||||
// TODO (?)
|
||||
}
|
||||
|
||||
private:
|
||||
Model& model_;
|
||||
GlobalEqVector linearize_res_local_{};
|
||||
Parallel::Communication lin_comm_;
|
||||
};
|
||||
|
||||
} // end namespace OPM
|
||||
|
||||
Reference in New Issue
Block a user