Add PORV argument when creating PAvgCalculator

This commit is contained in:
Joakim Hove 2020-12-04 10:58:27 +01:00
parent 0102d736ac
commit 9809197b5d
8 changed files with 22 additions and 25 deletions

View File

@ -193,7 +193,6 @@ namespace Opm
const TimeMap& getTimeMap() const;
PAvgCalculatorCollection pavg_calculators(const EclipseGrid& grid, const std::unordered_set<std::string>& wells, std::size_t report_step) const;
std::size_t numWells() const;
std::size_t numWells(std::size_t timestep) const;
bool hasWell(const std::string& wellName) const;

View File

@ -37,7 +37,7 @@ class Serializer;
class PAvgCalculator {
public:
PAvgCalculator(const std::string& well, const EclipseGrid& grid, const WellConnections& connections, const PAvg& pavg);
PAvgCalculator(const std::string& well, const EclipseGrid& grid, const std::vector<double>& porv, const WellConnections& connections, const PAvg& pavg);
enum class WBPMode {
WBP,

View File

@ -592,7 +592,7 @@ public:
void applyWellProdIndexScaling(const double scalingFactor,
std::vector<bool>& scalingApplicable);
const PAvg& pavg() const;
PAvgCalculator pavg_calculator(const EclipseGrid& grid) const;
PAvgCalculator pavg_calculator(const EclipseGrid& grid, const std::vector<double>& porv) const;
template<class Serializer>
void serializeOp(Serializer& serializer)

View File

@ -2931,12 +2931,16 @@ internal_store(const SummaryState& st, const int report_step)
}
Opm::PAvgCalculatorCollection Opm::out::Summary::SummaryImplementation::wbp_calculators(std::size_t report_step) const {
if (this->wbp_wells.empty())
return {};
Opm::PAvgCalculatorCollection calculators;
const auto& porv = this->es_.get().globalFieldProps().porv(true);
for (const auto& wname : this->wbp_wells) {
if (this->sched_.get().hasWell(wname, report_step)) {
const auto& well = this->sched_.get().getWell(wname, report_step);
if (well.getStatus() == Opm::Well::Status::OPEN)
calculators.add(well.pavg_calculator(this->grid_));
calculators.add(well.pavg_calculator(this->grid_, porv));
}
}
return calculators;

View File

@ -2026,13 +2026,4 @@ bool Schedule::cmp(const Schedule& sched1, const Schedule& sched2, std::size_t r
}
PAvgCalculatorCollection Schedule::pavg_calculators(const EclipseGrid& grid, const std::unordered_set<std::string>& wells, std::size_t report_step) const
{
PAvgCalculatorCollection calculators;
for (const auto& wname : wells) {
const auto& well = this->getWell(wname, report_step);
calculators.add(well.pavg_calculator(grid));
}
return calculators;
}
}

View File

@ -54,7 +54,7 @@ const std::string& PAvgCalculator::wname() const {
}
PAvgCalculator::PAvgCalculator(const std::string& well, const EclipseGrid& grid, const WellConnections& connections, const PAvg& pavg) :
PAvgCalculator::PAvgCalculator(const std::string& well, const EclipseGrid& grid, const std::vector<double>& porv, const WellConnections& connections, const PAvg& pavg) :
well_name(well),
m_pavg(pavg)
{
@ -62,10 +62,12 @@ PAvgCalculator::PAvgCalculator(const std::string& well, const EclipseGrid& grid,
OpmLog::warning("PORV based averaging is not yet supported in WBPx");
//throw std::logic_error("The current implementation does not yet support PORV based averaging");
if (porv.size() != grid.getCartesianSize())
throw std::logic_error("Should pass a GLOBAL porv vector");
for (const auto& conn : connections) {
if (conn.state() == ::Opm::Connection::State::OPEN || !this->m_pavg.open_connections()) {
double porv = -1;
Connection wp_conn(porv, conn.CF(), conn.dir(), conn.global_index());
Connection wp_conn(porv[conn.global_index()], conn.CF(), conn.dir(), conn.global_index());
this->add_connection(wp_conn);
}
}

View File

@ -1644,8 +1644,8 @@ bool Well::operator==(const Well& data) const {
}
PAvgCalculator Well::pavg_calculator(const EclipseGrid& grid) const {
return PAvgCalculator(this->name(), grid, this->getConnections(), this->m_pavg);
PAvgCalculator Well::pavg_calculator(const EclipseGrid& grid, const std::vector<double>& porv) const {
return PAvgCalculator(this->name(), grid, porv, this->getConnections(), this->m_pavg);
}

View File

@ -195,7 +195,8 @@ END
auto sched = Schedule{ deck, es };
auto summary_config = SummaryConfig{deck, sched, es.getTableManager(), es.aquifer()};
const auto& w1 = sched.getWell("P1", 0);
auto calc = w1.pavg_calculator(grid);
const auto& porv = es.globalFieldProps().porv(true);
auto calc = w1.pavg_calculator(grid, porv);
{
const auto& index_list = calc.index_list();
@ -234,7 +235,7 @@ END
//----------------------------------------------------
const auto& w5 = sched.getWell("P5", 0);
auto calc5 = w5.pavg_calculator(grid);
auto calc5 = w5.pavg_calculator(grid, porv);
{
const auto& index_list = calc5.index_list();
@ -262,8 +263,8 @@ END
// We emulate MPI and calc1 and calc2 are on two different processors
{
auto calc1 = w5.pavg_calculator(grid);
auto calc2 = w5.pavg_calculator(grid);
auto calc1 = w5.pavg_calculator(grid, porv);
auto calc2 = w5.pavg_calculator(grid, porv);
for (std::size_t k = 0; k < 3; k++) {
calc1.add_pressure(grid.getGlobalIndex(0,0,k), 1);
calc2.add_pressure(grid.getGlobalIndex(1,0,k), 2.0);
@ -285,9 +286,9 @@ END
auto calculators = sched.pavg_calculators(grid, summary_config.wbp_wells(), 0);
calculators.add(w1.pavg_calculator(grid));
calculators.add(w5.pavg_calculator(grid));
PAvgCalculatorCollection calculators;
calculators.add(w1.pavg_calculator(grid, porv));
calculators.add(w5.pavg_calculator(grid, porv));
BOOST_CHECK( calculators.has("P1"));