mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Add Parallel Calculation Support for WBPn/WPAVE
This commit adds a parallel calculation object derived from the serial PAvgCalculator class. This parallel version is aware of MPI communicators and knows how to aggregate contributions from wells that might be distributed across ranks. We also add a wrapper class, ParallelWBPCalculation, which knows how to exchange information from PAvgCalculatorCollection objects on different ranks and, especially, how to properly prune inactive cells/connections.
This commit is contained in:
parent
57532195da
commit
95d715b807
@ -415,6 +415,46 @@ opm_add_test(test_parallel_wbp_sourcevalues_np4
|
||||
4
|
||||
)
|
||||
|
||||
opm_add_test(test_parallel_wbp_calculation
|
||||
SOURCES
|
||||
tests/test_parallel_wbp_calculation.cpp
|
||||
LIBRARIES
|
||||
opmsimulators ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}
|
||||
CONDITION
|
||||
MPI_FOUND AND Boost_UNIT_TEST_FRAMEWORK_FOUND
|
||||
ONLY_COMPILE
|
||||
)
|
||||
|
||||
opm_add_test(test_parallel_wbp_calculation_create
|
||||
EXE_NAME
|
||||
test_parallel_wbp_calculation
|
||||
CONDITION
|
||||
MPI_FOUND AND Boost_UNIT_TEST_FRAMEWORK_FOUND
|
||||
DRIVER_ARGS
|
||||
-n 2
|
||||
-b ${PROJECT_BINARY_DIR}
|
||||
TEST_ARGS
|
||||
--run_test=Create
|
||||
NO_COMPILE
|
||||
PROCESSORS
|
||||
2
|
||||
)
|
||||
|
||||
opm_add_test(test_parallel_wbp_calculation_well_openconns
|
||||
EXE_NAME
|
||||
test_parallel_wbp_calculation
|
||||
CONDITION
|
||||
MPI_FOUND AND Boost_UNIT_TEST_FRAMEWORK_FOUND
|
||||
DRIVER_ARGS
|
||||
-n 2
|
||||
-b ${PROJECT_BINARY_DIR}
|
||||
TEST_ARGS
|
||||
--run_test=TopOfFormation_Well_OpenConns
|
||||
NO_COMPILE
|
||||
PROCESSORS
|
||||
2
|
||||
)
|
||||
|
||||
opm_add_test(test_broadcast
|
||||
DEPENDS "opmsimulators"
|
||||
LIBRARIES opmsimulators ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}
|
||||
|
@ -105,7 +105,9 @@ list (APPEND MAIN_SOURCE_FILES
|
||||
opm/simulators/wells/MultisegmentWellGeneric.cpp
|
||||
opm/simulators/wells/MultisegmentWellPrimaryVariables.cpp
|
||||
opm/simulators/wells/MultisegmentWellSegments.cpp
|
||||
opm/simulators/wells/ParallelPAvgCalculator.cpp
|
||||
opm/simulators/wells/ParallelPAvgDynamicSourceData.cpp
|
||||
opm/simulators/wells/ParallelWBPCalculation.cpp
|
||||
opm/simulators/wells/ParallelWellInfo.cpp
|
||||
opm/simulators/wells/PerfData.cpp
|
||||
opm/simulators/wells/RateConverter.cpp
|
||||
@ -264,6 +266,7 @@ if(MPI_FOUND)
|
||||
list(APPEND TEST_SOURCE_FILES tests/test_parallelistlinformation.cpp
|
||||
tests/test_ParallelSerialization.cpp)
|
||||
endif()
|
||||
|
||||
if(CUDA_FOUND)
|
||||
list(APPEND TEST_SOURCE_FILES tests/test_cusparseSolver.cpp)
|
||||
list(APPEND TEST_SOURCE_FILES tests/cuistl/test_cusparse_safe_call.cpp)
|
||||
@ -482,7 +485,9 @@ list (APPEND PUBLIC_HEADER_FILES
|
||||
opm/simulators/wells/MultisegmentWellGeneric.hpp
|
||||
opm/simulators/wells/MultisegmentWellPrimaryVariables.hpp
|
||||
opm/simulators/wells/MultisegmentWellSegments.hpp
|
||||
opm/simulators/wells/ParallelPAvgCalculator.hpp
|
||||
opm/simulators/wells/ParallelPAvgDynamicSourceData.hpp
|
||||
opm/simulators/wells/ParallelWBPCalculation.hpp
|
||||
opm/simulators/wells/ParallelWellInfo.hpp
|
||||
opm/simulators/wells/PerfData.hpp
|
||||
opm/simulators/wells/PerforationData.hpp
|
||||
|
56
opm/simulators/wells/ParallelPAvgCalculator.cpp
Normal file
56
opm/simulators/wells/ParallelPAvgCalculator.cpp
Normal file
@ -0,0 +1,56 @@
|
||||
/*
|
||||
Copyright 2023 Equinor ASA.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <config.h>
|
||||
|
||||
#include <opm/simulators/wells/ParallelPAvgCalculator.hpp>
|
||||
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
#include <opm/input/eclipse/EclipseState/Grid/GridDims.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculator.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/WellConnections.hpp>
|
||||
|
||||
#include <array>
|
||||
#include <functional>
|
||||
#include <initializer_list>
|
||||
|
||||
Opm::ParallelPAvgCalculator::
|
||||
ParallelPAvgCalculator(const Parallel::Communication& comm,
|
||||
const GridDims& cellIndexMap,
|
||||
const WellConnections& connections)
|
||||
: PAvgCalculator { cellIndexMap, connections }
|
||||
, comm_ { comm }
|
||||
{}
|
||||
|
||||
void Opm::ParallelPAvgCalculator::collectGlobalContributions()
|
||||
{
|
||||
auto collect = [this](Accumulator& accumulator)
|
||||
{
|
||||
auto avg = accumulator.getRunningAverages();
|
||||
|
||||
this->comm_.get().sum(avg.data(), avg.size());
|
||||
|
||||
accumulator.assignRunningAverages(avg);
|
||||
};
|
||||
|
||||
collect(this->accumCTF_);
|
||||
collect(this->accumPV_);
|
||||
}
|
71
opm/simulators/wells/ParallelPAvgCalculator.hpp
Normal file
71
opm/simulators/wells/ParallelPAvgCalculator.hpp
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
Copyright 2023 Equinor ASA.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef PARALLEL_PAVG_CALCULATOR_HPP
|
||||
#define PARALLEL_PAVG_CALCULATOR_HPP
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculator.hpp>
|
||||
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
#include <functional>
|
||||
|
||||
namespace Opm {
|
||||
class GridDims;
|
||||
class WellConnections;
|
||||
} // namespace Opm
|
||||
|
||||
namespace Opm {
|
||||
|
||||
/// Facility for deriving well-level pressure values from selected
|
||||
/// block-averaging procedures. Applicable to stopped wells which don't
|
||||
/// have a flowing bottom-hole pressure. Mainly useful for reporting.
|
||||
///
|
||||
/// Parallel edition. Handles distributed wells.
|
||||
class ParallelPAvgCalculator : public PAvgCalculator
|
||||
{
|
||||
public:
|
||||
/// Constructor
|
||||
///
|
||||
/// \param[in] comm MPI communication object. Typically \code
|
||||
/// ParallelWellInfo::communication() \endcode.
|
||||
///
|
||||
/// \param[in] cellIndexMap Cell index triple map ((I,J,K) <-> global).
|
||||
///
|
||||
/// \param[in] connections List of reservoir connections for single
|
||||
/// well.
|
||||
ParallelPAvgCalculator(const Parallel::Communication& comm,
|
||||
const GridDims& cellIndexMap,
|
||||
const WellConnections& connections);
|
||||
|
||||
private:
|
||||
/// MPI communication object.
|
||||
std::reference_wrapper<const Parallel::Communication> comm_;
|
||||
|
||||
/// Communicate local contributions and collect global (off-rank)
|
||||
/// contributions.
|
||||
///
|
||||
/// Reads from and writes to base class data members \c accumCTF_ and \c
|
||||
/// accumPV_.
|
||||
void collectGlobalContributions() override;
|
||||
};
|
||||
|
||||
} // namespace Opm
|
||||
|
||||
#endif // PARALLEL_PAVG_CALCULATOR_HPP
|
343
opm/simulators/wells/ParallelWBPCalculation.cpp
Normal file
343
opm/simulators/wells/ParallelWBPCalculation.cpp
Normal file
@ -0,0 +1,343 @@
|
||||
/*
|
||||
Copyright 2023 Equinor ASA.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <config.h>
|
||||
|
||||
#include <opm/simulators/wells/ParallelWBPCalculation.hpp>
|
||||
|
||||
#include <opm/simulators/wells/PerforationData.hpp>
|
||||
#include <opm/simulators/wells/ParallelPAvgCalculator.hpp>
|
||||
#include <opm/simulators/wells/ParallelWellInfo.hpp>
|
||||
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculator.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculatorCollection.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/Well.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/WellConnections.hpp>
|
||||
|
||||
#include <opm/grid/common/CommunicationUtils.hpp>
|
||||
|
||||
#include <opm/input/eclipse/EclipseState/Grid/GridDims.hpp>
|
||||
|
||||
#include <opm/common/ErrorMacros.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
#include <stdexcept>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
Opm::ParallelWBPCalculation::
|
||||
LocalConnSet::LocalConnSet(const std::vector<int>& localConnIdx)
|
||||
: localConnIdx_ { localConnIdx }
|
||||
{}
|
||||
|
||||
int
|
||||
Opm::ParallelWBPCalculation::
|
||||
LocalConnSet::localIndex(const std::size_t connIdx) const
|
||||
{
|
||||
return (connIdx >= this->localConnIdx_.size())
|
||||
? -1
|
||||
: this->localConnIdx_[connIdx];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
Opm::ParallelWBPCalculation::SourceData::SourceData(const Parallel::Communication& comm)
|
||||
: comm_ { comm }
|
||||
{}
|
||||
|
||||
Opm::ParallelWBPCalculation::SourceData&
|
||||
Opm::ParallelWBPCalculation::SourceData::
|
||||
localIndex(GlobalToLocal localIdx)
|
||||
{
|
||||
this->localIdx_ = std::move(localIdx);
|
||||
return *this;
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation::SourceData&
|
||||
Opm::ParallelWBPCalculation::SourceData::
|
||||
evaluator(Evaluator eval)
|
||||
{
|
||||
this->eval_ = std::move(eval);
|
||||
return *this;
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation::SourceData&
|
||||
Opm::ParallelWBPCalculation::SourceData::
|
||||
evaluatorFactory(EvaluatorFactory evalFactory)
|
||||
{
|
||||
this->evalFactory_ = std::move(evalFactory);
|
||||
return *this;
|
||||
}
|
||||
|
||||
void
|
||||
Opm::ParallelWBPCalculation::SourceData::
|
||||
buildStructure(const std::vector<std::size_t>& sourceLocations)
|
||||
{
|
||||
if (this->srcData_ == nullptr) {
|
||||
this->srcData_ = std::make_unique<ParallelPAvgDynamicSourceData>
|
||||
(this->comm_, sourceLocations, this->localIdx_);
|
||||
}
|
||||
else {
|
||||
this->srcData_->reconstruct(sourceLocations, this->localIdx_);
|
||||
}
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::SourceData::collectDynamicValues()
|
||||
{
|
||||
if (this->srcData_ == nullptr) {
|
||||
throw std::logic_error {
|
||||
"Cannot collect dynamic WBP source values "
|
||||
"prior to constructing source object"
|
||||
};
|
||||
}
|
||||
|
||||
// For safety reasons, especially when this SourceData object pertains
|
||||
// to well connections and the user selects the "ALL" connection flag.
|
||||
this->srcData_->setToZero();
|
||||
|
||||
if (this->eval_) {
|
||||
this->srcData_->collectLocalSources(this->eval_);
|
||||
}
|
||||
else if (this->evalFactory_) {
|
||||
this->srcData_->collectLocalSources(this->evalFactory_());
|
||||
}
|
||||
else {
|
||||
OPM_THROW(std::logic_error,
|
||||
"Collecting WBP inputs requires an evaluation "
|
||||
"function or an evaluation function factory");
|
||||
}
|
||||
|
||||
this->srcData_->synchroniseSources();
|
||||
}
|
||||
|
||||
std::vector<int>
|
||||
Opm::ParallelWBPCalculation::SourceData::
|
||||
getLocalIndex(const std::vector<std::size_t>& globalIndex) const
|
||||
{
|
||||
auto localIdx = std::vector<int>(globalIndex.size());
|
||||
|
||||
std::transform(globalIndex.begin(), globalIndex.end(), localIdx.begin(),
|
||||
[this](const std::size_t globIx)
|
||||
{
|
||||
return this->localIdx_(globIx);
|
||||
});
|
||||
|
||||
return localIdx;
|
||||
}
|
||||
|
||||
// ===========================================================================
|
||||
|
||||
Opm::ParallelWBPCalculation::ParallelWBPCalculation(const GridDims& cellIndexMap,
|
||||
const Parallel::Communication& gridComm)
|
||||
: cellIndexMap_{ cellIndexMap }
|
||||
, reservoirSrc_{ gridComm }
|
||||
{}
|
||||
|
||||
Opm::ParallelWBPCalculation&
|
||||
Opm::ParallelWBPCalculation::localCellIndex(GlobalToLocal localCellIdx)
|
||||
{
|
||||
this->reservoirSrc_.localIndex(std::move(localCellIdx));
|
||||
return *this;
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation&
|
||||
Opm::ParallelWBPCalculation::evalCellSource(Evaluator evalCellSrc)
|
||||
{
|
||||
this->reservoirSrc_.evaluator(std::move(evalCellSrc));
|
||||
return *this;
|
||||
}
|
||||
|
||||
std::size_t
|
||||
Opm::ParallelWBPCalculation::
|
||||
createCalculator(const Well& well,
|
||||
const ParallelWellInfo& parallelWellInfo,
|
||||
const std::vector<int>& localConnIdx,
|
||||
EvaluatorFactory makeWellSourceEvaluator)
|
||||
{
|
||||
assert (this->wellConnSrc_.size() == this->localConnSet_.size());
|
||||
|
||||
const auto ix = this->calculators_
|
||||
.setCalculator(well.seqIndex(), std::make_unique<ParallelPAvgCalculator>
|
||||
(parallelWellInfo.communication(),
|
||||
this->cellIndexMap_, well.getConnections()));
|
||||
|
||||
assert (ix <= this->wellConnSrc_.size());
|
||||
|
||||
if (ix == this->wellConnSrc_.size()) {
|
||||
// New calculator.
|
||||
this->wellConnSrc_.emplace_back(parallelWellInfo.communication());
|
||||
this->localConnSet_.emplace_back(localConnIdx);
|
||||
}
|
||||
else {
|
||||
// Update existing calculator. Reset sources and connection sets.
|
||||
this->wellConnSrc_[ix] = SourceData { parallelWellInfo.communication() };
|
||||
this->localConnSet_[ix] = LocalConnSet { localConnIdx };
|
||||
}
|
||||
|
||||
this->wellConnSrc_[ix]
|
||||
.evaluatorFactory(std::move(makeWellSourceEvaluator))
|
||||
.localIndex([ix = ix, this](const std::size_t connIdx)
|
||||
{ return this->localConnSet_[ix].localIndex(connIdx); });
|
||||
|
||||
return ix;
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::defineCommunication()
|
||||
{
|
||||
assert (this->calculators_.numCalculators() == this->wellConnSrc_.size());
|
||||
|
||||
this->pruneInactiveWBPCells();
|
||||
|
||||
this->defineReservoirCommunication();
|
||||
|
||||
const auto numWells = this->calculators_.numCalculators();
|
||||
for (auto well = 0*numWells; well < numWells; ++well) {
|
||||
this->defineWellCommunication(well);
|
||||
}
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::collectDynamicValues()
|
||||
{
|
||||
this->reservoirSrc_.collectDynamicValues();
|
||||
|
||||
for (auto& wellSrc : this->wellConnSrc_) {
|
||||
wellSrc.collectDynamicValues();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Opm::ParallelWBPCalculation::
|
||||
inferBlockAveragePressures(const std::size_t calcIndex,
|
||||
const PAvg& controls,
|
||||
const double gravity,
|
||||
const double refDepth)
|
||||
{
|
||||
this->calculators_[calcIndex]
|
||||
.inferBlockAveragePressures(this->makeEvaluationSources(calcIndex),
|
||||
controls, gravity, refDepth);
|
||||
}
|
||||
|
||||
const Opm::PAvgCalculator::Result&
|
||||
Opm::ParallelWBPCalculation::averagePressures(const std::size_t calcIndex) const
|
||||
{
|
||||
return this->calculators_[calcIndex].averagePressures();
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::pruneInactiveWBPCells()
|
||||
{
|
||||
if (this->reservoirSrc_.comm().size() == 1) {
|
||||
this->pruneInactiveWBPCellsSerial();
|
||||
}
|
||||
else {
|
||||
this->pruneInactiveWBPCellsParallel();
|
||||
}
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::pruneInactiveWBPCellsSerial()
|
||||
{
|
||||
this->calculators_.pruneInactiveWBPCells
|
||||
([this](const std::vector<std::size_t>& globalWBPCellIdx)
|
||||
{
|
||||
auto isActive = std::vector<bool>(globalWBPCellIdx.size(), false);
|
||||
|
||||
// Recall: localIndex() is negative if input is inactive or not on rank.
|
||||
const auto localWBPCellIdx =
|
||||
this->reservoirSrc_.getLocalIndex(globalWBPCellIdx);
|
||||
|
||||
const auto nCells = isActive.size();
|
||||
for (auto cellIdx = 0*nCells; cellIdx < nCells; ++cellIdx) {
|
||||
isActive[cellIdx] = localWBPCellIdx[cellIdx] >= 0;
|
||||
}
|
||||
|
||||
return isActive;
|
||||
});
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::pruneInactiveWBPCellsParallel()
|
||||
{
|
||||
this->calculators_.pruneInactiveWBPCells(
|
||||
[this](const std::vector<std::size_t>& globalWBPCellIdx)
|
||||
{
|
||||
auto isActive = std::vector<bool>(globalWBPCellIdx.size(), false);
|
||||
|
||||
// AllWBPCells possibly contains repeated indices. That's okay here.
|
||||
const auto& [allWBPCells, rankStart] =
|
||||
allGatherv(globalWBPCellIdx, this->reservoirSrc_.comm());
|
||||
|
||||
// Recall: localIndex() is negative if input is inactive or not on rank.
|
||||
auto localWBPCellIdx =
|
||||
this->reservoirSrc_.getLocalIndex(allWBPCells);
|
||||
|
||||
// The WBP cell is active if it has a non-negative local index on
|
||||
// one of the ranks.
|
||||
this->reservoirSrc_.comm()
|
||||
.max(localWBPCellIdx.data(), localWBPCellIdx.size());
|
||||
|
||||
const auto myRank = this->reservoirSrc_.comm().rank();
|
||||
|
||||
// Recall: This lambda function/callback is responsible only for the
|
||||
// range of 'allWBPCells' which applies to 'myRank'. Consequently,
|
||||
// that's the range for which we determine the values in 'isActive'.
|
||||
// The rest of 'allWBPCells', or localWBPCellIdx for that matter, is
|
||||
// ignored and discarded when the lambda returns.
|
||||
auto cellIdx = 0*isActive.size();
|
||||
for (auto begin = localWBPCellIdx.begin() + rankStart[myRank + 0],
|
||||
end = localWBPCellIdx.begin() + rankStart[myRank + 1];
|
||||
begin != end; ++begin, ++cellIdx)
|
||||
{
|
||||
isActive[cellIdx] = *begin >= 0;
|
||||
}
|
||||
|
||||
return isActive;
|
||||
});
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::defineReservoirCommunication()
|
||||
{
|
||||
auto sourceCells = std::vector<std::size_t>{};
|
||||
|
||||
std::tie(sourceCells, std::ignore) =
|
||||
allGatherv(this->calculators_.allWBPCells(), this->reservoirSrc_.comm());
|
||||
|
||||
std::sort(sourceCells.begin(), sourceCells.end());
|
||||
auto u = std::unique(sourceCells.begin(), sourceCells.end());
|
||||
|
||||
this->reservoirSrc_.buildStructure({sourceCells.begin(), u});
|
||||
}
|
||||
|
||||
void Opm::ParallelWBPCalculation::defineWellCommunication(const std::size_t well)
|
||||
{
|
||||
this->wellConnSrc_[well]
|
||||
.buildStructure(this->calculators_[well].allWellConnections());
|
||||
}
|
||||
|
||||
Opm::PAvgCalculator::Sources
|
||||
Opm::ParallelWBPCalculation::makeEvaluationSources(const WellID well) const
|
||||
{
|
||||
return PAvgCalculator::Sources{}
|
||||
.wellBlocks(this->reservoirSrc_)
|
||||
.wellConns (this->wellConnSrc_[well]);
|
||||
}
|
368
opm/simulators/wells/ParallelWBPCalculation.hpp
Normal file
368
opm/simulators/wells/ParallelWBPCalculation.hpp
Normal file
@ -0,0 +1,368 @@
|
||||
/*
|
||||
Copyright 2023 Equinor ASA.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#ifndef PARALLEL_WBP_CALCULATION_HPP
|
||||
#define PARALLEL_WBP_CALCULATION_HPP
|
||||
|
||||
#include <opm/simulators/wells/ParallelPAvgDynamicSourceData.hpp>
|
||||
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
#include <opm/simulators/wells/PerforationData.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculator.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgCalculatorCollection.hpp>
|
||||
|
||||
#include <cstddef>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
namespace Opm {
|
||||
class GridDims;
|
||||
class ParallelWellInfo;
|
||||
class PAvg;
|
||||
class Well;
|
||||
}
|
||||
|
||||
namespace Opm {
|
||||
|
||||
/// Parallel facility for managing the on-rank collection and global
|
||||
/// distribution of WBPn source values as well as local calculation and
|
||||
/// distributed reduction of the inferred WBPn report values.
|
||||
class ParallelWBPCalculation
|
||||
{
|
||||
public:
|
||||
/// Callback for inferring the source locations which are active on the
|
||||
/// current MPI rank.
|
||||
using GlobalToLocal = ParallelPAvgDynamicSourceData::GlobalToLocal;
|
||||
|
||||
/// Callback for evaluating WBPn source terms on the current MPI rank.
|
||||
using Evaluator = ParallelPAvgDynamicSourceData::Evaluator;
|
||||
|
||||
/// Callback for constructing a source term evaluation function on the
|
||||
/// current MPI rank. Needed for deferred construction of per-well
|
||||
/// source term evaluation functions.
|
||||
using EvaluatorFactory = std::function<Evaluator()>;
|
||||
|
||||
/// Constructor.
|
||||
///
|
||||
/// \param[in] cellIndexMap Cell index triple map ((I,J,K) <-> global).
|
||||
///
|
||||
/// \param[in] gridComm Main, grid level, global communicator.
|
||||
explicit ParallelWBPCalculation(const GridDims& cellIndexMap,
|
||||
const Parallel::Communication& gridComm);
|
||||
|
||||
/// Assign translation function for inferring the on-rank IDs of the
|
||||
/// known source locations.
|
||||
///
|
||||
/// \param[in] localCellIdx Translation from global, Cartesian cell
|
||||
/// indices to local, on-rank, cell indices.
|
||||
///
|
||||
/// \return \code *this \endcode
|
||||
ParallelWBPCalculation& localCellIndex(GlobalToLocal localCellIdx);
|
||||
|
||||
/// Assign evaluation function for computing the on-rank, cell level
|
||||
/// WBPn source terms.
|
||||
///
|
||||
/// \param[in] evalCellSrc Source term evaluation function.
|
||||
///
|
||||
/// \return \code *this \endcode
|
||||
ParallelWBPCalculation& evalCellSource(Evaluator evalCellSrc);
|
||||
|
||||
/// Create, or reassign, a WBPn calculation object for a particular
|
||||
/// well.
|
||||
///
|
||||
/// \param[in] well Well for which to create a WBPn calculation object.
|
||||
///
|
||||
/// \param[in] parallelWellInfo Communicator object for the ranks
|
||||
/// sharing this well.
|
||||
///
|
||||
/// \param[in] localConnIdx Local (on-rank) connection index. Sized
|
||||
/// according to \code well.getConnections().size() \endcode, but with
|
||||
/// a non-negative entry only for those active connections that
|
||||
/// intersect the current rank. If \code localConnIdx[i] == j
|
||||
/// \endcode, then the \c i-th global connection is the \c j-th active
|
||||
/// connection on the current rank. Use a negative value to identify
|
||||
/// a connection that is either not flowing or which does not
|
||||
/// intersect the current MPI rank.
|
||||
///
|
||||
/// \param[in] makeWellSourceEvaluator Factory function to support
|
||||
/// deferred creation of an evaluation function for the per-connection
|
||||
/// WBP source terms.
|
||||
///
|
||||
/// \return Calculator object index. Must be used in subsequent calls
|
||||
/// to inferBlockAveragePressures() and averagePressures() for this \p
|
||||
/// well.
|
||||
std::size_t
|
||||
createCalculator(const Well& well,
|
||||
const ParallelWellInfo& parallelWellInfo,
|
||||
const std::vector<int>& localConnIdx,
|
||||
EvaluatorFactory makeWellSourceEvaluator);
|
||||
|
||||
/// Set up communication patterns for both cell and connection level
|
||||
/// source terms and partial/intermediate WBPn results.
|
||||
///
|
||||
/// Clients must call this function once all calculation objects have
|
||||
/// been created, and strictly before the first call to member function
|
||||
/// \c collectDynamicValues().
|
||||
void defineCommunication();
|
||||
|
||||
/// Collect all on-rank source term value and distribute those on-rank
|
||||
/// values to all other MPI ranks.
|
||||
///
|
||||
/// Will call the registered source term evaluation functions for all
|
||||
/// on-rank source locations. Once this function returns, all ranks
|
||||
/// have a full view of all cell level source term values, and all ranks
|
||||
/// which share an individual well have a full view of the
|
||||
/// per-connection source term values for that well.
|
||||
///
|
||||
/// Will \c throw an object of type \code std::logic_error \endcode if
|
||||
/// the communication patterns have not been fully defined through a
|
||||
/// prior call to member function \c defineCommunication().
|
||||
void collectDynamicValues();
|
||||
|
||||
/// Compute WBPn report values for a single well.
|
||||
///
|
||||
/// \param[in] calcIndex Calculator object index. Return value from a
|
||||
/// previous call to member function \c createCalculator().
|
||||
///
|
||||
/// \param[in] controls Pressure averaging procedure controls for this
|
||||
/// well.
|
||||
///
|
||||
/// \param[in] gravity Strength of gravity acceleration.
|
||||
///
|
||||
/// \param[in] refDepth WBPn reference depth. Typically \code
|
||||
/// Well::getWPaveRefDepth() \endcode.
|
||||
void inferBlockAveragePressures(const std::size_t calcIndex,
|
||||
const PAvg& controls,
|
||||
const double gravity,
|
||||
const double refDepth);
|
||||
|
||||
/// Retrieve results from most recent WBPn value calculation for
|
||||
/// specified well.
|
||||
///
|
||||
/// \param[in] calcIndex Calculator object index. Return value from a
|
||||
/// previous call to member function \c createCalculator().
|
||||
///
|
||||
/// \return Result set from most recent call to member function \c
|
||||
/// inferBlockAveragePressures() for \c calcIndex.
|
||||
const PAvgCalculator::Result&
|
||||
averagePressures(const std::size_t calcIndex) const;
|
||||
|
||||
private:
|
||||
/// Callable wrapper for the local, per-well reservoir connections.
|
||||
class LocalConnSet
|
||||
{
|
||||
public:
|
||||
/// Constructor.
|
||||
///
|
||||
/// \param[in] localConnIdx Local (on-rank) connection index. Sized
|
||||
/// according to total number of connections of a well, but with a
|
||||
/// non-negative entry only for those active connections that
|
||||
/// intersect the current rank. If \code localConnIdx[i] == j
|
||||
/// \endcode, then the \c i-th global connection is the \c j-th
|
||||
/// active connection on the current rank. Use a negative value
|
||||
/// to identify a connection that is either not flowing or which
|
||||
/// does not intersect the current MPI rank.
|
||||
explicit LocalConnSet(const std::vector<int>& localConnIdx);
|
||||
|
||||
/// Retrieve local, on-rank connection index of a well's global
|
||||
/// connection index.
|
||||
///
|
||||
/// \param[in] connIdx Well's global connection index
|
||||
///
|
||||
/// \return Local, on-rank, connection index of well's global index.
|
||||
/// Negative value if \p connIdx does not intersect the current
|
||||
/// rank.
|
||||
int localIndex(const std::size_t connIdx) const;
|
||||
|
||||
private:
|
||||
/// Local (on-rank) connection index. Sized according to total
|
||||
/// number of connections of a well, but with a non-negative entry
|
||||
/// only for those active connections that intersect the current
|
||||
/// rank. If \code localConnIdx[i] == j \endcode, then the \c i-th
|
||||
/// global connection is the \c j-th active connection on the
|
||||
/// current rank. Use a negative value to identify a connection
|
||||
/// that is either not flowing or which does not intersect the
|
||||
/// current MPI rank.
|
||||
std::vector<int> localConnIdx_{};
|
||||
};
|
||||
|
||||
/// Parallel collection of individual source terms
|
||||
class SourceData
|
||||
{
|
||||
public:
|
||||
/// Conversion operator.
|
||||
///
|
||||
/// Enables transparent usage of this object as an argument to \code
|
||||
/// PAvgCalculator::inferBlockAveragePressures() \endcode.
|
||||
operator const ParallelPAvgDynamicSourceData&() const
|
||||
{
|
||||
return *this->srcData_;
|
||||
}
|
||||
|
||||
/// Constructor.
|
||||
///
|
||||
/// \param[in] comm MPI communicator.
|
||||
explicit SourceData(const Parallel::Communication& comm);
|
||||
|
||||
/// Assign translation function for inferring the on-rank IDs of the
|
||||
/// known source locations.
|
||||
///
|
||||
/// \param[in] localIdx Translation from global indices to local,
|
||||
/// on-rank indices.
|
||||
///
|
||||
/// \return \code *this \endcode
|
||||
SourceData& localIndex(GlobalToLocal localIdx);
|
||||
|
||||
/// Assign evaluation function for computing the on-rank, WBPn
|
||||
/// source terms.
|
||||
///
|
||||
/// \param[in] eval Source term evaluation function.
|
||||
///
|
||||
/// \return \code *this \endcode
|
||||
SourceData& evaluator(Evaluator eval);
|
||||
|
||||
/// Assign evaluation function factory for computing the on-rank,
|
||||
/// WBPn source terms.
|
||||
///
|
||||
/// \param[in] evalFactory Factory function to support deferred
|
||||
/// creation of an evaluation function for the per-connection WBP
|
||||
/// source terms.
|
||||
///
|
||||
/// \return \code *this \endcode
|
||||
SourceData& evaluatorFactory(EvaluatorFactory evalFactory);
|
||||
|
||||
/// Construct or update wrapped source term collection to account
|
||||
/// for new set of source locations.
|
||||
///
|
||||
/// This function is a prerequisite for subsequent calls to \c
|
||||
/// collectDynamicValues().
|
||||
///
|
||||
/// \param[in] sourceLocations Known locations, possibly linearised
|
||||
/// global cell IDs, for which to enable collecting/reporting
|
||||
/// dynamic source data.
|
||||
void buildStructure(const std::vector<std::size_t>& sourceLocations);
|
||||
|
||||
/// Collect all on-rank source term value and distribute those
|
||||
/// on-rank values to all other MPI ranks.
|
||||
///
|
||||
/// Will call the registered source term evaluation functions for
|
||||
/// all on-rank source locations.
|
||||
///
|
||||
/// Will \c throw an object of type \code std::logic_error \endcode
|
||||
/// if the communication pattern has not been fully defined through
|
||||
/// a prior call to member function \c buildStructure().
|
||||
void collectDynamicValues();
|
||||
|
||||
/// Provide read-only access to the associate MPI communicator.
|
||||
///
|
||||
/// \return Read-only MPI communicator.
|
||||
const Parallel::Communication& comm() const
|
||||
{
|
||||
return this->comm_.get();
|
||||
}
|
||||
|
||||
/// Convert collection of global indices to local, on-rank, indices.
|
||||
///
|
||||
/// Applies \c localIndex() to each element of the input array.
|
||||
///
|
||||
/// \param[in] globalIndex Collection of global indices.
|
||||
///
|
||||
/// \return Local, on-rank, indices for each index in \p
|
||||
/// globalIndex. Negative local index value for global indices
|
||||
/// which are either not active or not on the current MPI rank.
|
||||
std::vector<int>
|
||||
getLocalIndex(const std::vector<std::size_t>& globalIndex) const;
|
||||
|
||||
private:
|
||||
/// Type of wrapped object.
|
||||
using DataPtr = std::unique_ptr<ParallelPAvgDynamicSourceData>;
|
||||
|
||||
/// MPI communicator for this source data object.
|
||||
std::reference_wrapper<const Parallel::Communication> comm_;
|
||||
|
||||
/// Translation from global indices to local, on-rank, indices.
|
||||
GlobalToLocal localIdx_{};
|
||||
|
||||
/// Source term evaluator object. Empty if we need deferred
|
||||
/// initialisation, e.g., for well connections.
|
||||
Evaluator eval_{};
|
||||
|
||||
/// Creation function for source term evaluation functions. Empty
|
||||
/// if evaluation function has already been assigned, e.g., for
|
||||
/// reservoir cells.
|
||||
EvaluatorFactory evalFactory_{};
|
||||
|
||||
/// Parallel WBPn source term object.
|
||||
DataPtr srcData_{};
|
||||
};
|
||||
|
||||
/// Calculation object IDs.
|
||||
using WellID = std::vector<SourceData>::size_type;
|
||||
|
||||
/// Cell index triple map ((I,J,K) <-> global).
|
||||
std::reference_wrapper<const GridDims> cellIndexMap_;
|
||||
|
||||
/// Source term object for the reservoir cells.
|
||||
SourceData reservoirSrc_;
|
||||
|
||||
/// Collection of WBPn calculation objects. One object for each well on
|
||||
/// rank.
|
||||
PAvgCalculatorCollection calculators_{};
|
||||
|
||||
/// Source term objects for each well on rank.
|
||||
std::vector<SourceData> wellConnSrc_{};
|
||||
|
||||
/// Local connection indices for each well on rank.
|
||||
std::vector<LocalConnSet> localConnSet_{};
|
||||
|
||||
/// Eliminate inactive cells from the source locations backing \c
|
||||
/// reservoirSrc_.
|
||||
void pruneInactiveWBPCells();
|
||||
|
||||
/// Serial implementation of pruneInactiveWBPCells().
|
||||
void pruneInactiveWBPCellsSerial();
|
||||
|
||||
/// Parallel implementation of pruneInactiveWBPCells().
|
||||
void pruneInactiveWBPCellsParallel();
|
||||
|
||||
/// Define communication patterns for \c reservoirSrc_.
|
||||
void defineReservoirCommunication();
|
||||
|
||||
/// Define communication patterns for source terms pertaining to the
|
||||
/// reservoir connections of a single well.
|
||||
///
|
||||
/// \param[in] well Well for which to define communication pattern.
|
||||
void defineWellCommunication(const std::size_t well);
|
||||
|
||||
/// Aggregate pertinent source terms for the WBPn calculation object of
|
||||
/// a single well.
|
||||
///
|
||||
/// \param[in] well Well for which to aggregate the pertient source
|
||||
/// terms.
|
||||
///
|
||||
/// \return WBPn source terms aggregated for \p well.
|
||||
PAvgCalculator::Sources makeEvaluationSources(const WellID well) const;
|
||||
};
|
||||
|
||||
} // namespace Opm
|
||||
|
||||
#endif // PARALLEL_WBP_CALCULATION_HPP
|
616
tests/test_parallel_wbp_calculation.cpp
Normal file
616
tests/test_parallel_wbp_calculation.cpp
Normal file
@ -0,0 +1,616 @@
|
||||
/*
|
||||
Copyright 2023 Equinor.
|
||||
|
||||
This file is part of the Open Porous Media project (OPM).
|
||||
|
||||
OPM is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OPM is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <config.h>
|
||||
|
||||
#define BOOST_TEST_MODULE Parallel_WBPn_Calculation
|
||||
|
||||
#define BOOST_TEST_NO_MAIN
|
||||
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
#include <opm/simulators/wells/ParallelWBPCalculation.hpp>
|
||||
|
||||
#include <opm/simulators/wells/ParallelPAvgDynamicSourceData.hpp>
|
||||
#include <opm/simulators/wells/ParallelWellInfo.hpp>
|
||||
|
||||
#include <opm/simulators/utils/ParallelCommunication.hpp>
|
||||
|
||||
#include <opm/grid/common/CommunicationUtils.hpp>
|
||||
|
||||
#include <opm/input/eclipse/EclipseState/Grid/GridDims.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Schedule/Well/Connection.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvg.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/PAvgDynamicSourceData.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/Well.hpp>
|
||||
#include <opm/input/eclipse/Schedule/Well/WellConnections.hpp>
|
||||
|
||||
#include <opm/input/eclipse/Units/Units.hpp>
|
||||
|
||||
#include <dune/common/parallel/mpihelper.hh>
|
||||
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <cstddef>
|
||||
#include <functional>
|
||||
#include <initializer_list>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <vector>
|
||||
|
||||
namespace {
|
||||
|
||||
#if HAVE_MPI
|
||||
struct MPIError
|
||||
{
|
||||
MPIError(std::string_view errstr, const int ec)
|
||||
: errorstring { errstr }
|
||||
, errorcode { ec }
|
||||
{}
|
||||
|
||||
std::string errorstring;
|
||||
int errorcode;
|
||||
};
|
||||
|
||||
void MPI_err_handler(MPI_Comm*, int* err_code, ...)
|
||||
{
|
||||
std::array<char, MPI_MAX_ERROR_STRING> err_string_vec{'\0'};
|
||||
auto err_length = 0;
|
||||
|
||||
MPI_Error_string(*err_code, err_string_vec.data(), &err_length);
|
||||
|
||||
auto err_string = std::string_view {
|
||||
err_string_vec.data(),
|
||||
static_cast<std::string_view::size_type>(err_length)
|
||||
};
|
||||
|
||||
std::cerr << "An MPI Error ocurred:\n -> " << err_string << '\n';
|
||||
|
||||
throw MPIError { err_string, *err_code };
|
||||
}
|
||||
#endif // HAVE_MPI
|
||||
|
||||
double standardGravity()
|
||||
{
|
||||
return Opm::unit::gravity;
|
||||
}
|
||||
|
||||
std::size_t globIndex(const std::array<int,3>& ijk,
|
||||
const std::array<int,3>& dims)
|
||||
{
|
||||
return ijk[0] + dims[0]*(ijk[1] + static_cast<std::size_t>(dims[1])*ijk[2]);
|
||||
}
|
||||
|
||||
std::array<int,3> cellIJK(int cell,
|
||||
const std::array<int,3>& dims)
|
||||
{
|
||||
auto ijk = std::array<int,3>{};
|
||||
|
||||
ijk[0] = cell % dims[0]; cell /= dims[0];
|
||||
ijk[1] = cell % dims[1];
|
||||
ijk[2] = cell / dims[1];
|
||||
|
||||
return ijk;
|
||||
}
|
||||
|
||||
namespace Rank {
|
||||
namespace Top {
|
||||
int globalToLocal(const std::size_t global)
|
||||
{
|
||||
return (global >= 5 * 5 * 5)
|
||||
? -1
|
||||
: static_cast<int>(global);
|
||||
}
|
||||
|
||||
bool isInRange(const std::array<int,3>& ijk)
|
||||
{
|
||||
// Well block column in top half covers zero-based index
|
||||
// range
|
||||
//
|
||||
// [ 1..3, 1..3, 2..4 ]
|
||||
//
|
||||
// of index range
|
||||
//
|
||||
// [ 0..4, 0..4 , 0..4 ]
|
||||
|
||||
return (ijk[0] >= 1) && (ijk[0] <= 3)
|
||||
&& (ijk[1] >= 1) && (ijk[1] <= 3)
|
||||
&& (ijk[2] >= 2) && (ijk[2] <= 4);
|
||||
}
|
||||
|
||||
std::size_t fieldIx(const std::array<int,3>& ijk)
|
||||
{
|
||||
return globIndex({ ijk[0] - 1, ijk[1] - 1, ijk[2] - 2 }, {3, 3, 3});
|
||||
}
|
||||
|
||||
double fieldValue(const int cell, std::initializer_list<double> field)
|
||||
{
|
||||
const auto ijk = cellIJK(cell, { 5, 5, 5 });
|
||||
|
||||
return isInRange(ijk) ? *(std::data(field) + fieldIx(ijk)) : 0.0;
|
||||
}
|
||||
|
||||
// Octave: 1234 + fix(100 * rand([3, 3, 6])) -- top half
|
||||
double pressure(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=2
|
||||
1.302000e+03, 1.308000e+03, 1.279000e+03,
|
||||
1.242000e+03, 1.256000e+03, 1.325000e+03,
|
||||
1.249000e+03, 1.316000e+03, 1.287000e+03,
|
||||
|
||||
// K=3
|
||||
1.333000e+03, 1.241000e+03, 1.278000e+03,
|
||||
1.244000e+03, 1.330000e+03, 1.234000e+03,
|
||||
1.311000e+03, 1.315000e+03, 1.320000e+03,
|
||||
|
||||
// K=4
|
||||
1.242000e+03, 1.273000e+03, 1.259000e+03,
|
||||
1.314000e+03, 1.277000e+03, 1.325000e+03,
|
||||
1.252000e+03, 1.260000e+03, 1.248000e+03,
|
||||
});
|
||||
}
|
||||
|
||||
// Octave: fix(1e6 * (123.4 + 56.7*rand([3, 3, 6]))) / 1e6 -- top half
|
||||
double porevol(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=2
|
||||
1.301471680e+02, 1.516572410e+02, 1.778174820e+02,
|
||||
1.426998700e+02, 1.565846810e+02, 1.360901360e+02,
|
||||
1.659968420e+02, 1.378638930e+02, 1.520877640e+02,
|
||||
|
||||
// K=3
|
||||
1.630376500e+02, 1.739142140e+02, 1.777918230e+02,
|
||||
1.544271200e+02, 1.312600050e+02, 1.318649700e+02,
|
||||
1.380007180e+02, 1.710686680e+02, 1.378177990e+02,
|
||||
|
||||
// K=4
|
||||
1.695699490e+02, 1.372078650e+02, 1.760892470e+02,
|
||||
1.432440790e+02, 1.345469500e+02, 1.376364540e+02,
|
||||
1.583297330e+02, 1.502354770e+02, 1.433390940e+02,
|
||||
});
|
||||
}
|
||||
|
||||
// Octave: 0.1 + round(0.1 * rand([3, 3, 6]), 2) -- top half
|
||||
double density(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=2
|
||||
0.120, 0.120, 0.120,
|
||||
0.140, 0.130, 0.190,
|
||||
0.140, 0.120, 0.190,
|
||||
|
||||
// K=3
|
||||
0.200, 0.140, 0.110,
|
||||
0.130, 0.140, 0.160,
|
||||
0.130, 0.160, 0.170,
|
||||
|
||||
// K=4
|
||||
0.120, 0.110, 0.130,
|
||||
0.130, 0.140, 0.150,
|
||||
0.110, 0.130, 0.180,
|
||||
});
|
||||
}
|
||||
|
||||
void cellSource(const int cell,
|
||||
Opm::PAvgDynamicSourceData::SourceDataSpan<double> src)
|
||||
{
|
||||
using Item = Opm::PAvgDynamicSourceData::SourceDataSpan<double>::Item;
|
||||
|
||||
src .set(Item::Pressure , pressure(cell))
|
||||
.set(Item::PoreVol , porevol (cell))
|
||||
.set(Item::MixtureDensity, density (cell));
|
||||
}
|
||||
|
||||
std::vector<int> localConnIdx()
|
||||
{
|
||||
auto localIdx = std::vector<int>(6, -1);
|
||||
for (auto perf = 0; perf < 3; ++perf) {
|
||||
localIdx[perf] = perf;
|
||||
}
|
||||
|
||||
return localIdx;
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation::EvaluatorFactory connSource()
|
||||
{
|
||||
return []() {
|
||||
auto rho = std::vector { 0.1, 0.12, 0.14, };
|
||||
|
||||
return [rho = std::move(rho)]
|
||||
(const int connIx,
|
||||
Opm::PAvgDynamicSourceData::SourceDataSpan<double> src)
|
||||
{
|
||||
using Item = Opm::PAvgDynamicSourceData::SourceDataSpan<double>::Item;
|
||||
|
||||
src .set(Item::Pressure , 1222.0)
|
||||
.set(Item::PoreVol , 1.25)
|
||||
.set(Item::MixtureDensity, rho[connIx]);
|
||||
};
|
||||
};
|
||||
}
|
||||
} // namespace Top
|
||||
|
||||
namespace Bottom {
|
||||
int globalToLocal(const std::size_t global)
|
||||
{
|
||||
constexpr auto middle = 5 * 5 * 5;
|
||||
|
||||
return (global < middle)
|
||||
? -1
|
||||
: static_cast<int>(global - middle);
|
||||
}
|
||||
|
||||
bool isInRange(const std::array<int,3>& ijk)
|
||||
{
|
||||
// Well block column in bottom half covers zero-based index
|
||||
// range
|
||||
//
|
||||
// [ 1..3, 1..3, 0..2 ]
|
||||
//
|
||||
// of index range
|
||||
//
|
||||
// [ 0..4, 0..4 , 0..4 ]
|
||||
|
||||
return (ijk[0] >= 1) && (ijk[0] <= 3)
|
||||
&& (ijk[1] >= 1) && (ijk[1] <= 3)
|
||||
&& (ijk[2] >= 0) && (ijk[2] <= 2);
|
||||
}
|
||||
|
||||
std::size_t fieldIx(const std::array<int,3>& ijk)
|
||||
{
|
||||
return globIndex({ ijk[0] - 1, ijk[1] - 1, ijk[2] }, {3, 3, 3});
|
||||
}
|
||||
|
||||
double fieldValue(const int cell, std::initializer_list<double> field)
|
||||
{
|
||||
const auto ijk = cellIJK(cell, { 5, 5, 5 });
|
||||
|
||||
return isInRange(ijk) ? *(std::data(field) + fieldIx(ijk)) : 0.0;
|
||||
}
|
||||
|
||||
// Octave: 1234 + fix(100 * rand([3, 3, 6])) -- bottom half
|
||||
double pressure(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=5
|
||||
1.247000e+03, 1.320000e+03, 1.291000e+03,
|
||||
1.288000e+03, 1.248000e+03, 1.319000e+03,
|
||||
1.296000e+03, 1.269000e+03, 1.285000e+03,
|
||||
|
||||
// K=6
|
||||
1.274000e+03, 1.241000e+03, 1.257000e+03,
|
||||
1.246000e+03, 1.252000e+03, 1.257000e+03,
|
||||
1.275000e+03, 1.238000e+03, 1.324000e+03,
|
||||
|
||||
// K=7
|
||||
1.328000e+03, 1.283000e+03, 1.282000e+03,
|
||||
1.267000e+03, 1.324000e+03, 1.270000e+03,
|
||||
1.245000e+03, 1.312000e+03, 1.272000e+03,
|
||||
});
|
||||
}
|
||||
|
||||
// Octave: fix(1e6 * (123.4 + 56.7*rand([3, 3, 6]))) / 1e6 -- bottom half
|
||||
double porevol(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=5
|
||||
1.705079830e+02, 1.565844730e+02, 1.545693280e+02,
|
||||
1.754048800e+02, 1.396070720e+02, 1.663332520e+02,
|
||||
1.661364390e+02, 1.449712790e+02, 1.555954870e+02,
|
||||
|
||||
// K=6
|
||||
1.277009380e+02, 1.264589710e+02, 1.534962210e+02,
|
||||
1.675787810e+02, 1.763584050e+02, 1.307656820e+02,
|
||||
1.556523010e+02, 1.500144490e+02, 1.240748470e+02,
|
||||
|
||||
// K=7
|
||||
1.425148530e+02, 1.325957360e+02, 1.684359330e+02,
|
||||
1.410458920e+02, 1.533678280e+02, 1.327922820e+02,
|
||||
1.575323760e+02, 1.383104710e+02, 1.604862840e+02,
|
||||
});
|
||||
}
|
||||
|
||||
// Octave: 0.1 + round(0.1 * rand([3, 3, 6]), 2) -- bottom half
|
||||
double density(const int cell)
|
||||
{
|
||||
return fieldValue(cell, {
|
||||
// K=5
|
||||
0.100, 0.190, 0.170,
|
||||
0.150, 0.160, 0.120,
|
||||
0.150, 0.200, 0.150,
|
||||
|
||||
// K=6
|
||||
0.150, 0.120, 0.150,
|
||||
0.160, 0.170, 0.140,
|
||||
0.140, 0.200, 0.100,
|
||||
|
||||
// K=7
|
||||
0.190, 0.190, 0.180,
|
||||
0.110, 0.130, 0.130,
|
||||
0.170, 0.110, 0.170,
|
||||
});
|
||||
}
|
||||
|
||||
void cellSource(const int cell,
|
||||
Opm::PAvgDynamicSourceData::SourceDataSpan<double> src)
|
||||
{
|
||||
using Item = Opm::PAvgDynamicSourceData::SourceDataSpan<double>::Item;
|
||||
|
||||
src .set(Item::Pressure , pressure(cell))
|
||||
.set(Item::PoreVol , porevol (cell))
|
||||
.set(Item::MixtureDensity, density (cell));
|
||||
}
|
||||
|
||||
std::vector<int> localConnIdx()
|
||||
{
|
||||
auto localIdx = std::vector<int>(6, -1);
|
||||
for (auto perf = 0; perf < 3; ++perf) {
|
||||
localIdx[3 + perf] = perf;
|
||||
}
|
||||
|
||||
return localIdx;
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation::EvaluatorFactory connSource()
|
||||
{
|
||||
return []() {
|
||||
auto rho = std::vector { 0.16, 0.18, 0.2, };
|
||||
|
||||
return [rho = std::move(rho)]
|
||||
(const int connIx,
|
||||
Opm::PAvgDynamicSourceData::SourceDataSpan<double> src)
|
||||
{
|
||||
using Item = Opm::PAvgDynamicSourceData::SourceDataSpan<double>::Item;
|
||||
|
||||
src .set(Item::Pressure , 1222.0)
|
||||
.set(Item::PoreVol , 1.25)
|
||||
.set(Item::MixtureDensity, rho[connIx]);
|
||||
};
|
||||
};
|
||||
}
|
||||
} // namespace Bottom
|
||||
} // namespace Rank
|
||||
|
||||
std::shared_ptr<Opm::WellConnections>
|
||||
centreConnections(const int topConn, const int numConns)
|
||||
{
|
||||
auto conns = std::vector<Opm::Connection>{};
|
||||
|
||||
const auto dims = std::array { 5, 5, 10 };
|
||||
|
||||
const auto i = 2;
|
||||
const auto j = 2;
|
||||
const auto kMax = std::min(dims[2] - 1, topConn + numConns);
|
||||
|
||||
const auto state = std::array {
|
||||
Opm::Connection::State::OPEN,
|
||||
Opm::Connection::State::SHUT,
|
||||
Opm::Connection::State::OPEN,
|
||||
};
|
||||
|
||||
for (auto k = topConn; k < kMax; ++k) {
|
||||
conns.emplace_back(i, j, k, globIndex({i, j, k}, dims), k - topConn,
|
||||
2000 + (2*k + 1) / static_cast<double>(2),
|
||||
|
||||
// Open, Shut, Open, Open, Shut, ...
|
||||
state[(k - topConn) % state.size()],
|
||||
|
||||
// 0.03, 0.0, 0.01, 0.02, 0.03, ...
|
||||
((k + 3 - topConn) % 4) / 100.0,
|
||||
|
||||
1.0, 1.0, 0.5, 0.5, 1.0, 0.0, 0,
|
||||
Opm::Connection::Direction::Z,
|
||||
Opm::Connection::CTFKind::DeckValue, k - topConn, false);
|
||||
}
|
||||
|
||||
return std::make_shared<Opm::WellConnections>
|
||||
(Opm::Connection::Order::INPUT, i, j, conns);
|
||||
}
|
||||
|
||||
Opm::Well producerWell()
|
||||
{
|
||||
auto w = Opm::Well {
|
||||
"P", "G", 0, 0, 2, 2, 2000.5,
|
||||
Opm::WellType { true, Opm::Phase::OIL }, // Oil producer
|
||||
Opm::Well::ProducerCMode::ORAT,
|
||||
Opm::Connection::Order::INPUT,
|
||||
Opm::UnitSystem::newMETRIC(),
|
||||
-3.0e+20, // UDQ undefined
|
||||
0.0, true, true, 0,
|
||||
Opm::Well::GasInflowEquation::STD
|
||||
};
|
||||
|
||||
w.updateConnections(centreConnections(2, 6), true);
|
||||
|
||||
return w;
|
||||
}
|
||||
|
||||
Opm::ParallelWellInfo
|
||||
parallelWellInfo(const Opm::Parallel::Communication& comm)
|
||||
{
|
||||
auto pwi = Opm::ParallelWellInfo {
|
||||
std::pair { std::string{ "P" }, true }, comm
|
||||
};
|
||||
|
||||
pwi.beginReset();
|
||||
|
||||
const auto numLocalPerf = 3;
|
||||
const auto perfOffset = comm.rank() * numLocalPerf;
|
||||
|
||||
auto prev = Opm::ParallelWellInfo::INVALID_ECL_INDEX;
|
||||
for (auto perf = 0; perf < numLocalPerf; ++perf) {
|
||||
const auto curr = perfOffset + perf;
|
||||
pwi.pushBackEclIndex(prev, curr);
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
pwi.endReset();
|
||||
|
||||
pwi.communicateFirstPerforation(comm.rank() == 0);
|
||||
|
||||
return pwi;
|
||||
}
|
||||
|
||||
void setCallbacksTop(Opm::ParallelWBPCalculation& wbpCalcService)
|
||||
{
|
||||
wbpCalcService
|
||||
.localCellIndex(&Rank::Top::globalToLocal)
|
||||
.evalCellSource(&Rank::Top::cellSource);
|
||||
}
|
||||
|
||||
void setCallbacksBottom(Opm::ParallelWBPCalculation& wbpCalcService)
|
||||
{
|
||||
wbpCalcService
|
||||
.localCellIndex(&Rank::Bottom::globalToLocal)
|
||||
.evalCellSource(&Rank::Bottom::cellSource);
|
||||
}
|
||||
|
||||
void setCallbacks(const int rank,
|
||||
Opm::ParallelWBPCalculation& wbpCalcService)
|
||||
{
|
||||
if (rank == 0) {
|
||||
setCallbacksTop(wbpCalcService);
|
||||
}
|
||||
else {
|
||||
setCallbacksBottom(wbpCalcService);
|
||||
}
|
||||
}
|
||||
|
||||
Opm::ParallelWBPCalculation::EvaluatorFactory connSource(const int rank)
|
||||
{
|
||||
if (rank == 0) {
|
||||
return Rank::Top::connSource();
|
||||
}
|
||||
else {
|
||||
return Rank::Bottom::connSource();
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<int> localConnIdx(const int rank)
|
||||
{
|
||||
if (rank == 0) {
|
||||
return Rank::Top::localConnIdx();
|
||||
}
|
||||
else {
|
||||
return Rank::Bottom::localConnIdx();
|
||||
}
|
||||
}
|
||||
|
||||
bool init_unit_test_func()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
struct Setup
|
||||
{
|
||||
Setup()
|
||||
: comm { Dune::MPIHelper::getCommunicator() }
|
||||
, cellIndexMap { 5, 5, 10 }
|
||||
, wbpCalcService { cellIndexMap, comm }
|
||||
, pwi { parallelWellInfo(comm) }
|
||||
{
|
||||
setCallbacks(this->comm.rank(), this->wbpCalcService);
|
||||
|
||||
this->wbpCalcService
|
||||
.createCalculator(producerWell(), this->pwi,
|
||||
localConnIdx(this->comm.rank()),
|
||||
connSource(this->comm.rank()));
|
||||
}
|
||||
|
||||
Opm::Parallel::Communication comm;
|
||||
Opm::GridDims cellIndexMap;
|
||||
Opm::ParallelWBPCalculation wbpCalcService;
|
||||
Opm::ParallelWellInfo pwi;
|
||||
};
|
||||
|
||||
} // Anonymous namespace
|
||||
|
||||
BOOST_AUTO_TEST_CASE(Create)
|
||||
{
|
||||
auto comm = Opm::Parallel::Communication {
|
||||
Dune::MPIHelper::getCommunicator()
|
||||
};
|
||||
|
||||
BOOST_REQUIRE_EQUAL(comm.size(), 2);
|
||||
|
||||
auto wbpCalcService = Opm::ParallelWBPCalculation {
|
||||
Opm::GridDims { 5, 5, 10 },
|
||||
comm
|
||||
};
|
||||
|
||||
setCallbacks(comm.rank(), wbpCalcService);
|
||||
const auto pwi = parallelWellInfo(comm);
|
||||
|
||||
const auto calcIdx = wbpCalcService
|
||||
.createCalculator(producerWell(), pwi,
|
||||
localConnIdx(comm.rank()),
|
||||
connSource(comm.rank()));
|
||||
|
||||
BOOST_CHECK_EQUAL(calcIdx, std::size_t{0});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(TopOfFormation_Well_OpenConns)
|
||||
{
|
||||
// Producer connected in Z direction in cells (3,3,3), (3,3,4), (3,3,5),
|
||||
// (3,3,6), (3,3,7), and (3,3,8). Connections (3,3,4) and (3,3,7) are
|
||||
// shut.
|
||||
|
||||
Setup cse {};
|
||||
|
||||
cse.wbpCalcService.defineCommunication();
|
||||
cse.wbpCalcService.collectDynamicValues();
|
||||
|
||||
const auto calcIndex = std::size_t{0};
|
||||
const auto controls = Opm::PAvg{};
|
||||
const auto gravity = standardGravity();
|
||||
const auto refDepth = 2002.5; // BHP reference depth. Depth correction in layers 4..8.
|
||||
cse.wbpCalcService.inferBlockAveragePressures(calcIndex, controls, gravity, refDepth);
|
||||
|
||||
const auto avgPress = cse.wbpCalcService.averagePressures(calcIndex);
|
||||
using WBPMode = Opm::PAvgCalculator::Result::WBPMode;
|
||||
|
||||
BOOST_CHECK_CLOSE(avgPress.value(WBPMode::WBP) , 1254.806625666667, 1.0e-8);
|
||||
BOOST_CHECK_CLOSE(avgPress.value(WBPMode::WBP4), 1295.348292333333, 1.0e-8);
|
||||
BOOST_CHECK_CLOSE(avgPress.value(WBPMode::WBP5), 1275.077459000000, 1.0e-8);
|
||||
BOOST_CHECK_CLOSE(avgPress.value(WBPMode::WBP9), 1269.379542333333, 1.0e-8);
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
Dune::MPIHelper::instance(argc, argv);
|
||||
|
||||
#if HAVE_MPI
|
||||
// Register a throwing error handler to allow for debugging with
|
||||
//
|
||||
// catch throw
|
||||
//
|
||||
// in GDB.
|
||||
MPI_Errhandler handler{};
|
||||
MPI_Comm_create_errhandler(MPI_err_handler, &handler);
|
||||
MPI_Comm_set_errhandler(MPI_COMM_WORLD, handler);
|
||||
#endif // HAVE_MPI
|
||||
|
||||
return boost::unit_test::unit_test_main(&init_unit_test_func, argc, argv);
|
||||
}
|
Loading…
Reference in New Issue
Block a user