mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge pull request #2794 from bska/prepare-nodepress-smry-output
Chase Group/Node Summary API Update
This commit is contained in:
commit
04732f596b
@ -556,17 +556,17 @@ public:
|
||||
|
||||
};
|
||||
|
||||
class PackUnPackGroupData : public P2PCommunicatorType::DataHandleInterface
|
||||
class PackUnPackGroupAndNetworkValues : public P2PCommunicatorType::DataHandleInterface
|
||||
{
|
||||
const Opm::data::GroupValues& localGroupData_;
|
||||
Opm::data::GroupValues& globalGroupData_;
|
||||
const Opm::data::GroupAndNetworkValues& localGroupAndNetworkData_;
|
||||
Opm::data::GroupAndNetworkValues& globalGroupAndNetworkData_;
|
||||
|
||||
public:
|
||||
PackUnPackGroupData(const Opm::data::GroupValues& localGroupData,
|
||||
Opm::data::GroupValues& globalGroupData,
|
||||
const bool isIORank)
|
||||
: localGroupData_ (localGroupData)
|
||||
, globalGroupData_(globalGroupData)
|
||||
PackUnPackGroupAndNetworkValues(const Opm::data::GroupAndNetworkValues& localGroupAndNetworkData,
|
||||
Opm::data::GroupAndNetworkValues& globalGroupAndNetworkData,
|
||||
const bool isIORank)
|
||||
: localGroupAndNetworkData_ (localGroupAndNetworkData)
|
||||
, globalGroupAndNetworkData_(globalGroupAndNetworkData)
|
||||
{
|
||||
if (! isIORank) { return; }
|
||||
|
||||
@ -588,13 +588,13 @@ public:
|
||||
};
|
||||
}
|
||||
|
||||
// write all group data
|
||||
this->localGroupData_.write(buffer);
|
||||
// write all group and network (node/branch) data
|
||||
this->localGroupAndNetworkData_.write(buffer);
|
||||
}
|
||||
|
||||
// unpack all data associated with link
|
||||
void unpack(int /*link*/, MessageBufferType& buffer)
|
||||
{ this->globalGroupData_.read(buffer); }
|
||||
{ this->globalGroupAndNetworkData_.read(buffer); }
|
||||
};
|
||||
|
||||
|
||||
@ -660,18 +660,17 @@ public:
|
||||
void collect(const Opm::data::Solution& localCellData,
|
||||
const std::map<std::pair<std::string, int>, double>& localBlockData,
|
||||
const Opm::data::Wells& localWellData,
|
||||
const Opm::data::GroupValues& localGroupData)
|
||||
const Opm::data::GroupAndNetworkValues& localGroupAndNetworkData)
|
||||
{
|
||||
globalCellData_ = {};
|
||||
globalBlockData_.clear();
|
||||
globalWellData_.clear();
|
||||
globalGroupData_.clear();
|
||||
globalGroupAndNetworkData_.clear();
|
||||
|
||||
// index maps only have to be build when reordering is needed
|
||||
if(!needsReordering && !isParallel())
|
||||
return;
|
||||
|
||||
|
||||
// this also linearises the local buffers on ioRank
|
||||
PackUnPackCellData packUnpackCellData {
|
||||
localCellData,
|
||||
@ -693,9 +692,9 @@ public:
|
||||
this->isIORank()
|
||||
};
|
||||
|
||||
PackUnPackGroupData packUnpackGroupData {
|
||||
localGroupData,
|
||||
this->globalGroupData_,
|
||||
PackUnPackGroupAndNetworkValues packUnpackGroupAndNetworkData {
|
||||
localGroupAndNetworkData,
|
||||
this->globalGroupAndNetworkData_,
|
||||
this->isIORank()
|
||||
};
|
||||
|
||||
@ -707,7 +706,7 @@ public:
|
||||
|
||||
toIORankComm_.exchange(packUnpackCellData);
|
||||
toIORankComm_.exchange(packUnpackWellData);
|
||||
toIORankComm_.exchange(packUnpackGroupData);
|
||||
toIORankComm_.exchange(packUnpackGroupAndNetworkData);
|
||||
toIORankComm_.exchange(packUnpackBlockData);
|
||||
|
||||
|
||||
@ -727,8 +726,8 @@ public:
|
||||
const Opm::data::Wells& globalWellData() const
|
||||
{ return globalWellData_; }
|
||||
|
||||
const Opm::data::GroupValues& globalGroupData() const
|
||||
{ return globalGroupData_; }
|
||||
const Opm::data::GroupAndNetworkValues& globalGroupAndNetworkData() const
|
||||
{ return globalGroupAndNetworkData_; }
|
||||
|
||||
bool isIORank() const
|
||||
{ return toIORankComm_.rank() == ioRank; }
|
||||
@ -775,7 +774,7 @@ protected:
|
||||
Opm::data::Solution globalCellData_;
|
||||
std::map<std::pair<std::string, int>, double> globalBlockData_;
|
||||
Opm::data::Wells globalWellData_;
|
||||
Opm::data::GroupValues globalGroupData_;
|
||||
Opm::data::GroupAndNetworkValues globalGroupAndNetworkData_;
|
||||
std::vector<int> localIdxToGlobalIdx_;
|
||||
/// \brief sorted list of cartesian indices present-
|
||||
///
|
||||
|
@ -562,9 +562,9 @@ public:
|
||||
return wellDat;
|
||||
}
|
||||
|
||||
Opm::data::GroupValues
|
||||
groupData(const int /* reportStepIdx */,
|
||||
const Opm::Schedule& /* sched */) const
|
||||
Opm::data::GroupAndNetworkValues
|
||||
groupAndNetworkData(const int /* reportStepIdx */,
|
||||
const Opm::Schedule& /* sched */) const
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
@ -271,15 +271,15 @@ public:
|
||||
simulator_.setupTimer().realTimeElapsed() +
|
||||
simulator_.vanguard().externalSetupTime();
|
||||
|
||||
const auto localWellData = simulator_.problem().wellModel().wellData();
|
||||
const auto localGroupData = simulator_.problem().wellModel()
|
||||
.groupData(reportStepNum, simulator_.vanguard().schedule());
|
||||
const auto localWellData = simulator_.problem().wellModel().wellData();
|
||||
const auto localGroupAndNetworkData = simulator_.problem().wellModel()
|
||||
.groupAndNetworkData(reportStepNum, simulator_.vanguard().schedule());
|
||||
|
||||
this->prepareLocalCellData(isSubStep, reportStepNum);
|
||||
|
||||
if (collectToIORank_.isParallel())
|
||||
collectToIORank_.collect({}, eclOutputModule_.getBlockData(),
|
||||
localWellData, localGroupData);
|
||||
localWellData, localGroupAndNetworkData);
|
||||
|
||||
std::map<std::string, double> miscSummaryData;
|
||||
std::map<std::string, std::vector<double>> regionData;
|
||||
@ -306,9 +306,9 @@ public:
|
||||
? this->collectToIORank_.globalWellData()
|
||||
: localWellData;
|
||||
|
||||
const auto& groupData = this->collectToIORank_.isParallel()
|
||||
? this->collectToIORank_.globalGroupData()
|
||||
: localGroupData;
|
||||
const auto& groupAndNetworkData = this->collectToIORank_.isParallel()
|
||||
? this->collectToIORank_.globalGroupAndNetworkData()
|
||||
: localGroupAndNetworkData;
|
||||
|
||||
const auto& blockData
|
||||
= this->collectToIORank_.isParallel()
|
||||
@ -321,7 +321,7 @@ public:
|
||||
eclState,
|
||||
schedule(),
|
||||
wellData,
|
||||
groupData,
|
||||
groupAndNetworkData,
|
||||
miscSummaryData,
|
||||
regionData,
|
||||
blockData);
|
||||
@ -358,7 +358,8 @@ public:
|
||||
|
||||
// output using eclWriter if enabled
|
||||
auto localWellData = simulator_.problem().wellModel().wellData();
|
||||
auto localGroupData = simulator_.problem().wellModel().groupData(reportStepNum, simulator_.vanguard().schedule());
|
||||
auto localGroupAndNetworkData = simulator_.problem().wellModel()
|
||||
.groupAndNetworkData(reportStepNum, simulator_.vanguard().schedule());
|
||||
|
||||
Opm::data::Solution localCellData = {};
|
||||
if (! isSubStep) {
|
||||
@ -368,15 +369,16 @@ public:
|
||||
this->eclOutputModule_.addRftDataToWells(localWellData, reportStepNum);
|
||||
}
|
||||
|
||||
if (collectToIORank_.isParallel()) {
|
||||
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(), localWellData, localGroupData);
|
||||
if (this->collectToIORank_.isParallel()) {
|
||||
collectToIORank_.collect(localCellData, eclOutputModule_.getBlockData(),
|
||||
localWellData, localGroupAndNetworkData);
|
||||
}
|
||||
|
||||
if (this->collectToIORank_.isIORank()) {
|
||||
this->writeOutput(reportStepNum, isSubStep,
|
||||
std::move(localCellData),
|
||||
std::move(localWellData),
|
||||
std::move(localGroupData));
|
||||
std::move(localGroupAndNetworkData));
|
||||
}
|
||||
}
|
||||
|
||||
@ -740,11 +742,11 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
void writeOutput(const int reportStepNum,
|
||||
const bool isSubStep,
|
||||
::Opm::data::Solution&& localCellData,
|
||||
::Opm::data::Wells&& localWellData,
|
||||
::Opm::data::GroupValues&& localGroupData)
|
||||
void writeOutput(const int reportStepNum,
|
||||
const bool isSubStep,
|
||||
::Opm::data::Solution&& localCellData,
|
||||
::Opm::data::Wells&& localWellData,
|
||||
::Opm::data::GroupAndNetworkValues&& localGroupAndNetworkData)
|
||||
{
|
||||
const Scalar curTime = simulator_.time() + simulator_.timeStepSize();
|
||||
const Scalar nextStepSize = simulator_.problem().nextTimeStepSize();
|
||||
@ -757,8 +759,8 @@ private:
|
||||
isParallel ? this->collectToIORank_.globalWellData()
|
||||
: std::move(localWellData),
|
||||
|
||||
isParallel ? this->collectToIORank_.globalGroupData()
|
||||
: std::move(localGroupData)
|
||||
isParallel ? this->collectToIORank_.globalGroupAndNetworkData()
|
||||
: std::move(localGroupAndNetworkData)
|
||||
};
|
||||
|
||||
if (simulator_.vanguard().eclState().getSimulationConfig().useThresholdPressure()) {
|
||||
|
@ -207,6 +207,7 @@ std::size_t packSize(const std::array<T,N>& data, Dune::MPIHelper::MPICommunicat
|
||||
HANDLE_AS_POD(data::Connection)
|
||||
HANDLE_AS_POD(data::CurrentControl)
|
||||
HANDLE_AS_POD(data::GroupConstraints)
|
||||
HANDLE_AS_POD(data::NodeData)
|
||||
HANDLE_AS_POD(data::Rates)
|
||||
HANDLE_AS_POD(data::Segment)
|
||||
|
||||
@ -260,11 +261,10 @@ std::size_t packSize(const data::Solution& data, Dune::MPIHelper::MPICommunicato
|
||||
return packSize(static_cast<const std::map< std::string, data::CellData>&>(data), comm);
|
||||
}
|
||||
|
||||
std::size_t packSize(const data::GroupValues& data, Dune::MPIHelper::MPICommunicator comm)
|
||||
std::size_t packSize(const data::GroupAndNetworkValues& data, Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
// Needs explicit conversion to a supported base type holding the data
|
||||
// to prevent throwing.
|
||||
return packSize(static_cast<const std::map<std::string, data::GroupData>&>(data), comm);
|
||||
return packSize(data.groupData, comm)
|
||||
+ packSize(data.nodeData, comm);
|
||||
}
|
||||
|
||||
std::size_t packSize(const data::WellRates& data, Dune::MPIHelper::MPICommunicator comm)
|
||||
@ -278,7 +278,7 @@ std::size_t packSize(const RestartValue& data, Dune::MPIHelper::MPICommunicator
|
||||
{
|
||||
return packSize(data.solution, comm)
|
||||
+ packSize(data.wells, comm)
|
||||
+ packSize(data.groups, comm)
|
||||
+ packSize(data.grp_nwrk, comm)
|
||||
+ packSize(data.extra, comm);
|
||||
}
|
||||
|
||||
@ -552,13 +552,11 @@ void pack(const data::WellRates& data, std::vector<char>& buffer, int& position,
|
||||
buffer, position, comm);
|
||||
}
|
||||
|
||||
void pack(const data::GroupValues& data, std::vector<char>& buffer, int& position,
|
||||
void pack(const data::GroupAndNetworkValues& data, std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
// Needs explicit conversion to a supported base type holding the data
|
||||
// to prevent throwing.
|
||||
pack(static_cast<const std::map< std::string, data::GroupData>&>(data),
|
||||
buffer, position, comm);
|
||||
pack(data.groupData, buffer, position, comm);
|
||||
pack(data.nodeData, buffer, position, comm);
|
||||
}
|
||||
|
||||
void pack(const RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
@ -566,7 +564,7 @@ void pack(const RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
{
|
||||
pack(data.solution, buffer, position, comm);
|
||||
pack(data.wells, buffer, position, comm);
|
||||
pack(data.groups, buffer, position, comm);
|
||||
pack(data.grp_nwrk, buffer, position, comm);
|
||||
pack(data.extra, buffer, position, comm);
|
||||
}
|
||||
|
||||
@ -858,13 +856,11 @@ void unpack(data::WellRates& data, std::vector<char>& buffer, int& position,
|
||||
buffer, position, comm);
|
||||
}
|
||||
|
||||
void unpack(data::GroupValues& data, std::vector<char>& buffer, int& position,
|
||||
void unpack(data::GroupAndNetworkValues& data, std::vector<char>& buffer, int& position,
|
||||
Dune::MPIHelper::MPICommunicator comm)
|
||||
{
|
||||
// Needs explicit conversion to a supported base type holding the data
|
||||
// to prevent throwing.
|
||||
unpack(static_cast<std::map< std::string, data::GroupData>&>(data),
|
||||
buffer, position, comm);
|
||||
unpack(data.groupData, buffer, position, comm);
|
||||
unpack(data.nodeData, buffer, position, comm);
|
||||
}
|
||||
|
||||
void unpack(RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
@ -872,7 +868,7 @@ void unpack(RestartValue& data, std::vector<char>& buffer, int& position,
|
||||
{
|
||||
unpack(data.solution, buffer, position, comm);
|
||||
unpack(data.wells, buffer, position, comm);
|
||||
unpack(data.groups, buffer, position, comm);
|
||||
unpack(data.grp_nwrk, buffer, position, comm);
|
||||
unpack(data.extra, buffer, position, comm);
|
||||
}
|
||||
|
||||
@ -951,8 +947,8 @@ RestartValue loadParallelRestart(const EclipseIO* eclIO, Action::State& actionSt
|
||||
#if HAVE_MPI
|
||||
data::Solution sol;
|
||||
data::Wells wells;
|
||||
data::GroupValues groups;
|
||||
RestartValue restartValues(sol, wells, groups);
|
||||
data::GroupAndNetworkValues grp_nwrk;
|
||||
RestartValue restartValues(sol, wells, grp_nwrk);
|
||||
|
||||
if (eclIO)
|
||||
{
|
||||
|
@ -310,7 +310,8 @@ ADD_PACK_PROTOTYPES(data::GuideRateValue)
|
||||
ADD_PACK_PROTOTYPES(data::GroupConstraints)
|
||||
ADD_PACK_PROTOTYPES(data::GroupGuideRates)
|
||||
ADD_PACK_PROTOTYPES(data::GroupData)
|
||||
ADD_PACK_PROTOTYPES(data::GroupValues)
|
||||
ADD_PACK_PROTOTYPES(data::NodeData)
|
||||
ADD_PACK_PROTOTYPES(data::GroupAndNetworkValues)
|
||||
ADD_PACK_PROTOTYPES(data::Well)
|
||||
ADD_PACK_PROTOTYPES(data::WellRates)
|
||||
ADD_PACK_PROTOTYPES(RestartKey)
|
||||
|
@ -18,6 +18,9 @@
|
||||
*/
|
||||
|
||||
#include <config.h>
|
||||
|
||||
#include <opm/simulators/utils/ParallelSerialization.hpp>
|
||||
|
||||
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicState.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
|
@ -191,23 +191,15 @@ namespace Opm {
|
||||
|
||||
void initFromRestartFile(const RestartValue& restartValues);
|
||||
|
||||
Opm::data::GroupValues
|
||||
groupData(const int reportStepIdx, const Opm::Schedule& sched) const
|
||||
Opm::data::GroupAndNetworkValues
|
||||
groupAndNetworkData(const int reportStepIdx, const Opm::Schedule& sched) const
|
||||
{
|
||||
auto gvalues = ::Opm::data::GroupValues{};
|
||||
auto grp_nwrk_values = ::Opm::data::GroupAndNetworkValues{};
|
||||
|
||||
const auto groupGuideRates =
|
||||
calculateAllGroupGuiderates(reportStepIdx, sched);
|
||||
this->assignGroupValues(reportStepIdx, sched,
|
||||
grp_nwrk_values.groupData);
|
||||
|
||||
for (const auto& gname : sched.groupNames(reportStepIdx)) {
|
||||
const auto& grup = sched.getGroup(gname, reportStepIdx);
|
||||
|
||||
auto& gdata = gvalues[gname];
|
||||
this->assignGroupControl(grup, gdata);
|
||||
this->assignGroupGuideRates(grup, groupGuideRates, gdata);
|
||||
}
|
||||
|
||||
return gvalues;
|
||||
return grp_nwrk_values;
|
||||
}
|
||||
|
||||
Opm::data::Wells wellData() const
|
||||
@ -414,7 +406,7 @@ namespace Opm {
|
||||
|
||||
// convert well data from opm-common to well state from opm-core
|
||||
void wellsToState( const data::Wells& wells,
|
||||
const data::GroupValues& groupValues,
|
||||
const data::GroupAndNetworkValues& grpNwrkValues,
|
||||
const PhaseUsage& phases,
|
||||
const bool handle_ms_well,
|
||||
WellStateFullyImplicitBlackoil& state ) const;
|
||||
@ -444,6 +436,10 @@ namespace Opm {
|
||||
|
||||
void setWsolvent(const Group& group, const Schedule& schedule, const int reportStepIdx, double wsolvent);
|
||||
|
||||
void assignGroupValues(const int reportStepIdx,
|
||||
const Schedule& sched,
|
||||
std::map<std::string, data::GroupData>& gvalues) const;
|
||||
|
||||
std::unordered_map<std::string, data::GroupGuideRates>
|
||||
calculateAllGroupGuiderates(const int reportStepIdx, const Schedule& sched) const;
|
||||
|
||||
|
@ -547,7 +547,7 @@ namespace Opm {
|
||||
const size_t numCells = Opm::UgGridHelpers::numCells(grid());
|
||||
const bool handle_ms_well = (param_.use_multisegment_well_ && anyMSWellOpenLocal());
|
||||
well_state_.resize(wells_ecl_, schedule(), handle_ms_well, numCells, phaseUsage, well_perf_data_, summaryState, globalNumWells); // Resize for restart step
|
||||
wellsToState(restartValues.wells, restartValues.groups, phaseUsage, handle_ms_well, well_state_);
|
||||
wellsToState(restartValues.wells, restartValues.grp_nwrk, phaseUsage, handle_ms_well, well_state_);
|
||||
}
|
||||
|
||||
previous_well_state_ = well_state_;
|
||||
@ -1602,7 +1602,7 @@ namespace Opm {
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
wellsToState( const data::Wells& wells,
|
||||
const data::GroupValues& groupValues,
|
||||
const data::GroupAndNetworkValues& grpNwrkValues,
|
||||
const PhaseUsage& phases,
|
||||
const bool handle_ms_well,
|
||||
WellStateFullyImplicitBlackoil& state) const
|
||||
@ -1699,7 +1699,7 @@ namespace Opm {
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto& [group, value] : groupValues) {
|
||||
for (const auto& [group, value] : grpNwrkValues.groupData) {
|
||||
const auto cpc = value.currentControl.currentProdConstraint;
|
||||
const auto cgi = value.currentControl.currentGasInjectionConstraint;
|
||||
const auto cwi = value.currentControl.currentWaterInjectionConstraint;
|
||||
@ -2456,6 +2456,25 @@ namespace Opm {
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TypeTag>
|
||||
void
|
||||
BlackoilWellModel<TypeTag>::
|
||||
assignGroupValues(const int reportStepIdx,
|
||||
const Schedule& sched,
|
||||
std::map<std::string, data::GroupData>& gvalues) const
|
||||
{
|
||||
const auto groupGuideRates =
|
||||
this->calculateAllGroupGuiderates(reportStepIdx, sched);
|
||||
|
||||
for (const auto& gname : sched.groupNames(reportStepIdx)) {
|
||||
const auto& grup = sched.getGroup(gname, reportStepIdx);
|
||||
|
||||
auto& gdata = gvalues[gname];
|
||||
this->assignGroupControl(grup, gdata);
|
||||
this->assignGroupGuideRates(grup, groupGuideRates, gdata);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename TypeTag>
|
||||
std::unordered_map<std::string, data::GroupGuideRates>
|
||||
BlackoilWellModel<TypeTag>::
|
||||
|
@ -250,6 +250,13 @@ Opm::data::GroupData getGroupData()
|
||||
getGroupGuideRates()
|
||||
};
|
||||
}
|
||||
|
||||
Opm::data::NodeData getNodeData()
|
||||
{
|
||||
return Opm::data::NodeData {
|
||||
123.457
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -385,6 +392,14 @@ BOOST_AUTO_TEST_CASE(dataGroupData)
|
||||
DO_CHECKS(data::GroupData)
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(dataNodeData)
|
||||
{
|
||||
const auto val1 = getNodeData();
|
||||
const auto val2 = PackUnpack(val1);
|
||||
|
||||
DO_CHECKS(data::NodeData)
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(CellData)
|
||||
{
|
||||
Opm::data::CellData val1;
|
||||
@ -409,12 +424,17 @@ BOOST_AUTO_TEST_CASE(RestartValue)
|
||||
auto wells1 = Opm::data::WellRates {{
|
||||
{ "test_well", getWell() },
|
||||
}};
|
||||
auto groups1 = Opm::data::GroupValues {{
|
||||
{ "test_group1", getGroupData() },
|
||||
}};
|
||||
auto grp_nwrk_1 = Opm::data::GroupAndNetworkValues {
|
||||
{ // .groupData
|
||||
{ "test_group1", getGroupData() },
|
||||
},
|
||||
{ // .nodeData
|
||||
{ "test_node1", getNodeData() },
|
||||
}
|
||||
};
|
||||
|
||||
const auto val1 = Opm::RestartValue {
|
||||
getSolution(), std::move(wells1), std::move(groups1)
|
||||
getSolution(), std::move(wells1), std::move(grp_nwrk_1)
|
||||
};
|
||||
const auto val2 = PackUnpack(val1);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user