mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge pull request #2268 from akva2/noecl_flush_summarycfg
Avoid using global ecl state setting up SummaryConfig on non-root processes
This commit is contained in:
commit
736f647936
@ -347,10 +347,19 @@ int main(int argc, char** argv)
|
|||||||
|
|
||||||
eclipseState.reset( new Opm::EclipseState(*deck, parseContext, errorGuard ));
|
eclipseState.reset( new Opm::EclipseState(*deck, parseContext, errorGuard ));
|
||||||
schedule.reset(new Opm::Schedule(*deck, *eclipseState, parseContext, errorGuard));
|
schedule.reset(new Opm::Schedule(*deck, *eclipseState, parseContext, errorGuard));
|
||||||
summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard));
|
|
||||||
if (mpiRank == 0) {
|
if (mpiRank == 0) {
|
||||||
setupMessageLimiter(schedule->getMessageLimits(), "STDOUT_LOGGER");
|
setupMessageLimiter(schedule->getMessageLimits(), "STDOUT_LOGGER");
|
||||||
|
summaryConfig.reset( new Opm::SummaryConfig(*deck, *schedule, eclipseState->getTableManager(), parseContext, errorGuard));
|
||||||
|
#ifdef HAVE_MPI
|
||||||
|
Opm::Mpi::packAndSend(*summaryConfig, mpiHelper.getCollectiveCommunication());
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
#ifdef HAVE_MPI
|
||||||
|
else {
|
||||||
|
summaryConfig.reset(new Opm::SummaryConfig);
|
||||||
|
Opm::Mpi::receiveAndUnpack(*summaryConfig, mpiHelper.getCollectiveCommunication());
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);
|
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);
|
||||||
|
|
||||||
|
@ -63,6 +63,7 @@
|
|||||||
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WListManager.hpp>
|
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WListManager.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
|
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
|
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
|
||||||
|
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/Aqudims.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/Aqudims.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/Eqldims.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/Eqldims.hpp>
|
||||||
@ -221,6 +222,18 @@ std::size_t packSize(const std::unordered_set<T,H,KE,A>& data,
|
|||||||
return totalSize;
|
return totalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
std::size_t packSize(const std::set<K,C,A>& data,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
std::size_t totalSize = packSize(data.size(), comm);
|
||||||
|
for (const auto& entry : data)
|
||||||
|
{
|
||||||
|
totalSize += packSize(entry, comm);
|
||||||
|
}
|
||||||
|
return totalSize;
|
||||||
|
}
|
||||||
|
|
||||||
template<class Key, class Value>
|
template<class Key, class Value>
|
||||||
std::size_t packSize(const OrderedMap<Key,Value>& data, Dune::MPIHelper::MPICommunicator comm)
|
std::size_t packSize(const OrderedMap<Key,Value>& data, Dune::MPIHelper::MPICommunicator comm)
|
||||||
{
|
{
|
||||||
@ -1663,6 +1676,26 @@ std::size_t packSize(const Schedule& data,
|
|||||||
packSize(data.getWellGroupEvents(), comm);
|
packSize(data.getWellGroupEvents(), comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::size_t packSize(const SummaryNode& data,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
return packSize(data.keyword(), comm) +
|
||||||
|
packSize(data.category(), comm) +
|
||||||
|
packSize(data.location(), comm) +
|
||||||
|
packSize(data.type(), comm) +
|
||||||
|
packSize(data.namedEntity(), comm) +
|
||||||
|
packSize(data.number(), comm) +
|
||||||
|
packSize(data.isUserDefined(), comm);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t packSize(const SummaryConfig& data,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
return packSize(data.getKwds(), comm) +
|
||||||
|
packSize(data.getShortKwds(), comm) +
|
||||||
|
packSize(data.getSmryKwds(), comm);
|
||||||
|
}
|
||||||
|
|
||||||
////// pack routines
|
////// pack routines
|
||||||
|
|
||||||
template<class T>
|
template<class T>
|
||||||
@ -1752,6 +1785,19 @@ void pack(const std::vector<T, A>& data, std::vector<char>& buffer, int& positio
|
|||||||
pack(entry, buffer, position, comm);
|
pack(entry, buffer, position, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
void pack(const std::set<K,C,A>& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
pack(data.size(), buffer, position, comm);
|
||||||
|
|
||||||
|
for (const auto& entry : data)
|
||||||
|
{
|
||||||
|
pack(entry, buffer, position, comm);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
template<class T, class H, class KE, class A>
|
template<class T, class H, class KE, class A>
|
||||||
void pack(const std::unordered_set<T,H,KE,A>& data,
|
void pack(const std::unordered_set<T,H,KE,A>& data,
|
||||||
std::vector<char>& buffer, int& position,
|
std::vector<char>& buffer, int& position,
|
||||||
@ -3347,6 +3393,28 @@ void pack(const Schedule& data,
|
|||||||
pack(data.getWellGroupEvents(), buffer, position, comm);
|
pack(data.getWellGroupEvents(), buffer, position, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void pack(const SummaryNode& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
pack(data.keyword(), buffer, position, comm);
|
||||||
|
pack(data.category(), buffer, position, comm);
|
||||||
|
pack(data.location(), buffer, position, comm) ;
|
||||||
|
pack(data.type(), buffer, position, comm);
|
||||||
|
pack(data.namedEntity(), buffer, position, comm);
|
||||||
|
pack(data.number(), buffer, position, comm);
|
||||||
|
pack(data.isUserDefined(), buffer, position, comm);
|
||||||
|
}
|
||||||
|
|
||||||
|
void pack(const SummaryConfig& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
pack(data.getKwds(), buffer, position, comm);
|
||||||
|
pack(data.getShortKwds(), buffer, position, comm);
|
||||||
|
pack(data.getSmryKwds(), buffer, position, comm);
|
||||||
|
}
|
||||||
|
|
||||||
/// unpack routines
|
/// unpack routines
|
||||||
|
|
||||||
template<class T>
|
template<class T>
|
||||||
@ -3473,6 +3541,22 @@ void unpack(std::tuple<Ts...>& data, std::vector<char>& buffer,
|
|||||||
unpack_tuple_entry(data, buffer, position, comm);
|
unpack_tuple_entry(data, buffer, position, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
void unpack(std::set<K,C,A>& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
std::size_t size = 0;
|
||||||
|
unpack(size, buffer, position, comm);
|
||||||
|
|
||||||
|
for (;size>0; size--)
|
||||||
|
{
|
||||||
|
K entry;
|
||||||
|
unpack(entry, buffer, position, comm);
|
||||||
|
data.insert(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
template<class T, class H, class KE, class A>
|
template<class T, class H, class KE, class A>
|
||||||
void unpack(std::unordered_set<T,H,KE,A>& data,
|
void unpack(std::unordered_set<T,H,KE,A>& data,
|
||||||
std::vector<char>& buffer, int& position,
|
std::vector<char>& buffer, int& position,
|
||||||
@ -5763,6 +5847,45 @@ void unpack(Schedule& data, std::vector<char>& buffer, int& position,
|
|||||||
rftConfig, nupCol, wellGroupEvents);
|
rftConfig, nupCol, wellGroupEvents);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void unpack(SummaryNode& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
std::string keyword;
|
||||||
|
SummaryNode::Category category;
|
||||||
|
Location location;
|
||||||
|
SummaryNode::Type type;
|
||||||
|
std::string namedEntity;
|
||||||
|
int number;
|
||||||
|
bool isUserDefined;
|
||||||
|
|
||||||
|
unpack(keyword, buffer, position, comm);
|
||||||
|
unpack(category, buffer, position, comm);
|
||||||
|
unpack(location, buffer, position, comm) ;
|
||||||
|
unpack(type, buffer, position, comm);
|
||||||
|
unpack(namedEntity, buffer, position, comm);
|
||||||
|
unpack(number, buffer, position, comm);
|
||||||
|
unpack(isUserDefined, buffer, position, comm);
|
||||||
|
data = SummaryNode{keyword, category, location}
|
||||||
|
.parameterType(type)
|
||||||
|
.namedEntity(namedEntity)
|
||||||
|
.number(number)
|
||||||
|
.isUserDefined(isUserDefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
void unpack(SummaryConfig& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm)
|
||||||
|
{
|
||||||
|
SummaryConfig::keyword_list kwds;
|
||||||
|
std::set<std::string> shortKwds, smryKwds;
|
||||||
|
|
||||||
|
unpack(kwds, buffer, position, comm);
|
||||||
|
unpack(shortKwds, buffer, position, comm);
|
||||||
|
unpack(smryKwds, buffer, position, comm);
|
||||||
|
data = SummaryConfig(kwds, shortKwds, smryKwds);
|
||||||
|
}
|
||||||
|
|
||||||
#define INSTANTIATE_PACK_VECTOR(T) \
|
#define INSTANTIATE_PACK_VECTOR(T) \
|
||||||
template std::size_t packSize(const std::vector<T>& data, \
|
template std::size_t packSize(const std::vector<T>& data, \
|
||||||
Dune::MPIHelper::MPICommunicator comm); \
|
Dune::MPIHelper::MPICommunicator comm); \
|
||||||
|
@ -54,6 +54,7 @@
|
|||||||
|
|
||||||
#include <dune/common/parallel/mpihelper.hh>
|
#include <dune/common/parallel/mpihelper.hh>
|
||||||
|
|
||||||
|
#include <set>
|
||||||
#include <tuple>
|
#include <tuple>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <map>
|
#include <map>
|
||||||
@ -125,6 +126,8 @@ class SimpleTable;
|
|||||||
class SkprpolyTable;
|
class SkprpolyTable;
|
||||||
class SkprwatTable;
|
class SkprwatTable;
|
||||||
class SpiralICD;
|
class SpiralICD;
|
||||||
|
class SummaryConfig;
|
||||||
|
class SummaryNode;
|
||||||
class Tabdims;
|
class Tabdims;
|
||||||
class TableColumn;
|
class TableColumn;
|
||||||
class TableContainer;
|
class TableContainer;
|
||||||
@ -189,6 +192,10 @@ std::size_t packSize(const std::pair<T1,T2>& data, Dune::MPIHelper::MPICommunica
|
|||||||
template<class T, class A>
|
template<class T, class A>
|
||||||
std::size_t packSize(const std::vector<T,A>& data, Dune::MPIHelper::MPICommunicator comm);
|
std::size_t packSize(const std::vector<T,A>& data, Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
std::size_t packSize(const std::set<K,C,A>& data,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
template<class T, class H, class KE, class A>
|
template<class T, class H, class KE, class A>
|
||||||
std::size_t packSize(const std::unordered_set<T,H,KE,A>& data,
|
std::size_t packSize(const std::unordered_set<T,H,KE,A>& data,
|
||||||
Dune::MPIHelper::MPICommunicator comm);
|
Dune::MPIHelper::MPICommunicator comm);
|
||||||
@ -330,6 +337,11 @@ template<class... Ts>
|
|||||||
void pack(const std::tuple<Ts...>& data, std::vector<char>& buffer,
|
void pack(const std::tuple<Ts...>& data, std::vector<char>& buffer,
|
||||||
int& position, Dune::MPIHelper::MPICommunicator comm);
|
int& position, Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
void pack(const std::set<K,C,A>& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
template<class T, class H, class KE, class A>
|
template<class T, class H, class KE, class A>
|
||||||
void pack(const std::unordered_set<T,H,KE,A>& data,
|
void pack(const std::unordered_set<T,H,KE,A>& data,
|
||||||
std::vector<char>& buffer, int& position,
|
std::vector<char>& buffer, int& position,
|
||||||
@ -488,6 +500,11 @@ template<class... Ts>
|
|||||||
void unpack(std::tuple<Ts...>& data, std::vector<char>& buffer,
|
void unpack(std::tuple<Ts...>& data, std::vector<char>& buffer,
|
||||||
int& position, Dune::MPIHelper::MPICommunicator comm);
|
int& position, Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
|
template<class K, class C, class A>
|
||||||
|
void unpack(std::set<K,C,A>& data,
|
||||||
|
std::vector<char>& buffer, int& position,
|
||||||
|
Dune::MPIHelper::MPICommunicator comm);
|
||||||
|
|
||||||
template<class T, class H, class KE, class A>
|
template<class T, class H, class KE, class A>
|
||||||
void unpack(std::unordered_set<T,H,KE,A>& data,
|
void unpack(std::unordered_set<T,H,KE,A>& data,
|
||||||
std::vector<char>& buffer, int& position,
|
std::vector<char>& buffer, int& position,
|
||||||
@ -689,6 +706,8 @@ ADD_PACK_PROTOTYPES(SkprpolyTable)
|
|||||||
ADD_PACK_PROTOTYPES(SkprwatTable)
|
ADD_PACK_PROTOTYPES(SkprwatTable)
|
||||||
ADD_PACK_PROTOTYPES(SpiralICD)
|
ADD_PACK_PROTOTYPES(SpiralICD)
|
||||||
ADD_PACK_PROTOTYPES(std::string)
|
ADD_PACK_PROTOTYPES(std::string)
|
||||||
|
ADD_PACK_PROTOTYPES(SummaryConfig)
|
||||||
|
ADD_PACK_PROTOTYPES(SummaryNode)
|
||||||
ADD_PACK_PROTOTYPES(Tabdims)
|
ADD_PACK_PROTOTYPES(Tabdims)
|
||||||
ADD_PACK_PROTOTYPES(TableColumn)
|
ADD_PACK_PROTOTYPES(TableColumn)
|
||||||
ADD_PACK_PROTOTYPES(TableContainer)
|
ADD_PACK_PROTOTYPES(TableContainer)
|
||||||
@ -736,6 +755,31 @@ ADD_PACK_PROTOTYPES(WellTracerProperties)
|
|||||||
ADD_PACK_PROTOTYPES(WList)
|
ADD_PACK_PROTOTYPES(WList)
|
||||||
ADD_PACK_PROTOTYPES(WListManager)
|
ADD_PACK_PROTOTYPES(WListManager)
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
const T& packAndSend(const T& in, const auto& comm)
|
||||||
|
{
|
||||||
|
if (comm.size() == 0)
|
||||||
|
return in;
|
||||||
|
|
||||||
|
std::size_t size = packSize(in, comm);
|
||||||
|
std::vector<char> buffer(size);
|
||||||
|
int pos = 0;
|
||||||
|
Mpi::pack(in, buffer, pos, comm);
|
||||||
|
comm.broadcast(&pos, 1, 0);
|
||||||
|
comm.broadcast(buffer.data(), pos, 0);
|
||||||
|
return in;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
void receiveAndUnpack(T& result, const auto& comm)
|
||||||
|
{
|
||||||
|
int size;
|
||||||
|
comm.broadcast(&size, 1, 0);
|
||||||
|
std::vector<char> buffer(size);
|
||||||
|
comm.broadcast(buffer.data(), size, 0);
|
||||||
|
int pos = 0;
|
||||||
|
unpack(result, buffer, pos, comm);
|
||||||
|
}
|
||||||
} // end namespace Mpi
|
} // end namespace Mpi
|
||||||
|
|
||||||
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,
|
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,
|
||||||
|
@ -74,6 +74,7 @@
|
|||||||
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WListManager.hpp>
|
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WListManager.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
|
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
|
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
|
||||||
|
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/Aqudims.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/Aqudims.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
|
||||||
#include <opm/parser/eclipse/EclipseState/Tables/Eqldims.hpp>
|
#include <opm/parser/eclipse/EclipseState/Tables/Eqldims.hpp>
|
||||||
@ -2340,6 +2341,41 @@ BOOST_AUTO_TEST_CASE(Schedule)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
BOOST_AUTO_TEST_CASE(SummaryNode)
|
||||||
|
{
|
||||||
|
#ifdef HAVE_MPI
|
||||||
|
auto val1 = Opm::SummaryNode{"test1", Opm::SummaryNode::Category::Region,
|
||||||
|
Opm::Location{"test2", 1}}
|
||||||
|
.parameterType(Opm::SummaryNode::Type::Pressure)
|
||||||
|
.namedEntity("test3")
|
||||||
|
.number(2)
|
||||||
|
.isUserDefined(true);
|
||||||
|
|
||||||
|
auto val2 = PackUnpack(val1);
|
||||||
|
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
|
||||||
|
BOOST_CHECK(val1 == std::get<0>(val2));
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
BOOST_AUTO_TEST_CASE(SummaryConfig)
|
||||||
|
{
|
||||||
|
#ifdef HAVE_MPI
|
||||||
|
auto node = Opm::SummaryNode{"test1", Opm::SummaryNode::Category::Region,
|
||||||
|
Opm::Location{"test2", 1}}
|
||||||
|
.parameterType(Opm::SummaryNode::Type::Pressure)
|
||||||
|
.namedEntity("test3")
|
||||||
|
.number(2)
|
||||||
|
.isUserDefined(true);
|
||||||
|
Opm::SummaryConfig val1({node}, {"test1", "test2"}, {"test3", "test4"});
|
||||||
|
|
||||||
|
auto val2 = PackUnpack(val1);
|
||||||
|
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
|
||||||
|
BOOST_CHECK(val1 == std::get<0>(val2));
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool init_unit_test_func()
|
bool init_unit_test_func()
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
|
Loading…
Reference in New Issue
Block a user