Merge pull request #2483 from akva2/serialize_internal_schedule

Internal serialization in Schedule
This commit is contained in:
Arne Morten Kvarving 2020-03-19 12:53:10 +01:00 committed by GitHub
commit 82c65f9f5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 262 additions and 2776 deletions

View File

@ -46,7 +46,8 @@ if(CUDA_FOUND)
list (APPEND MAIN_SOURCE_FILES opm/simulators/linalg/bda/cusparseSolverBackend.cu)
endif()
if(MPI_FOUND)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/ParallelEclipseState.cpp)
list(APPEND MAIN_SOURCE_FILES opm/simulators/utils/ParallelEclipseState.cpp
opm/simulators/utils/ParallelSerialization.cpp)
endif()
# originally generated with the command:

View File

@ -25,23 +25,29 @@
namespace Opm {
/*! \brief Class for (de-)serializing and broadcasting data in parallel.
*! \details Can be called on any class with a serializeOp member. Such classes
*! are referred to as 'complex types' in the documentation.
*/
class EclMpiSerializer {
public:
enum class Operation {
PACKSIZE,
PACK,
UNPACK
};
//! \brief Constructor.
//! \param comm The global communicator to broadcast using
explicit EclMpiSerializer(Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> comm) :
m_comm(comm)
{}
//! \brief (De-)serialization for simple types.
//! \details The data handled by this depends on the underlying serialization used.
//! Currently you can call this for scalars, and stl containers with scalars.
template<class T>
void operator()(const T& data)
{
if constexpr (is_shared_ptr<T>::value) {
shared_ptr(data);
if constexpr (is_ptr<T>::value) {
ptr(data);
} else if constexpr (is_pair<T>::value) {
pair(data);
} else {
if (m_op == Operation::PACKSIZE)
m_packSize += Mpi::packSize(data, m_comm);
@ -52,7 +58,11 @@ public:
}
}
template<class T>
//! \brief Handler for vectors.
//! \tparam T Type for vector elements
//! \tparam complexType Whether or not T is a complex type
//! \param data The vector to (de-)serialize
template<class T, bool complexType = true>
void vector(std::vector<T>& data)
{
auto handle = [&](auto& d)
@ -60,8 +70,10 @@ public:
for (auto& it : d) {
if constexpr (is_pair<T>::value)
pair(it);
else if constexpr (is_shared_ptr<T>::value)
shared_ptr(it);
else if constexpr (is_ptr<T>::value)
ptr(it);
else if constexpr (!complexType)
(*this)(it);
else
it.serializeOp(*this);
}
@ -81,17 +93,28 @@ public:
}
}
template<template<class Key, class Data> class Map, class Key, class Data>
void map(Map<Key, Data>& data)
//! \brief Handler for maps.
//! \tparam Map map type
//! \tparam complexType Whether or not Data in map is a complex type
//! \param map The map to (de-)serialize
template<class Map, bool complexType = true>
void map(Map& data)
{
using Key = typename Map::key_type;
using Data = typename Map::mapped_type;
auto handle = [&](auto& d)
{
if constexpr (is_vector<Data>::value)
vector(d);
else if constexpr (is_shared_ptr<Data>::value)
shared_ptr(d);
else
this->template vector<typename Data::value_type,complexType>(d);
else if constexpr (is_ptr<Data>::value)
ptr(d);
else if constexpr (is_dynamic_state<Data>::value)
d.template serializeOp<EclMpiSerializer, complexType>(*this);
else if constexpr (complexType)
d.serializeOp(*this);
else
(*this)(d);
};
if (m_op == Operation::PACKSIZE) {
@ -119,6 +142,9 @@ public:
}
}
//! \brief Call this to serialize data.
//! \tparam T Type of class to serialize
//! \param data Class to serialize
template<class T>
void pack(T& data)
{
@ -131,6 +157,9 @@ public:
data.serializeOp(*this);
}
//! \brief Call this to de-serialize data.
//! \tparam T Type of class to de-serialize
//! \param data Class to de-serialize
template<class T>
void unpack(T& data)
{
@ -139,13 +168,15 @@ public:
data.serializeOp(*this);
}
//! \brief Serialize and broadcast on root process, de-serialize on others.
//! \tparam T Type of class to broadcast
//! \param data Class to broadcast
template<class T>
void broadcast(T& data)
{
if (m_comm.size() == 1)
return;
#if HAVE_MPI
if (m_comm.rank() == 0) {
pack(data);
m_comm.broadcast(&m_position, 1, 0);
@ -156,20 +187,29 @@ public:
m_comm.broadcast(m_buffer.data(), m_packSize, 0);
unpack(data);
}
#endif
}
//! \brief Returns current position in buffer.
size_t position() const
{
return m_position;
}
//! \brief Returns true if we are currently doing a serialization operation.
bool isSerializing() const
{
return m_op != Operation::UNPACK;
}
protected:
//! \brief Enumeration of operations.
enum class Operation {
PACKSIZE, //!< Calculating serialization buffer size
PACK, //!< Performing serialization
UNPACK //!< Performing de-serialization
};
//! \brief Predicate for detecting pairs.
template<class T>
struct is_pair {
constexpr static bool value = false;
@ -180,6 +220,7 @@ protected:
constexpr static bool value = true;
};
//! \brief Predicate for detecting vectors.
template<class T>
struct is_vector {
constexpr static bool value = false;
@ -190,16 +231,36 @@ protected:
constexpr static bool value = true;
};
//! \brief Predicate for smart pointers.
template<class T>
struct is_shared_ptr {
struct is_ptr {
constexpr static bool value = false;
};
template<class T1>
struct is_shared_ptr<std::shared_ptr<T1>> {
struct is_ptr<std::shared_ptr<T1>> {
constexpr static bool value = true;
};
template<class T1>
struct is_ptr<std::unique_ptr<T1>> {
constexpr static bool value = true;
};
//! \brief Predicate for DynamicState.
template<class T>
struct is_dynamic_state {
constexpr static bool value = false;
};
template<class T1>
struct is_dynamic_state<DynamicState<T1>> {
constexpr static bool value = true;
};
//! \brief Handler for pairs.
//! \details If data is POD or a string, we pass it to the underlying serializer,
//! if not we assume a complex type.
template<class T1, class T2>
void pair(const std::pair<T1,T2>& data)
{
@ -214,24 +275,27 @@ protected:
const_cast<T2&>(data.second).serializeOp(*this);
}
template<class T1>
void shared_ptr(const std::shared_ptr<T1>& data)
//! \brief Handler for smart pointers.
//! \details If data is POD or a string, we pass it to the underlying serializer,
//! if not we assume a complex type.
template<template<class T> class PtrType, class T1>
void ptr(const PtrType<T1>& data)
{
bool value = data ? true : false;
(*this)(value);
if (m_op == Operation::UNPACK && value) {
const_cast<std::shared_ptr<T1>&>(data) = std::make_shared<T1>();
const_cast<PtrType<T1>&>(data).reset(new T1);
}
if (data)
data->serializeOp(*this);
}
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> m_comm;
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> m_comm; //!< Communicator to broadcast using
Operation m_op = Operation::PACKSIZE;
size_t m_packSize = 0;
int m_position = 0;
std::vector<char> m_buffer;
Operation m_op = Operation::PACKSIZE; //!< Current operation
size_t m_packSize = 0; //!< Required buffer size after PACKSIZE has been done
int m_position = 0; //!< Current position in buffer
std::vector<char> m_buffer; //!< Buffer for serialized data
};
}

View File

@ -57,9 +57,6 @@
#include <opm/parser/eclipse/EclipseState/checkDeck.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQAssign.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQActive.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQASTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQConfig.hpp>
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/ArrayDimChecker.hpp>
@ -73,8 +70,8 @@
#endif
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/ParallelEclipseState.hpp>
#include <opm/simulators/utils/ParallelSerialization.hpp>
#endif
BEGIN_PROPERTIES
@ -392,10 +389,7 @@ int main(int argc, char** argv)
schedule.reset(new Opm::Schedule);
eclipseState.reset(new Opm::ParallelEclipseState);
}
Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication());
ser.broadcast(*summaryConfig);
ser.broadcast(*eclipseState);
ser.broadcast(*schedule);
Opm::eclStateBroadcast(*eclipseState, *schedule, *summaryConfig);
#endif
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);

View File

@ -38,10 +38,7 @@
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
#include <opm/parser/eclipse/EclipseState/checkDeck.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/ArrayDimChecker.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQActive.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQAssign.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQASTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQConfig.hpp>
//#include <opm/material/fluidsystems/BlackOilFluidSystemSimple.hpp>
//#include <opm/material/fluidsystems/BlackOilFluidSystemSimple.hpp>
@ -56,8 +53,8 @@
#endif
#if HAVE_MPI
#include <ebos/eclmpiserializer.hh>
#include <opm/simulators/utils/ParallelEclipseState.hpp>
#include <opm/simulators/utils/ParallelSerialization.hpp>
#endif
@ -403,10 +400,7 @@ int mainFlow(int argc, char** argv)
schedule.reset(new Opm::Schedule);
eclipseState.reset(new Opm::ParallelEclipseState);
}
Opm::EclMpiSerializer ser(mpiHelper.getCollectiveCommunication());
ser.broadcast(*summaryConfig);
ser.broadcast(*eclipseState);
ser.broadcast(*schedule);
Opm::eclStateBroadcast(*eclipseState, *schedule, *summaryConfig);
#endif
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule, parseContext, errorGuard);

View File

@ -27,8 +27,6 @@
namespace Opm {
class EclMpiSerializer;
/*! \brief Parallel frontend to the field properties.
*
* \details This is a parallel frontend to the mpi-unaware

File diff suppressed because it is too large Load Diff

View File

@ -26,16 +26,6 @@
#include <opm/output/eclipse/RestartValue.hpp>
#include <opm/output/eclipse/EclipseIO.hpp>
#include <opm/output/eclipse/Summary.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicVector.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSale.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSump.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRateConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/Well.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WellTestConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Util/OrderedMap.hpp>
#include <dune/common/parallel/mpihelper.hh>
@ -48,46 +38,6 @@
namespace Opm
{
namespace Action {
class Actions;
class ActionX;
class AST;
class ASTNode;
class Condition;
class Quantity;
}
class Connection;
class DeckItem;
class DeckRecord;
class Dimension;
class Events;
template<class T> class IOrderSet;
class Location;
class MessageLimits;
class MLimits;
class OilVaporizationProperties;
class RestartConfig;
class RestartSchedule;
class RFTConfig;
class Segment;
class SpiralICD;
class TimeStampUTC;
class Tuning;
class UDAValue;
class UnitSystem;
class Valve;
class VFPInjTable;
class VFPProdTable;
class WellConnections;
class WellEconProductionLimits;
class WellFoamProperties;
class WellPolymerProperties;
class WellSegments;
class WellTracerProperties;
class WList;
class WListManager;
namespace Mpi
{
template<class T>
@ -148,17 +98,9 @@ std::size_t packSize(const std::vector<bool,A>& data, Dune::MPIHelper::MPICommun
template<class... Ts>
std::size_t packSize(const std::tuple<Ts...>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const std::shared_ptr<T>& data,
Dune::MPIHelper::MPICommunicator comm);
template<class T, std::size_t N>
std::size_t packSize(const std::array<T,N>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const std::unique_ptr<T>& data,
Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const char* str, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const std::string& str, Dune::MPIHelper::MPICommunicator comm);
@ -169,18 +111,6 @@ std::size_t packSize(const std::map<T1,T2,C,A>& data, Dune::MPIHelper::MPICommun
template<class T1, class T2, class H, class P, class A>
std::size_t packSize(const std::unordered_map<T1,T2,H,P,A>& data, Dune::MPIHelper::MPICommunicator comm);
template<class Key, class Value>
std::size_t packSize(const OrderedMap<Key,Value>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const DynamicVector<T>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const DynamicState<T>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const IOrderSet<T>& data, Dune::MPIHelper::MPICommunicator comm);
////// pack routines
template<class T>
@ -250,18 +180,10 @@ void pack(const std::unordered_set<T,H,KE,A>& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const std::shared_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T, size_t N>
void pack(const std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const std::unique_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2, class C, class A>
void pack(const std::map<T1,T2,C,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -270,22 +192,6 @@ template<class T1, class T2, class H, class P, class A>
void pack(const std::unordered_map<T1,T2,H,P,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class Key, class Value>
void pack(const OrderedMap<Key,Value>& data, std::vector<char>& buffer,
int& position, Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const DynamicState<T>& data, std::vector<char>& buffer,
int& position, Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const DynamicVector<T>& data, std::vector<char>& buffer,
int& position, Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const IOrderSet<T>& data, std::vector<char>& buffer,
int& position, Dune::MPIHelper::MPICommunicator comm);
void pack(const char* str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -359,18 +265,10 @@ void unpack(std::unordered_set<T,H,KE,A>& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(std::shared_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T, size_t N>
void unpack(std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(std::unique_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2, class C, class A>
void unpack(std::map<T1,T2,C,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -379,22 +277,6 @@ template<class T1, class T2, class H, class P, class A>
void unpack(std::unordered_map<T1,T2,H,P,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class Key, class Value>
void unpack(OrderedMap<Key,Value>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(DynamicState<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(DynamicVector<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(IOrderSet<T>& data, std::vector<char>& buffer,
int& position, Dune::MPIHelper::MPICommunicator comm);
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -407,13 +289,6 @@ void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& posit
void unpack(T& data, std::vector<char>& buffer, int& position, \
Dune::MPIHelper::MPICommunicator comm);
ADD_PACK_PROTOTYPES(Action::Actions)
ADD_PACK_PROTOTYPES(Action::ActionX)
ADD_PACK_PROTOTYPES(Action::AST)
ADD_PACK_PROTOTYPES(Action::ASTNode)
ADD_PACK_PROTOTYPES(Action::Condition)
ADD_PACK_PROTOTYPES(Action::Quantity)
ADD_PACK_PROTOTYPES(Connection)
ADD_PACK_PROTOTYPES(data::CellData)
ADD_PACK_PROTOTYPES(data::Connection)
ADD_PACK_PROTOTYPES(data::CurrentControl)
@ -422,59 +297,9 @@ ADD_PACK_PROTOTYPES(data::Segment)
ADD_PACK_PROTOTYPES(data::Solution)
ADD_PACK_PROTOTYPES(data::Well)
ADD_PACK_PROTOTYPES(data::WellRates)
ADD_PACK_PROTOTYPES(Deck)
ADD_PACK_PROTOTYPES(DeckItem)
ADD_PACK_PROTOTYPES(DeckKeyword)
ADD_PACK_PROTOTYPES(DeckRecord)
ADD_PACK_PROTOTYPES(Dimension)
ADD_PACK_PROTOTYPES(Events)
ADD_PACK_PROTOTYPES(GConSale)
ADD_PACK_PROTOTYPES(GConSale::GCONSALEGroup)
ADD_PACK_PROTOTYPES(GConSump)
ADD_PACK_PROTOTYPES(GConSump::GCONSUMPGroup)
ADD_PACK_PROTOTYPES(GuideRateConfig)
ADD_PACK_PROTOTYPES(GuideRateConfig::GroupTarget)
ADD_PACK_PROTOTYPES(GuideRateConfig::WellTarget)
ADD_PACK_PROTOTYPES(GuideRateModel)
ADD_PACK_PROTOTYPES(Group)
ADD_PACK_PROTOTYPES(Group::GroupInjectionProperties)
ADD_PACK_PROTOTYPES(Group::GroupProductionProperties)
ADD_PACK_PROTOTYPES(Location)
ADD_PACK_PROTOTYPES(MessageLimits)
ADD_PACK_PROTOTYPES(MLimits)
ADD_PACK_PROTOTYPES(OilVaporizationProperties)
ADD_PACK_PROTOTYPES(RestartConfig)
ADD_PACK_PROTOTYPES(RestartKey)
ADD_PACK_PROTOTYPES(RestartSchedule)
ADD_PACK_PROTOTYPES(RestartValue)
ADD_PACK_PROTOTYPES(RFTConfig)
ADD_PACK_PROTOTYPES(Segment)
ADD_PACK_PROTOTYPES(SpiralICD)
ADD_PACK_PROTOTYPES(std::string)
ADD_PACK_PROTOTYPES(TimeMap)
ADD_PACK_PROTOTYPES(TimeStampUTC)
ADD_PACK_PROTOTYPES(Tuning)
ADD_PACK_PROTOTYPES(UDAValue)
ADD_PACK_PROTOTYPES(UnitSystem)
ADD_PACK_PROTOTYPES(Valve)
ADD_PACK_PROTOTYPES(VFPInjTable)
ADD_PACK_PROTOTYPES(VFPProdTable)
ADD_PACK_PROTOTYPES(Well)
ADD_PACK_PROTOTYPES(WellType)
ADD_PACK_PROTOTYPES(Well::WellGuideRate)
ADD_PACK_PROTOTYPES(Well::WellInjectionProperties)
ADD_PACK_PROTOTYPES(Well::WellProductionProperties)
ADD_PACK_PROTOTYPES(WellBrineProperties)
ADD_PACK_PROTOTYPES(WellConnections)
ADD_PACK_PROTOTYPES(WellEconProductionLimits)
ADD_PACK_PROTOTYPES(WellFoamProperties)
ADD_PACK_PROTOTYPES(WellPolymerProperties)
ADD_PACK_PROTOTYPES(WellSegments)
ADD_PACK_PROTOTYPES(WellTestConfig)
ADD_PACK_PROTOTYPES(WellTestConfig::WTESTWell)
ADD_PACK_PROTOTYPES(WellTracerProperties)
ADD_PACK_PROTOTYPES(WList)
ADD_PACK_PROTOTYPES(WListManager)
} // end namespace Mpi

View File

@ -0,0 +1,49 @@
/*
Copyright 2020 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Action/ASTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/SpiralICD.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/Valve.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQActive.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQASTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WList.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Well/WListManager.hpp>
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
#include <ebos/eclmpiserializer.hh>
#include <dune/common/parallel/mpihelper.hh>
namespace Opm {
void eclStateBroadcast(EclipseState& eclState, Schedule& schedule,
SummaryConfig& summaryConfig)
{
Opm::EclMpiSerializer ser(Dune::MPIHelper::getCollectiveCommunication());
ser.broadcast(eclState);
ser.broadcast(schedule);
ser.broadcast(summaryConfig);
}
}

View File

@ -0,0 +1,38 @@
/*
Copyright 2020 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef PARALLEL_SERIALIZATION_HPP
#define PARALLEL_SERIALIZATION_HPP
namespace Opm {
class EclipseState;
class Schedule;
class SummaryConfig;
/*! \brief Broadcasts an eclipse state from root node in parallel runs.
*! \param eclState EclipseState to broadcast
*! \param schedule Schedule to broadcast
*! \param summaryConfig SummaryConfig to broadcast
*/
void eclStateBroadcast(EclipseState& eclState, Schedule& schedule,
SummaryConfig& summaryConfig);
} // end namespace Opm
#endif // PARALLEL_SERIALIZATION_HPP

View File

@ -483,7 +483,8 @@ Opm::Action::ActionX getActionX()
}
Opm::AquiferCT getAquiferCT() {
Opm::AquiferCT getAquiferCT()
{
Opm::AquiferCT::AQUCT_data data;
data.aquiferID = 1;
data.inftableID = 2;
@ -504,7 +505,9 @@ Opm::AquiferCT getAquiferCT() {
return Opm::AquiferCT( { data } );
}
Opm::Aquifetp getAquifetp() {
Opm::Aquifetp getAquifetp()
{
Opm::Aquifetp::AQUFETP_data data;
data.aquiferID = 1;
@ -518,8 +521,8 @@ Opm::Aquifetp getAquifetp() {
}
Opm::Aquancon getAquancon() {
Opm::Aquancon getAquancon()
{
Opm::Aquancon::AquancCell cell(1, 100, std::make_pair(false, 0), 100, Opm::FaceDir::XPlus);
return Opm::Aquancon( std::unordered_map<int, std::vector<Opm::Aquancon::AquancCell>>{{1, {cell}}});
}
@ -866,7 +869,7 @@ BOOST_AUTO_TEST_CASE(RestartSchedule)
{
#if HAVE_MPI
Opm::RestartSchedule val1(1, 2, 3);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(RestartSchedule)
#endif
}
@ -877,7 +880,7 @@ BOOST_AUTO_TEST_CASE(TimeMap)
{
#if HAVE_MPI
Opm::TimeMap val1 = getTimeMap();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(TimeMap)
#endif
}
@ -891,7 +894,7 @@ BOOST_AUTO_TEST_CASE(RestartConfig)
Opm::IOConfig io(true, false, true, false, false, true, "test1", true,
"test2", true, "test3", false);
Opm::RestartConfig val1(getTimeMap(), 1, true, rsched, rkw, {false, true});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(RestartConfig)
#endif
}
@ -1274,12 +1277,12 @@ BOOST_AUTO_TEST_CASE(OilVaporizationProperties)
Opm::OilVaporizationProperties val1(VapType::VAPPARS,
1.0, 2.0, {5.0, 6.0},
{false, true}, {7.0, 8.0});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(OilVaporizationProperties)
val1 = Opm::OilVaporizationProperties(VapType::DRDT,
1.0, 2.0, {5.0, 6.0},
{false, true}, {7.0, 8.0});
val2 = PackUnpack(val1);
val2 = PackUnpack2(val1);
DO_CHECKS(OilVaporizationProperties)
#endif
}
@ -1289,7 +1292,7 @@ BOOST_AUTO_TEST_CASE(Events)
{
#ifdef HAVE_MPI
Opm::Events val1(Opm::DynamicVector<uint64_t>({1,2,3,4,5}));
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Events)
#endif
}
@ -1299,7 +1302,7 @@ BOOST_AUTO_TEST_CASE(MLimits)
{
#ifdef HAVE_MPI
Opm::MLimits val1{1,2,3,4,5,6,7,8,9,10,11,12};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(MLimits)
#endif
}
@ -1310,7 +1313,7 @@ BOOST_AUTO_TEST_CASE(MessageLimits)
#ifdef HAVE_MPI
std::vector<Opm::MLimits> limits{Opm::MLimits{1,2,3,4,5,6,7,8,9,10,11,12}};
Opm::MessageLimits val1(Opm::DynamicState<Opm::MLimits>(limits,2));
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(MessageLimits)
#endif
}
@ -1320,7 +1323,7 @@ BOOST_AUTO_TEST_CASE(VFPInjTable)
{
#ifdef HAVE_MPI
Opm::VFPInjTable val1 = getVFPInjTable();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(VFPInjTable)
#endif
}
@ -1330,7 +1333,7 @@ BOOST_AUTO_TEST_CASE(VFPProdTable)
{
#ifdef HAVE_MPI
Opm::VFPProdTable val1 = getVFPProdTable();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(VFPProdTable)
#endif
}
@ -1341,7 +1344,7 @@ BOOST_AUTO_TEST_CASE(WTESTWell)
#ifdef HAVE_MPI
Opm::WellTestConfig::WTESTWell val1{"test", Opm::WellTestConfig::ECONOMIC,
1.0, 2, 3.0, 4};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellTestConfig::WTESTWell)
#endif
}
@ -1353,7 +1356,7 @@ BOOST_AUTO_TEST_CASE(WellTestConfig)
Opm::WellTestConfig::WTESTWell tw{"test", Opm::WellTestConfig::ECONOMIC,
1.0, 2, 3.0, 4};
Opm::WellTestConfig val1({tw, tw, tw});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellTestConfig)
#endif
}
@ -1363,7 +1366,7 @@ BOOST_AUTO_TEST_CASE(WellPolymerProperties)
{
#ifdef HAVE_MPI
Opm::WellPolymerProperties val1{1.0, 2.0, 3, 4, 5};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellPolymerProperties)
#endif
}
@ -1373,7 +1376,7 @@ BOOST_AUTO_TEST_CASE(WellFoamProperties)
{
#ifdef HAVE_MPI
Opm::WellFoamProperties val1{1.0};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellFoamProperties)
#endif
}
@ -1383,7 +1386,7 @@ BOOST_AUTO_TEST_CASE(WellTracerProperties)
{
#ifdef HAVE_MPI
Opm::WellTracerProperties val1({{"test", 1.0}, {"test2", 2.0}});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellTracerProperties)
#endif
}
@ -1393,10 +1396,10 @@ BOOST_AUTO_TEST_CASE(UDAValue)
{
#ifdef HAVE_MPI
Opm::UDAValue val1("test");
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(UDAValue)
val1 = Opm::UDAValue(1.0);
val2 = PackUnpack(val1);
val2 = PackUnpack2(val1);
DO_CHECKS(UDAValue)
#endif
}
@ -1411,7 +1414,7 @@ BOOST_AUTO_TEST_CASE(Connection)
{9, 10, 11}, Opm::Connection::CTFKind::Defaulted,
12, 13.0, 14.0, true,
15, 16, 17.0);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Connection)
#endif
}
@ -1432,7 +1435,7 @@ BOOST_AUTO_TEST_CASE(WellInjectionProperties)
8,
Opm::InjectorType::OIL,
Opm::Well::InjectorCMode::BHP);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Well::WellInjectionProperties)
#endif
}
@ -1448,7 +1451,7 @@ BOOST_AUTO_TEST_CASE(WellEconProductionLimits)
6.0,
Opm::WellEconProductionLimits::EconWorkover::WELL,
7.0, 8.0, 9.0, 10.0);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellEconProductionLimits)
#endif
}
@ -1458,7 +1461,7 @@ BOOST_AUTO_TEST_CASE(WellGuideRate)
{
#ifdef HAVE_MPI
Opm::Well::WellGuideRate val1{true, 1.0, Opm::Well::GuideRateTarget::COMB, 2.0};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Well::WellGuideRate)
#endif
}
@ -1474,7 +1477,7 @@ BOOST_AUTO_TEST_CASE(WellConnections)
12, 13.0, 14.0, true,
15, 16, 17.0);
Opm::WellConnections val1(Opm::Connection::Order::TRACK, 1, 2, {conn, conn});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellConnections)
#endif
}
@ -1498,7 +1501,7 @@ BOOST_AUTO_TEST_CASE(WellProductionProperties)
true,
Opm::Well::ProducerCMode::CRAT,
Opm::Well::ProducerCMode::BHP, 11);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Well::WellProductionProperties)
#endif
}
@ -1509,7 +1512,7 @@ BOOST_AUTO_TEST_CASE(SpiralICD)
#ifdef HAVE_MPI
Opm::SpiralICD val1(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8, 9.0,
Opm::ICDStatus::OPEN, 10.0);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(SpiralICD)
#endif
}
@ -1519,7 +1522,7 @@ BOOST_AUTO_TEST_CASE(Valve)
{
#ifdef HAVE_MPI
Opm::Valve val1(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, Opm::ICDStatus::OPEN);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Valve)
#endif
}
@ -1532,7 +1535,7 @@ BOOST_AUTO_TEST_CASE(Segment)
Opm::Segment::SegmentType::SICD,
std::make_shared<Opm::SpiralICD>(),
std::make_shared<Opm::Valve>());
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Segment)
#endif
}
@ -1542,7 +1545,7 @@ BOOST_AUTO_TEST_CASE(Dimension)
{
#ifdef HAVE_MPI
Opm::Dimension val1(1.0, 2.0);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Dimension)
#endif
}
@ -1552,7 +1555,7 @@ BOOST_AUTO_TEST_CASE(UnitSystem)
{
#ifdef HAVE_MPI
Opm::UnitSystem val1(Opm::UnitSystem::UnitType::UNIT_TYPE_METRIC);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(UnitSystem)
#endif
}
@ -1568,7 +1571,7 @@ BOOST_AUTO_TEST_CASE(WellSegments)
Opm::WellSegments val1(Opm::WellSegments::CompPressureDrop::HF_,
{seg, seg});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellSegments)
#endif
}
@ -1578,7 +1581,7 @@ BOOST_AUTO_TEST_CASE(Well)
{
#ifdef HAVE_MPI
Opm::Well val1 = getFullWell();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Well)
#endif
}
@ -1595,7 +1598,7 @@ BOOST_AUTO_TEST_CASE(GroupInjectionProperties)
Opm::UDAValue(4.0),
"test1", "test2", 5};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Group::GroupInjectionProperties)
#endif
}
@ -1613,7 +1616,7 @@ BOOST_AUTO_TEST_CASE(GroupProductionProperties)
5.0, Opm::Group::GuideRateTarget::COMB,
6.0, 7};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Group::GroupProductionProperties)
#endif
}
@ -1633,7 +1636,7 @@ BOOST_AUTO_TEST_CASE(Group)
injection,
Opm::Group::GroupProductionProperties());
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Group)
#endif
}
@ -1643,7 +1646,7 @@ BOOST_AUTO_TEST_CASE(WList)
{
#ifdef HAVE_MPI
Opm::WList val1({"test1", "test2", "test3"});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WList)
#endif
}
@ -1655,7 +1658,7 @@ BOOST_AUTO_TEST_CASE(WListManager)
Opm::WList wl({"test1", "test2", "test3"});
std::map<std::string,Opm::WList> data{{"test", wl}, {"test2", wl}};
Opm::WListManager val1(data);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WListManager)
#endif
}
@ -1808,7 +1811,7 @@ BOOST_AUTO_TEST_CASE(GuideRateModel)
{
#ifdef HAVE_MPI
Opm::GuideRateModel val1 = getGuideRateModel();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GuideRateModel)
#endif
}
@ -1818,7 +1821,7 @@ BOOST_AUTO_TEST_CASE(GuideRateConfigGroup)
{
#ifdef HAVE_MPI
Opm::GuideRateConfig::GroupTarget val1 = getGuideRateConfigGroup();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GuideRateConfig::GroupTarget)
#endif
}
@ -1828,7 +1831,7 @@ BOOST_AUTO_TEST_CASE(GuideRateConfigWell)
{
#ifdef HAVE_MPI
Opm::GuideRateConfig::WellTarget val1 = getGuideRateConfigWell();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GuideRateConfig::WellTarget)
#endif
}
@ -1841,7 +1844,7 @@ BOOST_AUTO_TEST_CASE(GuideRateConfig)
Opm::GuideRateConfig val1(model,
{{"test1", getGuideRateConfigWell()}},
{{"test2", getGuideRateConfigGroup()}});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GuideRateConfig)
#endif
}
@ -1855,7 +1858,7 @@ BOOST_AUTO_TEST_CASE(GConSaleGroup)
Opm::UDAValue(3.0),
Opm::GConSale::MaxProcedure::PLUG,
4.0, Opm::UnitSystem()};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GConSale::GCONSALEGroup)
#endif
}
@ -1870,7 +1873,7 @@ BOOST_AUTO_TEST_CASE(GConSale)
Opm::GConSale::MaxProcedure::PLUG,
4.0, Opm::UnitSystem()};
Opm::GConSale val1({{"test1", group}, {"test2", group}});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GConSale)
#endif
}
@ -1883,7 +1886,7 @@ BOOST_AUTO_TEST_CASE(GConSumpGroup)
Opm::UDAValue(2.0),
"test",
3.0, Opm::UnitSystem()};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GConSump::GCONSUMPGroup)
#endif
}
@ -1897,7 +1900,7 @@ BOOST_AUTO_TEST_CASE(GConSump)
"test",
3.0, Opm::UnitSystem()};
Opm::GConSump val1({{"test1", group}, {"test2", group}});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(GConSump)
#endif
}
@ -1913,7 +1916,7 @@ BOOST_AUTO_TEST_CASE(RFTConfig)
{{"test3", 2}},
{{"test1", {{{Opm::RFTConfig::RFT::TIMESTEP, 3}}, 4}}},
{{"test2", {{{Opm::RFTConfig::PLT::REPT, 5}}, 6}}});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(RFTConfig)
#endif
}
@ -1929,7 +1932,7 @@ BOOST_AUTO_TEST_CASE(DeckItem)
{Opm::Dimension(7.0, 8.0)},
{Opm::Dimension(10.0, 11.0)});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(DeckItem)
#endif
}
@ -1939,7 +1942,7 @@ BOOST_AUTO_TEST_CASE(DeckRecord)
{
#ifdef HAVE_MPI
Opm::DeckRecord val1 = getDeckRecord();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(DeckRecord)
#endif
}
@ -1949,7 +1952,7 @@ BOOST_AUTO_TEST_CASE(Location)
{
#ifdef HAVE_MPI
Opm::Location val1{"test", 1};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Location)
#endif
}
@ -1960,7 +1963,7 @@ BOOST_AUTO_TEST_CASE(DeckKeyword)
#ifdef HAVE_MPI
Opm::DeckKeyword val1("test", {"test",1},
{getDeckRecord(), getDeckRecord()}, true, false);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(DeckKeyword)
#endif
}
@ -1974,7 +1977,7 @@ BOOST_AUTO_TEST_CASE(Deck)
{getDeckRecord(), getDeckRecord()}, true, false)},
Opm::UnitSystem(), unitSys.get(),
"test2", "test3", 2);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Deck)
#endif
}
@ -1984,7 +1987,7 @@ BOOST_AUTO_TEST_CASE(Tuning)
{
#ifdef HAVE_MPI
Opm::Tuning val1 = getTuning();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Tuning)
#endif
}
@ -1995,7 +1998,7 @@ BOOST_AUTO_TEST_CASE(ASTNode)
#ifdef HAVE_MPI
Opm::Action::ASTNode child(number, FuncType::field, "test3", {"test2"}, 2.0, {});
Opm::Action::ASTNode val1(number, FuncType::field, "test1", {"test2"}, 1.0, {child});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::ASTNode)
#endif
}
@ -2008,7 +2011,7 @@ BOOST_AUTO_TEST_CASE(AST)
node.reset(new Opm::Action::ASTNode(number, FuncType::field,
"test1", {"test2"}, 1.0, {}));
Opm::Action::AST val1(node);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::AST)
#endif
}
@ -2020,7 +2023,7 @@ BOOST_AUTO_TEST_CASE(Quantity)
Opm::Action::Quantity val1;
val1.quantity = "test1";
val1.args = {"test2", "test3"};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::Quantity)
#endif
}
@ -2030,7 +2033,7 @@ BOOST_AUTO_TEST_CASE(Condition)
{
#ifdef HAVE_MPI
Opm::Action::Condition val1 = getCondition();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::Condition)
#endif
}
@ -2040,7 +2043,7 @@ BOOST_AUTO_TEST_CASE(ActionX)
{
#ifdef HAVE_MPI
Opm::Action::ActionX val1 = getActionX();
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::ActionX)
#endif
}
@ -2050,7 +2053,7 @@ BOOST_AUTO_TEST_CASE(Actions)
{
#ifdef HAVE_MPI
Opm::Action::Actions val1({getActionX()});
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(Action::Actions)
#endif
}
@ -2223,7 +2226,7 @@ BOOST_AUTO_TEST_CASE(WellBrineProperties)
{
#ifdef HAVE_MPI
Opm::WellBrineProperties val1{1.0};
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellBrineProperties)
#endif
}
@ -2320,7 +2323,7 @@ BOOST_AUTO_TEST_CASE(WellType)
{
#ifdef HAVE_MPI
Opm::WellType val1(true, Opm::Phase::OIL);
auto val2 = PackUnpack(val1);
auto val2 = PackUnpack2(val1);
DO_CHECKS(WellType)
#endif
}