Merge pull request #2260 from akva2/noecl_flush

More MPI serialization support
This commit is contained in:
Arne Morten Kvarving 2020-01-03 08:14:22 +01:00 committed by GitHub
commit 3a893fb149
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 570 additions and 0 deletions

View File

@ -31,10 +31,12 @@
#include <opm/parser/eclipse/EclipseState/IOConfig/RestartConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Edit/EDITNNC.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRateConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MessageLimits.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/SpiralICD.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/Valve.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/OilVaporizationProperties.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/RFTConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQASTNode.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQConfig.hpp>
@ -271,6 +273,12 @@ std::size_t packSize(const std::unordered_map<T1,T2,H,P,A>& data, Dune::MPIHelpe
return totalSize;
}
template<class T, std::size_t N>
std::size_t packSize(const std::array<T,N>& data, Dune::MPIHelper::MPICommunicator comm)
{
return N*packSize(data[0], comm);
}
HANDLE_AS_POD(Actdims)
HANDLE_AS_POD(Aqudims)
HANDLE_AS_POD(data::Connection)
@ -281,6 +289,8 @@ HANDLE_AS_POD(EclHysterConfig)
HANDLE_AS_POD(Eqldims)
HANDLE_AS_POD(EquilRecord)
HANDLE_AS_POD(FoamData)
HANDLE_AS_POD(GuideRateConfig::GroupTarget);
HANDLE_AS_POD(GuideRateConfig::WellTarget);
HANDLE_AS_POD(JFunc)
HANDLE_AS_POD(MLimits)
HANDLE_AS_POD(PVTWRecord)
@ -1384,6 +1394,92 @@ std::size_t packSize(const UDQActive& data,
packSize(data.getWgKeys(), comm);
}
std::size_t packSize(const GuideRateModel& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.timeInterval(), comm) +
packSize(data.target(), comm) +
packSize(data.coefs(), comm) +
packSize(data.allow_increase(), comm) +
packSize(data.damping_factor(), comm) +
packSize(data.free_gas(), comm) +
packSize(data.defaultModel(), comm) +
packSize(data.udaCoefs(), comm);
}
std::size_t packSize(const GuideRateConfig& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getModel(), comm) +
packSize(data.getWells(), comm) +
packSize(data.getGroups(), comm);
}
std::size_t packSize(const GConSale::GCONSALEGroup& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.sales_target, comm) +
packSize(data.max_sales_rate, comm) +
packSize(data.min_sales_rate, comm) +
packSize(data.max_proc, comm) +
packSize(data.udq_undefined, comm) +
packSize(data.unit_system, comm);
}
std::size_t packSize(const GConSale& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getGroups(), comm);
}
std::size_t packSize(const GConSump::GCONSUMPGroup& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.consumption_rate, comm) +
packSize(data.import_rate, comm) +
packSize(data.network_node, comm) +
packSize(data.udq_undefined, comm) +
packSize(data.unit_system, comm);
}
std::size_t packSize(const GConSump& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getGroups(), comm);
}
std::size_t packSize(const RFTConfig& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.timeMap(), comm) +
packSize(data.wellOpenRftTime(), comm) +
packSize(data.wellOpenRftName(), comm) +
packSize(data.wellOpen(), comm) +
packSize(data.rftConfig(), comm) +
packSize(data.pltConfig(), comm);
}
std::size_t packSize(const DeckItem& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.dVal(), comm) +
packSize(data.iVal(), comm) +
packSize(data.sVal(), comm) +
packSize(data.uVal(), comm) +
packSize(data.getType(), comm) +
packSize(data.name(), comm) +
packSize(data.valueStatus(), comm) +
packSize(data.rawData(), comm) +
packSize(data.activeDimensions(), comm) +
packSize(data.defaultDimensions(), comm);
}
std::size_t packSize(const DeckRecord& data,
Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getItems(), comm);
}
////// pack routines
template<class T>
@ -1486,6 +1582,14 @@ void pack(const std::unordered_set<T,H,KE,A>& data,
}
}
template<class T, size_t N>
void pack(const std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
for (const T& entry : data)
pack(entry, buffer, position, comm);
}
template<class A>
void pack(const std::vector<bool,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
@ -2789,6 +2893,101 @@ void pack(const UDQActive& data,
pack(data.getWgKeys(), buffer, position, comm);
}
void pack(const GuideRateModel& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.timeInterval(), buffer, position, comm);
pack(data.target(), buffer, position, comm);
pack(data.coefs(), buffer, position, comm);
pack(data.allow_increase(), buffer, position, comm);
pack(data.damping_factor(), buffer, position, comm);
pack(data.free_gas(), buffer, position, comm);
pack(data.defaultModel(), buffer, position, comm);
pack(data.udaCoefs(), buffer, position, comm);
}
void pack(const GuideRateConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getModel(), buffer, position, comm);
pack(data.getWells(), buffer, position, comm);
pack(data.getGroups(), buffer, position, comm);
}
void pack(const GConSale::GCONSALEGroup& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.sales_target, buffer, position, comm);
pack(data.max_sales_rate, buffer, position, comm);
pack(data.min_sales_rate, buffer, position, comm);
pack(data.max_proc, buffer, position, comm);
pack(data.udq_undefined, buffer, position, comm);
pack(data.unit_system, buffer, position, comm);
}
void pack(const GConSale& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getGroups(), buffer, position, comm);
}
void pack(const GConSump::GCONSUMPGroup& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.consumption_rate, buffer, position, comm);
pack(data.import_rate, buffer, position, comm);
pack(data.network_node, buffer, position, comm);
pack(data.udq_undefined, buffer, position, comm);
pack(data.unit_system, buffer, position, comm);
}
void pack(const GConSump& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getGroups(), buffer, position, comm);
}
void pack(const RFTConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.timeMap(), buffer, position, comm);
pack(data.wellOpenRftTime(), buffer, position, comm);
pack(data.wellOpenRftName(), buffer, position, comm);
pack(data.wellOpen(), buffer, position, comm);
pack(data.rftConfig(), buffer, position, comm);
pack(data.pltConfig(), buffer, position, comm);
}
void pack(const DeckItem& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.dVal(), buffer, position, comm);
pack(data.iVal(), buffer, position, comm);
pack(data.sVal(), buffer, position, comm);
pack(data.uVal(), buffer, position, comm);
pack(data.getType(), buffer, position, comm);
pack(data.name(), buffer, position, comm);
pack(data.valueStatus(), buffer, position, comm);
pack(data.rawData(), buffer, position, comm);
pack(data.activeDimensions(), buffer, position, comm);
pack(data.defaultDimensions(), buffer, position, comm);
}
void pack(const DeckRecord& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getItems(), buffer, position, comm);
}
/// unpack routines
template<class T>
@ -2931,6 +3130,14 @@ void unpack(std::unordered_set<T,H,KE,A>& data,
}
}
template<class T, size_t N>
void unpack(std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
for (T& entry : data)
unpack(entry, buffer, position, comm);
}
template<class Key, class Value>
void unpack(OrderedMap<Key,Value>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
@ -4765,6 +4972,143 @@ void unpack(UDQActive& data,
data = UDQActive(inputRecords, outputRecords, udqKeys, wgKeys);
}
void unpack(GuideRateModel& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
double timeInterval;
GuideRateModel::Target target;
std::array<double,6> coefs;
bool allow_increase, free_gas, defaultModel;
double damping_factor;
std::array<UDAValue,3> udaCoefs;
unpack(timeInterval, buffer, position, comm);
unpack(target, buffer, position, comm);
unpack(coefs, buffer, position, comm);
unpack(allow_increase, buffer, position, comm);
unpack(damping_factor, buffer, position, comm);
unpack(free_gas, buffer, position, comm);
unpack(defaultModel, buffer, position, comm);
unpack(udaCoefs, buffer, position, comm);
data = GuideRateModel(timeInterval, target, coefs, allow_increase,
damping_factor, free_gas, defaultModel, udaCoefs);
}
void unpack(GuideRateConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::shared_ptr<GuideRateModel> model;
std::unordered_map<std::string, GuideRateConfig::WellTarget> wells;
std::unordered_map<std::string, GuideRateConfig::GroupTarget> groups;
unpack(model, buffer, position, comm);
unpack(wells, buffer, position, comm);
unpack(groups, buffer, position, comm);
data = GuideRateConfig(model, wells, groups);
}
void unpack(GConSale::GCONSALEGroup& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.sales_target, buffer, position, comm);
unpack(data.max_sales_rate, buffer, position, comm);
unpack(data.min_sales_rate, buffer, position, comm);
unpack(data.max_proc, buffer, position, comm);
unpack(data.udq_undefined, buffer, position, comm);
unpack(data.unit_system, buffer, position, comm);
}
void unpack(GConSale& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::map<std::string,GConSale::GCONSALEGroup> groups;
unpack(groups, buffer, position, comm);
data = GConSale(groups);
}
void unpack(GConSump::GCONSUMPGroup& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.consumption_rate, buffer, position, comm);
unpack(data.import_rate, buffer, position, comm);
unpack(data.network_node, buffer, position, comm);
unpack(data.udq_undefined, buffer, position, comm);
unpack(data.unit_system, buffer, position, comm);
}
void unpack(GConSump& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::map<std::string,GConSump::GCONSUMPGroup> groups;
unpack(groups, buffer, position, comm);
data = GConSump(groups);
}
void unpack(RFTConfig& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
TimeMap timeMap;
std::pair<bool, std::size_t> wellOpenRftTime;
std::unordered_set<std::string> wellOpenRftName;
std::unordered_map<std::string, std::size_t> wellOpen;
RFTConfig::RFTMap rftConfig;
RFTConfig::PLTMap pltConfig;
unpack(timeMap, buffer, position, comm);
unpack(wellOpenRftTime, buffer, position, comm);
unpack(wellOpenRftName, buffer, position, comm);
unpack(wellOpen, buffer, position, comm);
unpack(rftConfig, buffer, position, comm);
unpack(pltConfig, buffer, position, comm);
data = RFTConfig(timeMap, wellOpenRftTime, wellOpenRftName,
wellOpen, rftConfig, pltConfig);
}
void unpack(DeckItem& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::vector<double> dVal;
std::vector<int> iVal;
std::vector<std::string> sVal;
std::vector<UDAValue> uVal;
type_tag type;
std::string name;
std::vector<value::status> valueStatus;
bool rawData;
std::vector<Dimension> activeDimensions, defaultDimensions;
unpack(dVal, buffer, position, comm);
unpack(iVal, buffer, position, comm);
unpack(sVal, buffer, position, comm);
unpack(uVal, buffer, position, comm);
unpack(type, buffer, position, comm);
unpack(name, buffer, position, comm);
unpack(valueStatus, buffer, position, comm);
unpack(rawData, buffer, position, comm);
unpack(activeDimensions, buffer, position, comm);
unpack(defaultDimensions, buffer, position, comm);
data = DeckItem(dVal, iVal, sVal, uVal, type, name,
valueStatus, rawData, activeDimensions, defaultDimensions);
}
void unpack(DeckRecord& data,
std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::vector<DeckItem> items;
unpack(items, buffer, position, comm);
data = DeckRecord(std::move(items));
}
#define INSTANTIATE_PACK_VECTOR(T) \
template std::size_t packSize(const std::vector<T>& data, \
Dune::MPIHelper::MPICommunicator comm); \

View File

@ -41,7 +41,10 @@
#include <opm/output/eclipse/Summary.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicState.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/DynamicVector.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSale.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSump.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRateConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQAssign.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQActive.hpp>
@ -63,6 +66,8 @@ class Actdims;
class Aqudims;
class ColumnSchema;
class Connection;
class DeckItem;
class DeckRecord;
class DENSITYRecord;
class DensityTable;
class Dimension;
@ -96,6 +101,7 @@ class PvtwTable;
class Regdims;
class RestartConfig;
class RestartSchedule;
class RFTConfig;
class ROCKRecord;
class RockTable;
class Rock2dTable;
@ -184,6 +190,9 @@ template<class T>
std::size_t packSize(const std::shared_ptr<T>& data,
Dune::MPIHelper::MPICommunicator comm);
template<class T, std::size_t N>
std::size_t packSize(const std::array<T,N>& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const char* str, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const std::string& str, Dune::MPIHelper::MPICommunicator comm);
@ -313,6 +322,10 @@ template<class T>
void pack(const std::shared_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T, size_t N>
void pack(const std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2, class C, class A>
void pack(const std::map<T1,T2,C,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -463,6 +476,10 @@ template<class T>
void unpack(std::shared_ptr<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T, size_t N>
void unpack(std::array<T,N>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2, class C, class A>
void unpack(std::map<T1,T2,C,A>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
@ -577,6 +594,8 @@ ADD_PACK_PROTOTYPES(data::Segment)
ADD_PACK_PROTOTYPES(data::Solution)
ADD_PACK_PROTOTYPES(data::Well)
ADD_PACK_PROTOTYPES(data::WellRates)
ADD_PACK_PROTOTYPES(DeckItem)
ADD_PACK_PROTOTYPES(DeckRecord)
ADD_PACK_PROTOTYPES(DENSITYRecord)
ADD_PACK_PROTOTYPES(DensityTable)
ADD_PACK_PROTOTYPES(Dimension)
@ -589,6 +608,14 @@ ADD_PACK_PROTOTYPES(EquilRecord)
ADD_PACK_PROTOTYPES(Events)
ADD_PACK_PROTOTYPES(FoamConfig)
ADD_PACK_PROTOTYPES(FoamData)
ADD_PACK_PROTOTYPES(GConSale)
ADD_PACK_PROTOTYPES(GConSale::GCONSALEGroup)
ADD_PACK_PROTOTYPES(GConSump)
ADD_PACK_PROTOTYPES(GConSump::GCONSUMPGroup)
ADD_PACK_PROTOTYPES(GuideRateConfig)
ADD_PACK_PROTOTYPES(GuideRateConfig::GroupTarget)
ADD_PACK_PROTOTYPES(GuideRateConfig::WellTarget)
ADD_PACK_PROTOTYPES(GuideRateModel)
ADD_PACK_PROTOTYPES(Group)
ADD_PACK_PROTOTYPES(Group::GroupInjectionProperties)
ADD_PACK_PROTOTYPES(Group::GroupProductionProperties)
@ -614,6 +641,7 @@ ADD_PACK_PROTOTYPES(RestartConfig)
ADD_PACK_PROTOTYPES(RestartKey)
ADD_PACK_PROTOTYPES(RestartSchedule)
ADD_PACK_PROTOTYPES(RestartValue)
ADD_PACK_PROTOTYPES(RFTConfig)
ADD_PACK_PROTOTYPES(ROCKRecord)
ADD_PACK_PROTOTYPES(RockTable)
ADD_PACK_PROTOTYPES(Rock2dTable)

View File

@ -28,6 +28,7 @@
#include <opm/material/fluidsystems/blackoilpvt/DryGasPvt.hpp>
#include <opm/material/fluidsystems/blackoilpvt/SolventPvt.hpp>
#include <opm/material/fluidsystems/blackoilpvt/WetGasPvt.hpp>
#include <opm/parser/eclipse/Deck/DeckItem.hpp>
#include <opm/parser/eclipse/EclipseState/Runspec.hpp>
#include <opm/parser/eclipse/EclipseState/Edit/EDITNNC.hpp>
#include <opm/parser/eclipse/EclipseState/Grid/NNC.hpp>
@ -37,11 +38,14 @@
#include <opm/parser/eclipse/EclipseState/IOConfig/IOConfig.hpp>
#include <opm/parser/eclipse/EclipseState/IOConfig/RestartConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Events.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GConSale.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/Group.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Group/GuideRateModel.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MessageLimits.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/SpiralICD.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/MSW/Valve.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/OilVaporizationProperties.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/RFTConfig.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQActive.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/UDQ/UDQAssign.hpp>
@ -335,9 +339,52 @@ Opm::UDQConfig getUDQConfig()
omap,
{{Opm::UDQVarType::SCALAR, 5}, {Opm::UDQVarType::WELL_VAR, 6}});
}
Opm::GuideRateModel getGuideRateModel()
{
return Opm::GuideRateModel(1.0, Opm::GuideRateModel::Target::WAT,
{2.0, 3.0, 4.0, 5.0, 6.0, 7.0},
true, 8.0, false, false,
{Opm::UDAValue(9.0),
Opm::UDAValue(10.0),
Opm::UDAValue(11.0)});
}
#endif
Opm::GuideRateConfig::GroupTarget getGuideRateConfigGroup()
{
return Opm::GuideRateConfig::GroupTarget{1.0, Opm::Group::GuideRateTarget::COMB};
}
Opm::GuideRateConfig::WellTarget getGuideRateConfigWell()
{
return Opm::GuideRateConfig::WellTarget{1.0, Opm::Well::GuideRateTarget::COMB, 2.0};
}
Opm::DeckRecord getDeckRecord()
{
Opm::DeckItem item1({1.0}, {2}, {"test3"}, {Opm::UDAValue(4)},
Opm::type_tag::string, "test5",
{Opm::value::status::deck_value},
true,
{Opm::Dimension("DimensionLess", 7.0, 8.0)},
{Opm::Dimension("Metric", 10.0, 11.0)});
Opm::DeckItem item2({1.0}, {2}, {"test3"}, {Opm::UDAValue(4)},
Opm::type_tag::string, "test6",
{Opm::value::status::deck_value},
true,
{Opm::Dimension("DimensionLess", 7.0, 8.0)},
{Opm::Dimension("Metric", 10.0, 11.0)});
return Opm::DeckRecord({item1, item2});
}
}
@ -1831,6 +1878,157 @@ BOOST_AUTO_TEST_CASE(UDQActive)
}
BOOST_AUTO_TEST_CASE(GuideRateModel)
{
#ifdef HAVE_MPI
Opm::GuideRateModel val1 = getGuideRateModel();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GuideRateConfigGroup)
{
#ifdef HAVE_MPI
Opm::GuideRateConfig::GroupTarget val1 = getGuideRateConfigGroup();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GuideRateConfigWell)
{
#ifdef HAVE_MPI
Opm::GuideRateConfig::WellTarget val1 = getGuideRateConfigWell();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GuideRateConfig)
{
#ifdef HAVE_MPI
auto model = std::make_shared<Opm::GuideRateModel>(getGuideRateModel());
Opm::GuideRateConfig val1(model,
{{"test1", getGuideRateConfigWell()}},
{{"test2", getGuideRateConfigGroup()}});
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GConSaleGroup)
{
#ifdef HAVE_MPI
Opm::GConSale::GCONSALEGroup val1{Opm::UDAValue(1.0),
Opm::UDAValue(2.0),
Opm::UDAValue(3.0),
Opm::GConSale::MaxProcedure::PLUG,
4.0, Opm::UnitSystem()};
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GConSale)
{
#ifdef HAVE_MPI
Opm::GConSale::GCONSALEGroup group{Opm::UDAValue(1.0),
Opm::UDAValue(2.0),
Opm::UDAValue(3.0),
Opm::GConSale::MaxProcedure::PLUG,
4.0, Opm::UnitSystem()};
Opm::GConSale val1({{"test1", group}, {"test2", group}});
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GConSumpGroup)
{
#ifdef HAVE_MPI
Opm::GConSump::GCONSUMPGroup val1{Opm::UDAValue(1.0),
Opm::UDAValue(2.0),
"test",
3.0, Opm::UnitSystem()};
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(GConSump)
{
#ifdef HAVE_MPI
Opm::GConSump::GCONSUMPGroup group{Opm::UDAValue(1.0),
Opm::UDAValue(2.0),
"test",
3.0, Opm::UnitSystem()};
Opm::GConSump val1({{"test1", group}, {"test2", group}});
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(RFTConfig)
{
#ifdef HAVE_MPI
Opm::RFTConfig val1(getTimeMap(),
{true, 1},
{"test1", "test2"},
{{"test3", 2}},
{{"test1", {{{Opm::RFTConfig::RFT::TIMESTEP, 3}}, 4}}},
{{"test2", {{{Opm::RFTConfig::PLT::REPT, 5}}, 6}}});
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(DeckItem)
{
#ifdef HAVE_MPI
Opm::DeckItem val1({1.0}, {2}, {"test3"}, {Opm::UDAValue(4)},
Opm::type_tag::string, "test5",
{Opm::value::status::deck_value},
true,
{Opm::Dimension("DimensionLess", 7.0, 8.0)},
{Opm::Dimension("Metric", 10.0, 11.0)});
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(DeckRecord)
{
#ifdef HAVE_MPI
Opm::DeckRecord val1 = getDeckRecord();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
bool init_unit_test_func()
{
return true;