Merge pull request #2234 from akva2/noecl_flush

More MPI serialization support
This commit is contained in:
Arne Morten Kvarving 2019-12-18 14:16:53 +01:00 committed by GitHub
commit c956b8fcc0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 335 additions and 3 deletions

View File

@ -34,6 +34,8 @@
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/PvtgTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/PvtoTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/Rock2dTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/Rock2dtrTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/SimpleTable.hpp>
@ -200,14 +202,18 @@ std::size_t packSize(const std::unordered_map<T1,T2,H,P,A>& data, Dune::MPIHelpe
return totalSize;
}
HANDLE_AS_POD(Actdims)
HANDLE_AS_POD(data::Connection)
HANDLE_AS_POD(data::Rates)
HANDLE_AS_POD(data::Segment)
HANDLE_AS_POD(EclHysterConfig)
HANDLE_AS_POD(EquilRecord)
HANDLE_AS_POD(FoamData)
HANDLE_AS_POD(RestartSchedule)
HANDLE_AS_POD(Tabdims)
HANDLE_AS_POD(TimeMap::StepData)
HANDLE_AS_POD(Welldims)
HANDLE_AS_POD(WellSegmentDims)
std::size_t packSize(const data::Well& data, Dune::MPIHelper::MPICommunicator comm)
{
@ -392,6 +398,52 @@ std::size_t packSize(const Phases& data, Dune::MPIHelper::MPICommunicator comm)
return packSize(data.getBits(), comm);
}
std::size_t packSize(const EndpointScaling& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getBits(), comm);
}
std::size_t packSize(const UDQParams& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.reseed(), comm) +
packSize(data.rand_seed(), comm) +
packSize(data.range(), comm) +
packSize(data.undefinedValue(), comm) +
packSize(data.cmpEpsilon(), comm);
}
std::size_t packSize(const Runspec& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.phases(), comm) +
packSize(data.tabdims(), comm) +
packSize(data.endpointScaling(), comm) +
packSize(data.wellDimensions(), comm) +
packSize(data.wellSegmentDimensions(), comm) +
packSize(data.udqParams(), comm) +
packSize(data.hysterPar(), comm) +
packSize(data.actdims(), comm);
}
std::size_t packSize(const PvtxTable& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.getOuterColumnSchema(), comm) +
packSize(data.getOuterColumn(), comm) +
packSize(data.getUnderSaturatedSchema(), comm) +
packSize(data.getSaturatedSchema(), comm) +
packSize(data.getUnderSaturatedTables(), comm) +
packSize(data.getSaturatedTable(), comm);
}
std::size_t packSize(const PvtgTable& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(static_cast<const PvtxTable&>(data), comm);
}
std::size_t packSize(const PvtoTable& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(static_cast<const PvtxTable&>(data), comm);
}
////// pack routines
template<class T>
@ -773,6 +825,58 @@ void pack(const Phases& data, std::vector<char>& buffer, int& position,
pack(data.getBits(), buffer, position, comm);
}
void pack(const EndpointScaling& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getBits(), buffer, position, comm);
}
void pack(const UDQParams& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.reseed(), buffer, position, comm);
pack(data.rand_seed(), buffer, position, comm);
pack(data.range(), buffer, position, comm);
pack(data.undefinedValue(), buffer, position, comm);
pack(data.cmpEpsilon(), buffer, position, comm);
}
void pack(const Runspec& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.phases(), buffer, position, comm);
pack(data.tabdims(), buffer, position, comm);
pack(data.endpointScaling(), buffer, position, comm);
pack(data.wellDimensions(), buffer, position, comm);
pack(data.wellSegmentDimensions(), buffer, position, comm);
pack(data.udqParams(), buffer, position, comm);
pack(data.hysterPar(), buffer, position, comm);
pack(data.actdims(), buffer, position, comm);
}
void pack(const PvtxTable& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.getOuterColumnSchema(), buffer, position, comm);
pack(data.getOuterColumn(), buffer, position, comm);
pack(data.getUnderSaturatedSchema(), buffer, position, comm);
pack(data.getSaturatedSchema(), buffer, position, comm);
pack(data.getUnderSaturatedTables(), buffer, position, comm);
pack(data.getSaturatedTable(), buffer, position, comm);
}
void pack(const PvtgTable& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(static_cast<const PvtxTable&>(data), buffer, position, comm);
}
void pack(const PvtoTable& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(static_cast<const PvtxTable&>(data), buffer, position, comm);
}
/// unpack routines
template<class T>
@ -1240,6 +1344,84 @@ void unpack(Phases& data, std::vector<char>& buffer, int& position,
data = Phases(std::bitset<NUM_PHASES_IN_ENUM>(bits));
}
void unpack(EndpointScaling& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unsigned long bits;
unpack(bits, buffer, position, comm);
data = EndpointScaling(std::bitset<4>(bits));
}
void unpack(UDQParams& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
bool reseed;
int rand_seed;
double range, undefVal, cmp_eps;
unpack(reseed, buffer, position, comm);
unpack(rand_seed, buffer, position, comm);
unpack(range, buffer, position, comm);
unpack(undefVal, buffer, position, comm);
unpack(cmp_eps, buffer, position, comm);
data = UDQParams(reseed, rand_seed, range, undefVal, cmp_eps);
}
void unpack(Runspec& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
Phases phases;
Tabdims tabdims;
EndpointScaling endScale;
Welldims wellDims;
WellSegmentDims wsegDims;
UDQParams udqparams;
EclHysterConfig hystPar;
Actdims actdims;
unpack(phases, buffer, position, comm);
unpack(tabdims, buffer, position, comm);
unpack(endScale, buffer, position, comm);
unpack(wellDims, buffer, position, comm);
unpack(wsegDims, buffer, position, comm);
unpack(udqparams, buffer, position, comm);
unpack(hystPar, buffer, position, comm);
unpack(actdims, buffer, position, comm);
data = Runspec(phases, tabdims, endScale, wellDims, wsegDims,
udqparams, hystPar, actdims);
}
template<class PVTType>
void unpack_pvt(PVTType& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
ColumnSchema outer_schema;
TableColumn outer_column;
TableSchema undersat_schema, sat_schema;
std::vector<SimpleTable> undersat_tables;
SimpleTable sat_table;
unpack(outer_schema, buffer, position, comm);
unpack(outer_column, buffer, position, comm);
unpack(undersat_schema, buffer, position, comm);
unpack(sat_schema, buffer, position, comm);
unpack(undersat_tables, buffer, position, comm);
unpack(sat_table, buffer, position, comm);
data = PVTType(outer_schema, outer_column, undersat_schema, sat_schema,
undersat_tables, sat_table);
}
void unpack(PvtgTable& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack_pvt(data, buffer, position, comm);
}
void unpack(PvtoTable& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack_pvt(data, buffer, position, comm);
}
} // end namespace Mpi
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,
const std::vector<Opm::RestartKey>& solutionKeys,

View File

@ -39,8 +39,11 @@
namespace Opm
{
class Actdims;
class ColumnSchema;
class EclHysterConfig;
class EDITNNC;
class EndpointScaling;
class Equil;
class EquilRecord;
class FoamConfig;
@ -50,10 +53,13 @@ class IOConfig;
class NNC;
struct NNCdata;
class Phases;
class PvtgTable;
class PvtoTable;
class RestartConfig;
class RestartSchedule;
class Rock2dTable;
class Rock2dtrTable;
class Runspec;
class SimulationConfig;
class SimpleTable;
class Tabdims;
@ -61,6 +67,9 @@ class TableColumn;
class TableContainer;
class TableSchema;
class ThresholdPressure;
class UDQParams;
class Welldims;
class WellSegmentDims;
namespace Mpi
{
@ -236,6 +245,7 @@ void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& posit
void unpack(T& data, std::vector<char>& buffer, int& position, \
Dune::MPIHelper::MPICommunicator comm);
ADD_PACK_PROTOTYPES(Actdims)
ADD_PACK_PROTOTYPES(ColumnSchema)
ADD_PACK_PROTOTYPES(data::CellData)
ADD_PACK_PROTOTYPES(data::Connection)
@ -245,21 +255,26 @@ ADD_PACK_PROTOTYPES(data::Solution)
ADD_PACK_PROTOTYPES(data::Well)
ADD_PACK_PROTOTYPES(data::WellRates)
ADD_PACK_PROTOTYPES(EDITNNC)
ADD_PACK_PROTOTYPES(EndpointScaling)
ADD_PACK_PROTOTYPES(Equil)
ADD_PACK_PROTOTYPES(EquilRecord)
ADD_PACK_PROTOTYPES(FoamConfig)
ADD_PACK_PROTOTYPES(FoamData)
ADD_PACK_PROTOTYPES(EclHysterConfig)
ADD_PACK_PROTOTYPES(InitConfig)
ADD_PACK_PROTOTYPES(IOConfig)
ADD_PACK_PROTOTYPES(NNC)
ADD_PACK_PROTOTYPES(NNCdata)
ADD_PACK_PROTOTYPES(Phases)
ADD_PACK_PROTOTYPES(PvtgTable)
ADD_PACK_PROTOTYPES(PvtoTable)
ADD_PACK_PROTOTYPES(RestartConfig)
ADD_PACK_PROTOTYPES(RestartKey)
ADD_PACK_PROTOTYPES(RestartSchedule)
ADD_PACK_PROTOTYPES(RestartValue)
ADD_PACK_PROTOTYPES(Rock2dTable)
ADD_PACK_PROTOTYPES(Rock2dtrTable)
ADD_PACK_PROTOTYPES(Runspec)
ADD_PACK_PROTOTYPES(std::string)
ADD_PACK_PROTOTYPES(SimulationConfig)
ADD_PACK_PROTOTYPES(SimpleTable)
@ -270,6 +285,9 @@ ADD_PACK_PROTOTYPES(TableSchema)
ADD_PACK_PROTOTYPES(ThresholdPressure)
ADD_PACK_PROTOTYPES(TimeMap)
ADD_PACK_PROTOTYPES(TimeMap::StepData)
ADD_PACK_PROTOTYPES(UDQParams)
ADD_PACK_PROTOTYPES(Welldims)
ADD_PACK_PROTOTYPES(WellSegmentDims)
} // end namespace Mpi
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,

View File

@ -37,6 +37,8 @@
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
#include <opm/parser/eclipse/EclipseState/SimulationConfig/ThresholdPressure.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/ColumnSchema.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/PvtgTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/PvtoTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/Rock2dTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/Rock2dtrTable.hpp>
#include <opm/parser/eclipse/EclipseState/Tables/SimpleTable.hpp>
@ -168,9 +170,6 @@ Opm::FoamData getFoamData()
return Opm::FoamData(1.0, 2.0, 3.0, true, 4.0);
}
#endif
Opm::TimeMap getTimeMap()
{
return Opm::TimeMap({123},
@ -178,6 +177,32 @@ Opm::TimeMap getTimeMap()
{{2, Opm::TimeStampUTC(456)}});
}
Opm::PvtgTable getPvtgTable()
{
return Opm::PvtgTable(Opm::ColumnSchema("test1", Opm::Table::INCREASING,
Opm::Table::DEFAULT_LINEAR),
getTableColumn(),
getTableSchema(),
getTableSchema(),
{getSimpleTable()},
getSimpleTable());
}
Opm::PvtoTable getPvtoTable()
{
return Opm::PvtoTable(Opm::ColumnSchema("test1", Opm::Table::INCREASING,
Opm::Table::DEFAULT_LINEAR),
getTableColumn(),
getTableSchema(),
getTableSchema(),
{getSimpleTable()},
getSimpleTable());
}
#endif
}
@ -571,6 +596,113 @@ BOOST_AUTO_TEST_CASE(Tabdims)
}
BOOST_AUTO_TEST_CASE(EndpointScaling)
{
#if HAVE_MPI
Opm::EndpointScaling val1(std::bitset<4>(13));
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(Welldims)
{
#if HAVE_MPI
Opm::Welldims val1(1,2,3,4);
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(WellSegmentDims)
{
#if HAVE_MPI
Opm::WellSegmentDims val1(1,2,3);
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(UDQParams)
{
#if HAVE_MPI
Opm::UDQParams val1(true, 1, 2.0, 3.0, 4.0);
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(EclHysterConfig)
{
#if HAVE_MPI
Opm::EclHysterConfig val1(true, 1, 2);
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(Actdims)
{
#if HAVE_MPI
Opm::Actdims val1(1,2,3,4);
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(Runspec)
{
#if HAVE_MPI
Opm::Runspec val1(Opm::Phases(true, true, true, false, true, false, true, false),
Opm::Tabdims(1,2,3,4,5,6),
Opm::EndpointScaling(std::bitset<4>(13)),
Opm::Welldims(1,2,3,4),
Opm::WellSegmentDims(1,2,3),
Opm::UDQParams(true, 1, 2.0, 3.0, 4.0),
Opm::EclHysterConfig(true, 1, 2),
Opm::Actdims(1,2,3,4));
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(PvtgTable)
{
#if HAVE_MPI
Opm::PvtgTable val1 = getPvtgTable();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
BOOST_AUTO_TEST_CASE(PvtoTable)
{
#if HAVE_MPI
Opm::PvtoTable val1 = getPvtoTable();
auto val2 = PackUnpack(val1);
BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2));
BOOST_CHECK(val1 == std::get<0>(val2));
#endif
}
bool init_unit_test_func()
{
return true;