diff --git a/opm/simulators/utils/ParallelRestart.cpp b/opm/simulators/utils/ParallelRestart.cpp index df9ddfe49..dce381e22 100644 --- a/opm/simulators/utils/ParallelRestart.cpp +++ b/opm/simulators/utils/ParallelRestart.cpp @@ -34,6 +34,8 @@ #include #include #include +#include +#include #include #include #include @@ -422,6 +424,26 @@ std::size_t packSize(const Runspec& data, Dune::MPIHelper::MPICommunicator comm) packSize(data.actdims(), comm); } +std::size_t packSize(const PvtxTable& data, Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(data.getOuterColumnSchema(), comm) + + packSize(data.getOuterColumn(), comm) + + packSize(data.getUnderSaturatedSchema(), comm) + + packSize(data.getSaturatedSchema(), comm) + + packSize(data.getUnderSaturatedTables(), comm) + + packSize(data.getSaturatedTable(), comm); +} + +std::size_t packSize(const PvtgTable& data, Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(static_cast(data), comm); +} + +std::size_t packSize(const PvtoTable& data, Dune::MPIHelper::MPICommunicator comm) +{ + return packSize(static_cast(data), comm); +} + ////// pack routines template @@ -832,6 +854,29 @@ void pack(const Runspec& data, std::vector& buffer, int& position, pack(data.actdims(), buffer, position, comm); } +void pack(const PvtxTable& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(data.getOuterColumnSchema(), buffer, position, comm); + pack(data.getOuterColumn(), buffer, position, comm); + pack(data.getUnderSaturatedSchema(), buffer, position, comm); + pack(data.getSaturatedSchema(), buffer, position, comm); + pack(data.getUnderSaturatedTables(), buffer, position, comm); + pack(data.getSaturatedTable(), buffer, position, comm); +} + +void pack(const PvtgTable& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(static_cast(data), buffer, position, comm); +} + +void pack(const PvtoTable& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + pack(static_cast(data), buffer, position, comm); +} + /// unpack routines template @@ -1345,6 +1390,38 @@ void unpack(Runspec& data, std::vector& buffer, int& position, udqparams, hystPar, actdims); } +template +void unpack_pvt(PVTType& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + ColumnSchema outer_schema; + TableColumn outer_column; + TableSchema undersat_schema, sat_schema; + std::vector undersat_tables; + SimpleTable sat_table; + unpack(outer_schema, buffer, position, comm); + unpack(outer_column, buffer, position, comm); + unpack(undersat_schema, buffer, position, comm); + unpack(sat_schema, buffer, position, comm); + unpack(undersat_tables, buffer, position, comm); + unpack(sat_table, buffer, position, comm); + data = PVTType(outer_schema, outer_column, undersat_schema, sat_schema, + undersat_tables, sat_table); +} + +void unpack(PvtgTable& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + unpack_pvt(data, buffer, position, comm); +} + +void unpack(PvtoTable& data, std::vector& buffer, int& position, + Dune::MPIHelper::MPICommunicator comm) +{ + unpack_pvt(data, buffer, position, comm); +} + + } // end namespace Mpi RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState, const std::vector& solutionKeys, diff --git a/opm/simulators/utils/ParallelRestart.hpp b/opm/simulators/utils/ParallelRestart.hpp index 87a76f8fb..33487df8f 100644 --- a/opm/simulators/utils/ParallelRestart.hpp +++ b/opm/simulators/utils/ParallelRestart.hpp @@ -53,6 +53,8 @@ class IOConfig; class NNC; struct NNCdata; class Phases; +class PvtgTable; +class PvtoTable; class RestartConfig; class RestartSchedule; class Rock2dTable; @@ -264,6 +266,8 @@ ADD_PACK_PROTOTYPES(IOConfig) ADD_PACK_PROTOTYPES(NNC) ADD_PACK_PROTOTYPES(NNCdata) ADD_PACK_PROTOTYPES(Phases) +ADD_PACK_PROTOTYPES(PvtgTable) +ADD_PACK_PROTOTYPES(PvtoTable) ADD_PACK_PROTOTYPES(RestartConfig) ADD_PACK_PROTOTYPES(RestartKey) ADD_PACK_PROTOTYPES(RestartSchedule) diff --git a/tests/test_ParallelRestart.cpp b/tests/test_ParallelRestart.cpp index c958a5f65..aec990d1e 100644 --- a/tests/test_ParallelRestart.cpp +++ b/tests/test_ParallelRestart.cpp @@ -37,6 +37,8 @@ #include #include #include +#include +#include #include #include #include @@ -168,9 +170,6 @@ Opm::FoamData getFoamData() return Opm::FoamData(1.0, 2.0, 3.0, true, 4.0); } -#endif - - Opm::TimeMap getTimeMap() { return Opm::TimeMap({123}, @@ -178,6 +177,32 @@ Opm::TimeMap getTimeMap() {{2, Opm::TimeStampUTC(456)}}); } +Opm::PvtgTable getPvtgTable() +{ + return Opm::PvtgTable(Opm::ColumnSchema("test1", Opm::Table::INCREASING, + Opm::Table::DEFAULT_LINEAR), + getTableColumn(), + getTableSchema(), + getTableSchema(), + {getSimpleTable()}, + getSimpleTable()); +} + + +Opm::PvtoTable getPvtoTable() +{ + return Opm::PvtoTable(Opm::ColumnSchema("test1", Opm::Table::INCREASING, + Opm::Table::DEFAULT_LINEAR), + getTableColumn(), + getTableSchema(), + getTableSchema(), + {getSimpleTable()}, + getSimpleTable()); +} + + +#endif + } @@ -656,6 +681,28 @@ BOOST_AUTO_TEST_CASE(Runspec) } +BOOST_AUTO_TEST_CASE(PvtgTable) +{ +#if HAVE_MPI + Opm::PvtgTable val1 = getPvtgTable(); + auto val2 = PackUnpack(val1); + BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2)); + BOOST_CHECK(val1 == std::get<0>(val2)); +#endif +} + + +BOOST_AUTO_TEST_CASE(PvtoTable) +{ +#if HAVE_MPI + Opm::PvtoTable val1 = getPvtoTable(); + auto val2 = PackUnpack(val1); + BOOST_CHECK(std::get<1>(val2) == std::get<2>(val2)); + BOOST_CHECK(val1 == std::get<0>(val2)); +#endif +} + + bool init_unit_test_func() { return true;