Load restart with global grid only available on one processor.

For this the master (IO) rank loads the RestartValue and then
broadcasts it to the other ranks.
This commit is contained in:
Markus Blatt
2019-10-10 23:06:55 +02:00
parent 7784d53198
commit 9735bdadfc
4 changed files with 900 additions and 1 deletions

View File

@@ -0,0 +1,637 @@
/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#if HAVE_MPI
#include <mpi.h>
#endif
#include "ParallelRestart.hpp"
#include <dune/common/parallel/mpitraits.hh>
namespace Opm
{
namespace Mpi
{
template<class T>
std::size_t packSize(const T*, std::size_t, Dune::MPIHelper::MPICommunicator,
std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
std::size_t packSize(const T*, std::size_t l, Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
std::size_t totalSize = size;
MPI_Pack_size(l, Dune::MPITraits<T>::getType(), comm, &size);
return totalSize + size;
#else
return 0;
#endif
}
template<class T>
std::size_t packSize(const T* data, std::size_t l, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data, l, comm, typename std::is_pod<T>::type());
}
template<class T>
std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator,
std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
int size{};
MPI_Pack_size(1, Dune::MPITraits<T>::getType(), comm, &size);
return size;
#else
return 0;
#endif
}
template<class T>
std::size_t packSize(const T& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data, comm, typename std::is_pod<T>::type());
}
template<class T1, class T2>
std::size_t packSize(const std::pair<T1,T2>& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.first, comm) + packSize(data.second, comm);
}
template<class T>
std::size_t packSize(const std::vector<T>& data, Dune::MPIHelper::MPICommunicator comm)
{
if (std::is_pod<T>::value)
// size written automatically
return packSize(data.data(), data.size(), comm);
std::size_t size = packSize(data.size(), comm);
for (const auto& entry: data)
size+=packSize(entry, comm);
return size;
}
std::size_t packSize(const char* str, Dune::MPIHelper::MPICommunicator comm)
{
#if HAVE_MPI
int size;
MPI_Pack_size(1, Dune::MPITraits<std::size_t>::getType(), comm, &size);
int totalSize = size;
MPI_Pack_size(strlen(str)+1, MPI_CHAR, comm, &size);
return totalSize + size;
#else
return 0;
#endif
}
std::size_t packSize(const std::string& str, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(str.c_str(), comm);
}
template<class T1, class T2>
std::size_t packSize(const std::map<T1,T2> data, Dune::MPIHelper::MPICommunicator comm)
{
std::size_t totalSize = packSize(data.size(), comm);
for (const auto& entry: data)
{
totalSize += packSize(entry, comm);
}
return totalSize;
}
template<class T1, class T2>
std::size_t packSize(const std::unordered_map<T1,T2> data, Dune::MPIHelper::MPICommunicator comm)
{
std::size_t totalSize = packSize(data.size(), comm);
for (const auto& entry: data)
{
totalSize += packSize(entry, comm);
}
return totalSize;
}
std::size_t packSize(const data::Rates& data, Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
return packSize(data, comm, std::integral_constant<bool,true>());
}
std::size_t packSize(const data::Connection& data, Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
return packSize(data, comm, std::integral_constant<bool,true>());
}
std::size_t packSize(const data::Segment& data, Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
return packSize(data, comm, std::integral_constant<bool,true>());
}
std::size_t packSize(const data::Well& data, Dune::MPIHelper::MPICommunicator comm)
{
std::size_t size = packSize(data.rates, comm);
size += packSize(data.bhp, comm) + packSize(data.thp, comm);
size += packSize(data.temperature, comm);
size += packSize(data.control, comm);
size += packSize(data.connections, comm);
size += packSize(data.segments, comm);
return size;
}
std::size_t packSize(const data::CellData& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.dim, comm) + packSize(data.data, comm) + packSize(data.target, comm);
}
std::size_t packSize(const RestartKey& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.key, comm) + packSize(data.dim, comm) + packSize(data.required, comm);
}
std::size_t packSize(const data::Solution& data, Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
return packSize(static_cast<const std::map< std::string, data::CellData>&>(data), comm);
}
std::size_t packSize(const data::WellRates& data, Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
return packSize(static_cast<const std::map< std::string, data::Well>&>(data), comm);
}
std::size_t packSize(const RestartValue& data, Dune::MPIHelper::MPICommunicator comm)
{
return packSize(data.solution, comm) + packSize(data.wells, comm) + packSize(data.extra, comm);
}
////// pack routines
template<class T>
void pack(const T*, std::size_t, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&l, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm);
MPI_Pack(data, l, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
#endif
}
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
template<class T>
void pack(const T&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Pack(&data, 1, Dune::MPITraits<T>::getType(), buffer.data(),
buffer.size(), &position, comm);
#endif
}
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data, buffer, position, comm, typename std::is_pod<T>::type());
}
template<class T1, class T2>
void pack(const std::pair<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.first, buffer, position, comm);
pack(data.second, buffer, position, comm);
}
template<class T>
void pack(const std::vector<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
if (std::is_pod<T>::value)
{
// size written automatically
pack(data.data(), data.size(), buffer, position, comm);
return;
}
pack(data.size(), buffer, position, comm);
for (const auto& entry: data)
pack(entry, buffer, position, comm);
}
void pack(const char* str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
#if HAVE_MPI
std::size_t length = strlen(str)+1;
MPI_Pack(&length, 1, Dune::MPITraits<std::size_t>::getType(), buffer.data(),
buffer.size(), &position, comm);
MPI_Pack(str, strlen(str)+1, MPI_CHAR, buffer.data(), buffer.size(),
&position, comm);
#endif
}
void pack(const std::string& str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(str.c_str(), buffer, position, comm);
}
template<class T1, class T2>
void pack(const std::map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.size(), buffer, position, comm);
for (const auto& entry: data)
{
pack(entry, buffer, position, comm);
}
}
template<class T1, class T2>
void pack(const std::unordered_map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.size(), buffer, position, comm);
for (const auto& entry: data)
{
pack(entry, buffer, position, comm);
}
}
void pack(const data::Rates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
pack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void pack(const data::Connection& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
pack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void pack(const data::Segment& data,std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
pack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void pack(const data::Well& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.rates, buffer, position, comm);
pack(data.bhp, buffer, position, comm);
pack(data.thp, buffer, position, comm);
pack(data.temperature, buffer, position, comm);
pack(data.control, buffer, position, comm);
pack(data.connections, buffer, position, comm);
pack(data.segments, buffer, position, comm);
}
void pack(const RestartKey& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.key, buffer, position, comm);
pack(data.dim, buffer, position, comm);
pack(data.required, buffer, position, comm);
}
void pack(const data::CellData& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.dim, buffer, position, comm);
pack(data.data, buffer, position, comm);
pack(data.target, buffer, position, comm);
}
void pack(const data::Solution& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
pack(static_cast<const std::map< std::string, data::CellData>&>(data),
buffer, position, comm);
}
void pack(const data::WellRates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
pack(static_cast<const std::map< std::string, data::Well>&>(data),
buffer, position, comm);
}
void pack(const RestartValue& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
pack(data.solution, buffer, position, comm);
pack(data.wells, buffer, position, comm);
pack(data.extra, buffer, position, comm);
}
/// unpack routines
template<class T>
void unpack(T*, const std::size_t&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, data, l,
Dune::MPITraits<T>::getType(), comm);
#endif
}
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data, l, buffer, position, comm, typename std::is_pod<T>::type());
}
template<class T>
void unpack(T&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>)
{
OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type.");
}
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm, std::integral_constant<bool, true>)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, &data, 1,
Dune::MPITraits<T>::getType(), comm);
#endif
}
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data, buffer, position, comm, typename std::is_pod<T>::type());
}
template<class T1, class T2>
void unpack(std::pair<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.first, buffer, position, comm);
unpack(data.second, buffer, position, comm);
}
template<class T>
void unpack(std::vector<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::size_t length=0;
unpack(length, buffer, position, comm);
data.resize(length);
if (std::is_pod<T>::value)
{
unpack(data.data(), data.size(), buffer, position, comm);
return;
}
for (auto& entry: data)
unpack(entry, buffer, position, comm);
}
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
#if HAVE_MPI
MPI_Unpack(buffer.data(), buffer.size(), &position, const_cast<char*>(str), length, MPI_CHAR, comm);
#endif
}
void unpack(std::string& str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::size_t length=0;
unpack(length, buffer, position, comm);
char* cStr = new char[length];
unpack(cStr, length, buffer, position, comm);
assert(str.empty());
str.append(cStr);
delete[] cStr;
}
template<class T1, class T2>
void unpack(std::map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::size_t size=0;
unpack(size, buffer, position, comm);
for (;size>0; size--)
{
std::pair<T1,T2> entry;
unpack(entry, buffer, position, comm);
data.insert(entry);
}
}
template<class T1, class T2>
void unpack(std::unordered_map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
std::size_t size=0;
unpack(size, buffer, position, comm);
for (;size>0; size--)
{
std::pair<T1,T2> entry;
unpack(entry, buffer, position, comm);
data.insert(entry);
}
}
void unpack(data::Rates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
unpack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void unpack(data::Connection& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
unpack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void unpack(data::Segment& data,std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// plain struct no pointers -> treat as pod
unpack(data, buffer, position, comm, std::integral_constant<bool,true>());
}
void unpack(data::Well& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.rates, buffer, position, comm);
unpack(data.bhp, buffer, position, comm);
unpack(data.thp, buffer, position, comm);
unpack(data.temperature, buffer, position, comm);
unpack(data.control, buffer, position, comm);
unpack(data.connections, buffer, position, comm);
unpack(data.segments, buffer, position, comm);
}
void unpack(RestartKey& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.key, buffer, position, comm);
unpack(data.dim, buffer, position, comm);
unpack(data.required, buffer, position, comm);
}
void unpack(data::CellData& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.dim, buffer, position, comm);
unpack(data.data, buffer, position, comm);
unpack(data.target, buffer, position, comm);
}
void unpack(data::Solution& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
unpack(static_cast<std::map< std::string, data::CellData>&>(data),
buffer, position, comm);
}
void unpack(data::WellRates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
// Needs explicit conversion to a supported base type holding the data
// to prevent throwing.
unpack(static_cast<std::map< std::string, data::Well>&>(data),
buffer, position, comm);
}
void unpack(RestartValue& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm)
{
unpack(data.solution, buffer, position, comm);
unpack(data.wells, buffer, position, comm);
unpack(data.extra, buffer, position, comm);
}
} // end namespace Mpi
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,
const std::vector<Opm::RestartKey>& solutionKeys,
const std::vector<Opm::RestartKey>& extraKeys,
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> comm)
{
#if HAVE_MPI
data::Solution sol;
data::Wells wells;
RestartValue restartValues(sol, wells);
if (eclIO)
{
assert(comm.rank() == 0);
restartValues = eclIO->loadRestart(summaryState, solutionKeys, extraKeys);
int packedSize = Mpi::packSize(restartValues, comm);
std::vector<char> buffer(packedSize);
int position=0;
Mpi::pack(restartValues, buffer, position, comm);
comm.broadcast(&position, 1, 0);
comm.broadcast(buffer.data(), position, 0);
}
else
{
int bufferSize{};
comm.broadcast(&bufferSize, 1, 0);
std::vector<char> buffer(bufferSize);
comm.broadcast(buffer.data(), bufferSize, 0);
int position{};
Mpi::unpack(restartValues, buffer, position, comm);
}
return restartValues;
#else
return eclIO->loadRestart(summaryState, solutionKeys, extraKeys);
#endif
}
} // end namespace Opm

View File

@@ -0,0 +1,258 @@
/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef PARALLEL_RESTART_HPP
#define PARALLEL_RESTART_HPP
#if HAVE_MPI
#include <mpi.h>
#endif
#include <opm/output/eclipse/RestartValue.hpp>
#include <opm/output/eclipse/EclipseIO.hpp>
#include <opm/output/eclipse/Summary.hpp>
#include <dune/common/parallel/mpihelper.hh>
#include <vector>
#include <map>
#include <unordered_map>
namespace Opm
{
namespace Mpi
{
template<class T>
std::size_t packSize(const T*, std::size_t, Dune::MPIHelper::MPICommunicator,
std::integral_constant<bool, false>);
template<class T>
std::size_t packSize(const T*, std::size_t l, Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>);
template<class T>
std::size_t packSize(const T* data, std::size_t l, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator,
std::integral_constant<bool, false>);
template<class T>
std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>);
template<class T>
std::size_t packSize(const T& data, Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
std::size_t packSize(const std::pair<T1,T2>& data, Dune::MPIHelper::MPICommunicator comm);
template<class T>
std::size_t packSize(const std::vector<T>& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const char* str, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const std::string& str, Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
std::size_t packSize(const std::map<T1,T2> data, Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
std::size_t packSize(const std::unordered_map<T1,T2> data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::Rates& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::Connection& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::Segment& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::Well& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::CellData& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const RestartKey& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::Solution& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const data::WellRates& data, Dune::MPIHelper::MPICommunicator comm);
std::size_t packSize(const RestartValue& data, Dune::MPIHelper::MPICommunicator comm);
////// pack routines
template<class T>
void pack(const T*, std::size_t, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>);
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm, std::integral_constant<bool, true>);
template<class T>
void pack(const T* data, std::size_t l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const T&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>);
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm, std::integral_constant<bool, true>);
template<class T>
void pack(const T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void pack(const std::pair<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void pack(const std::vector<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const char* str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const std::string& str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void pack(const std::map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void pack(const std::unordered_map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::Rates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::Connection& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::Segment& data,std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::Well& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const RestartKey& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::CellData& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::Solution& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const data::WellRates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void pack(const RestartValue& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
/// unpack routines
template<class T>
void unpack(T*, const std::size_t&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>);
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm,
std::integral_constant<bool, true>);
template<class T>
void unpack(T* data, const std::size_t& l, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(T&, std::vector<char>&, int&,
Dune::MPIHelper::MPICommunicator, std::integral_constant<bool, false>);
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm, std::integral_constant<bool, true>);
template<class T>
void unpack(T& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void unpack(std::pair<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T>
void unpack(std::vector<T>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(char* str, std::size_t length, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(std::string& str, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void unpack(std::map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
template<class T1, class T2>
void unpack(std::unordered_map<T1,T2>& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::Rates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::Connection& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::Segment& data,std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::Well& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(RestartKey& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::CellData& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::Solution& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(data::WellRates& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
void unpack(RestartValue& data, std::vector<char>& buffer, int& position,
Dune::MPIHelper::MPICommunicator comm);
} // end namespace Mpi
RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState,
const std::vector<Opm::RestartKey>& solutionKeys,
const std::vector<Opm::RestartKey>& extraKeys,
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> comm);
} // end namespace Opm
#endif // PARALLEL_RESTART_HPP