/* Copyright 2019 Equinor AS. This file is part of the Open Porous Media project (OPM). OPM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OPM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OPM. If not, see . */ #include #if HAVE_MPI #include #endif #include "ParallelRestart.hpp" #include namespace Opm { namespace Mpi { template std::size_t packSize(const T*, std::size_t, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template std::size_t packSize(const T*, std::size_t l, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI int size; MPI_Pack_size(1, Dune::MPITraits::getType(), comm, &size); std::size_t totalSize = size; MPI_Pack_size(l, Dune::MPITraits::getType(), comm, &size); return totalSize + size; #else (void) comm; return l-l; #endif } template std::size_t packSize(const T* data, std::size_t l, Dune::MPIHelper::MPICommunicator comm) { return packSize(data, l, comm, typename std::is_pod::type()); } template std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template std::size_t packSize(const T&, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI int size{}; MPI_Pack_size(1, Dune::MPITraits::getType(), comm, &size); return size; #else (void) comm; return 0; #endif } template std::size_t packSize(const T& data, Dune::MPIHelper::MPICommunicator comm) { return packSize(data, comm, typename std::is_pod::type()); } template std::size_t packSize(const std::pair& data, Dune::MPIHelper::MPICommunicator comm) { return packSize(data.first, comm) + packSize(data.second, comm); } template std::size_t packSize(const std::vector& data, Dune::MPIHelper::MPICommunicator comm) { if (std::is_pod::value) // size written automatically return packSize(data.data(), data.size(), comm); std::size_t size = packSize(data.size(), comm); for (const auto& entry: data) size+=packSize(entry, comm); return size; } std::size_t packSize(const char* str, Dune::MPIHelper::MPICommunicator comm) { #if HAVE_MPI int size; MPI_Pack_size(1, Dune::MPITraits::getType(), comm, &size); int totalSize = size; MPI_Pack_size(strlen(str)+1, MPI_CHAR, comm, &size); return totalSize + size; #else (void) str; (void) comm; return 0; #endif } std::size_t packSize(const std::string& str, Dune::MPIHelper::MPICommunicator comm) { return packSize(str.c_str(), comm); } template std::size_t packSize(const std::map& data, Dune::MPIHelper::MPICommunicator comm) { std::size_t totalSize = packSize(data.size(), comm); for (const auto& entry: data) { totalSize += packSize(entry, comm); } return totalSize; } template std::size_t packSize(const std::unordered_map& data, Dune::MPIHelper::MPICommunicator comm) { std::size_t totalSize = packSize(data.size(), comm); for (const auto& entry: data) { totalSize += packSize(entry, comm); } return totalSize; } std::size_t packSize(const data::Rates& data, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod return packSize(data, comm, std::integral_constant()); } std::size_t packSize(const data::Connection& data, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod return packSize(data, comm, std::integral_constant()); } std::size_t packSize(const data::Segment& data, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod return packSize(data, comm, std::integral_constant()); } std::size_t packSize(const data::Well& data, Dune::MPIHelper::MPICommunicator comm) { std::size_t size = packSize(data.rates, comm); size += packSize(data.bhp, comm) + packSize(data.thp, comm); size += packSize(data.temperature, comm); size += packSize(data.control, comm); size += packSize(data.connections, comm); size += packSize(data.segments, comm); return size; } std::size_t packSize(const data::CellData& data, Dune::MPIHelper::MPICommunicator comm) { return packSize(data.dim, comm) + packSize(data.data, comm) + packSize(data.target, comm); } std::size_t packSize(const RestartKey& data, Dune::MPIHelper::MPICommunicator comm) { return packSize(data.key, comm) + packSize(data.dim, comm) + packSize(data.required, comm); } std::size_t packSize(const data::Solution& data, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. return packSize(static_cast&>(data), comm); } std::size_t packSize(const data::WellRates& data, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. return packSize(static_cast&>(data), comm); } std::size_t packSize(const RestartValue& data, Dune::MPIHelper::MPICommunicator comm) { return packSize(data.solution, comm) + packSize(data.wells, comm) + packSize(data.extra, comm); } ////// pack routines template void pack(const T*, std::size_t, std::vector&, int&, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template void pack(const T* data, std::size_t l, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI MPI_Pack(&l, 1, Dune::MPITraits::getType(), buffer.data(), buffer.size(), &position, comm); MPI_Pack(data, l, Dune::MPITraits::getType(), buffer.data(), buffer.size(), &position, comm); #else (void) data; (void) comm; (void) l; (void) buffer; (void) position; #endif } template void pack(const T* data, std::size_t l, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data, l, buffer, position, comm, typename std::is_pod::type()); } template void pack(const T&, std::vector&, int&, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template void pack(const T& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI MPI_Pack(&data, 1, Dune::MPITraits::getType(), buffer.data(), buffer.size(), &position, comm); #else (void) data; (void) comm; (void) buffer; (void) position; #endif } template void pack(const T& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data, buffer, position, comm, typename std::is_pod::type()); } template void pack(const std::pair& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.first, buffer, position, comm); pack(data.second, buffer, position, comm); } template void pack(const std::vector& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { if (std::is_pod::value) { // size written automatically pack(data.data(), data.size(), buffer, position, comm); return; } pack(data.size(), buffer, position, comm); for (const auto& entry: data) pack(entry, buffer, position, comm); } void pack(const char* str, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { #if HAVE_MPI std::size_t length = strlen(str)+1; MPI_Pack(&length, 1, Dune::MPITraits::getType(), buffer.data(), buffer.size(), &position, comm); MPI_Pack(str, strlen(str)+1, MPI_CHAR, buffer.data(), buffer.size(), &position, comm); #else (void) str; (void) comm; (void) buffer; (void) position; #endif } void pack(const std::string& str, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(str.c_str(), buffer, position, comm); } template void pack(const std::map& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.size(), buffer, position, comm); for (const auto& entry: data) { pack(entry, buffer, position, comm); } } template void pack(const std::unordered_map& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.size(), buffer, position, comm); for (const auto& entry: data) { pack(entry, buffer, position, comm); } } void pack(const data::Rates& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod pack(data, buffer, position, comm, std::integral_constant()); } void pack(const data::Connection& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod pack(data, buffer, position, comm, std::integral_constant()); } void pack(const data::Segment& data,std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod pack(data, buffer, position, comm, std::integral_constant()); } void pack(const data::Well& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.rates, buffer, position, comm); pack(data.bhp, buffer, position, comm); pack(data.thp, buffer, position, comm); pack(data.temperature, buffer, position, comm); pack(data.control, buffer, position, comm); pack(data.connections, buffer, position, comm); pack(data.segments, buffer, position, comm); } void pack(const RestartKey& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.key, buffer, position, comm); pack(data.dim, buffer, position, comm); pack(data.required, buffer, position, comm); } void pack(const data::CellData& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.dim, buffer, position, comm); pack(data.data, buffer, position, comm); pack(data.target, buffer, position, comm); } void pack(const data::Solution& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. pack(static_cast&>(data), buffer, position, comm); } void pack(const data::WellRates& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. pack(static_cast&>(data), buffer, position, comm); } void pack(const RestartValue& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { pack(data.solution, buffer, position, comm); pack(data.wells, buffer, position, comm); pack(data.extra, buffer, position, comm); } /// unpack routines template void unpack(T*, const std::size_t&, std::vector&, int&, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template void unpack(T* data, const std::size_t& l, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI MPI_Unpack(buffer.data(), buffer.size(), &position, data, l, Dune::MPITraits::getType(), comm); #else (void) data; (void) comm; (void) l; (void) buffer; (void) position; #endif } template void unpack(T* data, const std::size_t& l, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data, l, buffer, position, comm, typename std::is_pod::type()); } template void unpack(T&, std::vector&, int&, Dune::MPIHelper::MPICommunicator, std::integral_constant) { OPM_THROW(std::logic_error, "Packing not (yet) supported for this non-pod type."); } template void unpack(T& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm, std::integral_constant) { #if HAVE_MPI MPI_Unpack(buffer.data(), buffer.size(), &position, &data, 1, Dune::MPITraits::getType(), comm); #else (void) data; (void) comm; (void) buffer; (void) position; #endif } template void unpack(T& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data, buffer, position, comm, typename std::is_pod::type()); } template void unpack(std::pair& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data.first, buffer, position, comm); unpack(data.second, buffer, position, comm); } template void unpack(std::vector& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { std::size_t length=0; unpack(length, buffer, position, comm); data.resize(length); if (std::is_pod::value) { unpack(data.data(), data.size(), buffer, position, comm); return; } for (auto& entry: data) unpack(entry, buffer, position, comm); } void unpack(char* str, std::size_t length, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { #if HAVE_MPI MPI_Unpack(buffer.data(), buffer.size(), &position, const_cast(str), length, MPI_CHAR, comm); #else (void) str; (void) comm; (void) length; (void) buffer; (void) position; #endif } void unpack(std::string& str, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { std::size_t length=0; unpack(length, buffer, position, comm); std::vector cStr(length, '\0'); unpack(cStr.data(), length, buffer, position, comm); assert(str.empty()); str.append(cStr.data()); } template void unpack(std::map& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { std::size_t size=0; unpack(size, buffer, position, comm); for (;size>0; size--) { std::pair entry; unpack(entry, buffer, position, comm); data.insert(entry); } } template void unpack(std::unordered_map& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { std::size_t size=0; unpack(size, buffer, position, comm); for (;size>0; size--) { std::pair entry; unpack(entry, buffer, position, comm); data.insert(entry); } } void unpack(data::Rates& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod unpack(data, buffer, position, comm, std::integral_constant()); } void unpack(data::Connection& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod unpack(data, buffer, position, comm, std::integral_constant()); } void unpack(data::Segment& data,std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // plain struct no pointers -> treat as pod unpack(data, buffer, position, comm, std::integral_constant()); } void unpack(data::Well& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data.rates, buffer, position, comm); unpack(data.bhp, buffer, position, comm); unpack(data.thp, buffer, position, comm); unpack(data.temperature, buffer, position, comm); unpack(data.control, buffer, position, comm); unpack(data.connections, buffer, position, comm); unpack(data.segments, buffer, position, comm); } void unpack(RestartKey& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data.key, buffer, position, comm); unpack(data.dim, buffer, position, comm); unpack(data.required, buffer, position, comm); } void unpack(data::CellData& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data.dim, buffer, position, comm); unpack(data.data, buffer, position, comm); unpack(data.target, buffer, position, comm); } void unpack(data::Solution& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. unpack(static_cast&>(data), buffer, position, comm); } void unpack(data::WellRates& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { // Needs explicit conversion to a supported base type holding the data // to prevent throwing. unpack(static_cast&>(data), buffer, position, comm); } void unpack(RestartValue& data, std::vector& buffer, int& position, Dune::MPIHelper::MPICommunicator comm) { unpack(data.solution, buffer, position, comm); unpack(data.wells, buffer, position, comm); unpack(data.extra, buffer, position, comm); } } // end namespace Mpi RestartValue loadParallelRestart(const EclipseIO* eclIO, SummaryState& summaryState, const std::vector& solutionKeys, const std::vector& extraKeys, Dune::CollectiveCommunication comm) { #if HAVE_MPI data::Solution sol; data::Wells wells; RestartValue restartValues(sol, wells); if (eclIO) { assert(comm.rank() == 0); restartValues = eclIO->loadRestart(summaryState, solutionKeys, extraKeys); int packedSize = Mpi::packSize(restartValues, comm); std::vector buffer(packedSize); int position=0; Mpi::pack(restartValues, buffer, position, comm); comm.broadcast(&position, 1, 0); comm.broadcast(buffer.data(), position, 0); } else { int bufferSize{}; comm.broadcast(&bufferSize, 1, 0); std::vector buffer(bufferSize); comm.broadcast(buffer.data(), bufferSize, 0); int position{}; Mpi::unpack(restartValues, buffer, position, comm); } return restartValues; #else (void) comm; return eclIO->loadRestart(summaryState, solutionKeys, extraKeys); #endif } } // end namespace Opm