/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see .
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
namespace Opm::Mpi::detail {
std::size_t mpi_buffer_size(const std::size_t bufsize, const std::size_t position)
{
if (bufsize < position) {
throw std::invalid_argument("Buffer size should never be less than position!");
}
return std::min(bufsize - position,
static_cast(std::numeric_limits::max()));
}
template
std::size_t Packing>::
packSize(const std::bitset& data,
Parallel::MPIComm comm)
{
return Packing::packSize(data.to_ullong(), comm);
}
template
void Packing>::
pack(const std::bitset& data,
std::vector& buffer,
std::size_t& position,
Parallel::MPIComm comm)
{
Packing::pack(data.to_ullong(), buffer, position, comm);
}
template
void Packing>::
unpack(std::bitset& data,
const std::vector& buffer,
std::size_t& position,
Parallel::MPIComm comm)
{
unsigned long long d;
Packing::unpack(d, buffer, position, comm);
data = std::bitset(d);
}
std::size_t Packing::
packSize(const std::string& data, Parallel::MPIComm comm)
{
int size;
MPI_Pack_size(1, Dune::MPITraits::getType(), comm, &size);
int totalSize = size;
MPI_Pack_size(data.size(), MPI_CHAR, comm, &size);
return totalSize + size;
}
void Packing::
pack(const std::string& data,
std::vector& buffer,
std::size_t& position,
Parallel::MPIComm comm)
{
std::size_t length = data.size();
int int_position = 0;
MPI_Pack(&length, 1, Dune::MPITraits::getType(), buffer.data()+position,
mpi_buffer_size(buffer.size(), position), &int_position, comm);
MPI_Pack(data.data(), length, MPI_CHAR, buffer.data()+position, mpi_buffer_size(buffer.size(), position),
&int_position, comm);
position += int_position;
}
void Packing::
unpack(std::string& data,
const std::vector& buffer,
std::size_t& position,
Opm::Parallel::MPIComm comm)
{
std::size_t length = 0;
int int_position = 0;
MPI_Unpack(buffer.data()+position, mpi_buffer_size(buffer.size(), position), &int_position, &length, 1,
Dune::MPITraits::getType(), comm);
std::vector cStr(length+1, '\0');
MPI_Unpack(buffer.data()+position, mpi_buffer_size(buffer.size(), position), &int_position, cStr.data(), length,
MPI_CHAR, comm);
position += int_position;
data.clear();
data.append(cStr.data(), length);
}
std::size_t Packing::
packSize(const time_point&, Opm::Parallel::MPIComm comm)
{
return Packing::packSize(std::time_t(), comm);
}
void Packing::
pack(const time_point& data,
std::vector& buffer,
std::size_t& position,
Parallel::MPIComm comm)
{
Packing::pack(TimeService::to_time_t(data),
buffer, position, comm);
}
void Packing::
unpack(time_point& data,
const std::vector& buffer,
std::size_t& position,
Parallel::MPIComm comm)
{
std::time_t res;
Packing::unpack(res, buffer, position, comm);
data = TimeService::from_time_t(res);
}
template struct Packing>;
template struct Packing>;
template struct Packing>;
constexpr int NumFip = static_cast(FIPConfig::OutputField::NUM_FIP_REPORT);
template struct Packing>;
} // end namespace Opm::Mpi::detail