/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see .
*/
#include
#include "ParallelEclipseState.hpp"
#include "ParallelRestart.hpp"
#include
namespace Opm {
ParallelFieldPropsManager::ParallelFieldPropsManager(FieldPropsManager& manager)
: m_manager(manager)
, m_comm(Dune::MPIHelper::getCollectiveCommunication())
{
}
std::vector ParallelFieldPropsManager::actnum() const
{
if (m_comm.rank() == 0)
return m_manager.actnum();
return{};
}
void ParallelFieldPropsManager::reset_actnum(const std::vector& actnum)
{
if (m_comm.rank() != 0)
OPM_THROW(std::runtime_error, "reset_actnum should only be called on root process.");
m_manager.reset_actnum(actnum);
}
std::vector ParallelFieldPropsManager::porv(bool global) const
{
std::vector result;
if (m_comm.rank() == 0)
result = m_manager.porv(global);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
const std::vector& ParallelFieldPropsManager::get_int(const std::string& keyword) const
{
auto it = m_intProps.find(keyword);
if (it == m_intProps.end())
OPM_THROW(std::runtime_error, "No integer property field: " + keyword);
return it->second;
}
std::vector ParallelFieldPropsManager::get_global_int(const std::string& keyword) const
{
std::vector result;
if (m_comm.rank() == 0)
result = m_manager.get_global_int(keyword);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
const std::vector& ParallelFieldPropsManager::get_double(const std::string& keyword) const
{
auto it = m_doubleProps.find(keyword);
if (it == m_doubleProps.end())
OPM_THROW(std::runtime_error, "No double property field: " + keyword);
return it->second;
}
std::vector ParallelFieldPropsManager::get_global_double(const std::string& keyword) const
{
std::vector result;
if (m_comm.rank() == 0)
result = m_manager.get_global_double(keyword);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
bool ParallelFieldPropsManager::has_int(const std::string& keyword) const
{
auto it = m_intProps.find(keyword);
return it != m_intProps.end();
}
bool ParallelFieldPropsManager::has_double(const std::string& keyword) const
{
auto it = m_doubleProps.find(keyword);
return it != m_doubleProps.end();
}
ParallelEclipseState::ParallelEclipseState()
: m_fieldProps(field_props)
{
}
ParallelEclipseState::ParallelEclipseState(const Deck& deck)
: EclipseState(deck)
, m_fieldProps(field_props)
{
}
#if HAVE_MPI
std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const
{
return serializer.packSize(m_tables) +
serializer.packSize(m_runspec) +
serializer.packSize(m_eclipseConfig) +
serializer.packSize(m_deckUnitSystem) +
serializer.packSize(m_inputNnc) +
serializer.packSize(m_inputEditNnc) +
serializer.packSize(m_gridDims) +
serializer.packSize(m_simulationConfig) +
serializer.packSize(m_transMult) +
serializer.packSize(m_faults) +
serializer.packSize(m_title);
}
void ParallelEclipseState::pack(std::vector& buffer, int& position,
EclMpiSerializer& serializer) const
{
serializer.pack(m_tables, buffer, position);
serializer.pack(m_runspec, buffer, position);
serializer.pack(m_eclipseConfig, buffer, position);
serializer.pack(m_deckUnitSystem, buffer, position);
serializer.pack(m_inputNnc, buffer, position);
serializer.pack(m_inputEditNnc, buffer, position);
serializer.pack(m_gridDims, buffer, position);
serializer.pack(m_simulationConfig, buffer, position);
serializer.pack(m_transMult, buffer, position);
serializer.pack(m_faults, buffer, position);
serializer.pack(m_title, buffer, position);
}
void ParallelEclipseState::unpack(std::vector& buffer, int& position,
EclMpiSerializer& serializer)
{
serializer.unpack(m_tables, buffer, position);
serializer.unpack(m_runspec, buffer, position);
serializer.unpack(m_eclipseConfig, buffer, position);
serializer.unpack(m_deckUnitSystem, buffer, position);
serializer.unpack(m_inputNnc, buffer, position);
serializer.unpack(m_inputEditNnc, buffer, position);
serializer.unpack(m_gridDims, buffer, position);
serializer.unpack(m_simulationConfig, buffer, position);
serializer.unpack(m_transMult, buffer, position);
serializer.unpack(m_faults, buffer, position);
serializer.unpack(m_title, buffer, position);
}
#endif
const FieldPropsManager& ParallelEclipseState::fieldProps() const
{
if (!m_parProps && Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access field properties on no-root process before switch to parallel properties");
if (!m_parProps || Dune::MPIHelper::getCollectiveCommunication().size() == 1)
return this->EclipseState::fieldProps();
return m_fieldProps;
}
const FieldPropsManager& ParallelEclipseState::globalFieldProps() const
{
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access global field properties on non-root process");
return this->EclipseState::globalFieldProps();
}
const EclipseGrid& ParallelEclipseState::getInputGrid() const
{
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access eclipse grid on non-root process");
return this->EclipseState::getInputGrid();
}
void ParallelEclipseState::switchToGlobalProps()
{
m_parProps = false;
}
void ParallelEclipseState::switchToDistributedProps()
{
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
if (comm.size() == 1) // No need for the parallel frontend
return;
m_parProps = true;
}
#if HAVE_MPI
namespace {
template
struct GetField {
GetField(const FieldPropsManager& propMan) : props(propMan) {}
std::vector getField(const std::string& key) const;
const FieldPropsManager& props;
};
template<>
std::vector GetField::getField(const std::string& key) const {
return props.get_global_int(key);
}
template<>
std::vector GetField::getField(const std::string& key) const {
return props.get_global_double(key);
}
template
void extractRootProps(const std::vector& localToGlobal,
const std::vector& keys,
const GetField& getter,
std::map>& localMap)
{
for (const std::string& key : keys) {
auto prop = getter.getField(key);
std::vector& local = localMap[key];
local.reserve(localToGlobal.size());
for (int cell : localToGlobal) {
local.push_back(prop[cell]);
}
}
}
template
void packProps(const std::vector& l2gCell,
const std::vector& keys,
const GetField& getter,
std::vector& buffer, int& position)
{
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
std::vector sendData(l2gCell.size());
for (const std::string& key : keys) {
auto prop = getter.getField(key);
size_t idx = 0;
for (int cell : l2gCell)
sendData[idx++] = prop[cell];
Mpi::pack(sendData, buffer, position, comm);
}
}
}
void ParallelEclipseState::setupLocalProps(const std::vector& localToGlobal)
{
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
if (comm.rank() == 0) {
extractRootProps(localToGlobal, this->globalFieldProps().keys(),
GetField(this->globalFieldProps()),
m_fieldProps.m_intProps);
extractRootProps(localToGlobal, this->globalFieldProps().keys(),
GetField(this->globalFieldProps()),
m_fieldProps.m_doubleProps);
for (int i = 1; i < comm.size(); ++i) {
std::vector l2gCell;
size_t size;
MPI_Recv(&size, 1, Dune::MPITraits::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
l2gCell.resize(size);
MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
size_t cells = l2gCell.size();
const auto& intKeys = this->globalFieldProps().keys();
const auto& dblKeys = this->globalFieldProps().keys();
size = Mpi::packSize(intKeys, comm) +
Mpi::packSize(dblKeys,comm) +
intKeys.size() * Mpi::packSize(std::vector(cells), comm) +
dblKeys.size() * Mpi::packSize(std::vector(cells), comm);
std::vector buffer(size);
int position = 0;
Mpi::pack(intKeys, buffer, position, comm);
Mpi::pack(dblKeys, buffer, position, comm);
packProps(l2gCell, intKeys, GetField(this->globalFieldProps()),
buffer, position);
packProps(l2gCell, dblKeys, GetField(this->globalFieldProps()),
buffer, position);
MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD);
MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD);
}
} else {
size_t l2gSize = localToGlobal.size();
MPI_Send(&l2gSize, 1, Dune::MPITraits::getType(), 0, 0, MPI_COMM_WORLD);
MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD);
int size;
MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
std::vector buffer(size);
MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
std::vector intKeys, dblKeys;
int position = 0;
Mpi::unpack(intKeys, buffer, position, comm);
Mpi::unpack(dblKeys, buffer, position, comm);
for (const std::string& key : intKeys) {
Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm);
}
for (const std::string& key : dblKeys) {
Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm);
}
}
}
#endif
} // end namespace Opm