add ParallelEclipseState and ParallelFieldProps

these are wrappers sitting on top of the EclipseState and
FieldPropsManager classes.

The former has some additional methods related to parallelism,
and the latter is a parallel frontend to the FieldPropManager which
only hold the process-local data (in compressed arrays).
This commit is contained in:
Arne Morten Kvarving 2020-01-22 10:27:36 +01:00
parent 4904f09d4d
commit 35de9fa53d
3 changed files with 515 additions and 0 deletions

View File

@ -37,6 +37,7 @@ list (APPEND MAIN_SOURCE_FILES
opm/simulators/utils/DeferredLogger.cpp
opm/simulators/utils/gatherDeferredLogger.cpp
opm/simulators/utils/moduleVersion.cpp
opm/simulators/utils/ParallelEclipseState.cpp
opm/simulators/utils/ParallelRestart.cpp
opm/simulators/wells/VFPProdProperties.cpp
opm/simulators/wells/VFPInjProperties.cpp
@ -176,6 +177,7 @@ list (APPEND PUBLIC_HEADER_FILES
opm/simulators/utils/DeferredLogger.hpp
opm/simulators/utils/gatherDeferredLogger.hpp
opm/simulators/utils/moduleVersion.hpp
opm/simulators/utils/ParallelEclipseState.hpp
opm/simulators/utils/ParallelRestart.hpp
opm/simulators/wells/PerforationData.hpp
opm/simulators/wells/RateConverter.hpp

View File

@ -0,0 +1,354 @@
/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include "ParallelEclipseState.hpp"
#include "ParallelRestart.hpp"
#include <ebos/eclmpiserializer.hh>
namespace Opm {
ParallelFieldPropsManager::ParallelFieldPropsManager(FieldPropsManager& manager)
: m_manager(manager)
, m_comm(Dune::MPIHelper::getCollectiveCommunication())
{
}
std::vector<int> ParallelFieldPropsManager::actnum() const
{
if (m_comm.rank() == 0)
return m_manager.actnum();
return{};
}
void ParallelFieldPropsManager::reset_actnum(const std::vector<int>& actnum)
{
if (m_comm.rank() != 0)
OPM_THROW(std::runtime_error, "reset_actnum should only be called on root process.");
m_manager.reset_actnum(actnum);
}
std::vector<double> ParallelFieldPropsManager::porv(bool global) const
{
std::vector<double> result;
if (m_comm.rank() == 0)
result = m_manager.porv(global);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
const std::vector<int>& ParallelFieldPropsManager::get_int(const std::string& keyword) const
{
auto it = m_intProps.find(keyword);
if (it == m_intProps.end())
OPM_THROW(std::runtime_error, "No integer property field: " + keyword);
return it->second;
}
std::vector<int> ParallelFieldPropsManager::get_global_int(const std::string& keyword) const
{
std::vector<int> result;
if (m_comm.rank() == 0)
result = m_manager.get_global_int(keyword);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
const std::vector<double>& ParallelFieldPropsManager::get_double(const std::string& keyword) const
{
auto it = m_doubleProps.find(keyword);
if (it == m_doubleProps.end())
OPM_THROW(std::runtime_error, "No double property field: " + keyword);
return it->second;
}
std::vector<double> ParallelFieldPropsManager::get_global_double(const std::string& keyword) const
{
std::vector<double> result;
if (m_comm.rank() == 0)
result = m_manager.get_global_double(keyword);
size_t size = result.size();
m_comm.broadcast(&size, 1, 0);
result.resize(size);
m_comm.broadcast(result.data(), size, 0);
return result;
}
bool ParallelFieldPropsManager::has_int(const std::string& keyword) const
{
auto it = m_intProps.find(keyword);
return it != m_intProps.end();
}
bool ParallelFieldPropsManager::has_double(const std::string& keyword) const
{
auto it = m_doubleProps.find(keyword);
return it != m_doubleProps.end();
}
ParallelEclipseState::ParallelEclipseState()
: m_fieldProps(field_props)
{
}
ParallelEclipseState::ParallelEclipseState(const Deck& deck)
: EclipseState(deck)
, m_fieldProps(field_props)
{
}
std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const
{
return serializer.packSize(m_tables) +
serializer.packSize(m_runspec) +
serializer.packSize(m_eclipseConfig) +
serializer.packSize(m_deckUnitSystem) +
serializer.packSize(m_inputNnc) +
serializer.packSize(m_inputEditNnc) +
serializer.packSize(m_gridDims) +
serializer.packSize(m_simulationConfig) +
serializer.packSize(m_transMult) +
serializer.packSize(m_faults) +
serializer.packSize(m_title);
}
void ParallelEclipseState::pack(std::vector<char>& buffer, int& position,
EclMpiSerializer& serializer) const
{
serializer.pack(m_tables, buffer, position);
serializer.pack(m_runspec, buffer, position);
serializer.pack(m_eclipseConfig, buffer, position);
serializer.pack(m_deckUnitSystem, buffer, position);
serializer.pack(m_inputNnc, buffer, position);
serializer.pack(m_inputEditNnc, buffer, position);
serializer.pack(m_gridDims, buffer, position);
serializer.pack(m_simulationConfig, buffer, position);
serializer.pack(m_transMult, buffer, position);
serializer.pack(m_faults, buffer, position);
serializer.pack(m_title, buffer, position);
}
void ParallelEclipseState::unpack(std::vector<char>& buffer, int& position,
EclMpiSerializer& serializer)
{
serializer.unpack(m_tables, buffer, position);
serializer.unpack(m_runspec, buffer, position);
serializer.unpack(m_eclipseConfig, buffer, position);
serializer.unpack(m_deckUnitSystem, buffer, position);
serializer.unpack(m_inputNnc, buffer, position);
serializer.unpack(m_inputEditNnc, buffer, position);
serializer.unpack(m_gridDims, buffer, position);
serializer.unpack(m_simulationConfig, buffer, position);
serializer.unpack(m_transMult, buffer, position);
serializer.unpack(m_faults, buffer, position);
serializer.unpack(m_title, buffer, position);
}
const FieldPropsManager& ParallelEclipseState::fieldProps() const
{
if (!m_parProps && Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access field properties on no-root process before switch to parallel properties");
if (!m_parProps || Dune::MPIHelper::getCollectiveCommunication().size() == 1)
return this->EclipseState::fieldProps();
return m_fieldProps;
}
const FieldPropsManager& ParallelEclipseState::globalFieldProps() const
{
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access global field properties on non-root process");
return this->EclipseState::globalFieldProps();
}
const EclipseGrid& ParallelEclipseState::getInputGrid() const
{
if (Dune::MPIHelper::getCollectiveCommunication().rank() != 0)
OPM_THROW(std::runtime_error, "Attempt to access eclipse grid on non-root process");
return this->EclipseState::getInputGrid();
}
void ParallelEclipseState::switchToGlobalProps()
{
m_parProps = false;
}
void ParallelEclipseState::switchToDistributedProps()
{
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
if (comm.size() == 1) // No need for the parallel frontend
return;
m_parProps = true;
}
namespace {
template<class T>
struct GetField {
GetField(const FieldPropsManager& propMan) : props(propMan) {}
std::vector<T> getField(const std::string& key) const;
const FieldPropsManager& props;
};
template<>
std::vector<int> GetField<int>::getField(const std::string& key) const {
return props.get_global_int(key);
}
template<>
std::vector<double> GetField<double>::getField(const std::string& key) const {
return props.get_global_double(key);
}
template<class T>
void extractRootProps(const std::vector<int>& localToGlobal,
const std::vector<std::string>& keys,
const GetField<T>& getter,
std::map<std::string,std::vector<T>>& localMap)
{
for (const std::string& key : keys) {
auto prop = getter.getField(key);
std::vector<T>& local = localMap[key];
local.reserve(localToGlobal.size());
for (int cell : localToGlobal) {
local.push_back(prop[cell]);
}
}
}
template<class T>
void packProps(const std::vector<int>& l2gCell,
const std::vector<std::string>& keys,
const GetField<T>& getter,
std::vector<char>& buffer, int& position)
{
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
std::vector<T> sendData(l2gCell.size());
for (const std::string& key : keys) {
auto prop = getter.getField(key);
size_t idx = 0;
for (int cell : l2gCell)
sendData[idx++] = prop[cell];
Mpi::pack(sendData, buffer, position, comm);
}
}
}
void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal)
{
#if HAVE_MPI
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
if (comm.rank() == 0) {
extractRootProps(localToGlobal, this->globalFieldProps().keys<int>(),
GetField<int>(this->globalFieldProps()),
m_fieldProps.m_intProps);
extractRootProps(localToGlobal, this->globalFieldProps().keys<double>(),
GetField<double>(this->globalFieldProps()),
m_fieldProps.m_doubleProps);
for (int i = 1; i < comm.size(); ++i) {
std::vector<int> l2gCell;
size_t size;
MPI_Recv(&size, 1, Dune::MPITraits<size_t>::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
l2gCell.resize(size);
MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
size_t cells = l2gCell.size();
const auto& intKeys = this->globalFieldProps().keys<int>();
const auto& dblKeys = this->globalFieldProps().keys<double>();
size = Mpi::packSize(intKeys, comm) +
Mpi::packSize(dblKeys,comm) +
intKeys.size() * Mpi::packSize(std::vector<int>(cells), comm) +
dblKeys.size() * Mpi::packSize(std::vector<double>(cells), comm);
std::vector<char> buffer(size);
int position = 0;
Mpi::pack(intKeys, buffer, position, comm);
Mpi::pack(dblKeys, buffer, position, comm);
packProps(l2gCell, intKeys, GetField<int>(this->globalFieldProps()),
buffer, position);
packProps(l2gCell, dblKeys, GetField<double>(this->globalFieldProps()),
buffer, position);
MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD);
MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD);
}
} else {
size_t l2gSize = localToGlobal.size();
MPI_Send(&l2gSize, 1, Dune::MPITraits<size_t>::getType(), 0, 0, MPI_COMM_WORLD);
MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD);
int size;
MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
std::vector<char> buffer(size);
MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
std::vector<std::string> intKeys, dblKeys;
int position = 0;
Mpi::unpack(intKeys, buffer, position, comm);
Mpi::unpack(dblKeys, buffer, position, comm);
for (const std::string& key : intKeys) {
Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm);
}
for (const std::string& key : dblKeys) {
Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm);
}
}
#endif
}
} // end namespace Opm

View File

@ -0,0 +1,159 @@
/*
Copyright 2019 Equinor AS.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef PARALLEL_ECLIPSE_STATE_HPP
#define PARALLEL_ECLIPSE_STATE_HPP
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
#include <dune/common/parallel/mpihelper.hh>
namespace Opm {
class EclMpiSerializer;
/*! \brief Parallel frontend to the field properties.
*
* \details This is a parallel frontend to the mpi-unaware
* FieldPropsManager in opm-common. It contains
* process-local field properties on each process using
* compressed indexing.
*/
class ParallelFieldPropsManager : public FieldPropsManager {
public:
friend class ParallelEclipseState; //!< Friend so props can be setup.
//! \brief Constructor.
//! \param manager The field property manager to wrap.
ParallelFieldPropsManager(FieldPropsManager& manager);
//! \brief Returns actnum vector.
//! \details If called on non-root process an empty vector is returned
std::vector<int> actnum() const override;
//! \brief Reset the actnum vector.
//! \details Can only be called on root process
void reset_actnum(const std::vector<int>& actnum) override;
//! \brief Returns the pore volume vector.
std::vector<double> porv(bool global = false) const override;
//! \brief Returns an int property using compressed indices.
//! \param keyword Name of property
const std::vector<int>& get_int(const std::string& keyword) const override;
//! \brief Returns a double property using compressed indices.
//! \param keyword Name of property
const std::vector<double>& get_double(const std::string& keyword) const override;
//! \brief Returns an int property using global cartesian indices.
//! \param keyword Name of property
//! \details The vector is broadcast from root process
std::vector<int> get_global_int(const std::string& keyword) const override;
//! \brief Returns a double property using global cartesian indices.
//! \param keyword Name of property
//! \details The vector is broadcast from root process
std::vector<double> get_global_double(const std::string& keyword) const override;
//! \brief Check if an integer property is available.
//! \param keyword Name of property
bool has_int(const std::string& keyword) const override;
//! \brief Check if a double property is available.
//! \param keyword Name of property
bool has_double(const std::string& keyword) const override;
protected:
std::map<std::string, std::vector<int>> m_intProps; //!< Map of integer properties in process-local compressed indices.
std::map<std::string, std::vector<double>> m_doubleProps; //!< Map of double properties in process-local compressed indices.
FieldPropsManager& m_manager; //!< Underlying field property manager (only used on root process).
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> m_comm; //!< Collective communication handler.
};
/*! \brief Parallel frontend to the EclipseState
*
* \details This is a parallel frontend to the mpi-unaware EclipseState in opm-common.
* It extends the eclipse state class with serialization support, and
* contains methods to switch between full global field properties,
* and distributed field properties for consumption in the simulator.
* Additionally, it has a few sanity checks to ensure that the data that
* is only available on the root process is not attempted to be accessed
* on non-root processes.
*/
class ParallelEclipseState : public EclipseState {
public:
//! \brief Default constructor.
ParallelEclipseState();
//! \brief Construct from a deck instance.
//! \param deck The deck to construct from
//! \details Only called on root process
ParallelEclipseState(const Deck& deck);
//! \brief Calculates the size of serialized data.
//! \param serializer The serializer to use
std::size_t packSize(EclMpiSerializer& serializer) const;
//! \brief Serialize data.
//! \param buffer Buffer to write serialized data into
//! \param Position in buffer
//! \param serializer The serializer to use
void pack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer) const;
//! \brief Deserialize data.
//! \param buffer Buffer to read serialized data from
//! \param Position in buffer
//! \param serializer The serializer to use
void unpack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer);
//! \brief Switch to global field properties.
//! \details Called on root process to use the global field properties
void switchToGlobalProps();
//! \brief Switch to distributed field properies.
//! \details Called on root process to use the distributed field properties.
//! setupLocalProps must be called prior to this.
void switchToDistributedProps();
//! \brief Setup local properties.
//! \param localToGlobal Map from local cells on calling process to global cartesian cell
//! \details Must be called after grid has been paritioned
void setupLocalProps(const std::vector<int>& localToGlobal);
//! \brief Returns a const ref to current field properties.
const FieldPropsManager& fieldProps() const override;
//! \brief Returns a const ref to global field properties.
//! \details Can only be called on root process.
const FieldPropsManager& globalFieldProps() const override;
//! \brief Returns a const ref to the eclipse grid.
//! \details Can only be called on root process.
const EclipseGrid& getInputGrid() const override;
private:
bool m_parProps = false; //! True to use distributed properties on root process
ParallelFieldPropsManager m_fieldProps; //!< The parallel field properties
};
} // end namespace Opm
#endif // PARALLEL_ECLIPSE_STATE_HPP