Merge pull request #2373 from akva2/fix_build_no_mpi

fix build without MPI
This commit is contained in:
Arne Morten Kvarving
2020-02-28 13:04:03 +01:00
committed by GitHub
4 changed files with 11 additions and 3 deletions

View File

@@ -208,6 +208,7 @@ public:
cartesianIndexMapper_.reset(new CartesianIndexMapper(*grid_)); cartesianIndexMapper_.reset(new CartesianIndexMapper(*grid_));
this->updateGridView_(); this->updateGridView_();
#if HAVE_MPI
if (mpiSize > 1) { if (mpiSize > 1) {
std::vector<int> cartIndices; std::vector<int> cartIndices;
cartIndices.reserve(grid_->numCells()); cartIndices.reserve(grid_->numCells());
@@ -219,6 +220,7 @@ public:
static_cast<ParallelEclipseState&>(this->eclState()).setupLocalProps(cartIndices); static_cast<ParallelEclipseState&>(this->eclState()).setupLocalProps(cartIndices);
static_cast<ParallelEclipseState&>(this->eclState()).switchToDistributedProps(); static_cast<ParallelEclipseState&>(this->eclState()).switchToDistributedProps();
} }
#endif
} }
/*! /*!

View File

@@ -363,7 +363,7 @@ int main(int argc, char** argv)
parState = new Opm::ParallelEclipseState(*deck); parState = new Opm::ParallelEclipseState(*deck);
eclipseState.reset(parState); eclipseState.reset(parState);
#else #else
eclipseState.reset(new Opm::EclipseState(*deck); eclipseState.reset(new Opm::EclipseState(*deck));
#endif #endif
/* /*
For the time being initializing wells and groups from the For the time being initializing wells and groups from the

View File

@@ -136,6 +136,7 @@ ParallelEclipseState::ParallelEclipseState(const Deck& deck)
} }
#if HAVE_MPI
std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const std::size_t ParallelEclipseState::packSize(EclMpiSerializer& serializer) const
{ {
return serializer.packSize(m_tables) + return serializer.packSize(m_tables) +
@@ -185,6 +186,7 @@ void ParallelEclipseState::unpack(std::vector<char>& buffer, int& position,
serializer.unpack(m_faults, buffer, position); serializer.unpack(m_faults, buffer, position);
serializer.unpack(m_title, buffer, position); serializer.unpack(m_title, buffer, position);
} }
#endif
const FieldPropsManager& ParallelEclipseState::fieldProps() const const FieldPropsManager& ParallelEclipseState::fieldProps() const
@@ -231,6 +233,7 @@ void ParallelEclipseState::switchToDistributedProps()
} }
#if HAVE_MPI
namespace { namespace {
@@ -294,7 +297,6 @@ void packProps(const std::vector<int>& l2gCell,
void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal) void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal)
{ {
#if HAVE_MPI
const auto& comm = Dune::MPIHelper::getCollectiveCommunication(); const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
if (comm.rank() == 0) { if (comm.rank() == 0) {
extractRootProps(localToGlobal, this->globalFieldProps().keys<int>(), extractRootProps(localToGlobal, this->globalFieldProps().keys<int>(),
@@ -347,8 +349,8 @@ void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal
Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm); Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm);
} }
} }
#endif
} }
#endif
} // end namespace Opm } // end namespace Opm

View File

@@ -109,6 +109,7 @@ public:
//! \details Only called on root process //! \details Only called on root process
ParallelEclipseState(const Deck& deck); ParallelEclipseState(const Deck& deck);
#if HAVE_MPI
//! \brief Calculates the size of serialized data. //! \brief Calculates the size of serialized data.
//! \param serializer The serializer to use //! \param serializer The serializer to use
std::size_t packSize(EclMpiSerializer& serializer) const; std::size_t packSize(EclMpiSerializer& serializer) const;
@@ -123,6 +124,7 @@ public:
//! \param Position in buffer //! \param Position in buffer
//! \param serializer The serializer to use //! \param serializer The serializer to use
void unpack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer); void unpack(std::vector<char>& buffer, int& position, EclMpiSerializer& serializer);
#endif
//! \brief Switch to global field properties. //! \brief Switch to global field properties.
//! \details Called on root process to use the global field properties //! \details Called on root process to use the global field properties
@@ -133,10 +135,12 @@ public:
//! setupLocalProps must be called prior to this. //! setupLocalProps must be called prior to this.
void switchToDistributedProps(); void switchToDistributedProps();
#if HAVE_MPI
//! \brief Setup local properties. //! \brief Setup local properties.
//! \param localToGlobal Map from local cells on calling process to global cartesian cell //! \param localToGlobal Map from local cells on calling process to global cartesian cell
//! \details Must be called after grid has been paritioned //! \details Must be called after grid has been paritioned
void setupLocalProps(const std::vector<int>& localToGlobal); void setupLocalProps(const std::vector<int>& localToGlobal);
#endif
//! \brief Returns a const ref to current field properties. //! \brief Returns a const ref to current field properties.
const FieldPropsManager& fieldProps() const override; const FieldPropsManager& fieldProps() const override;