mirror of
https://github.com/OPM/opm-simulators.git
synced 2024-11-29 04:23:48 -06:00
Merge pull request #2446 from blattms/use-compressed-in-initstateequil.hh
Autocreate parallel fieldprops and use compressed ones in initstateequil.hh
This commit is contained in:
commit
5b457cbbd6
@ -40,6 +40,8 @@
|
||||
|
||||
#include <dune/common/version.hh>
|
||||
|
||||
#include <sstream>
|
||||
|
||||
namespace Opm {
|
||||
template <class TypeTag>
|
||||
class EclCpGridVanguard;
|
||||
@ -189,17 +191,29 @@ public:
|
||||
//distribute the grid and switch to the distributed view.
|
||||
{
|
||||
const auto wells = this->schedule().getWellsatEnd();
|
||||
auto& eclState = static_cast<ParallelEclipseState&>(this->eclState());
|
||||
const EclipseGrid* eclGrid = nullptr;
|
||||
|
||||
if (grid_->comm().rank() == 0)
|
||||
try
|
||||
{
|
||||
eclGrid = &this->eclState().getInputGrid();
|
||||
}
|
||||
auto& eclState = dynamic_cast<ParallelEclipseState&>(this->eclState());
|
||||
const EclipseGrid* eclGrid = nullptr;
|
||||
|
||||
PropsCentroidsDataHandle<Dune::CpGrid> handle(*grid_, eclState, eclGrid, this->centroids_,
|
||||
cartesianIndexMapper());
|
||||
defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data()));
|
||||
if (grid_->comm().rank() == 0)
|
||||
{
|
||||
eclGrid = &this->eclState().getInputGrid();
|
||||
}
|
||||
|
||||
PropsCentroidsDataHandle<Dune::CpGrid> handle(*grid_, eclState, eclGrid, this->centroids_,
|
||||
cartesianIndexMapper());
|
||||
defunctWellNames_ = std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, faceTrans.data()));
|
||||
}
|
||||
catch(const std::bad_cast& e)
|
||||
{
|
||||
std::ostringstream message;
|
||||
message << "Parallel simulator setup is incorrect as it does not use ParallelEclipseState ("
|
||||
<< e.what() <<")"<<std::flush;
|
||||
OpmLog::error(message.str());
|
||||
std::rethrow_exception(std::current_exception());
|
||||
}
|
||||
}
|
||||
grid_->switchToDistributedView();
|
||||
|
||||
@ -221,7 +235,21 @@ public:
|
||||
this->updateGridView_();
|
||||
#if HAVE_MPI
|
||||
if (mpiSize > 1) {
|
||||
static_cast<ParallelEclipseState&>(this->eclState()).switchToDistributedProps();
|
||||
try
|
||||
{
|
||||
auto& parallelEclState = dynamic_cast<ParallelEclipseState&>(this->eclState());
|
||||
// reset cartesian index mapper for auto creation of field properties
|
||||
parallelEclState.resetCartesianMapper(cartesianIndexMapper_.get());
|
||||
parallelEclState.switchToDistributedProps();
|
||||
}
|
||||
catch(const std::bad_cast& e)
|
||||
{
|
||||
std::ostringstream message;
|
||||
message << "Parallel simulator setup is incorrect as it does not use ParallelEclipseState ("
|
||||
<< e.what() <<")"<<std::flush;
|
||||
OpmLog::error(message.str());
|
||||
std::rethrow_exception(std::current_exception());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
@ -906,12 +906,8 @@ equilnum(const Opm::EclipseState& eclipseState,
|
||||
const int nc = grid.size(/*codim=*/0);
|
||||
eqlnum.resize(nc);
|
||||
|
||||
const auto& e = eclipseState.fieldProps().get_global_int("EQLNUM");
|
||||
const int* gc = Opm::UgGridHelpers::globalCell(grid);
|
||||
for (int cell = 0; cell < nc; ++cell) {
|
||||
const int deckPos = (gc == NULL) ? cell : gc[cell];
|
||||
eqlnum[cell] = e[deckPos] - 1;
|
||||
}
|
||||
const auto& e = eclipseState.fieldProps().get_int("EQLNUM");
|
||||
std::transform(e.begin(), e.end(), eqlnum.begin(), [](int n){ return n - 1;});
|
||||
}
|
||||
return eqlnum;
|
||||
}
|
||||
@ -940,16 +936,8 @@ public:
|
||||
{
|
||||
//Check for presence of kw SWATINIT
|
||||
if (applySwatInit) {
|
||||
const int nc = grid.size(/*codim=*/0);
|
||||
|
||||
if (eclipseState.fieldProps().has_double("SWATINIT")) {
|
||||
const std::vector<double>& swatInitEcl = eclipseState.fieldProps().get_global_double("SWATINIT");
|
||||
const int* gc = Opm::UgGridHelpers::globalCell(grid);
|
||||
swatInit_.resize(nc);
|
||||
for (int c = 0; c < nc; ++c) {
|
||||
const int deckPos = (gc == NULL) ? c : gc[c];
|
||||
swatInit_[c] = swatInitEcl[deckPos];
|
||||
}
|
||||
swatInit_ = eclipseState.fieldProps().get_double("SWATINIT");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1101,25 +1089,20 @@ private:
|
||||
void setRegionPvtIdx(const Grid& grid, const Opm::EclipseState& eclState, const RMap& reg)
|
||||
{
|
||||
size_t numCompressed = grid.size(/*codim=*/0);
|
||||
const auto* globalCell = Opm::UgGridHelpers::globalCell(grid);
|
||||
std::vector<int> cellPvtRegionIdx(numCompressed);
|
||||
|
||||
//Get the PVTNUM data
|
||||
const auto pvtnumData = eclState.fieldProps().get_global_int("PVTNUM");
|
||||
// Convert PVTNUM data into an array of indices for compressed cells. Remember
|
||||
const auto& pvtnumData = eclState.fieldProps().get_int("PVTNUM");
|
||||
// Save pvt indices of regions. Remember
|
||||
// that Eclipse uses Fortran-style indices which start at 1 instead of 0, so we
|
||||
// need to subtract 1.
|
||||
for (size_t cellIdx = 0; cellIdx < numCompressed; ++ cellIdx) {
|
||||
size_t cartesianCellIdx = globalCell[cellIdx];
|
||||
assert(cartesianCellIdx < pvtnumData.size());
|
||||
size_t pvtRegionIdx = pvtnumData[cartesianCellIdx] - 1;
|
||||
cellPvtRegionIdx[cellIdx] = pvtRegionIdx;
|
||||
}
|
||||
std::transform(pvtnumData.begin(), pvtnumData.end(),
|
||||
cellPvtRegionIdx.begin(), [](int n){ return n - 1;});
|
||||
|
||||
for (const auto& r : reg.activeRegions()) {
|
||||
const auto& cells = reg.cells(r);
|
||||
const int cell = *(cells.begin());
|
||||
regionPvtIdx_[r] = cellPvtRegionIdx[cell];
|
||||
regionPvtIdx_[r] = pvtnumData[cell] - 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,19 @@ const std::vector<int>& ParallelFieldPropsManager::get_int(const std::string& ke
|
||||
{
|
||||
auto it = m_intProps.find(keyword);
|
||||
if (it == m_intProps.end())
|
||||
OPM_THROW(std::runtime_error, "No integer property field: " + keyword);
|
||||
{
|
||||
// Some of the keywords might be defaulted.
|
||||
// We will let rank 0 create them and distribute them using get_global_int
|
||||
auto data = get_global_int(keyword);
|
||||
auto& local_data = const_cast<std::map<std::string, std::vector<int>>&>(m_intProps)[keyword];
|
||||
local_data.resize(m_activeSize());
|
||||
|
||||
for (int i = 0; i < m_activeSize(); ++i)
|
||||
{
|
||||
local_data[i] = data[m_local2Global(i)];
|
||||
}
|
||||
return local_data;
|
||||
}
|
||||
|
||||
return it->second;
|
||||
}
|
||||
@ -74,8 +86,26 @@ const std::vector<int>& ParallelFieldPropsManager::get_int(const std::string& ke
|
||||
std::vector<int> ParallelFieldPropsManager::get_global_int(const std::string& keyword) const
|
||||
{
|
||||
std::vector<int> result;
|
||||
int exceptionThrown{};
|
||||
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.get_global_int(keyword);
|
||||
{
|
||||
try
|
||||
{
|
||||
result = m_manager.get_global_int(keyword);
|
||||
}catch(std::exception& e) {
|
||||
exceptionThrown = 1;
|
||||
OpmLog::error("No integer property field: " + keyword + " ("+e.what()+")");
|
||||
m_comm.broadcast(&exceptionThrown, 1, 0);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
m_comm.broadcast(&exceptionThrown, 1, 0);
|
||||
|
||||
if (exceptionThrown)
|
||||
OPM_THROW_NOLOG(std::runtime_error, "No integer property field: " + keyword);
|
||||
|
||||
size_t size = result.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
@ -89,7 +119,18 @@ const std::vector<double>& ParallelFieldPropsManager::get_double(const std::stri
|
||||
{
|
||||
auto it = m_doubleProps.find(keyword);
|
||||
if (it == m_doubleProps.end())
|
||||
OPM_THROW(std::runtime_error, "No double property field: " + keyword);
|
||||
{
|
||||
// Some of the keywords might be defaulted.
|
||||
// We will let rank 0 create them and distribute them using get_global_int
|
||||
auto data = get_global_double(keyword);
|
||||
auto& local_data = const_cast<std::map<std::string, std::vector<double>>&>(m_doubleProps)[keyword];
|
||||
local_data.resize(m_activeSize());
|
||||
for (int i = 0; i < m_activeSize(); ++i)
|
||||
{
|
||||
local_data[i] = data[m_local2Global(i)];
|
||||
}
|
||||
return local_data;
|
||||
}
|
||||
|
||||
return it->second;
|
||||
}
|
||||
@ -98,8 +139,26 @@ const std::vector<double>& ParallelFieldPropsManager::get_double(const std::stri
|
||||
std::vector<double> ParallelFieldPropsManager::get_global_double(const std::string& keyword) const
|
||||
{
|
||||
std::vector<double> result;
|
||||
int exceptionThrown{};
|
||||
|
||||
if (m_comm.rank() == 0)
|
||||
result = m_manager.get_global_double(keyword);
|
||||
{
|
||||
try
|
||||
{
|
||||
result = m_manager.get_global_double(keyword);
|
||||
}catch(std::exception& e) {
|
||||
exceptionThrown = 1;
|
||||
OpmLog::error("No double property field: " + keyword + " ("+e.what()+")");
|
||||
m_comm.broadcast(&exceptionThrown, 1, 0);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
m_comm.broadcast(&exceptionThrown, 1, 0);
|
||||
|
||||
if (exceptionThrown)
|
||||
OPM_THROW_NOLOG(std::runtime_error, "No double property field: " + keyword);
|
||||
|
||||
size_t size = result.size();
|
||||
m_comm.broadcast(&size, 1, 0);
|
||||
result.resize(size);
|
||||
|
@ -22,6 +22,8 @@
|
||||
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
||||
#include <dune/common/parallel/mpihelper.hh>
|
||||
|
||||
#include <functional>
|
||||
|
||||
namespace Opm {
|
||||
|
||||
|
||||
@ -83,11 +85,25 @@ public:
|
||||
//! \param keyword Name of property
|
||||
bool has_double(const std::string& keyword) const override;
|
||||
|
||||
//! \brief Resets the underlying cartesian mapper
|
||||
//! \detail This has to be the cartesian mapper of the distributed grid.
|
||||
//! It will be used to autocreate properties not explicitly stored.
|
||||
//! \tparam T The type of the cartesian mapper
|
||||
//! \param mapper The cartesian mapper of the distributed grid
|
||||
template<class T>
|
||||
void resetCartesianMapper(const T* mapper)
|
||||
{
|
||||
m_activeSize = std::bind(&T::compressedSize, mapper);
|
||||
m_local2Global = std::bind(&T::cartesianIndex, mapper,
|
||||
std::placeholders::_1);
|
||||
}
|
||||
protected:
|
||||
std::map<std::string, std::vector<int>> m_intProps; //!< Map of integer properties in process-local compressed indices.
|
||||
std::map<std::string, std::vector<double>> m_doubleProps; //!< Map of double properties in process-local compressed indices.
|
||||
FieldPropsManager& m_manager; //!< Underlying field property manager (only used on root process).
|
||||
Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> m_comm; //!< Collective communication handler.
|
||||
std::function<int(void)> m_activeSize; //!< active size function of the grid
|
||||
std::function<int(const int)> m_local2Global; //!< mapping from local to global cartesian indices
|
||||
};
|
||||
|
||||
|
||||
@ -141,6 +157,16 @@ public:
|
||||
//! \details Can only be called on root process.
|
||||
const EclipseGrid& getInputGrid() const override;
|
||||
|
||||
//! \brief Resets the underlying cartesian mapper
|
||||
//! \detail This has to be the cartesian mapper of the distributed grid.
|
||||
//! It will be used to autocreate properties not explicitly stored.
|
||||
//! \tparam T The type of the cartesian mapper
|
||||
//! \param mapper The cartesian mapper of the distributed grid
|
||||
template<class T>
|
||||
void resetCartesianMapper(const T* mapper)
|
||||
{
|
||||
m_fieldProps.resetCartesianMapper(mapper);
|
||||
}
|
||||
private:
|
||||
bool m_parProps = false; //! True to use distributed properties on root process
|
||||
ParallelFieldPropsManager m_fieldProps; //!< The parallel field properties
|
||||
|
Loading…
Reference in New Issue
Block a user