mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Merge pull request #1336 from blattms/print-mpi-ranks
Adapt writeInitial call to use integer vectors for printing ranks
This commit is contained in:
@@ -767,7 +767,7 @@ namespace Opm
|
|||||||
UgGridHelpers::createEclipseGrid( grid , inputGrid ),
|
UgGridHelpers::createEclipseGrid( grid , inputGrid ),
|
||||||
*schedule_,
|
*schedule_,
|
||||||
*summary_config_ ));
|
*summary_config_ ));
|
||||||
eclipse_writer_->writeInitial(geoprops_->simProps(grid),
|
eclipse_writer_->writeInitial(geoprops_->simProps(grid), {},
|
||||||
geoprops_->nonCartesianConnections());
|
geoprops_->nonCartesianConnections());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -481,6 +481,7 @@ namespace Opm
|
|||||||
// Writes to:
|
// Writes to:
|
||||||
// state_
|
// state_
|
||||||
// threshold_pressures_
|
// threshold_pressures_
|
||||||
|
// fluidprops_ (if SWATINIT is used)
|
||||||
void setupState()
|
void setupState()
|
||||||
{
|
{
|
||||||
const PhaseUsage pu = Opm::phaseUsageFromDeck(deck());
|
const PhaseUsage pu = Opm::phaseUsageFromDeck(deck());
|
||||||
@@ -623,6 +624,8 @@ namespace Opm
|
|||||||
{
|
{
|
||||||
bool output = ( output_ > OUTPUT_LOG_ONLY );
|
bool output = ( output_ > OUTPUT_LOG_ONLY );
|
||||||
bool output_ecl = param_.getDefault("output_ecl", true);
|
bool output_ecl = param_.getDefault("output_ecl", true);
|
||||||
|
auto int_vectors = computeCellRanks(output, output_ecl);
|
||||||
|
|
||||||
if( output && output_ecl && grid().comm().rank() == 0 )
|
if( output && output_ecl && grid().comm().rank() == 0 )
|
||||||
{
|
{
|
||||||
exportNncStructure_();
|
exportNncStructure_();
|
||||||
@@ -632,7 +635,7 @@ namespace Opm
|
|||||||
UgGridHelpers::createEclipseGrid( this->globalGrid() , inputGrid ),
|
UgGridHelpers::createEclipseGrid( this->globalGrid() , inputGrid ),
|
||||||
schedule(),
|
schedule(),
|
||||||
summaryConfig()));
|
summaryConfig()));
|
||||||
eclIO_->writeInitial(computeLegacySimProps_(), nnc_);
|
eclIO_->writeInitial(computeLegacySimProps_(), int_vectors, nnc_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -805,6 +808,28 @@ namespace Opm
|
|||||||
Scalar gravity() const
|
Scalar gravity() const
|
||||||
{ return ebosProblem().gravity()[2]; }
|
{ return ebosProblem().gravity()[2]; }
|
||||||
|
|
||||||
|
std::map<std::string, std::vector<int> > computeCellRanks(bool output, bool output_ecl)
|
||||||
|
{
|
||||||
|
std::map<std::string, std::vector<int> > integerVectors;
|
||||||
|
|
||||||
|
if( output && output_ecl && grid().comm().size() > 1 )
|
||||||
|
{
|
||||||
|
// Get the owner rank number for each cell
|
||||||
|
using ElementMapper = Dune::MultipleCodimMultipleGeomTypeMapper<GridView, Dune::MCMGElementLayout>;
|
||||||
|
using Handle = CellOwnerDataHandle<ElementMapper>;
|
||||||
|
ElementMapper globalMapper(this->globalGrid().leafGridView());
|
||||||
|
const auto* dims = UgGridHelpers::cartDims(grid());
|
||||||
|
const auto globalSize = dims[0]*dims[1]*dims[2];
|
||||||
|
std::vector<int> ranks(globalSize, -1);
|
||||||
|
Handle handle(globalMapper, ranks,
|
||||||
|
this->globalGrid().globalCell());
|
||||||
|
this->grid().gatherData(handle);
|
||||||
|
integerVectors.emplace("MPI_RANK", ranks);
|
||||||
|
}
|
||||||
|
|
||||||
|
return integerVectors;
|
||||||
|
}
|
||||||
|
|
||||||
data::Solution computeLegacySimProps_()
|
data::Solution computeLegacySimProps_()
|
||||||
{
|
{
|
||||||
const int* dims = UgGridHelpers::cartDims(grid());
|
const int* dims = UgGridHelpers::cartDims(grid());
|
||||||
|
|||||||
@@ -119,6 +119,60 @@ private:
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/// \brief Data handle for gathering the rank that owns a cell
|
||||||
|
template<class Mapper>
|
||||||
|
class CellOwnerDataHandle
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
using DataType = int;
|
||||||
|
|
||||||
|
CellOwnerDataHandle(const Mapper& globalMapper, std::vector<int>& globalData,
|
||||||
|
const std::vector<int>& globalCell)
|
||||||
|
: globalMapper_(globalMapper), globalData_(globalData), globalCell_(globalCell)
|
||||||
|
{
|
||||||
|
int argc = 0;
|
||||||
|
char** argv = nullptr;
|
||||||
|
my_rank_ = Dune::MPIHelper::instance(argc,argv).rank();
|
||||||
|
}
|
||||||
|
bool fixedsize(int /*dim*/, int /*codim*/)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
template<class T>
|
||||||
|
std::size_t size(const T& e)
|
||||||
|
{
|
||||||
|
if ( T::codimension == 0)
|
||||||
|
{
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
OPM_THROW(std::logic_error, "Data handle can only be used for elements");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
template<class B, class T>
|
||||||
|
void gather(B& buffer, const T& e)
|
||||||
|
{
|
||||||
|
buffer.write(my_rank_);
|
||||||
|
}
|
||||||
|
template<class B, class T>
|
||||||
|
void scatter(B& buffer, const T& e, std::size_t /* size */)
|
||||||
|
{
|
||||||
|
const auto& index = globalCell_[globalMapper_.index(e)];
|
||||||
|
buffer.read(globalData_[index]);
|
||||||
|
}
|
||||||
|
bool contains(int dim, int codim)
|
||||||
|
{
|
||||||
|
return codim==0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
int my_rank_;
|
||||||
|
const Mapper& globalMapper_;
|
||||||
|
std::vector<int>& globalData_;
|
||||||
|
const std::vector<int>& globalCell_;
|
||||||
|
};
|
||||||
|
|
||||||
#if HAVE_OPM_GRID && HAVE_MPI
|
#if HAVE_OPM_GRID && HAVE_MPI
|
||||||
/// \brief a data handle to distribute the threshold pressures
|
/// \brief a data handle to distribute the threshold pressures
|
||||||
class ThresholdPressureDataHandle
|
class ThresholdPressureDataHandle
|
||||||
|
|||||||
Reference in New Issue
Block a user