add block-jacobi partitioner option. Add block-jacobi matrix for use in OpenCL preconditioner

Rebased
This commit is contained in:
Tong Dong Qiu
2022-04-21 17:18:32 +02:00
parent aba4c5f487
commit e360c00b73
23 changed files with 582 additions and 57 deletions

View File

@@ -85,6 +85,10 @@ struct EdgeWeightsMethod {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct NumJacobiBlocks {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct OwnerCellsFirst {
using type = UndefinedProperty;
};
@@ -133,6 +137,10 @@ struct EdgeWeightsMethod<TypeTag, TTag::EclBaseVanguard> {
static constexpr int value = 1;
};
template<class TypeTag>
struct NumJacobiBlocks<TypeTag, TTag::EclBaseVanguard> {
static constexpr int value = 0;
};
template<class TypeTag>
struct OwnerCellsFirst<TypeTag, TTag::EclBaseVanguard> {
static constexpr bool value = true;
};
@@ -211,6 +219,8 @@ public:
"When restarting: should we try to initialize wells and groups from historical SCHEDULE section.");
EWOMS_REGISTER_PARAM(TypeTag, int, EdgeWeightsMethod,
"Choose edge-weighing strategy: 0=uniform, 1=trans, 2=log(trans).");
EWOMS_REGISTER_PARAM(TypeTag, int, NumJacobiBlocks,
"Number of blocks to be created for the Block-Jacobi preconditioner.");
EWOMS_REGISTER_PARAM(TypeTag, bool, OwnerCellsFirst,
"Order cells owned by rank before ghost/overlap cells.");
EWOMS_REGISTER_PARAM(TypeTag, bool, SerialPartitioning,
@@ -235,6 +245,7 @@ public:
{
fileName_ = EWOMS_GET_PARAM(TypeTag, std::string, EclDeckFileName);
edgeWeightsMethod_ = Dune::EdgeWeightMethod(EWOMS_GET_PARAM(TypeTag, int, EdgeWeightsMethod));
numJacobiBlocks_ = EWOMS_GET_PARAM(TypeTag, int, NumJacobiBlocks);
ownersFirst_ = EWOMS_GET_PARAM(TypeTag, bool, OwnerCellsFirst);
serialPartitioning_ = EWOMS_GET_PARAM(TypeTag, bool, SerialPartitioning);
zoltanImbalanceTol_ = EWOMS_GET_PARAM(TypeTag, double, ZoltanImbalanceTol);

View File

@@ -136,7 +136,7 @@ public:
this->serialPartitioning(), this->enableDistributedWells(),
this->zoltanImbalanceTol(), this->gridView(),
this->schedule(), this->centroids_,
this->eclState(), this->parallelWells_);
this->eclState(), this->parallelWells_, this->numJacobiBlocks());
#endif
this->updateGridView_();
@@ -192,6 +192,7 @@ protected:
}
std::unique_ptr<TransmissibilityType> globalTrans_;
//std::vector<int> cell_part_;
};
} // namespace Opm

View File

@@ -82,12 +82,13 @@ void EclGenericCpGridVanguard<ElementMapper,GridView,Scalar>::doLoadBalance_(Dun
const Schedule& schedule,
std::vector<double>& centroids,
EclipseState& eclState1,
EclGenericVanguard::ParallelWellStruct& parallelWells)
EclGenericVanguard::ParallelWellStruct& parallelWells,
int numJacobiBlocks)
{
int mpiSize = 1;
MPI_Comm_size(grid_->comm(), &mpiSize);
if (mpiSize > 1) {
if (mpiSize > 1 || numJacobiBlocks > 0) {
// the CpGrid's loadBalance() method likes to have the transmissibilities as
// its edge weights. since this is (kind of) a layering violation and
// transmissibilities are relatively expensive to compute, we only do it if
@@ -131,53 +132,60 @@ void EclGenericCpGridVanguard<ElementMapper,GridView,Scalar>::doLoadBalance_(Dun
}
//distribute the grid and switch to the distributed view.
{
const auto wells = schedule.getWellsatEnd();
try
if (mpiSize > 1) {
{
auto& eclState = dynamic_cast<ParallelEclipseState&>(eclState1);
const EclipseGrid* eclGrid = nullptr;
const auto wells = schedule.getWellsatEnd();
if (grid_->comm().rank() == 0)
try
{
eclGrid = &eclState.getInputGrid();
}
auto& eclState = dynamic_cast<ParallelEclipseState&>(eclState1);
const EclipseGrid* eclGrid = nullptr;
PropsCentroidsDataHandle<Dune::CpGrid> handle(*grid_, eclState, eclGrid, centroids,
cartesianIndexMapper());
if (loadBalancerSet)
{
std::vector<int> parts;
if (grid_->comm().rank() == 0)
{
parts = (*externalLoadBalancer)(*grid_);
eclGrid = &eclState.getInputGrid();
}
parallelWells = std::get<1>(grid_->loadBalance(handle, parts, &wells, ownersFirst, false, 1));
}
else
{
parallelWells =
std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, serialPartitioning,
faceTrans.data(), ownersFirst, false, 1, true, zoltanImbalanceTol,
enableDistributedWells));
}
}
catch(const std::bad_cast& e)
{
std::ostringstream message;
message << "Parallel simulator setup is incorrect as it does not use ParallelEclipseState ("
<< e.what() <<")"<<std::flush;
OpmLog::error(message.str());
std::rethrow_exception(std::current_exception());
}
}
grid_->switchToDistributedView();
PropsCentroidsDataHandle<Dune::CpGrid> handle(*grid_, eclState, eclGrid, centroids,
cartesianIndexMapper());
if (loadBalancerSet)
{
std::vector<int> parts;
if (grid_->comm().rank() == 0)
{
parts = (*externalLoadBalancer)(*grid_);
}
parallelWells = std::get<1>(grid_->loadBalance(handle, parts, &wells, ownersFirst, false, 1));
}
else
{
parallelWells =
std::get<1>(grid_->loadBalance(handle, edgeWeightsMethod, &wells, serialPartitioning,
faceTrans.data(), ownersFirst, false, 1, true, zoltanImbalanceTol,
enableDistributedWells));
}
}
catch(const std::bad_cast& e)
{
std::ostringstream message;
message << "Parallel simulator setup is incorrect as it does not use ParallelEclipseState ("
<< e.what() <<")"<<std::flush;
OpmLog::error(message.str());
std::rethrow_exception(std::current_exception());
}
}
grid_->switchToDistributedView();
}
// Calling Schedule::filterConnections would remove any perforated
// cells that exist only on other ranks even in the case of distributed wells
// But we need all connections to figure out the first cell of a well (e.g. for
// pressure). Hence this is now skipped. Rank 0 had everything even before.
if (numJacobiBlocks > 0 && mpiSize == 1) {
const auto wells = schedule.getWellsatEnd();
cell_part_.resize(grid_->numCells());
cell_part_ = grid_->zoltanPartitionWithoutScatter(&wells, faceTrans.data(), numJacobiBlocks, zoltanImbalanceTol);
}
}
}

View File

@@ -101,6 +101,10 @@ public:
*/
const CartesianIndexMapper& equilCartesianIndexMapper() const;
std::vector<int> cellPartition() const
{
return cell_part_;
}
protected:
/*!
* \brief Distribute the simulation grid over multiple processes
@@ -114,7 +118,8 @@ protected:
const GridView& gridv, const Schedule& schedule,
std::vector<double>& centroids,
EclipseState& eclState,
EclGenericVanguard::ParallelWellStruct& parallelWells);
EclGenericVanguard::ParallelWellStruct& parallelWells,
int numJacobiBlocks);
void distributeFieldProps_(EclipseState& eclState);
#endif
@@ -137,6 +142,7 @@ protected:
std::unique_ptr<CartesianIndexMapper> equilCartesianIndexMapper_;
int mpiRank;
std::vector<int> cell_part_;
};
} // namespace Opm

View File

@@ -249,6 +249,12 @@ public:
Dune::EdgeWeightMethod edgeWeightsMethod() const
{ return edgeWeightsMethod_; }
/*!
* \brief Number of blocks in the Block-Jacobi preconditioner.
*/
int numJacobiBlocks() const
{ return numJacobiBlocks_; }
/*!
* \brief Parameter that decide if cells owned by rank are ordered before ghost cells.
*/
@@ -323,6 +329,7 @@ protected:
std::string caseName_;
std::string fileName_;
Dune::EdgeWeightMethod edgeWeightsMethod_;
int numJacobiBlocks_;
bool ownersFirst_;
bool serialPartitioning_;
double zoltanImbalanceTol_;