Add the metis partitioner as a command line option as well

This commit is contained in:
Lisa Julia Nebel 2024-07-11 14:25:04 +02:00
parent c243620057
commit 653cb28bc2
9 changed files with 164 additions and 47 deletions

View File

@ -287,7 +287,7 @@ if(MPI_FOUND)
opm/simulators/utils/ParallelEclipseState.cpp
opm/simulators/utils/ParallelNLDDPartitioningZoltan.cpp
opm/simulators/utils/ParallelSerialization.cpp
opm/simulators/utils/SetupZoltanParams.cpp)
opm/simulators/utils/SetupPartitioningParams.cpp)
list(APPEND PUBLIC_HEADER_FILES opm/simulators/utils/MPIPacker.hpp
opm/simulators/utils/MPISerializer.hpp)
endif()

View File

@ -227,10 +227,11 @@ public:
}
this->doLoadBalance_(this->edgeWeightsMethod(), this->ownersFirst(),
this->serialPartitioning(), this->enableDistributedWells(),
this->zoltanImbalanceTol(), this->gridView(),
this->schedule(), this->eclState(),
this->parallelWells_, this->numJacobiBlocks());
this->partitionMethod(), this->serialPartitioning(),
this->enableDistributedWells(), this->imbalanceTol(),
this->gridView(), this->schedule(),
this->eclState(), this->parallelWells_,
this->numJacobiBlocks());
#endif
this->updateGridView_();
@ -298,6 +299,10 @@ protected:
{
return this->zoltanParams_;
}
const std::string& metisParams() const override
{
return this->metisParams_;
}
#endif
// removing some connection located in inactive grid cells

View File

@ -105,11 +105,22 @@ struct OwnerCellsFirst {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct PartitionMethod {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct SerialPartitioning {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct ImbalanceTol {
using type = UndefinedProperty;
};
// Remove this for release 2025.04
template<class TypeTag, class MyTypeTag>
struct ZoltanImbalanceTol {
using type = UndefinedProperty;
@ -120,6 +131,11 @@ struct ZoltanParams {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct MetisParams {
using type = UndefinedProperty;
};
template <class TypeTag, class MyTypeTag>
struct ExternalPartition
{
@ -175,6 +191,12 @@ template<class TypeTag>
struct OwnerCellsFirst<TypeTag, TTag::FlowBaseVanguard> {
static constexpr bool value = true;
};
template<class TypeTag>
struct PartitionMethod<TypeTag, TTag::FlowBaseVanguard> {
static constexpr int value = 1; // 0: simple, 1: Zoltan, 2: METIS, see GridEnums.hpp
};
template<class TypeTag>
struct SerialPartitioning<TypeTag, TTag::FlowBaseVanguard> {
static constexpr bool value = false;
@ -190,6 +212,16 @@ struct ZoltanParams<TypeTag,TTag::FlowBaseVanguard> {
static constexpr auto value = "graph";
};
template<class TypeTag>
struct ImbalanceTol<TypeTag, TTag::FlowBaseVanguard> {
static constexpr double value = 1.1;
};
template<class TypeTag>
struct MetisParams<TypeTag,TTag::FlowBaseVanguard> {
static constexpr auto value = "default";
};
template <class TypeTag>
struct ExternalPartition<TypeTag, TTag::FlowBaseVanguard>
{
@ -280,10 +312,13 @@ public:
Parameters::registerParam<TypeTag, Properties::OwnerCellsFirst>
("Order cells owned by rank before ghost/overlap cells.");
#if HAVE_MPI
Parameters::registerParam<TypeTag, Properties::PartitionMethod>
("Choose partitioning strategy: 0=simple, 1=Zoltan, 2=METIS.");
Parameters::registerParam<TypeTag, Properties::SerialPartitioning>
("Perform partitioning for parallel runs on a single process.");
Parameters::registerParam<TypeTag, Properties::ZoltanImbalanceTol>
("Tolerable imbalance of the loadbalancing provided by Zoltan (default: 1.1).");
("Tolerable imbalance of the loadbalancing provided by Zoltan. DEPRECATED: Use --imbalance-tol instead");
Parameters::hideParam<TypeTag, Properties::ZoltanImbalanceTol>();
Parameters::registerParam<TypeTag, Properties::ZoltanParams>
("Configuration of Zoltan partitioner. "
"Valid options are: graph, hypergraph or scotch. "
@ -292,6 +327,15 @@ public:
"See https://sandialabs.github.io/Zoltan/ug_html/ug.html "
"for available Zoltan options.");
Parameters::hideParam<TypeTag, Properties::ZoltanParams>();
Parameters::registerParam<TypeTag, Properties::ImbalanceTol>
("Tolerable imbalance of the loadbalancing (default: 1.1).");
Parameters::registerParam<TypeTag, Properties::MetisParams>
("Configuration of Metis partitioner. "
"You can request a configuration to be read "
"from a JSON file by giving the filename here, ending with '.json.' "
"See http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/manual.pdf"
"for available METIS options.");
Parameters::registerParam<TypeTag, Properties::ExternalPartition>
("Name of file from which to load an externally generated "
"partitioning of the model's active cells for MPI "
@ -324,9 +368,16 @@ public:
ownersFirst_ = Parameters::get<TypeTag, Properties::OwnerCellsFirst>();
#if HAVE_MPI
partitionMethod_ = Dune::PartitionMethod(Parameters::get<TypeTag, Properties::PartitionMethod>());
serialPartitioning_ = Parameters::get<TypeTag, Properties::SerialPartitioning>();
imbalanceTol_ = Parameters::get<TypeTag, Properties::ImbalanceTol>();
zoltanImbalanceTolSet_ = Parameters::isSet<TypeTag, Properties::ZoltanImbalanceTol>();
zoltanImbalanceTol_ = Parameters::get<TypeTag, Properties::ZoltanImbalanceTol>();
zoltanParams_ = Parameters::get<TypeTag, Properties::ZoltanParams>();
metisParams_ = Parameters::get<TypeTag, Properties::MetisParams>();
externalPartitionFile_ = Parameters::get<TypeTag, Properties::ExternalPartition>();
#endif
enableDistributedWells_ = Parameters::get<TypeTag, Properties::AllowDistributedWells>();

View File

@ -29,6 +29,8 @@
#include <dune/common/parallel/communication.hh>
#include <opm/common/OpmLog/OpmLog.hpp>
#include <opm/grid/common/GridEnums.hpp>
#include <opm/input/eclipse/Schedule/Well/WellTestState.hpp>
@ -213,6 +215,11 @@ public:
{ return ownersFirst_; }
#if HAVE_MPI
/*!
* \brief Parameter deciding which partition method to use
*/
Dune::PartitionMethod partitionMethod() const
{ return partitionMethod_; }
/*!
* \brief Parameter that decides if partitioning for parallel runs
* should be performed on a single process only.
@ -221,10 +228,17 @@ public:
{ return serialPartitioning_; }
/*!
* \brief Parameter that sets the zoltan imbalance tolarance.
* \brief Parameter that sets the imbalance tolarance, depending on the chosen partition method
*/
double zoltanImbalanceTol() const
{ return zoltanImbalanceTol_; }
double imbalanceTol() const
{
if (zoltanImbalanceTolSet_) {
OpmLog::info("The parameter --zoltan-imbalance-tol is deprecated and has been renamed to --imbalance-tol, please adjust your calls and scripts!");
return zoltanImbalanceTol_;
} else {
return imbalanceTol_;
}
}
const std::string& externalPartitionFile() const
{
@ -291,9 +305,16 @@ protected:
bool ownersFirst_;
#if HAVE_MPI
Dune::PartitionMethod partitionMethod_;
bool serialPartitioning_;
double imbalanceTol_;
bool zoltanImbalanceTolSet_;
double zoltanImbalanceTol_;
std::string zoltanParams_;
std::string metisParams_;
std::string externalPartitionFile_{};
#endif
bool enableDistributedWells_;

View File

@ -40,7 +40,7 @@
#include <opm/simulators/utils/ParallelEclipseState.hpp>
#include <opm/simulators/utils/ParallelSerialization.hpp>
#include <opm/simulators/utils/PropsDataHandle.hpp>
#include <opm/simulators/utils/SetupZoltanParams.hpp>
#include <opm/simulators/utils/SetupPartitioningParams.hpp>
#if HAVE_MPI
#include <opm/simulators/utils/MPISerializer.hpp>
@ -146,17 +146,20 @@ template<class ElementMapper, class GridView, class Scalar>
void GenericCpGridVanguard<ElementMapper, GridView, Scalar>::
doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const GridView& gridView,
const Schedule& schedule,
EclipseState& eclState1,
FlowGenericVanguard::ParallelWellStruct& parallelWells,
const int numJacobiBlocks)
{
if (!this->zoltanParams().empty())
this->grid_->setZoltanParams(setupZoltanParams(this->zoltanParams()));
if (partitionMethod == Dune::PartitionMethod::zoltan && !this->zoltanParams().empty())
this->grid_->setPartitioningParams(setupZoltanParams(this->zoltanParams()));
if (partitionMethod == Dune::PartitionMethod::metis && !this->metisParams().empty())
this->grid_->setPartitioningParams(setupMetisParams(this->metisParams()));
const auto mpiSize = this->grid_->comm().size();
@ -196,9 +199,9 @@ doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
// Distribute the grid and switch to the distributed view.
if (mpiSize > 1) {
this->distributeGrid(edgeWeightsMethod, ownersFirst,
this->distributeGrid(edgeWeightsMethod, ownersFirst, partitionMethod,
serialPartitioning, enableDistributedWells,
zoltanImbalanceTol, loadBalancerSet != 0,
imbalanceTol, loadBalancerSet != 0,
faceTrans, wells,
eclState1, parallelWells);
}
@ -214,7 +217,7 @@ doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
this->cell_part_ = this->grid_->
zoltanPartitionWithoutScatter(&wells, faceTrans.data(),
numJacobiBlocks,
zoltanImbalanceTol);
imbalanceTol);
}
#endif
}
@ -281,9 +284,10 @@ void
GenericCpGridVanguard<ElementMapper, GridView, Scalar>::
distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
@ -293,9 +297,9 @@ distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
if (auto* eclState = dynamic_cast<ParallelEclipseState*>(&eclState1);
eclState != nullptr)
{
this->distributeGrid(edgeWeightsMethod, ownersFirst,
this->distributeGrid(edgeWeightsMethod, ownersFirst, partitionMethod,
serialPartitioning, enableDistributedWells,
zoltanImbalanceTol, loadBalancerSet, faceTrans,
imbalanceTol, loadBalancerSet, faceTrans,
wells, eclState, parallelWells);
}
else {
@ -317,9 +321,10 @@ void
GenericCpGridVanguard<ElementMapper, GridView, Scalar>::
distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
@ -340,20 +345,18 @@ distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
auto parts = isIORank
? (*externalLoadBalancer)(*this->grid_)
: std::vector<int>{};
//For this case, simple partitioning is selected automatically
parallelWells =
std::get<1>(this->grid_->loadBalance(handle, parts, &wells, ownersFirst,
addCornerCells, overlapLayers));
}
else {
const auto useZoltan = true;
parallelWells =
std::get<1>(this->grid_->loadBalance(handle, edgeWeightsMethod,
&wells, serialPartitioning,
faceTrans.data(), ownersFirst,
addCornerCells, overlapLayers,
useZoltan, zoltanImbalanceTol,
partitionMethod, imbalanceTol,
enableDistributedWells));
}
}

View File

@ -150,9 +150,10 @@ protected:
#if HAVE_MPI
void doLoadBalance_(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const GridView& gridView,
const Schedule& schedule,
EclipseState& eclState,
@ -166,9 +167,10 @@ private:
void distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
@ -177,9 +179,10 @@ private:
void distributeGrid(const Dune::EdgeWeightMethod edgeWeightsMethod,
const bool ownersFirst,
const Dune::PartitionMethod partitionMethod,
const bool serialPartitioning,
const bool enableDistributedWells,
const double zoltanImbalanceTol,
const double imbalanceTol,
const bool loadBalancerSet,
const std::vector<double>& faceTrans,
const std::vector<Well>& wells,
@ -188,6 +191,7 @@ private:
protected:
virtual const std::string& zoltanParams() const = 0;
virtual const std::string& metisParams() const = 0;
#endif // HAVE_MPI

View File

@ -30,7 +30,7 @@
#include <opm/simulators/utils/DeferredLoggingErrorHelpers.hpp>
#include <opm/simulators/utils/gatherDeferredLogger.hpp>
#include <opm/simulators/utils/ParallelNLDDPartitioningZoltan.hpp>
#include <opm/simulators/utils/SetupZoltanParams.hpp>
#include <opm/simulators/utils/SetupPartitioningParams.hpp>
#include <opm/input/eclipse/Schedule/Well/Connection.hpp>
#include <opm/input/eclipse/Schedule/Well/Well.hpp>

View File

@ -18,19 +18,42 @@
*/
#include <config.h>
#include <opm/simulators/utils/SetupZoltanParams.hpp>
#include <opm/simulators/utils/SetupPartitioningParams.hpp>
#include <opm/common/ErrorMacros.hpp>
#include <opm/common/OpmLog/OpmLog.hpp>
#if BOOST_VERSION / 100 % 1000 > 48
#include <boost/property_tree/json_parser.hpp>
#include <boost/version.hpp>
#endif
#include <filesystem>
#include <iostream>
namespace Opm
{
#if BOOST_VERSION / 100 % 1000 > 48
void convertJSONToMap(const std::string& conf, std::map<std::string,std::string>& result)
{
if ( !std::filesystem::exists(conf) ) {
OPM_THROW(std::invalid_argument,
"JSON file " + conf + " does not exist.");
}
boost::property_tree::ptree tree;
try {
boost::property_tree::read_json(conf, tree);
} catch (boost::property_tree::json_parser::json_parser_error& err) {
OpmLog::error(err.what());
}
for (const auto& node : tree) {
auto value = node.second.get_value_optional<std::string>();
if (value)
result.insert_or_assign(node.first, *value);
}
}
#endif
std::map<std::string,std::string> setupZoltanParams(const std::string& conf)
{
@ -46,21 +69,7 @@ std::map<std::string,std::string> setupZoltanParams(const std::string& conf)
result.emplace("GRAPH_PACKAGE", "PHG");
} else if (conf.size() > 5 && conf.substr(conf.size() - 5, 5) == ".json") {
#if BOOST_VERSION / 100 % 1000 > 48
if ( !std::filesystem::exists(conf) ) {
OPM_THROW(std::invalid_argument,
"JSON file " + conf + " does not exist.");
}
boost::property_tree::ptree tree;
try {
boost::property_tree::read_json(conf, tree);
} catch (boost::property_tree::json_parser::json_parser_error& err) {
OpmLog::error(err.what());
}
for (const auto& node : tree) {
auto value = node.second.get_value_optional<std::string>();
if (value)
result.insert_or_assign(node.first, *value);
}
convertJSONToMap(conf, result);
#else
OPM_THROW(std::invalid_argument,
"--zoltan-params=file.json not supported with "
@ -76,4 +85,27 @@ std::map<std::string,std::string> setupZoltanParams(const std::string& conf)
return result;
}
std::map<std::string,std::string> setupMetisParams(const std::string& conf)
{
std::map<std::string,std::string> result;
if (conf == "default") {
return result;
} else if (conf.size() > 5 && conf.substr(conf.size() - 5, 5) == ".json") {
#if BOOST_VERSION / 100 % 1000 > 48
convertJSONToMap(conf, result);
return result;
#else
OPM_THROW(std::invalid_argument,
"--metis-params=file.json not supported with "
"boost version. Needs version > 1.48.");
#endif
} else {
// No valid configuration option found.
OPM_THROW(std::invalid_argument,
conf + " is not a valid setting for --metis-params."
" Please use a json file containing the METIS parameters.");
}
}
} // namespace Opm

View File

@ -17,8 +17,8 @@
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef OPM_SETUP_ZOLTAN_PARAMS_HPP
#define OPM_SETUP_ZOLTAN_PARAMS_HPP
#ifndef OPM_SETUP_PARTITIONING_PARAMS_HPP
#define OPM_SETUP_PARTITIONING_PARAMS_HPP
#include <map>
#include <string>
@ -26,7 +26,8 @@
namespace Opm {
std::map<std::string,std::string> setupZoltanParams(const std::string& conf);
std::map<std::string,std::string> setupMetisParams(const std::string& conf);
} // namespace Opm
#endif // OPM_SETUP_ZOLTAN_PARAMS_HPP
#endif // OPM_SETUP_PARTITIONING_PARAMS_HPP