Allow Python bindings to setup MPI

Allows the Python bindings to control whether MPI_Init() and
MPI_Finalize() will be called when creating an OPM::Main object.
This commit is contained in:
Håkon Hægland 2024-05-01 11:04:36 +02:00
parent 9529c18235
commit a1b7d4b5dd
9 changed files with 85 additions and 10 deletions

View File

@ -49,7 +49,9 @@ Main::Main(int argc, char** argv, bool ownMPI)
}
}
Main::Main(const std::string& filename)
Main::Main(const std::string& filename, bool mpi_init, bool mpi_finalize)
: mpi_init_{mpi_init}
, mpi_finalize_{mpi_finalize}
{
setArgvArgc_(filename);
initMPI();
@ -58,10 +60,14 @@ Main::Main(const std::string& filename)
Main::Main(const std::string& filename,
std::shared_ptr<EclipseState> eclipseState,
std::shared_ptr<Schedule> schedule,
std::shared_ptr<SummaryConfig> summaryConfig)
std::shared_ptr<SummaryConfig> summaryConfig,
bool mpi_init,
bool mpi_finalize)
: eclipseState_{std::move(eclipseState)}
, schedule_{std::move(schedule)}
, summaryConfig_{std::move(summaryConfig)}
, mpi_init_{mpi_init}
, mpi_finalize_{mpi_finalize}
{
setArgvArgc_(filename);
initMPI();
@ -107,7 +113,7 @@ Main::~Main()
#endif // HAVE_DAMARIS
#if HAVE_MPI && !HAVE_DUNE_FEM
if (ownMPI_) {
if (ownMPI_ && this->mpi_finalize_) {
MPI_Finalize();
}
#endif
@ -132,9 +138,15 @@ void Main::setArgvArgc_(const std::string& filename)
void Main::initMPI()
{
#if HAVE_DUNE_FEM
Dune::Fem::MPIManager::initialize(argc_, argv_);
// The instance() method already checks if MPI has been initialized so we may
// not need to check mpi_init_ here.
if (this->mpi_init_) {
Dune::MPIHelper::instance(argc_, argv_);
}
#elif HAVE_MPI
MPI_Init(&argc_, &argv_);
if (this->mpi_init_) {
MPI_Init(&argc_, &argv_);
}
#endif
FlowGenericVanguard::setCommunication(std::make_unique<Parallel::Communication>());

View File

@ -130,14 +130,16 @@ public:
Main(int argc, char** argv, bool ownMPI = true);
// This constructor can be called from Python
Main(const std::string& filename);
Main(const std::string& filename, bool mpi_init = true, bool mpi_finalize = true);
// This constructor can be called from Python when Python has
// already parsed a deck
Main(const std::string& filename,
std::shared_ptr<EclipseState> eclipseState,
std::shared_ptr<Schedule> schedule,
std::shared_ptr<SummaryConfig> summaryConfig);
std::shared_ptr<SummaryConfig> summaryConfig,
bool mpi_init = true,
bool mpi_finalize = true);
~Main();
@ -748,6 +750,8 @@ private:
std::shared_ptr<EclipseState> eclipseState_{};
std::shared_ptr<Schedule> schedule_{};
std::shared_ptr<SummaryConfig> summaryConfig_{};
bool mpi_init_{true}; //!< True if MPI_Init should be called
bool mpi_finalize_{true}; //!< True if MPI_Finalize should be called
// To demonstrate run with non_world_comm
bool test_split_comm_ = false;

View File

@ -62,6 +62,7 @@ public:
const std::string &idx_name,
py::array_t<double,
py::array::c_style | py::array::forcecast> array);
void setupMpi(bool init_mpi, bool finalize_mpi);
int step();
int stepCleanup();
int stepInit();
@ -74,6 +75,8 @@ private:
const std::string deck_filename_;
bool has_run_init_ = false;
bool has_run_cleanup_ = false;
bool mpi_init_ = true;
bool mpi_finalize_ = true;
//bool debug_ = false;
// This *must* be declared before other pointers
// to simulator objects. This in order to deinitialize

View File

@ -59,6 +59,10 @@
"signature": "set_primary_variable(variable: str, value: NDArray[float]) -> None",
"doc": "Set the primary variable's values for the simulation grid.\n\n:para variable: The name of the variable. Valid names are 'pressure', 'water', 'gas', and 'brine'.\n:type variable: str\n:para value: An array of primary variable values to be set. See ``get_primary_variable()`` for more information.\n:type value: NDArray[float]"
},
"setupMpi": {
"signature": "mpi_init(init: bool, finalize: bool) -> None",
"doc": "Setup MPI for parallel simulation. This method should be called before any other method.\n:param init: Whether to call ``MPI_Init()`` or not.\n:param finalize:Whether to call ``MPI_Finalize()```when the simulator object goes out of scope.\n\n:return: None"
},
"step": {
"signature": "step() -> int",
"doc": "Execute the next simulation report step.\n\n:return: Result of the simulation step."

View File

@ -60,7 +60,7 @@ if(OPM_ENABLE_PYTHON_TESTS)
# splitting the python tests into multiple add_test() tests instead
# of having a single "python -m unittest" test call that will run all
# the tests in the "test" sub directory.
foreach(case_name IN ITEMS basic fluidstate_variables primary_variables schedule throw)
foreach(case_name IN ITEMS basic fluidstate_variables mpi primary_variables schedule throw)
add_test(NAME python_${case_name}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/python
COMMAND ${CMAKE_COMMAND}

View File

@ -171,6 +171,15 @@ setPrimaryVariable(
getFluidState().setPrimaryVariable(variable, data, size_);
}
void PyBlackOilSimulator::setupMpi(bool mpi_init, bool mpi_finalize)
{
if (this->has_run_init_) {
throw std::logic_error("mpi_init() called after step_init()");
}
this->mpi_init_ = mpi_init;
this->mpi_finalize_ = mpi_finalize;
}
int PyBlackOilSimulator::step()
{
if (!this->has_run_init_) {
@ -211,11 +220,17 @@ int PyBlackOilSimulator::stepInit()
this->deck_->getDataFile(),
this->eclipse_state_,
this->schedule_,
this->summary_config_
this->summary_config_,
this->mpi_init_,
this->mpi_finalize_
);
}
else {
this->main_ = std::make_unique<Opm::Main>( this->deck_filename_ );
this->main_ = std::make_unique<Opm::Main>(
this->deck_filename_,
this->mpi_init_,
this->mpi_finalize_
);
}
int exit_code = EXIT_SUCCESS;
this->flow_main_ = this->main_->initFlowBlackoil(exit_code);
@ -305,6 +320,7 @@ void export_PyBlackOilSimulator(py::module& m)
.def("set_porosity", &PyBlackOilSimulator::setPorosity, setPorosity_docstring, py::arg("array"))
.def("set_primary_variable", &PyBlackOilSimulator::setPrimaryVariable,
py::arg("variable"), py::arg("value"))
.def("setup_mpi", &PyBlackOilSimulator::setupMpi, setupMpi_docstring, py::arg("init"), py::arg("finalize"))
.def("step", &PyBlackOilSimulator::step, step_docstring)
.def("step_cleanup", &PyBlackOilSimulator::stepCleanup, stepCleanup_docstring)
.def("step_init", &PyBlackOilSimulator::stepInit, stepInit_docstring);

View File

@ -71,3 +71,4 @@ class TestBasic(unittest.TestCase):
poro2 = sim.get_porosity()
self.assertAlmostEqual(poro2[0], 0.285, places=7, msg='value of porosity 2')

34
python/test/test_mpi.py Normal file
View File

@ -0,0 +1,34 @@
import os
import unittest
from pathlib import Path
from opm.simulators import BlackOilSimulator
from .pytest_common import pushd
class TestBasic(unittest.TestCase):
@classmethod
def setUpClass(cls):
# NOTE: See comment in test_basic.py for the reason why we are
# only using a single test_all() function instead of splitting
# it up in multiple test functions
test_dir = Path(os.path.dirname(__file__))
cls.data_dir = test_dir.parent.joinpath("test_data/SPE1CASE1a")
# IMPORTANT: Tests are run alphabetically, so we need to make sure that
# the the first test calls MPI_Init(), therefore the name of the tests
# have a numeric label like "01" in test_01_mpi_init to ensure that they
# are run in a given order.
def test_01_mpi_init(self):
with pushd(self.data_dir):
sim = BlackOilSimulator("SPE1CASE1.DATA")
sim.setup_mpi(init=True, finalize=False)
sim.step_init() # This will create the OPM::Main() object which will call MPI_Init()
assert True
def test_02_mpi_no_init(self):
with pushd(self.data_dir):
sim = BlackOilSimulator("SPE1CASE1.DATA")
sim.setup_mpi(init=False, finalize=True)
sim.step_init() # This will create the OPM::Main() object which will not call MPI_Init()
# That this test runs shows that the simulator does not call
# MPI_Init() a second time
assert True

View File

@ -20,3 +20,4 @@ class TestBasic(unittest.TestCase):
# has not been initialized
with self.assertRaises(RuntimeError):
sim.get_dt()