mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Fix initialization and stopping Damaris.
With this, flow no longer hangs on exit.
This commit is contained in:
committed by
Elyes Ahmed
parent
e347959d79
commit
fae71df32d
@@ -170,12 +170,21 @@ public:
|
|||||||
|
|
||||||
EclGenericVanguard::setCommunication(nullptr);
|
EclGenericVanguard::setCommunication(nullptr);
|
||||||
|
|
||||||
#if HAVE_DAMARIS
|
#if HAVE_DAMARIS
|
||||||
int err = damaris_finalize();
|
if (enableDamarisOutput_) {
|
||||||
if (err != DAMARIS_OK ) {
|
int err;
|
||||||
std::cerr << "ERROR: Damaris library produced an error result for damaris_initialize()" << std::endl;
|
if (isSimulationRank_) {
|
||||||
|
err = damaris_stop();
|
||||||
|
if (err != DAMARIS_OK) {
|
||||||
|
std::cerr << "ERROR: Damaris library produced an error result for damaris_stop()" << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = damaris_finalize();
|
||||||
|
if (err != DAMARIS_OK) {
|
||||||
|
std::cerr << "ERROR: Damaris library produced an error result for damaris_finalize()" << std::endl;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif // HAVE_DAMARIS
|
||||||
|
|
||||||
#if HAVE_MPI && !HAVE_DUNE_FEM
|
#if HAVE_MPI && !HAVE_DUNE_FEM
|
||||||
MPI_Finalize();
|
MPI_Finalize();
|
||||||
@@ -388,11 +397,6 @@ private:
|
|||||||
externalSetupTimer.start();
|
externalSetupTimer.start();
|
||||||
|
|
||||||
handleVersionCmdLine_(argc_, argv_);
|
handleVersionCmdLine_(argc_, argv_);
|
||||||
#if HAVE_DUNE_FEM
|
|
||||||
int mpiRank = Dune::Fem::MPIManager::rank();
|
|
||||||
#else
|
|
||||||
int mpiRank = EclGenericVanguard::comm().rank();
|
|
||||||
#endif // HAVE_DUNE_FEM
|
|
||||||
|
|
||||||
// we always want to use the default locale, and thus spare us the trouble
|
// we always want to use the default locale, and thus spare us the trouble
|
||||||
// with incorrect locale settings.
|
// with incorrect locale settings.
|
||||||
@@ -421,11 +425,6 @@ private:
|
|||||||
return false; // Whether to run the simulator
|
return false; // Whether to run the simulator
|
||||||
}
|
}
|
||||||
|
|
||||||
FileOutputMode outputMode = FileOutputMode::OUTPUT_NONE;
|
|
||||||
outputCout_ = false;
|
|
||||||
if (mpiRank == 0)
|
|
||||||
outputCout_ = EWOMS_GET_PARAM(PreTypeTag, bool, EnableTerminalOutput);
|
|
||||||
|
|
||||||
std::string deckFilename;
|
std::string deckFilename;
|
||||||
std::string outputDir;
|
std::string outputDir;
|
||||||
if ( eclipseState_ ) {
|
if ( eclipseState_ ) {
|
||||||
@@ -434,8 +433,40 @@ private:
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
deckFilename = EWOMS_GET_PARAM(PreTypeTag, std::string, EclDeckFileName);
|
deckFilename = EWOMS_GET_PARAM(PreTypeTag, std::string, EclDeckFileName);
|
||||||
|
outputDir = EWOMS_GET_PARAM(PreTypeTag, std::string, OutputDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if HAVE_DAMARIS
|
||||||
|
enableDamarisOutput_ = EWOMS_GET_PARAM(PreTypeTag, bool, EnableDamarisOutput);
|
||||||
|
if (enableDamarisOutput_) {
|
||||||
|
// By default EnableAsyncDamarisOutput is false so FilePerCore Mode is used in Damaris so all the
|
||||||
|
// simulation results in each node are aggregated by dedicated cores and stored asynchronously at the end of
|
||||||
|
// each iteration.
|
||||||
|
const bool enableAsyncDamarisOutput = EWOMS_GET_PARAM(PreTypeTag, bool, EnableAsyncDamarisOutput);
|
||||||
|
// Using the ModifyModel class to set the XML file for Damaris.
|
||||||
|
// If EnableAsyncDamarisOutput is enabled, all simulation results will
|
||||||
|
// be written into one single file for each iteration using Parallel HDF5.
|
||||||
|
DamarisOutput::initializeDamaris(EclGenericVanguard::comm(), EclGenericVanguard::comm().rank(), outputDir, enableAsyncDamarisOutput);
|
||||||
|
int is_client;
|
||||||
|
MPI_Comm new_comm;
|
||||||
|
int err = damaris_start(&is_client);
|
||||||
|
isSimulationRank_ = (is_client > 0);
|
||||||
|
if (isSimulationRank_) {
|
||||||
|
damaris_client_comm_get(&new_comm);
|
||||||
|
EclGenericVanguard::setCommunication(std::make_unique<Parallel::Communication>(new_comm));
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif // HAVE_DAMARIS
|
||||||
|
|
||||||
|
int mpiRank = EclGenericVanguard::comm().rank();
|
||||||
|
FileOutputMode outputMode = FileOutputMode::OUTPUT_NONE;
|
||||||
|
outputCout_ = false;
|
||||||
|
if (mpiRank == 0)
|
||||||
|
outputCout_ = EWOMS_GET_PARAM(PreTypeTag, bool, EnableTerminalOutput);
|
||||||
|
|
||||||
|
|
||||||
if (deckFilename.empty()) {
|
if (deckFilename.empty()) {
|
||||||
if (mpiRank == 0) {
|
if (mpiRank == 0) {
|
||||||
std::cerr << "No input case given. Try '--help' for a usage description.\n";
|
std::cerr << "No input case given. Try '--help' for a usage description.\n";
|
||||||
@@ -465,9 +496,6 @@ private:
|
|||||||
try {
|
try {
|
||||||
auto python = std::make_shared<Python>();
|
auto python = std::make_shared<Python>();
|
||||||
const bool init_from_restart_file = !EWOMS_GET_PARAM(PreTypeTag, bool, SchedRestart);
|
const bool init_from_restart_file = !EWOMS_GET_PARAM(PreTypeTag, bool, SchedRestart);
|
||||||
if (outputDir.empty())
|
|
||||||
outputDir = EWOMS_GET_PARAM(PreTypeTag, std::string, OutputDir);
|
|
||||||
|
|
||||||
const bool allRanksDbgPrtLog = EWOMS_GET_PARAM(PreTypeTag, bool,
|
const bool allRanksDbgPrtLog = EWOMS_GET_PARAM(PreTypeTag, bool,
|
||||||
EnableLoggingFalloutWarning);
|
EnableLoggingFalloutWarning);
|
||||||
outputMode = setupLogging(mpiRank,
|
outputMode = setupLogging(mpiRank,
|
||||||
@@ -495,27 +523,6 @@ private:
|
|||||||
if (output_param >= 0)
|
if (output_param >= 0)
|
||||||
outputInterval = output_param;
|
outputInterval = output_param;
|
||||||
|
|
||||||
#if HAVE_DAMARIS
|
|
||||||
if (EWOMS_GET_PARAM(PreTypeTag, bool, EnableDamarisOutput)) {
|
|
||||||
// By default EnableAsyncDamarisOutput is false so FilePerCore Mode is used in Damaris so all the simulation results
|
|
||||||
// in each node are aggregated by dedicated cores and stored asynchronously at the end of each iteration.
|
|
||||||
if (EWOMS_GET_PARAM(PreTypeTag, bool, EnableAsyncDamarisOutput))
|
|
||||||
enableAsyncDamarisOutput = true;
|
|
||||||
// Using the ModifyModel class to set the XML file for Damaris.
|
|
||||||
// If EnableAsyncDamarisOutput is enabled, all simulation results will
|
|
||||||
// be written into one single file for each iteration using Parallel HDF5.
|
|
||||||
DamarisOutput::initializeDamaris(EclGenericVanguard::comm(), mpiRank, outputDir, enableAsyncDamarisOutput);
|
|
||||||
int is_client;
|
|
||||||
MPI_Comm new_comm;
|
|
||||||
int err = damaris_start(&is_client) ;
|
|
||||||
isSimulationRank_ = (is_client > 0) ;
|
|
||||||
if (isSimulationRank_) {
|
|
||||||
damaris_client_comm_get (&new_comm) ;
|
|
||||||
EclGenericVanguard::setCommunication(std::make_unique<Parallel::Communication>(new_comm));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif // HAVE_DAMARIS
|
|
||||||
|
|
||||||
readDeck(EclGenericVanguard::comm(), deckFilename, deck_, eclipseState_, schedule_, udqState_, actionState_, wtestState_,
|
readDeck(EclGenericVanguard::comm(), deckFilename, deck_, eclipseState_, schedule_, udqState_, actionState_, wtestState_,
|
||||||
summaryConfig_, nullptr, python, std::move(parseContext),
|
summaryConfig_, nullptr, python, std::move(parseContext),
|
||||||
init_from_restart_file, outputCout_, outputInterval);
|
init_from_restart_file, outputCout_, outputInterval);
|
||||||
@@ -819,8 +826,8 @@ private:
|
|||||||
// To demonstrate run with non_world_comm
|
// To demonstrate run with non_world_comm
|
||||||
bool test_split_comm_ = false;
|
bool test_split_comm_ = false;
|
||||||
bool isSimulationRank_ = true;
|
bool isSimulationRank_ = true;
|
||||||
// To use Damaris with parallel HDF5: Asynchronous Output using a separate core
|
|
||||||
bool enableAsyncDamarisOutput = false;
|
bool enableDamarisOutput_ = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Opm
|
} // namespace Opm
|
||||||
|
Reference in New Issue
Block a user