Changes needed to add Damaris functionality. The current state is that we output the pressure field and we use both HDF5 and parallel HDF5.

Damaris initialization is added after InitMpi but before starting the simulation. Damaris will invoke a separate core for writing in
parallel and leave the rest of cores for the simulator. The main changes are in main where start_damaris and then in eclwriterm where
we use damaris to output the PRESSURE. To test Damaris one can use --enable-damaris-output=true and to use parallel HDF5 one can use
--enable-async-damaris-output=true (false is the default choice)
This commit is contained in:
Joshua Bowden 2021-10-22 11:09:12 +02:00 committed by Elyes Ahmed
parent 7e1c63c92d
commit fa7af3540c
11 changed files with 471 additions and 7 deletions

View File

@ -31,6 +31,7 @@ option(USE_CHOW_PATEL_ILU "Use the iterative ILU by Chow and Patel?" OFF)
option(USE_CHOW_PATEL_ILU_GPU "Run iterative ILU decomposition on GPU? Requires USE_CHOW_PATEL_ILU" OFF)
option(USE_CHOW_PATEL_ILU_GPU_PARALLEL "Try to use more parallelism on the GPU during the iterative ILU decomposition? Requires USE_CHOW_PATEL_ILU_GPU" OFF)
option(BUILD_FLOW_ALU_GRID "Build flow blackoil with alu grid" OFF)
option(USE_DAMARIS_LIB "Use the Damaris library for asynchronous I/O?" OFF)
# The following was copied from CMakeLists.txt in opm-common.
# TODO: factor out the common parts in opm-common and opm-simulator as a cmake module
@ -63,7 +64,6 @@ if (OPM_ENABLE_PYTHON)
set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE})
endif()
if(SIBLING_SEARCH AND NOT opm-common_DIR)
# guess the sibling dir
get_filename_component(_leaf_dir_name ${PROJECT_BINARY_DIR} NAME)
@ -113,6 +113,13 @@ include(Findopm-tests)
# with the find module
include ("${project}-prereqs")
# Only add the HAVE_DAMARIS define to config.h if the build requested Damaris support
if (USE_DAMARIS_LIB)
if (Damaris_FOUND)
set (opm-simulators_CONFIG_VAR ${opm-simulators_CONFIG_VAR} HAVE_DAMARIS)
endif()
endif()
# Make sure we are using the same compiler underneath
# NVCC as for the rest. In the case that NVCC does not support
# that compiler it will error out. Unfortunately this will only
@ -335,7 +342,8 @@ endmacro (files_hook)
macro (tests_hook)
endmacro (tests_hook)
# all setup common to the OPM library modules is done here
include (OpmLibMain)
@ -544,6 +552,38 @@ if(HAVE_FPGA)
ExternalProject_Get_Property(FPGA_library binary_dir)
target_link_libraries(opmsimulators PUBLIC ${binary_dir}/fpga_lib_alveo_u280.a)
endif()
if (USE_DAMARIS_LIB) # User requested Damaris support
if (Damaris_FOUND)
message(STATUS "The Damaris library was found: ${Damaris_VERSION} ${Damaris_DIR}")
if (Damaris_HAS_HDF5)
message(STATUS "The Damaris library has HDF5 support: ${HDF5_VERSION}")
else()
message(STATUS "The Damaris library does NOT have HDF5 support")
endif()
if (Damaris_HAS_VISIT)
message(STATUS "The Damaris library has VisIt support: ${VisIt_VERSION}")
else()
message(STATUS "The Damaris library does NOT have VisIt support")
endif()
if (Damaris_HAS_CATALYST)
message(STATUS "The Damaris library has Catalyst support: ${Catalyst_VERSION} ${Paraview_VERSION}")
else()
message(STATUS "The Damaris library does NOT have Catalyst support")
endif()
include_directories(${Damaris_INCLUDE_DIRS})
target_link_libraries(opmsimulators PUBLIC ${Damaris_LIBRARIES})
else()
message(STATUS "The Damaris library was requested but NOT found")
endif()
else() # User did not request Damaris support
if (Damaris_FOUND)
message(STATUS "The Damaris library was NOT requested but was found: ${Damaris_VERSION} ${Damaris_DIR}")
else()
message(STATUS "The Damaris library was NOT requested and was NOT found")
endif()
endif()
install(DIRECTORY doc/man1 DESTINATION ${CMAKE_INSTALL_MANDIR}
FILES_MATCHING PATTERN "*.1")

View File

@ -66,6 +66,8 @@ list (APPEND MAIN_SOURCE_FILES
opm/simulators/utils/PartiallySupportedFlowKeywords.cpp
opm/simulators/utils/readDeck.cpp
opm/simulators/utils/UnsupportedFlowKeywords.cpp
opm/simulators/utils/DamarisOutputModule.cpp
opm/simulators/utils/DamarisKeywords.cpp
opm/simulators/timestepping/AdaptiveSimulatorTimer.cpp
opm/simulators/timestepping/AdaptiveTimeSteppingEbos.cpp
opm/simulators/timestepping/TimeStepControl.cpp

View File

@ -47,6 +47,14 @@ class EclipseState;
template<class FluidSystem, class Scalar>
class EclGenericOutputBlackoilModule {
public:
Scalar* getPRESSURE_ptr(void) {
return (this->oilPressure_.data()) ;
};
int getPRESSURE_size( void ) {
return (this->oilPressure_.size()) ;
};
// write cumulative production and injection reports to output
void outputCumLog(size_t reportStepNum,
const bool substep,

View File

@ -432,11 +432,25 @@ struct EnableEclOutput<TypeTag,TTag::EclBaseProblem> {
static constexpr bool value = true;
};
//! Enable the Damaris output by default
template<class TypeTag>
struct EnableDamarisOutput<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = false; };
//! By default, write the Damaris output using separate core
//!
//! This has only an effect if EnableVtkOutput is true
// If available, write the ECL output in a non-blocking manner
template<class TypeTag>
struct EnableAsyncEclOutput<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = true;
};
// If Damaris is available, write specific variable output in parallel
template<class TypeTag>
struct EnableAsyncDamarisOutput<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = false;
};
// Write ESMRY file for fast loading of summary data
template<class TypeTag>
struct EnableEsmry<TypeTag, TTag::EclBaseProblem> {
@ -695,6 +709,8 @@ public:
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableEclOutput,
"Write binary output which is compatible with the commercial "
"Eclipse simulator");
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableDamarisOutput,
"Write a specific variable using Damaris in a separate core");
EWOMS_REGISTER_PARAM(TypeTag, bool, EclOutputDoublePrecision,
"Tell the output writer to use double precision. Useful for 'perfect' restarts");
EWOMS_REGISTER_PARAM(TypeTag, unsigned, RestartWritingInterval,

View File

@ -39,8 +39,9 @@
#include <dune/grid/common/partitionset.hh>
#include <limits>
#include <stdexcept>
#if HAVE_DAMARIS
#include <opm/simulators/utils/DamarisOutputModule.hpp>
#endif
#include <string>
namespace Opm::Properties {
@ -58,10 +59,17 @@ struct EclOutputDoublePrecision {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct EnableDamarisOutput {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct EnableAsyncDamarisOutput {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct EnableEsmry {
using type = UndefinedProperty;
};
} // namespace Opm::Properties
namespace Opm {
@ -116,6 +124,8 @@ public:
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableAsyncEclOutput,
"Write the ECL-formated results in a non-blocking way (i.e., using a separate thread).");
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableAsyncDamarisOutput,
"Write specific variable in parallel using Damaris (i.e., using parallel HDF5).");
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableEsmry,
"Write ESMRY file for fast loading of summary data.");
}
@ -135,6 +145,10 @@ public:
EWOMS_GET_PARAM(TypeTag, bool, EnableAsyncEclOutput), EWOMS_GET_PARAM(TypeTag, bool, EnableEsmry))
, simulator_(simulator)
{
#ifdef HAVE_DAMARIS
if (enableDamarisOutput_())
this->damarisUpdate = true ;
#endif
this->eclOutputModule_ = std::make_unique<EclOutputBlackOilModule<TypeTag>>(simulator, this->wbp_index_list_, this->collectToIORank_);
this->wbp_index_list_.clear();
}
@ -280,11 +294,34 @@ public:
void writeOutput(bool isSubStep)
{
const int reportStepNum = simulator_.episodeIndex() + 1;
this->prepareLocalCellData(isSubStep, reportStepNum);
this->eclOutputModule_->outputErrorLog(simulator_.gridView().comm());
#ifdef HAVE_DAMARIS
if (EWOMS_GET_PARAM(TypeTag, bool, EnableDamarisOutput)) {
/* N.B. damarisUpdate should be set to true if at any time the model geometry changes */
if (this->damarisUpdate == true)
{
const auto& gridView = simulator_.gridView();
const int numElements = gridView.size(/*codim=*/0); // I think this might be the full model size? No, it is the local ranks model size
Opm::DamarisOutput::setupDamarisWritingPars(simulator_.vanguard().grid().comm(), numElements);
// By defauls we assume static grid
this->damarisUpdate = false;
}
if (! isSubStep) {
data::Solution localCellData = {};
this->eclOutputModule_->assignToSolution(localCellData);
// Output the PRESSURE field
if (this->eclOutputModule_->getPRESSURE_ptr() != nullptr) {
damaris_write("PRESSURE", (void *) this->eclOutputModule_->getPRESSURE_ptr());
damaris_end_iteration( );
}
}
}
#else
// output using eclWriter if enabled
auto localWellData = simulator_.problem().wellModel().wellData();
auto localGroupAndNetworkData = simulator_.problem().wellModel()
@ -328,6 +365,7 @@ public:
curTime, nextStepSize,
EWOMS_GET_PARAM(TypeTag, bool, EclOutputDoublePrecision));
}
#endif
}
void beginRestart()
@ -422,6 +460,9 @@ public:
private:
static bool enableEclOutput_()
{ return EWOMS_GET_PARAM(TypeTag, bool, EnableEclOutput); }
static bool enableDamarisOutput_()
{ return EWOMS_GET_PARAM(TypeTag, bool, EnableDamarisOutput); }
const EclipseState& eclState() const
{ return simulator_.vanguard().eclState(); }
@ -503,6 +544,9 @@ private:
Simulator& simulator_;
std::unique_ptr<EclOutputBlackOilModule<TypeTag>> eclOutputModule_;
Scalar restartTimeStepSize_;
#ifdef HAVE_DAMARIS
bool damarisUpdate ; ///< Whenever this is true writeOutput() will set up Damris offsets of model fields
#endif
};
} // namespace Opm

View File

@ -44,6 +44,7 @@ set (opm-simulators_DEPS
"opm-material REQUIRED"
"opm-grid REQUIRED"
"opm-models REQUIRED"
"Damaris 1.7"
)
find_package_deps(opm-simulators)

View File

@ -74,6 +74,10 @@
#include <opm/simulators/utils/ParallelEclipseState.hpp>
#endif
#if HAVE_DAMARIS
#include <opm/simulators/utils/DamarisOutputModule.hpp>
#endif
#include <cassert>
#include <cstdlib>
#include <filesystem>
@ -165,6 +169,13 @@ public:
#endif // HAVE_MPI
EclGenericVanguard::setCommunication(nullptr);
#if HAVE_DAMARIS
int err = damaris_finalize();
if (err != DAMARIS_OK ) {
std::cerr << "ERROR: Damaris library produced an error result for damaris_initialize()" << std::endl;
}
#endif
#if HAVE_MPI && !HAVE_DUNE_FEM
MPI_Finalize();
@ -381,7 +392,7 @@ private:
int mpiRank = Dune::Fem::MPIManager::rank();
#else
int mpiRank = EclGenericVanguard::comm().rank();
#endif
#endif // HAVE_DUNE_FEM
// we always want to use the default locale, and thus spare us the trouble
// with incorrect locale settings.
@ -484,6 +495,27 @@ private:
if (output_param >= 0)
outputInterval = output_param;
#if HAVE_DAMARIS
if (EWOMS_GET_PARAM(PreTypeTag, bool, EnableDamarisOutput)) {
// By default EnableAsyncDamarisOutput is false so FilePerCore Mode is used in Damaris so all the simulation results
// in each node are aggregated by dedicated cores and stored asynchronously at the end of each iteration.
if (EWOMS_GET_PARAM(PreTypeTag, bool, EnableAsyncDamarisOutput))
enableAsyncDamarisOutput = true;
// Using the ModifyModel class to set the XML file for Damaris.
// If EnableAsyncDamarisOutput is enabled, all simulation results will
// be written into one single file for each iteration using Parallel HDF5.
DamarisOutput::initializeDamaris(EclGenericVanguard::comm(), mpiRank, outputDir, enableAsyncDamarisOutput);
int is_client;
MPI_Comm new_comm;
int err = damaris_start(&is_client) ;
isSimulationRank_ = (is_client > 0) ;
if (isSimulationRank_) {
damaris_client_comm_get (&new_comm) ;
EclGenericVanguard::setCommunication(std::make_unique<Parallel::Communication>(new_comm));
}
}
#endif // HAVE_DAMARIS
readDeck(EclGenericVanguard::comm(), deckFilename, deck_, eclipseState_, schedule_, udqState_, actionState_, wtestState_,
summaryConfig_, nullptr, python, std::move(parseContext),
init_from_restart_file, outputCout_, outputInterval);
@ -787,6 +819,8 @@ private:
// To demonstrate run with non_world_comm
bool test_split_comm_ = false;
bool isSimulationRank_ = true;
// To use Damaris with parallel HDF5: Asynchronous Output using a separate core
bool enableAsyncDamarisOutput = false;
};
} // namespace Opm

View File

@ -0,0 +1,63 @@
/*
Copyright 2021 Equinor.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <opm/simulators/utils/DamarisKeywords.hpp>
#include <string>
#include <map>
/*
Below is the Damaris Keywords supported by Damaris to be filled
in the built-in XML file.
The entries in the map below will be filled by the corresponding
Damaris Keywords. Yet, only output directory and FileMode are to
be chosen by the user
*/
namespace Opm::DamarisOutput
{
std::map<std::string,std::string> DamarisKeywords(std::string OutputDir, bool enableAsyncDamarisOutput) {
if (enableAsyncDamarisOutput) {
std::map<std::string,std::string> damaris_keywords = {
{"_SHMEM_BUFFER_BYTES_REGEX_","536870912"},
{"_DC_REGEX_","1"},
{"_DN_REGEX_","0"},
{"_File_Mode", "Collective"},
{"_MORE_VARIABLES_REGEX_",""},
{"_PATH_REGEX_", OutputDir},
{"_MYSTORE_OR_EMPTY_REGEX_","MyStore"},
};
return damaris_keywords;
}
else {
std::map<std::string,std::string> damaris_keywords = {
{"_SHMEM_BUFFER_BYTES_REGEX_","536870912"},
{"_DC_REGEX_","1"},
{"_DN_REGEX_","0"},
{"_File_Mode", "FilePerCore"},
{"_MORE_VARIABLES_REGEX_",""},
{"_File_Mode", OutputDir},
{"_MYSTORE_OR_EMPTY_REGEX_","MyStore"},
};
return damaris_keywords;
}
}
}

View File

@ -0,0 +1,42 @@
/*
Copyright 2021 Equinor.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef OPM_DAMARISKEYWORDS_HEADER_INCLUDED
#define OPM_DAMARISKEYWORDS_HEADER_INCLUDED
#include <string>
#include <map>
/*
Below is the std::map with the keywords that are supported by Damaris.
Most entries in the map below are not critical ('static') and will not
be changed. We only allow changing FileMode together with output directory
*/
namespace Opm::DamarisOutput
{
std::map<std::string,std::string> DamarisKeywords(std::string OutputDir, bool enableAsyncDamarisOutput);
} // namespace Opm::DamarisOutput
#endif

View File

@ -0,0 +1,173 @@
/*
Copyright 2022 SINTEF Digital, Mathematics and Cybernetics.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <damaris/model/ModifyModel.hpp>
#include <opm/simulators/utils/DamarisOutputModule.hpp>
#include <opm/simulators/utils/DamarisKeywords.hpp>
#include <opm/simulators/utils/ParallelCommunication.hpp>
/*
Below is the XML file for Damaris that is supported by Damaris.
The entries in the map below will be filled by corresponding Damaris
Keywords.
*/
namespace Opm::DamarisOutput
{
std::string initDamarisXmlFile()
{
std::string init_damaris = R"V0G0N(<?xml version="1.0"?>
<simulation name="opm-flow" language="c"
xmlns="http://damaris.gforge.inria.fr/damaris/model">
<architecture>
<domains count="1"/>
<dedicated cores="_DC_REGEX_" nodes="_DN_REGEX_"/>
<buffer name="buffer" size="_SHMEM_BUFFER_BYTES_REGEX_" />
<placement />
<queue name="queue" size="300" />
</architecture>
<data>
<parameter name="n_elements_total" type="int" value="1" />
<parameter name="n_elements_local" type="int" value="1" />
<parameter name="n" type="int" value="1" />
<layout name="zonal_layout_usmesh" type="double" dimensions="n_elements_local" global="n_elements_total" comment="For the field data e.g. Pressure" />
<variable name="PRESSURE" layout="zonal_layout_usmesh" type="scalar" visualizable="false" unit="Pa" centering="zonal" store="_MYSTORE_OR_EMPTY_REGEX_" />
_MORE_VARIABLES_REGEX_
</data>
<storage>
<store name="MyStore" type="HDF5">
<option key="FileMode">"_File_Mode"</option>
<option key="XDMFMode">NoIteration</option>
<option key="FilesPath">./</option>
</store>
</storage>
<actions>
</actions>
<log FileName="_PATH_REGEX_/damaris_log/exa_dbg" RotationSize="5" LogFormat="[%TimeStamp%]: %Message%" Flush="True" LogLevel="debug" />
</simulation>)V0G0N";
return init_damaris;
}
void initializeDamaris(MPI_Comm comm, int mpiRank, std::string OutputDir, bool enableAsyncDamarisOutput)
{
// Prepare the XML file
std::string damaris_config_xml = initDamarisXmlFile();
damaris::model::ModifyModel myMod = damaris::model::ModifyModel(damaris_config_xml);
// The map will make it precise the output directory and FileMode (either FilePerCore or Collective storage)
// The map file find all occurences of the string in position 1 and repalce it/them with string in position 2
std::map<std::string,std::string> find_replace_map = DamarisKeywords(OutputDir, enableAsyncDamarisOutput);
myMod.RepalceWithRegEx(find_replace_map);
std::string damaris_xml_filename_str = OutputDir +"/damaris_config.xml" ;
if (mpiRank == 0) {
myMod.SaveXMLStringToFile(damaris_xml_filename_str);
}
int damaris_err;
/* Get the name of the Damaris input file from an environment variable if available */
const char *cs_damaris_xml_file = getenv("FLOW_DAMARIS_XML_FILE");
if (cs_damaris_xml_file != NULL)
{
std::cout << "INFO: initializing Damaris from environment variable FLOW_DAMARIS_XML_FILE: " << cs_damaris_xml_file << std::endl;
damaris_err = damaris_initialize(cs_damaris_xml_file, MPI_COMM_WORLD);
if (damaris_err != DAMARIS_OK ) {
std::cerr << "ERROR: damaris_initialize() error via FLOW_DAMARIS_XML_FILE=" << cs_damaris_xml_file << std::endl;
}
}
else
{
std::cout << "INFO: initializing Damaris using internally built file:" << damaris_xml_filename_str << std::endl;
damaris_err = damaris_initialize(damaris_xml_filename_str.c_str() , comm);
if (damaris_err != DAMARIS_OK ) {
std::cerr << "ERROR: damaris_initialize() error via built file:" << std::endl << myMod.GetConfigString();
}
}
}
void setupDamarisWritingPars(Parallel::Communication comm, const int n_elements_local_grid)
{
int damaris_err = DAMARIS_OK;
const int nranks = comm.size();
const int rank = comm.rank();
std::vector<unsigned long long> elements_rank_sizes(nranks); // one for each rank -- to be gathered from each client rank
std::vector<unsigned long long> elements_rank_offsets(nranks); // one for each rank, first one 0 -- to be computed - Probably could use MPI_Scan()!
// n_elements_local_grid should be the full model size
const unsigned long long n_elements_local = n_elements_local_grid ;
std::cout << "INFO (" << rank << "): n_elements_local_grid = " << n_elements_local_grid << std::endl ;
// This gets the n_elements_local from all ranks and copies them to a std::vector of all the values on all ranks (elements_rank_sizes[]).
comm.allgather(&n_elements_local, 1, elements_rank_sizes.data());
elements_rank_offsets[0] = 0ULL ;
// This scan makes the offsets to the start of each ranks grid section if each local grid data was concatenated (in rank order)
for (int t1 = 1 ; t1 < nranks; t1++) {
elements_rank_offsets[t1] = elements_rank_offsets[t1-1] + elements_rank_sizes[t1-1];
}
// find the global/total size
unsigned long long n_elements_global_max = elements_rank_offsets[nranks-1] ;
n_elements_global_max += elements_rank_sizes[nranks-1] ; // add the last ranks size to the already accumulated offset values
if (rank == 0 ) {
std::cout << "INFO (" << rank << "): n_elements_global_max = " << n_elements_global_max << std::endl ;
}
// Set the paramater so that the Damaris servers can allocate the correct amount of memory for the variabe
// Damaris parameters only support int data types. This will limit models to be under size of 2^32-1 elements
// ToDo: Do we need to check that local ranks are 0 based ?
int temp_int = static_cast<int>(elements_rank_sizes[rank]) ;
damaris_err = damaris_parameter_set("n_elements_local",&temp_int, sizeof(int));
if (damaris_err != DAMARIS_OK ) {
std::cerr << "ERROR: Damaris library produced an error result for damaris_parameter_set(n_elements_local,&temp_int, sizeof(int));" << std::endl ;
}
// Damaris parameters only support int data types. This will limit models to be under size of 2^32-1 elements
// ToDo: Do we need to check that n_elements_global_max will fit in a C int type (INT_MAX)
temp_int = static_cast<int>(n_elements_global_max) ;
damaris_err = damaris_parameter_set("n_elements_total",&temp_int, sizeof(int));
if (damaris_err != DAMARIS_OK ) {
std::cerr << "ERROR: Damaris library produced an error result for damaris_parameter_set(n_elements_local,&temp_int, sizeof(int));" << std::endl ;
}
// Use damaris_set_position to set the offset in the global size of the array.
// This is used so that output functionality (e.g. HDF5Store) knows global offsets of the data of the ranks
int64_t temp_int64_t[1] ;
temp_int64_t[0] = static_cast<int64_t>(elements_rank_offsets[rank]) ;
damaris_err = damaris_set_position("PRESSURE",temp_int64_t) ;
if (damaris_err != DAMARIS_OK ) {
std::cerr << "ERROR: Damaris library produced an error result for damaris_set_position(\"PRESSURE\",temp_int64_t);" << std::endl ;
}
}
}

View File

@ -0,0 +1,41 @@
/*
Copyright 2022 SINTEF Digital, Mathematics and Cybernetics.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <string>
#include <Damaris.h>
#include <opm/simulators/utils/ParallelCommunication.hpp>
/*
Below is the XML file for Damaris that is supported by Damaris.
The entries in the map below will be filled by corresponding Damaris
Keywords.
*/
namespace Opm::DamarisOutput
{
// Initialize an XML file
std::string initDamarisXmlFile();
// Initialize Damaris by filling in th XML file and stroring it in the chosed directory
void initializeDamaris(MPI_Comm comm, int mpiRank, std::string OutputDir, bool enableAsyncDamarisOutput);
// Setup Damaris Parameters for writing e.g., grid size and communicator to output "PRESSURE" field
void setupDamarisWritingPars(Parallel::Communication comm, const int n_elements_local_grid);
} // namespace Opm::DamarisOutput