final review changes from PR 4889 and added extra command line argument to output mesh data --damaris-save-mesh-to-hdf

This commit is contained in:
Josh Bowden 2023-12-12 21:48:17 +01:00
parent 9309f5a1bd
commit 757a96941a
9 changed files with 1326 additions and 1325 deletions

View File

@ -50,6 +50,7 @@
#include <limits>
#include <stdexcept>
#include <string>
#include <memory>
#include <omp.h>
@ -66,7 +67,11 @@ struct EnableDamarisOutput {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct EnableDamarisOutputCollective {
struct DamarisOutputHdfCollective {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
struct DamarisSaveMeshToHdf {
using type = UndefinedProperty;
};
template<class TypeTag, class MyTypeTag>
@ -145,11 +150,16 @@ class DamarisWriter : public EclGenericWriter<GetPropType<TypeTag, Properties::G
public:
static void registerParameters()
{
EWOMS_REGISTER_PARAM(TypeTag, bool, EnableDamarisOutputCollective,
"Write output via Damaris using parallel HDF5 to get single file per timestep instead of one per Damaris core.");
EWOMS_REGISTER_PARAM(TypeTag, bool, DamarisOutputHdfCollective,
"Write output via Damaris using parallel HDF5 to get single file and dataset per timestep instead of one per Damaris \n \
core with multiple datasets.");
EWOMS_REGISTER_PARAM(TypeTag, bool, DamarisSaveToHdf,
"Set to false to prevent output to HDF5. Uses collective output by default or set --enable-damaris-collective=false to\n \
use file per core (file per Damaris server).");
EWOMS_REGISTER_PARAM(TypeTag, bool, DamarisSaveMeshToHdf,
"Saves the mesh data to the HDF5 file (1st iteration only). Will set --damaris-output-hdf-collective to false \n \
so will use file per core (file per Damaris server) output (global sizes and offset values \n \
of mesh variables are not being provided as yet).");
EWOMS_REGISTER_PARAM(TypeTag, std::string, DamarisPythonScript,
"Set to the path and filename of a Python script to run on Damaris server resources with access to OPM flow data.");
EWOMS_REGISTER_PARAM(TypeTag, std::string, DamarisPythonParaviewScript,
@ -322,15 +332,16 @@ private:
rank_, damaris_error_string(dam_err_) ));
}
// This is an example of writing to the Damaris shared memory directly (i.e. not using damaris_write() to copy data there)
// We will add the MPI rank value directly into shared memory using the DamarisVar wrapper of the C based Damaris API
// The shared memory is given back to Damaris on object deletion - i.e. when the unique_ptr goes out of scope.
//auto mpi_rank_var = std::make_unique<Opm::DamarisOutput::DamarisVar<int>>(
// 1, {std::string("n_elements_local")}, std::string("MPI_RANK"), rank_)) ;
// std::unique_ptr<Opm::DamarisOutput::DamarisVar<int>>
// This is an example of writing to the Damaris shared memory directly (i.e. not using
// damaris_write() to copy data there)
// We will add the MPI rank value directly into shared memory using the DamarisVar
// wrapper of the C based Damaris API.
// The shared memory is given back to Damaris on object deletion - i.e. when the
// unique_ptr goes out of scope.
std::unique_ptr<DamarisVarInt> mpi_rank_var( new DamarisVarInt(1,
{std::string("n_elements_local")},
std::string("MPI_RANK"), rank_) ) ;
std::string("MPI_RANK"), rank_) );
// N.B. we have not set any offset values, so HDF5 collective and Dask arrays cannot be used.
mpi_rank_var->setDamarisParameterAndShmem( {this->numElements_ } ) ;
// Fill the created memory area
@ -406,11 +417,6 @@ private:
"damaris_set_position(\"GLOBAL_CELL_INDEX\", temp_int64_t);");
}
//auto mpi_rank_var = std::make_unique<Opm::DamarisOutput::DamarisVar<int>>(
// 1, {std::string("n_elements_local")}, std::string("MPI_RANK"), rank_)) ;
// std::unique_ptr<Opm::DamarisOutput::DamarisVar<int>>
// mpi_rank_var(new Opm::DamarisOutput::DamarisVar<int>(1, {std::string("n_elements_local")}, std::string("MPI_RANK"), rank_)) ;
std::unique_ptr<DamarisVarInt> mpi_rank_var( new DamarisVarInt(1,
{std::string("n_elements_local")},
std::string("MPI_RANK"), rank_) ) ;
@ -442,19 +448,17 @@ private:
// <variable name="z" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="PythonConduitTest" time-varying="false" />
// </group>
std::unique_ptr<Opm::DamarisOutput::DamarisVar<double>> var_x(new Opm::DamarisOutput::DamarisVar<double>(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/x"), rank_)) ;
std::unique_ptr<DamarisVarDbl> var_x(new DamarisVarDbl(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/x"), rank_)) ;
// N.B. We have not set any position/offset values (using DamarisVar::SetDamarisPosition).
// They are not needed for mesh data as each process has a local geometric model.
// However, HDF5 collective and Dask arrays cannot be used for this data.
var_x->setDamarisParameterAndShmem( { geomData.getNVertices() } ) ;
std::unique_ptr<Opm::DamarisOutput::DamarisVar<double>> var_y(new Opm::DamarisOutput::DamarisVar<double>(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/y"), rank_)) ;
var_y->parameterIsSet() ;
var_y->setPointersToDamarisShmem() ;
std::unique_ptr<DamarisVarDbl> var_y(new DamarisVarDbl(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/y"), rank_)) ;
var_y->setDamarisParameterAndShmem( { geomData.getNVertices() } ) ;
std::unique_ptr<Opm::DamarisOutput::DamarisVar<double>> var_z(new Opm::DamarisOutput::DamarisVar<double>(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/z"), rank_)) ;
var_z->parameterIsSet() ;
var_z->setPointersToDamarisShmem() ;
std::unique_ptr<DamarisVarDbl> var_z(new DamarisVarDbl(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/z"), rank_)) ;
var_z->setDamarisParameterAndShmem( { geomData.getNVertices() } ) ;
// Now we can return the memory that Damaris has allocated in shmem and use it to write the X,y,z coordinates
double itime, ftime, exec_time;
@ -462,8 +466,8 @@ private:
if ( geomData.writeGridPoints(*var_x,*var_y,*var_z) < 0)
DUNE_THROW(Dune::IOError, geomData.getError() );
//if ( geomData.writeGridPoints(var_x->data(),var_y->data(),var_z->data()) < 0)
// DUNE_THROW(Dune::IOError, geomData.getError() );
if ( geomData.writeGridPoints(var_x->data(),var_y->data(),var_z->data(), geomData.getNVertices()) < 0)
DUNE_THROW(Dune::IOError, geomData.getError() );
ftime = omp_get_wtime();
exec_time = ftime - itime;
@ -489,8 +493,7 @@ private:
std::unique_ptr<DamarisVarInt> var_offsets(new DamarisVarInt(1, {std::string("n_offsets_types_ph")}, std::string("topologies/topo/elements/offsets"), rank_)) ;
var_offsets->setDamarisParameterAndShmem({ geomData.getNCells()}) ;
std::unique_ptr<DamarisVarChar> var_types(new DamarisVarChar(1, {std::string("n_offsets_types_ph")}, std::string("topologies/topo/elements/types"), rank_)) ;
var_types->parameterIsSet() ;
var_types->setPointersToDamarisShmem() ;
var_types->setDamarisParameterAndShmem({ geomData.getNCells()}) ;
// Copy the mesh data from the Durne grid
long i = 0 ;
@ -507,6 +510,7 @@ private:
i = geomData.writeCellTypes(*var_types) ;
if ( i != geomData.getNCells())
DUNE_THROW(Dune::IOError,geomData.getError());
}
catch (std::exception& e)
{

View File

@ -394,10 +394,14 @@ struct EnableDamarisOutput<TypeTag, TTag::EclBaseProblem> {
};
// If Damaris is available, write specific variable output in parallel
template<class TypeTag>
struct EnableDamarisOutputCollective<TypeTag, TTag::EclBaseProblem> {
struct DamarisOutputHdfCollective<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = true;
};
template<class TypeTag>
struct DamarisSaveMeshToHdf<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = false;
};
template<class TypeTag>
struct DamarisSaveToHdf<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = true;
};

View File

@ -83,7 +83,7 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
const std::string& OutputDir)
{
std::string saveToHDF5_str("MyStore");
if (! saveToDamarisHDF5 ){
if (! saveToDamarisHDF5_ ){
saveToHDF5_str = "#";
}
@ -96,43 +96,43 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
std::string publishToPython_str("#"); // to be changed to the name of the PyScript XML element
#ifdef HAVE_PYTHON_ENABLED
// Test if input Python file exists and set the name of the script for <variable ... script="" > )XML elements
if (pythonFilename != ""){
if (FileExists(pythonFilename, comm)) {
if (pythonFilename_ != ""){
if (FileExists(pythonFilename_, comm)) {
publishToPython_str="PythonScript"; // the name of the PyScript XML element
disablePythonXMLstart.clear();
disablePythonXMLfin.clear();
} else {
pythonFilename.clear(); // set to empty if it does not exist
pythonFilename_.clear(); // set to empty if it does not exist
disablePythonXMLstart = std::string("!--");
disablePythonXMLfin = std::string("--");
}
}
#else
OpmLog::info(fmt::format("INFO: Opm::DamarisOutput::DamarisKeywords() : Python is not enabled in the Damaris library. "
"The commandline --damaris-python-script={} will be set to empty string", pythonFilename));
pythonFilename.clear();
"The commandline --damaris-python-script={} will be set to empty string", pythonFilename_));
pythonFilename_.clear();
#endif
#ifdef HAVE_PARAVIEW_ENABLED
// Test if input Paraview Python file exists
if (paraviewPythonFilename != ""){
if (FileExists(paraviewPythonFilename, comm)) {
if (paraviewPythonFilename_ != ""){
if (FileExists(paraviewPythonFilename_, comm)) {
disableParaviewXMLstart.clear();
disableParaviewXMLfin.clear();
} else {
paraviewPythonFilename.clear(); // set to empty if it does not exist
paraviewPythonFilename_.clear(); // set to empty if it does not exist
disableParaviewXMLstart = std::string("!--");
disableParaviewXMLfin = std::string("--");
}
}
#else
OpmLog::info(fmt::format("INFO: Opm::DamarisOutput::DamarisKeywords() : Paraview is not enabled in the Damaris library. "
"The commandline --damaris-python-paraview-script={} will be set to empty string", paraviewPythonFilename));
paraviewPythonFilename.clear();
"The commandline --damaris-python-paraview-script={} will be set to empty string", paraviewPythonFilename_));
paraviewPythonFilename_.clear();
#endif
// Flag error if both scripts are enabled
if ((pythonFilename.size() > 0) && (paraviewPythonFilename.size() > 0) )
if ((pythonFilename_.size() > 0) && (paraviewPythonFilename_.size() > 0) )
{
// A work around of this issue is to remove the Paraview mpi4py library (use print(inspect.getfile(mpi4py)))
// and then possibly not use mpi4py in the Paraview script code. OR try to install paraview mpi4py with headers.
@ -142,16 +142,21 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
"locally and without header files). "
"Please choose one or the other method of analysis for now. Exiting." );
}
std::string saveMeshToHDF5_str("#");
if (saveMeshToHDF5_ == true) {
enableDamarisOutputCollective_ = false ;
saveMeshToHDF5_str = "MyStore" ;
}
std::string damarisOutputCollective_str;
if (enableDamarisOutputCollective) {
if (enableDamarisOutputCollective_) {
damarisOutputCollective_str = "Collective";
} else {
damarisOutputCollective_str = "FilePerCore";
}
std::string simName_str;
if (damarisSimName.empty()) {
if (damarisSimName_.empty()) {
// Having a different simulation name is important if multiple simulations
// are running on the same node, as it is used to name the simulations shmem area
// and when one sim finishes it removes its shmem file.
@ -169,35 +174,35 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
simName_str = "opm-flow-" + simName_str;
}
} else {
simName_str = damarisSimName;
simName_str = damarisSimName_;
}
if ((nDamarisCores > 0) && (nDamarisNodes > 0))
if ((nDamarisCores_ > 0) && (nDamarisNodes_ > 0))
{
nDamarisNodes = 0; // Default is to use Damaris Cores
nDamarisNodes_ = 0; // Default is to use Damaris Cores
}
std::string nDamarisCores_str;
if ( nDamarisCores != 0 ) {
nDamarisCores_str = std::to_string(nDamarisCores);
if ( nDamarisCores_ != 0 ) {
nDamarisCores_str = std::to_string(nDamarisCores_);
} else {
nDamarisCores_str = "0";
}
std::string nDamarisNodes_str;
if ( nDamarisNodes != 0 ) {
nDamarisNodes_str = std::to_string(nDamarisNodes);
if ( nDamarisNodes_ != 0 ) {
nDamarisNodes_str = std::to_string(nDamarisNodes_);
} else {
nDamarisNodes_str = "0";
}
std::string shmemSizeBytes_str;
if (shmemSizeBytes != 0) {
shmemSizeBytes_str = std::to_string(shmemSizeBytes);
if (shmemSizeBytes_ != 0) {
shmemSizeBytes_str = std::to_string(shmemSizeBytes_);
} else {
shmemSizeBytes_str = "536870912"; // 512 MB
}
std::string logLevel_str(damarisLogLevel);
std::string logLevel_str(damarisLogLevel_);
std::string logFlush_str("false");
if ((logLevel_str == "debug") || (logLevel_str == "trace") ) {
logFlush_str = "true";
@ -211,8 +216,9 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
{"_MORE_VARIABLES_REGEX_", ""},
{"_PATH_REGEX_", OutputDir}, /* Do Not change the string "_PATH_REGEX_" as it is used to search for the output path */
{"_MYSTORE_OR_EMPTY_REGEX_", saveToHDF5_str},
{"_PARAVIEW_PYTHON_SCRIPT_",paraviewPythonFilename}, /* this has to be before _PYTHON_SCRIPT_ entry */
{"_PYTHON_SCRIPT_",pythonFilename}, /* if a Python script is specified then assume that we want to publish the data to Python */
{"_MYSTORE_MESH_OR_EMPTY_REGEX_", saveMeshToHDF5_str},
{"_PARAVIEW_PYTHON_SCRIPT_",paraviewPythonFilename_}, /* this has to be before _PYTHON_SCRIPT_ entry */
{"_PYTHON_SCRIPT_",pythonFilename_}, /* if a Python script is specified then assume that we want to publish the data to Python */
{"_PRESSURE_UNIT_","Pa"},
{"_MAKE_AVAILABLE_IN_PYTHON_",publishToPython_str}, /* must match <pyscript name="PythonScript" */
{"_SIM_NAME_",simName_str},
@ -222,7 +228,7 @@ DamarisSettings::getKeywords([[maybe_unused]] const Parallel::Communication& com
{"_DISABLEPYTHONFIN_",disablePythonXMLfin},
{"_DISABLEPARAVIEWSTART_",disableParaviewXMLstart},
{"_DISABLEPARAVIEWFIN_",disableParaviewXMLfin},
{"_DASK_SCHEDULER_FILE_",damarisDaskFile},
{"_DASK_SCHEDULER_FILE_",damarisDaskFile_},
};
return damaris_keywords;
}

View File

@ -49,17 +49,20 @@ bool FileExists(const std::string& filename_in,
const Parallel::Communication& comm);
struct DamarisSettings {
bool enableDamarisOutputCollective = true;
bool saveToDamarisHDF5 = true;
std::string pythonFilename;
std::string paraviewPythonFilename;
bool enableDamarisOutputCollective_ = true;
bool saveToDamarisHDF5_ = true;
// if saveMeshToDamarisHDF5 is true, requires enableDamarisOutputCollective to be false
// (until offsets are are added to mesh data for collective writing)
bool saveMeshToHDF5_ = false;
std::string pythonFilename_;
std::string paraviewPythonFilename_;
std::string damarisSimName; // empty defaults to opm-sim-<magic_number>
std::string damarisLogLevel = "info";
std::string damarisDaskFile = "";
int nDamarisCores = 1;
int nDamarisNodes = 0;
long shmemSizeBytes = 536870912; // 512 MB
std::string damarisSimName_; // empty defaults to opm-sim-<magic_number>
std::string damarisLogLevel_ = "info";
std::string damarisDaskFile_ = "";
int nDamarisCores_ = 1; // this is the number of (Damaris server) cores per node
int nDamarisNodes_ = 0;
long shmemSizeBytes_ = 536870912; // 512 MB
std::map<std::string, std::string>
getKeywords(const Parallel::Communication& comm,
@ -81,16 +84,17 @@ getDamarisKeywords(const Parallel::Communication& comm, const std::string& Outpu
DamarisSettings settings;
// Get all of the Damaris keywords (except for --enable-damaris, which is used in simulators/flow/Main.hpp)
// These command line arguments are defined in ebos/damariswriter.hh and defaults are set in ebos/eclproblem_properties.hh
settings.enableDamarisOutputCollective = EWOMS_GET_PARAM(TypeTag, bool, EnableDamarisOutputCollective);
settings.saveToDamarisHDF5 = EWOMS_GET_PARAM(TypeTag, bool, DamarisSaveToHdf);
settings.pythonFilename = EWOMS_GET_PARAM(TypeTag, std::string, DamarisPythonScript);
settings.paraviewPythonFilename = EWOMS_GET_PARAM(TypeTag, std::string, DamarisPythonParaviewScript);
settings.damarisSimName = EWOMS_GET_PARAM(TypeTag, std::string, DamarisSimName);
settings.nDamarisCores = EWOMS_GET_PARAM(TypeTag, int, DamarisDedicatedCores);
settings.nDamarisNodes = EWOMS_GET_PARAM(TypeTag, int, DamarisDedicatedNodes);
settings.shmemSizeBytes = EWOMS_GET_PARAM(TypeTag, long, DamarisSharedMemorySizeBytes);
settings.damarisLogLevel = EWOMS_GET_PARAM(TypeTag, std::string, DamarisLogLevel);
settings.damarisDaskFile = EWOMS_GET_PARAM(TypeTag, std::string, DamarisDaskFile);
settings.enableDamarisOutputCollective_ = EWOMS_GET_PARAM(TypeTag, bool, DamarisOutputHdfCollective);
settings.saveMeshToHDF5_ = EWOMS_GET_PARAM(TypeTag, bool, DamarisSaveMeshToHdf);
settings.saveToDamarisHDF5_ = EWOMS_GET_PARAM(TypeTag, bool, DamarisSaveToHdf);
settings.pythonFilename_ = EWOMS_GET_PARAM(TypeTag, std::string, DamarisPythonScript);
settings.paraviewPythonFilename_ = EWOMS_GET_PARAM(TypeTag, std::string, DamarisPythonParaviewScript);
settings.damarisSimName_ = EWOMS_GET_PARAM(TypeTag, std::string, DamarisSimName);
settings.nDamarisCores_ = EWOMS_GET_PARAM(TypeTag, int, DamarisDedicatedCores);
settings.nDamarisNodes_ = EWOMS_GET_PARAM(TypeTag, int, DamarisDedicatedNodes);
settings.shmemSizeBytes_ = EWOMS_GET_PARAM(TypeTag, long, DamarisSharedMemorySizeBytes);
settings.damarisLogLevel_ = EWOMS_GET_PARAM(TypeTag, std::string, DamarisLogLevel);
settings.damarisDaskFile_ = EWOMS_GET_PARAM(TypeTag, std::string, DamarisDaskFile);
return settings.getKeywords(comm, OutputDir);
}

View File

@ -21,6 +21,8 @@
#define XSD_CXX11_TEMPLATE_ALIAS 1
#include <config.h>
#include <damaris/model/ModifyModel.hpp>
#include <opm/simulators/utils/DamarisKeywords.hpp>
#include <opm/simulators/utils/DamarisOutputModule.hpp>
@ -37,10 +39,10 @@ std::string initDamarisXmlFile(); // Defined in initDamarisXMLFile.cpp, to avoid
/**
* Initialize Damaris by either reading a file specified by the environment variable FLOW_DAMARIS_XML_FILE or
* by filling in th XML file and storing it in the chosen directory
* by filling in the XML file and storing it in the chosen directory
*/
void
initializeDamaris(MPI_Comm comm, int mpiRank, std::map<std::string, std::string>& find_replace_map )
initializeDamaris(const MPI_Comm comm, const int mpiRank, const std::map<std::string, std::string>& find_replace_map )
{
int dam_err;
@ -63,14 +65,16 @@ initializeDamaris(MPI_Comm comm, int mpiRank, std::map<std::string, std::string>
// std::map<std::string, std::string> find_replace_map = DamarisKeywords(outputDir, enableDamarisOutputCollective);
myMod.RepalceWithRegEx(find_replace_map);
std::string outputDir = find_replace_map["_PATH_REGEX_"];
std::string outputDir = find_replace_map.at("_PATH_REGEX_");
std::string damaris_xml_filename_str = outputDir + "/damaris_config.xml";
if (mpiRank == 0) {
myMod.SaveXMLStringToFile(damaris_xml_filename_str);
}
OpmLog::info("Initializing Damaris using internally built file: " + damaris_xml_filename_str + " (N.B. use environment variable FLOW_DAMARIS_XML_FILE to override)");
OpmLog::info("Initializing Damaris using internally built file: " + damaris_xml_filename_str +
" (N.B. use environment variable FLOW_DAMARIS_XML_FILE to override)");
dam_err = damaris_initialize(damaris_xml_filename_str.c_str(), comm);
if (dam_err != DAMARIS_OK) {
OpmLog::error(fmt::format("damariswriter::initializeDamaris() : ( rank:{}) "

View File

@ -18,8 +18,6 @@
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <opm/common/OpmLog/OpmLog.hpp>
#include <limits>
@ -28,10 +26,6 @@
#include <fmt/format.h>
#if HAVE_MPI
#include <mpi.h>
#endif
#include <Damaris.h>
#include <opm/simulators/utils/ParallelCommunication.hpp>
@ -54,6 +48,6 @@ namespace Opm::DamarisOutput
* 2/ Reading a file specified by the environment variable FLOW_DAMARIS_XML_FILE
*
*/
void initializeDamaris(MPI_Comm comm, int mpiRank, std::map<std::string, std::string>& find_replace_map );
void initializeDamaris(const MPI_Comm comm, const int mpiRank, const std::map<std::string, std::string>& find_replace_map );
} // namespace Opm::DamarisOutput

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -108,9 +108,9 @@ std::string initDamarisXmlFile()
<parameter name="n_coords_local" type="int" value="1" />
<layout name="n_coords_layout" type="double" dimensions="n_coords_local" comment="For the individual x, y and z coordinates of the mesh vertices, these values are referenced in the topologies/topo/subelements/connectivity_pg data" />
<group name="coordset/coords/values">
<variable name="x" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="y" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="z" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="x" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
<variable name="y" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
<variable name="z" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
</group>
<parameter name="n_connectivity_ph" type="int" value="1" />
@ -119,9 +119,9 @@ std::string initDamarisXmlFile()
<layout name="n_offsets_layout_ph" type="int" dimensions="n_offsets_types_ph" comment="Layout for the offsets_ph" />
<layout name="n_types_layout_ph" type="char" dimensions="n_offsets_types_ph" comment="Layout for the types_ph " />
<group name="topologies/topo/elements">
<variable name="connectivity" layout="n_connections_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="offsets" layout="n_offsets_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="types" layout="n_types_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="MyStore" />
<variable name="connectivity" layout="n_connections_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
<variable name="offsets" layout="n_offsets_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
<variable name="types" layout="n_types_layout_ph" type="scalar" visualizable="false" script="_MAKE_AVAILABLE_IN_PYTHON_" time-varying="false" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" />
</group>
<mesh name="us_mesh" type="unstructured" topology="3" time-varying="false"