Improved in-line help comments; output MPI cell values to HDF5 only when saving mesh to HDF5; removed timing data

This commit is contained in:
Josh Bowden 2023-12-14 10:27:05 +01:00
parent e16cd9a3b3
commit 55dc80725f
5 changed files with 28 additions and 18 deletions

View File

@ -461,16 +461,9 @@ private:
DamarisVarDbl var_z(1, {std::string("n_coords_local")}, std::string("coordset/coords/values/z"), rank_) ;
var_z.setDamarisParameterAndShmem( { geomData.getNVertices() } ) ;
// Now we can return the memory that Damaris has allocated in shmem and use it to write the X,y,z coordinates
double itime, ftime, exec_time;
itime = omp_get_wtime();
// Now we can use the shared memory area that Damaris has allocated and use it to write the x,y,z coordinates
if ( geomData.writeGridPoints(var_x, var_y, var_z) < 0)
DUNE_THROW(Dune::IOError, geomData.getError() );
ftime = omp_get_wtime();
exec_time = ftime - itime;
// OpmLog::info("\n\nTime taken geomData.writeGridPoints(): is " + std::to_string(exec_time) ) ;
std::cout << "\n\n rank_: " << rank_ << " Time taken geomData.writeGridPoints(): is " + std::to_string(exec_time) << std::endl ;
// This is the template XML model for connectivity, offsets and types, as defined in initDamarisXmlFile.cpp which is used to
// build the internally generated Damaris XML configuration file.

View File

@ -387,7 +387,7 @@ struct EnableEclOutput<TypeTag,TTag::EclBaseProblem> {
static constexpr bool value = true;
};
#ifdef HAVE_DAMARIS
//! Enable the Damaris output by default
//! Disable the Damaris HDF5 output by default
template<class TypeTag>
struct EnableDamarisOutput<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = false;
@ -397,46 +397,63 @@ template<class TypeTag>
struct DamarisOutputHdfCollective<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = true;
};
// Save the reservoir model mesh data to the HDF5 file (even if field data HDF5 output is disabled)
template<class TypeTag>
struct DamarisSaveMeshToHdf<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = false;
};
// Save the simulation fields (currently only PRESSURE) variables to HDF5 file
template<class TypeTag>
struct DamarisSaveToHdf<TypeTag, TTag::EclBaseProblem> {
static constexpr bool value = true;
};
// Specify path and filename of a Python script to run on each end of iteration output
template<class TypeTag>
struct DamarisPythonScript<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "";
};
// Specifiy a Paraview Catalyst in situ visualisation script (if Paraview is enabled in Damaris)
template<class TypeTag>
struct DamarisPythonParaviewScript<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "";
};
// Specify a unique name for the Damaris simulation (used as prefix to HDF5 filenames)
template<class TypeTag>
struct DamarisSimName<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "";
};
// Specify the number of Damaris cores (dc) to create (per-node). Must divide into the remaining ranks
// equally, e.g. mpirun -np 16 ... -> (if running on one node)
// The following are allowed:
// 1 dc + 15 sim ranks
// or 2 dc + 14 sim
// or 4 dc + 12 sim
// *not* 3 dc + 13 sim ranks
template<class TypeTag>
struct DamarisDedicatedCores<TypeTag, TTag::EclBaseProblem> {
static constexpr int value = 1;
};
// Specify the number of Damaris nodes to create
template<class TypeTag>
struct DamarisDedicatedNodes<TypeTag, TTag::EclBaseProblem> {
static constexpr int value = 0;
};
// Specify a name for the Damaris shared memory file (a unique name will be created by default)
template<class TypeTag>
struct DamarisSharedMemoryName<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "" ; // default name is empty, will make unique if needed in DamarisKeywords()
};
// Specify the shared memory file size
template<class TypeTag>
struct DamarisSharedMemorySizeBytes<TypeTag, TTag::EclBaseProblem> {
static constexpr long value = 536870912; // 512 MB
};
// Specify the Damaris log level - if set to debug then log is flushed regularly
template<class TypeTag>
struct DamarisLogLevel<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "info";
};
// Specify the dask file jason file that specifies the Dask scheduler etc.
template<class TypeTag>
struct DamarisDaskFile<TypeTag, TTag::EclBaseProblem> {
static constexpr auto value = "";

View File

@ -212,7 +212,7 @@ public:
/**
* Constructor - sets private data values and dos not initialise the shared memory area.
*
* N.B. These objects need a matching <variable ...> and <paramater ...> in the Damaris XML file
* N.B. These objects need a matching <variable ...> and <paramater ...> entries in the Damaris XML file
*
* Two usages:
* Example XML definition:
@ -293,7 +293,7 @@ public:
* by specifying the values for the variables parameters .
* i.e. makes the data() pointer available and sets the size of the memory block it points to.
*
* N.B. These objects need a matching <variable ...> and <paramater ...> in the Damaris XML file
* N.B. These objects need a matching <variable ...> and <paramater ...> entries in the Damaris XML file
*
* Example use:
* Example XML definition:

View File

@ -35,11 +35,11 @@
to get the references into the vertex array and element (aka cell) types for
the sub-partition. This allows the full set of vertices to be reused for
visualisation of the various sub-partitions, at the expense of copying all
the vertices. Typically a user is interested in the interiorBoarder elements
the vertices. Typically a user is interested in the interiorBorder elements
which make use of the bulk (~80%) of the vertices. This saves having to
renumber the indexes to the vertices for the sub-partitions. The vertex data
can be retrieved as seperate x, y and z arrays, or as a single array of
structures, or as a single structure of arrays based
array of structures, or as single structure of arrays based array.
Example:
@ -480,11 +480,11 @@ public:
ConnectivityVertexOrder whichOrder,
long max_size = 0) {
if (max_size < ncorners_) {
// assert(max_size >= ncorners_);
OPM_THROW(
std::runtime_error,
"Opm::GridDataOutput::writeConnectivity( T* connectivity_inout ) " +
" Input objects size (" + std::to_string(max_size) +
"Opm::GridDataOutput::writeConnectivity( T* connectivity_inout,... ) " +
" Input max_size value (" + std::to_string(max_size) +
") is not sufficient to fit the ncorners_ values (" +
std::to_string(ncorners_) + ")");
}
@ -622,7 +622,7 @@ public:
OPM_THROW(std::runtime_error,
"Opm::GridDataOutput::writeOffsetsCells( VectType& "
"offsets_inout ) " +
" Input objects check_size (" + std::to_string(check_size) +
" Input objects size (" + std::to_string(offsets_inout.size()) +
") is not sufficient to fit the ncells_ values (" +
std::to_string(ncells_) + ")");
}

View File

@ -55,7 +55,7 @@ std::string initDamarisXmlFile()
<variable name="PRESSURE" layout="zonal_layout_usmesh" type="scalar" visualizable="true" mesh="us_mesh" unit="_PRESSURE_UNIT_" centering="zonal" select-file="GLOBAL_CELL_INDEX" store="_MYSTORE_OR_EMPTY_REGEX_" script="_MAKE_AVAILABLE_IN_PYTHON_" />
_MORE_VARIABLES_REGEX_
<variable name="MPI_RANK" layout="zonal_layout_usmesh_integer" type="scalar" visualizable="true" mesh="us_mesh" unit="rank" centering="zonal" store="_MYSTORE_OR_EMPTY_REGEX_" time-varying="false" select-file="GLOBAL_CELL_INDEX" script="_MAKE_AVAILABLE_IN_PYTHON_" comment="The MPI rank of each cell"/>
<variable name="MPI_RANK" layout="zonal_layout_usmesh_integer" type="scalar" visualizable="true" mesh="us_mesh" unit="rank" centering="zonal" store="_MYSTORE_MESH_OR_EMPTY_REGEX_" time-varying="false" select-file="GLOBAL_CELL_INDEX" script="_MAKE_AVAILABLE_IN_PYTHON_" comment="The MPI rank of each cell"/>
<variable name="KRNSW_GO" layout="zonal_layout_usmesh" type="scalar" visualizable="true" mesh="#" unit="" centering="zonal" time-varying="true" select-file="GLOBAL_CELL_INDEX" store="#" script="#" />
<variable name="KRNSW_OW" layout="zonal_layout_usmesh" type="scalar" visualizable="true" mesh="#" unit="" centering="zonal" time-varying="true" select-file="GLOBAL_CELL_INDEX" store="#" script="#" />