2023-09-09 16:02:09 -05:00
// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
// vi: set et ts=4 sw=4 sts=4:
/*
Copyright 2022 SINTEF Digital , Mathematics and Cybernetics .
Copyright 2023 Inria , Bretagne – Atlantique Research Center
This file is part of the Open Porous Media project ( OPM ) .
OPM is free software : you can redistribute it and / or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 2 of the License , or
( at your option ) any later version .
OPM is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with OPM . If not , see < http : //www.gnu.org/licenses/>.
Consult the COPYING file in the top - level source directory of this
module for the precise wording of the license and the list of
copyright holders .
*/
/*!
* \ file
*
* \ copydoc Opm : : DamarisWriter
*/
2024-02-02 04:10:08 -06:00
# ifndef OPM_DAMARIS_WRITER_HPP
# define OPM_DAMARIS_WRITER_HPP
2023-09-09 16:02:09 -05:00
# include <dune/grid/common/partitionset.hh>
# include <opm/common/OpmLog/OpmLog.hpp>
2024-01-10 07:13:52 -06:00
# include <opm/simulators/flow/countGlobalCells.hpp>
2024-02-02 04:10:08 -06:00
# include <opm/simulators/flow/DamarisProperties.hpp>
2024-02-02 03:46:44 -06:00
# include <opm/simulators/flow/EclGenericWriter.hpp>
2024-02-02 03:46:44 -06:00
# include <opm/simulators/flow/FlowBaseVanguard.hpp>
2024-02-05 12:22:15 -06:00
# include <opm/simulators/flow/OutputBlackoilModule.hpp>
2024-01-10 07:13:52 -06:00
# include <opm/simulators/utils/DamarisVar.hpp>
# include <opm/simulators/utils/DeferredLoggingErrorHelpers.hpp>
# include <opm/simulators/utils/GridDataOutput.hpp>
2024-03-22 08:59:10 -05:00
# include <opm/simulators/utils/ParallelSerialization.hpp>
2023-12-11 14:30:18 -06:00
2023-09-09 16:02:09 -05:00
# include <fmt/format.h>
2024-01-30 06:20:11 -06:00
# include <algorithm>
2024-01-10 07:13:52 -06:00
# include <memory>
2024-01-30 06:20:11 -06:00
# include <numeric>
2024-01-10 07:13:52 -06:00
# include <string>
2024-01-31 02:28:18 -06:00
# include <vector>
2023-09-09 16:02:09 -05:00
namespace Opm {
2024-01-30 06:20:11 -06:00
namespace DamarisOutput {
int endIteration ( int rank ) ;
int setParameter ( const char * field , int rank , int value ) ;
int setPosition ( const char * field , int rank , int64_t pos ) ;
int write ( const char * field , int rank , const void * data ) ;
2024-01-31 02:28:18 -06:00
int setupWritingPars ( Parallel : : Communication comm ,
const int n_elements_local_grid ,
std : : vector < unsigned long long > & elements_rank_offsets ) ;
2024-01-30 06:20:11 -06:00
}
2023-09-09 16:02:09 -05:00
/*!
* \ ingroup EclBlackOilSimulator
*
* \ brief Collects necessary output values and pass them to Damaris server processes .
*
* Currently only passing through PRESSURE , GLOBAL_CELL_INDEX and MPI_RANK information .
2023-09-22 16:23:29 -05:00
* This class now passes through the 3 D mesh information to Damaris to enable
2023-09-09 16:02:09 -05:00
* in situ visualization via Paraview or Ascent . And developed so that variables specified
* through the Eclipse input deck will be available to Damaris .
*/
template < class TypeTag >
class DamarisWriter : public EclGenericWriter < GetPropType < TypeTag , Properties : : Grid > ,
2024-01-10 07:15:00 -06:00
GetPropType < TypeTag , Properties : : EquilGrid > ,
GetPropType < TypeTag , Properties : : GridView > ,
GetPropType < TypeTag , Properties : : ElementMapper > ,
GetPropType < TypeTag , Properties : : Scalar > >
2023-09-09 16:02:09 -05:00
{
using Simulator = GetPropType < TypeTag , Properties : : Simulator > ;
using GridView = GetPropType < TypeTag , Properties : : GridView > ;
using Grid = GetPropType < TypeTag , Properties : : Grid > ;
using EquilGrid = GetPropType < TypeTag , Properties : : EquilGrid > ;
using Scalar = GetPropType < TypeTag , Properties : : Scalar > ;
using ElementContext = GetPropType < TypeTag , Properties : : ElementContext > ;
using Element = typename GridView : : template Codim < 0 > : : Entity ;
using ElementMapper = GetPropType < TypeTag , Properties : : ElementMapper > ;
using BaseType = EclGenericWriter < Grid , EquilGrid , GridView , ElementMapper , Scalar > ;
2024-01-31 02:06:37 -06:00
using DamarisVarInt = DamarisOutput : : DamarisVar < int > ;
using DamarisVarChar = DamarisOutput : : DamarisVar < char > ;
using DamarisVarDbl = DamarisOutput : : DamarisVar < double > ;
2023-12-08 14:22:55 -06:00
2023-09-09 16:02:09 -05:00
public :
static void registerParameters ( )
{
2024-04-04 02:51:37 -05:00
Parameters : : registerParam < TypeTag , Properties : : DamarisOutputHdfCollective >
( " Write output via Damaris using parallel HDF5 to "
" get single file and dataset per timestep instead "
" of one per Damaris core with multiple datasets. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisSaveToHdf >
( " Set to false to prevent output to HDF5. "
" Uses collective output by default or "
" set --enable-damaris-collective=false to "
" use file per core (file per Damaris server). " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisSaveMeshToHdf >
( " Saves the mesh data to the HDF5 file (1st iteration only). "
" Will set --damaris-output-hdf-collective to false "
" so will use file per core (file per Damaris server) output "
" (global sizes and offset values of mesh variables are not being provided as yet). " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisPythonScript >
( " Set to the path and filename of a Python script to run on "
" Damaris server resources with access to OPM flow data. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisPythonParaviewScript >
( " Set to the path and filename of a Paraview Python script "
" to run on Paraview Catalyst (1 or 2) on Damaris server "
" resources with access to OPM flow data. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisSimName >
( " The name of the simulation to be used by Damaris. "
" If empty (the default) then Damaris uses \" opm-sim-<random-number> \" . "
" This name is used for the Damaris HDF5 file name prefix. "
" Make unique if writing to the same output directory. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisLogLevel >
( " The log level for the Damaris logging system (boost log based). "
" Levels are: [trace, debug, info, warning, error, fatal]. "
" Currently debug and info are useful. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisDaskFile >
( " The name of a Dask json configuration file (if using Dask for processing). " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisDedicatedCores >
( " Set the number of dedicated cores (MPI processes) "
" that should be used for Damaris processing (per node). "
" Must divide evenly into the number of simulation ranks (client ranks). " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisDedicatedNodes >
( " Set the number of dedicated nodes (full nodes) "
" that should be used for Damaris processing (per simulation). "
" Must divide evenly into the number of simulation nodes. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisSharedMemorySizeBytes >
( " Set the size of the shared memory buffer used for IPC "
" between the simulation and the Damaris resources. "
" Needs to hold all the variables published, possibly over "
" multiple simulation iterations. " ) ;
Parameters : : registerParam < TypeTag , Properties : : DamarisSharedMemoryName >
( " The name of the shared memory area to be used by Damaris for the current. "
" If empty (the default) then Damaris uses \" opm-damaris-<random-string> \" . "
" This name should be unique if multiple simulations are running on "
" the same node/server as it is used for the Damaris shmem name and by "
" the Python Dask library to locate sections of variables. " ) ;
2023-09-09 16:02:09 -05:00
}
// The Simulator object should preferably have been const - the
// only reason that is not the case is due to the SummaryState
// object owned deep down by the vanguard.
DamarisWriter ( Simulator & simulator )
: BaseType ( simulator . vanguard ( ) . schedule ( ) ,
simulator . vanguard ( ) . eclState ( ) ,
simulator . vanguard ( ) . summaryConfig ( ) ,
simulator . vanguard ( ) . grid ( ) ,
2024-03-04 02:08:58 -06:00
( ( simulator . vanguard ( ) . grid ( ) . comm ( ) . rank ( ) = = 0 )
? & simulator . vanguard ( ) . equilGrid ( )
: nullptr ) ,
2023-09-09 16:02:09 -05:00
simulator . vanguard ( ) . gridView ( ) ,
simulator . vanguard ( ) . cartesianIndexMapper ( ) ,
2024-03-04 02:08:58 -06:00
( ( simulator . vanguard ( ) . grid ( ) . comm ( ) . rank ( ) = = 0 )
? & simulator . vanguard ( ) . equilCartesianIndexMapper ( )
: nullptr ) ,
2023-09-09 16:02:09 -05:00
false , false )
, simulator_ ( simulator )
{
this - > damarisUpdate_ = true ;
2024-03-04 02:08:58 -06:00
this - > rank_ = this - > simulator_ . vanguard ( ) . grid ( ) . comm ( ) . rank ( ) ;
this - > nranks_ = this - > simulator_ . vanguard ( ) . grid ( ) . comm ( ) . size ( ) ;
this - > elements_rank_offsets_ . resize ( this - > nranks_ ) ;
2023-09-09 16:02:09 -05:00
// Get the size of the unique vector elements (excludes the shared 'ghost' elements)
2024-03-04 02:08:58 -06:00
//
// Might possibly use
//
// detail::countLocalInteriorCellsGridView(this->simulator_.gridView())
//
// from countGlobalCells.hpp instead of calling std::distance() directly.
{
const auto & gridView = this - > simulator_ . gridView ( ) ;
const auto & interior_elements = elements ( gridView , Dune : : Partitions : : interior ) ;
this - > numElements_ = std : : distance ( interior_elements . begin ( ) , interior_elements . end ( ) ) ;
}
if ( this - > nranks_ > 1 ) {
auto smryCfg = ( this - > rank_ = = 0 )
? this - > eclIO_ - > finalSummaryConfig ( )
: SummaryConfig { } ;
2024-03-22 08:59:10 -05:00
eclBroadcast ( this - > simulator_ . vanguard ( ) . grid ( ) . comm ( ) , smryCfg ) ;
2024-03-04 02:08:58 -06:00
this - > damarisOutputModule_ = std : : make_unique < OutputBlackOilModule < TypeTag > >
( simulator , smryCfg , this - > collectOnIORank_ ) ;
}
else {
this - > damarisOutputModule_ = std : : make_unique < OutputBlackOilModule < TypeTag > >
( simulator , this - > eclIO_ - > finalSummaryConfig ( ) , this - > collectOnIORank_ ) ;
}
2023-09-09 16:02:09 -05:00
}
/*!
* \ brief Writes localCellData through to Damaris servers . Sets up the unstructured mesh which is passed to Damaris .
*/
void writeOutput ( data : : Solution & localCellData , bool isSubStep )
{
OPM_TIMEBLOCK ( writeOutput ) ;
const int reportStepNum = simulator_ . episodeIndex ( ) + 1 ;
// added this as localCellData was not being written
if ( ! isSubStep )
this - > damarisOutputModule_ - > invalidateLocalData ( ) ;
this - > prepareLocalCellData ( isSubStep , reportStepNum ) ;
this - > damarisOutputModule_ - > outputErrorLog ( simulator_ . gridView ( ) . comm ( ) ) ;
// The damarisWriter is not outputing well or aquifer data (yet)
auto localWellData = simulator_ . problem ( ) . wellModel ( ) . wellData ( ) ; // data::Well
if ( ! isSubStep )
{
if ( localCellData . size ( ) = = 0 ) {
this - > damarisOutputModule_ - > assignToSolution ( localCellData ) ;
}
// add cell data to perforations for Rft output
this - > damarisOutputModule_ - > addRftDataToWells ( localWellData , reportStepNum ) ;
// On first call and if the mesh and variable size change then set damarisUpdate_ to true
if ( damarisUpdate_ = = true ) {
// Sets the damaris parameter values "n_elements_local" and "n_elements_total"
// which define sizes of the Damaris variables, per-rank and globally (over all ranks).
// Also sets the offsets to where a ranks array data sits within the global array.
// This is usefull for HDF5 output and for defining distributed arrays in Dask.
2024-01-31 02:28:18 -06:00
dam_err_ = DamarisOutput : : setupWritingPars ( simulator_ . vanguard ( ) . grid ( ) . comm ( ) ,
numElements_ , elements_rank_offsets_ ) ;
2023-09-09 16:02:09 -05:00
// sets data for non-time-varying variables MPI_RANK and GLOBAL_CELL_INDEX
this - > setGlobalIndexForDamaris ( ) ;
2023-09-22 16:23:29 -05:00
// Set the geometry data for the mesh model.
// this function writes the mesh data directly to Damaris shared memory using Opm::DamarisOutput::DamarisVar objects.
this - > writeDamarisGridOutput ( ) ;
// Currently by default we assume a static mesh grid (the geometry unchanging through the simulation)
// Set damarisUpdate_ to true if we want to update the geometry sent to Damaris
2023-09-09 16:02:09 -05:00
this - > damarisUpdate_ = false ;
}
if ( this - > damarisOutputModule_ - > getPRESSURE_ptr ( ) ! = nullptr )
{
2024-01-30 06:20:11 -06:00
dam_err_ = DamarisOutput : : setPosition ( " PRESSURE " , rank_ ,
this - > elements_rank_offsets_ [ rank_ ] ) ;
dam_err_ = DamarisOutput : : write ( " PRESSURE " , rank_ ,
this - > damarisOutputModule_ - > getPRESSURE_ptr ( ) ) ;
dam_err_ = DamarisOutput : : endIteration ( rank_ ) ;
2023-09-09 16:02:09 -05:00
}
} // end of ! isSubstep
}
private :
int dam_err_ ;
int rank_ ;
int nranks_ ;
int numElements_ ; ///< size of the unique vector elements
Simulator & simulator_ ;
2024-02-05 12:22:15 -06:00
std : : unique_ptr < OutputBlackOilModule < TypeTag > > damarisOutputModule_ ;
2023-09-09 16:02:09 -05:00
std : : vector < unsigned long long > elements_rank_offsets_ ;
bool damarisUpdate_ = false ; ///< Whenever this is true writeOutput() will set up Damaris mesh information and offsets of model fields
static bool enableDamarisOutput_ ( )
{
return EWOMS_GET_PARAM ( TypeTag , bool , EnableDamarisOutput ) ;
}
void setGlobalIndexForDamaris ( )
{
// GLOBAL_CELL_INDEX is used to reorder variable data when writing to disk
// This is enabled using select-file="GLOBAL_CELL_INDEX" in the <variable> XML tag
2024-02-02 04:03:48 -06:00
if ( this - > collectOnIORank_ . isParallel ( ) ) {
2024-01-31 02:07:59 -06:00
const std : : vector < int > & local_to_global =
2024-02-02 04:03:48 -06:00
this - > collectOnIORank_ . localIdxToGlobalIdxMapping ( ) ;
2024-01-30 06:20:11 -06:00
dam_err_ = DamarisOutput : : write ( " GLOBAL_CELL_INDEX " , rank_ , local_to_global . data ( ) ) ;
2023-09-09 16:02:09 -05:00
} else {
std : : vector < int > local_to_global_filled ;
local_to_global_filled . resize ( this - > numElements_ ) ;
2024-01-30 06:20:11 -06:00
std : : iota ( local_to_global_filled . begin ( ) , local_to_global_filled . end ( ) , 0 ) ;
dam_err_ = DamarisOutput : : write ( " GLOBAL_CELL_INDEX " , rank_ , local_to_global_filled . data ( ) ) ;
2023-09-09 16:02:09 -05:00
}
2023-12-12 14:48:17 -06:00
// This is an example of writing to the Damaris shared memory directly (i.e. not using
// damaris_write() to copy data there)
// We will add the MPI rank value directly into shared memory using the DamarisVar
// wrapper of the C based Damaris API.
2023-12-13 10:50:05 -06:00
// The shared memory is given back to Damaris when the DamarisVarInt goes out of scope.
2024-01-31 02:07:59 -06:00
DamarisVarInt mpi_rank_var_test ( 1 , { " n_elements_local " } , " MPI_RANK " , rank_ ) ;
2023-12-13 10:50:05 -06:00
mpi_rank_var_test . setDamarisParameterAndShmem ( { this - > numElements_ } ) ;
2023-09-22 16:23:29 -05:00
// Fill the created memory area
2024-01-30 06:20:11 -06:00
std : : fill ( mpi_rank_var_test . data ( ) , mpi_rank_var_test . data ( ) + numElements_ , rank_ ) ;
2023-09-09 16:02:09 -05:00
}
2024-01-30 06:20:11 -06:00
void writeDamarisGridOutput ( )
2023-09-22 16:23:29 -05:00
{
const auto & gridView = simulator_ . gridView ( ) ;
2024-01-31 02:06:37 -06:00
GridDataOutput : : SimMeshDataAccessor geomData ( gridView , Dune : : Partitions : : interior ) ;
2023-09-22 16:23:29 -05:00
try {
const bool hasPolyCells = geomData . polyhedralCellPresent ( ) ;
if ( hasPolyCells ) {
OpmLog : : error ( fmt : : format ( " ERORR: rank {} The DUNE geometry grid has polyhedral elements - These elements are currently not supported. " , rank_ ) ) ;
}
// This is the template XML model for x,y,z coordinates defined in initDamarisXmlFile.cpp which is used to
// build the internally generated Damaris XML configuration file.
// <parameter name="n_coords_local" type="int" value="1" />
// <parameter name="n_coords_global" type="int" value="1" comment="only needed if we need to write to HDF5 in Collective mode"/>
// <layout name="n_coords_layout" type="double" dimensions="n_coords_local" comment="For the individual x, y and z coordinates of the mesh vertices" />
// <group name="coordset/coords/values">
// <variable name="x" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="PythonConduitTest" time-varying="false" />
// <variable name="y" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="PythonConduitTest" time-varying="false" />
// <variable name="z" layout="n_coords_layout" type="scalar" visualizable="false" unit="m" script="PythonConduitTest" time-varying="false" />
// </group>
2024-01-31 02:07:59 -06:00
DamarisVarDbl var_x ( 1 , { " n_coords_local " } , " coordset/coords/values/x " , rank_ ) ;
2023-09-22 16:23:29 -05:00
// N.B. We have not set any position/offset values (using DamarisVar::SetDamarisPosition).
// They are not needed for mesh data as each process has a local geometric model.
// However, HDF5 collective and Dask arrays cannot be used for this data.
2023-12-13 10:50:05 -06:00
var_x . setDamarisParameterAndShmem ( { geomData . getNVertices ( ) } ) ;
2023-09-22 16:23:29 -05:00
2024-01-31 02:07:59 -06:00
DamarisVarDbl var_y ( 1 , { " n_coords_local " } , " coordset/coords/values/y " , rank_ ) ;
2023-12-13 10:50:05 -06:00
var_y . setDamarisParameterAndShmem ( { geomData . getNVertices ( ) } ) ;
2023-09-22 16:23:29 -05:00
2024-01-31 02:07:59 -06:00
DamarisVarDbl var_z ( 1 , { " n_coords_local " } , " coordset/coords/values/z " , rank_ ) ;
2023-12-13 10:50:05 -06:00
var_z . setDamarisParameterAndShmem ( { geomData . getNVertices ( ) } ) ;
2023-09-22 16:23:29 -05:00
2023-12-14 03:27:05 -06:00
// Now we can use the shared memory area that Damaris has allocated and use it to write the x,y,z coordinates
2023-12-13 10:50:05 -06:00
if ( geomData . writeGridPoints ( var_x , var_y , var_z ) < 0 )
2023-12-12 14:48:17 -06:00
DUNE_THROW ( Dune : : IOError , geomData . getError ( ) ) ;
2023-12-11 14:30:18 -06:00
2023-09-22 16:23:29 -05:00
// This is the template XML model for connectivity, offsets and types, as defined in initDamarisXmlFile.cpp which is used to
// build the internally generated Damaris XML configuration file.
// <parameter name="n_connectivity_ph" type="int" value="1" />
// <layout name="n_connections_layout_ph" type="int" dimensions="n_connectivity_ph" comment="Layout for connectivities " />
// <parameter name="n_offsets_types_ph" type="int" value="1" />
// <layout name="n_offsets_layout_ph" type="int" dimensions="n_offsets_types_ph+1" comment="Layout for the offsets_ph" />
// <layout name="n_types_layout_ph" type="char" dimensions="n_offsets_types_ph" comment="Layout for the types_ph " />
// <group name="topologies/topo/elements">
// <variable name="connectivity" layout="n_connections_layout_ph" type="scalar" visualizable="false" unit="" script="PythonConduitTest" time-varying="false" />
// <variable name="offsets" layout="n_offsets_layout_ph" type="scalar" visualizable="false" unit="" script="PythonConduitTest" time-varying="false" />
// <variable name="types" layout="n_types_layout_ph" type="scalar" visualizable="false" unit="" script="PythonConduitTest" time-varying="false" />
// </group>
2024-01-31 02:07:59 -06:00
DamarisVarInt var_connectivity ( 1 , { " n_connectivity_ph " } ,
" topologies/topo/elements/connectivity " , rank_ ) ;
2023-12-13 10:50:05 -06:00
var_connectivity . setDamarisParameterAndShmem ( { geomData . getNCorners ( ) } ) ;
2024-01-31 02:07:59 -06:00
DamarisVarInt var_offsets ( 1 , { " n_offsets_types_ph " } ,
" topologies/topo/elements/offsets " , rank_ ) ;
2024-01-18 09:36:47 -06:00
var_offsets . setDamarisParameterAndShmem ( { geomData . getNCells ( ) + 1 } ) ;
2024-01-31 02:07:59 -06:00
DamarisVarChar var_types ( 1 , { " n_offsets_types_ph " } ,
" topologies/topo/elements/types " , rank_ ) ;
2023-12-13 10:50:05 -06:00
var_types . setDamarisParameterAndShmem ( { geomData . getNCells ( ) } ) ;
2023-09-22 16:23:29 -05:00
// Copy the mesh data from the Durne grid
long i = 0 ;
2024-01-31 02:06:37 -06:00
GridDataOutput : : ConnectivityVertexOrder vtkorder = GridDataOutput : : VTK ;
2023-09-22 16:23:29 -05:00
2023-12-13 10:50:05 -06:00
i = geomData . writeConnectivity ( var_connectivity , vtkorder ) ;
2023-09-22 16:23:29 -05:00
if ( i ! = geomData . getNCorners ( ) )
2023-12-08 14:22:55 -06:00
DUNE_THROW ( Dune : : IOError , geomData . getError ( ) ) ;
2023-09-22 16:23:29 -05:00
2023-12-13 10:50:05 -06:00
i = geomData . writeOffsetsCells ( var_offsets ) ;
2023-09-22 16:23:29 -05:00
if ( i ! = geomData . getNCells ( ) + 1 )
2023-12-08 14:22:55 -06:00
DUNE_THROW ( Dune : : IOError , geomData . getError ( ) ) ;
2023-09-22 16:23:29 -05:00
2023-12-13 10:50:05 -06:00
i = geomData . writeCellTypes ( var_types ) ;
2023-09-22 16:23:29 -05:00
if ( i ! = geomData . getNCells ( ) )
2023-12-08 14:22:55 -06:00
DUNE_THROW ( Dune : : IOError , geomData . getError ( ) ) ;
2023-09-22 16:23:29 -05:00
}
catch ( std : : exception & e )
{
2023-12-08 14:22:55 -06:00
OpmLog : : error ( e . what ( ) ) ;
2023-09-09 16:02:09 -05:00
}
}
void prepareLocalCellData ( const bool isSubStep ,
const int reportStepNum )
{
OPM_TIMEBLOCK ( prepareLocalCellData ) ;
if ( damarisOutputModule_ - > localDataValid ( ) ) {
return ;
}
const auto & gridView = simulator_ . vanguard ( ) . gridView ( ) ;
2023-12-12 04:32:03 -06:00
const int num_interior = detail : :
2023-12-06 03:33:46 -06:00
countLocalInteriorCellsGridView ( gridView ) ;
2024-02-02 04:03:48 -06:00
const bool log = this - > collectOnIORank_ . isIORank ( ) ;
2023-09-09 16:02:09 -05:00
2023-12-06 03:33:46 -06:00
damarisOutputModule_ - > allocBuffers ( num_interior , reportStepNum ,
2023-09-09 16:02:09 -05:00
isSubStep , log , /*isRestart*/ false ) ;
ElementContext elemCtx ( simulator_ ) ;
OPM_BEGIN_PARALLEL_TRY_CATCH ( ) ;
{
OPM_TIMEBLOCK ( prepareCellBasedData ) ;
2023-12-06 03:33:46 -06:00
for ( const auto & elem : elements ( gridView , Dune : : Partitions : : interior ) ) {
2023-09-09 16:02:09 -05:00
elemCtx . updatePrimaryStencil ( elem ) ;
elemCtx . updatePrimaryIntensiveQuantities ( /*timeIdx=*/ 0 ) ;
damarisOutputModule_ - > processElement ( elemCtx ) ;
}
}
if ( ! simulator_ . model ( ) . linearizer ( ) . getFlowsInfo ( ) . empty ( ) ) {
OPM_TIMEBLOCK ( prepareFlowsData ) ;
2023-12-06 03:33:46 -06:00
for ( const auto & elem : elements ( gridView , Dune : : Partitions : : interior ) ) {
2023-09-09 16:02:09 -05:00
elemCtx . updatePrimaryStencil ( elem ) ;
elemCtx . updatePrimaryIntensiveQuantities ( /*timeIdx=*/ 0 ) ;
damarisOutputModule_ - > processElementFlows ( elemCtx ) ;
}
}
{
OPM_TIMEBLOCK ( prepareBlockData ) ;
2023-12-06 03:33:46 -06:00
for ( const auto & elem : elements ( gridView , Dune : : Partitions : : interior ) ) {
2023-09-09 16:02:09 -05:00
elemCtx . updatePrimaryStencil ( elem ) ;
elemCtx . updatePrimaryIntensiveQuantities ( /*timeIdx=*/ 0 ) ;
damarisOutputModule_ - > processElementBlockData ( elemCtx ) ;
}
}
{
OPM_TIMEBLOCK ( prepareFluidInPlace ) ;
# ifdef _OPENMP
# pragma omp parallel for
# endif
2023-12-06 03:33:46 -06:00
for ( int dofIdx = 0 ; dofIdx < num_interior ; + + dofIdx ) {
2023-09-09 16:02:09 -05:00
const auto & intQuants = * ( simulator_ . model ( ) . cachedIntensiveQuantities ( dofIdx , /*timeIdx=*/ 0 ) ) ;
const auto totVolume = simulator_ . model ( ) . dofTotalVolume ( dofIdx ) ;
damarisOutputModule_ - > updateFluidInPlace ( dofIdx , intQuants , totVolume ) ;
}
}
damarisOutputModule_ - > validateLocalData ( ) ;
OPM_END_PARALLEL_TRY_CATCH ( " DamarisWriter::prepareLocalCellData() failed: " , simulator_ . vanguard ( ) . grid ( ) . comm ( ) ) ;
}
} ;
} // namespace Opm
2024-02-02 04:10:08 -06:00
# endif // OPM_DAMARIS_WRITER_HPP