2015-09-14 09:51:27 -05:00
|
|
|
|
/*
|
|
|
|
|
Copyright 2015 IRIS AS
|
|
|
|
|
|
|
|
|
|
This file is part of the Open Porous Media project (OPM).
|
|
|
|
|
|
|
|
|
|
OPM is free software: you can redistribute it and/or modify
|
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
|
|
OPM is distributed in the hope that it will be useful,
|
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
|
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
*/
|
|
|
|
|
#ifndef OPM_PARALLELDEBUGOUTPUT_HEADER_INCLUDED
|
|
|
|
|
#define OPM_PARALLELDEBUGOUTPUT_HEADER_INCLUDED
|
|
|
|
|
|
2016-09-14 07:41:27 -05:00
|
|
|
|
#include <unordered_set>
|
|
|
|
|
|
2016-02-28 04:27:00 -06:00
|
|
|
|
#include <opm/common/data/SimulationDataContainer.hpp>
|
|
|
|
|
|
|
|
|
|
|
2018-02-09 06:42:16 -06:00
|
|
|
|
#include <opm/grid/UnstructuredGrid.h>
|
2015-09-14 09:51:27 -05:00
|
|
|
|
#include <opm/core/simulator/WellState.hpp>
|
|
|
|
|
#include <opm/core/wells/WellsManager.hpp>
|
|
|
|
|
|
|
|
|
|
#include <opm/autodiff/WellStateFullyImplicitBlackoil.hpp>
|
|
|
|
|
#include <opm/autodiff/WellStateFullyImplicitBlackoil.hpp>
|
2017-12-07 08:25:21 -06:00
|
|
|
|
#include <opm/autodiff/Compat.hpp>
|
2016-06-27 06:39:25 -05:00
|
|
|
|
#include <opm/core/wells/DynamicListEconLimited.hpp>
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2016-04-05 08:17:17 -05:00
|
|
|
|
#if HAVE_OPM_GRID
|
2018-02-09 06:42:16 -06:00
|
|
|
|
#include <opm/grid/common/p2pcommunicator.hh>
|
2015-09-14 09:51:27 -05:00
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
namespace Opm
|
|
|
|
|
{
|
|
|
|
|
|
|
|
|
|
class ParallelDebugOutputInterface
|
2016-02-28 04:27:00 -06:00
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
|
|
|
|
protected:
|
|
|
|
|
ParallelDebugOutputInterface () {}
|
|
|
|
|
public:
|
|
|
|
|
virtual ~ParallelDebugOutputInterface() {}
|
|
|
|
|
|
2016-07-21 08:33:16 -05:00
|
|
|
|
//! \brief gather solution to rank 0 for EclipseWriter
|
2016-11-16 05:33:08 -06:00
|
|
|
|
//! \param localReservoirState The reservoir state
|
2016-07-21 08:33:16 -05:00
|
|
|
|
//! \param localWellState The well state
|
2016-11-16 05:33:08 -06:00
|
|
|
|
//! \param localCellData The cell data used for eclipse output
|
|
|
|
|
//! (needs to include the cell data of
|
|
|
|
|
//! localReservoirState)
|
2016-07-21 08:33:16 -05:00
|
|
|
|
//! \param wellStateStepNumber The step number of the well state.
|
2016-02-28 04:27:00 -06:00
|
|
|
|
virtual bool collectToIORank( const SimulationDataContainer& localReservoirState,
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& localWellState,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const data::Solution& localCellData,
|
2016-07-21 08:33:16 -05:00
|
|
|
|
const int wellStateStepNumber ) = 0;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2016-02-28 04:27:00 -06:00
|
|
|
|
virtual const SimulationDataContainer& globalReservoirState() const = 0 ;
|
2016-11-16 05:33:08 -06:00
|
|
|
|
virtual const data::Solution& globalCellData() const = 0 ;
|
2016-11-21 10:06:22 -06:00
|
|
|
|
virtual const WellStateFullyImplicitBlackoil& globalWellState() const = 0 ;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
virtual bool isIORank() const = 0;
|
2015-09-21 05:24:07 -05:00
|
|
|
|
virtual bool isParallel() const = 0;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
virtual int numCells() const = 0 ;
|
|
|
|
|
virtual const int* globalCell() const = 0;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
template <class GridImpl>
|
|
|
|
|
class ParallelDebugOutput : public ParallelDebugOutputInterface
|
|
|
|
|
{
|
|
|
|
|
protected:
|
|
|
|
|
const GridImpl& grid_;
|
|
|
|
|
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const SimulationDataContainer* globalState_;
|
|
|
|
|
const WellStateFullyImplicitBlackoil* wellState_;
|
|
|
|
|
const data::Solution* globalCellData_;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
ParallelDebugOutput ( const GridImpl& grid,
|
2016-10-14 02:23:26 -05:00
|
|
|
|
const EclipseState& /* eclipseState */,
|
2017-10-29 15:06:19 -05:00
|
|
|
|
const Schedule&,
|
2015-09-21 07:05:35 -05:00
|
|
|
|
const int,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const Opm::PhaseUsage& )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
: grid_( grid ) {}
|
|
|
|
|
|
|
|
|
|
// gather solution to rank 0 for EclipseWriter
|
2016-02-28 04:27:00 -06:00
|
|
|
|
virtual bool collectToIORank( const SimulationDataContainer& localReservoirState,
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& localWellState,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const data::Solution& localCellData,
|
2016-07-21 08:33:16 -05:00
|
|
|
|
const int /* wellStateStepNumber */)
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
|
|
|
|
globalState_ = &localReservoirState;
|
|
|
|
|
wellState_ = &localWellState;
|
2016-11-16 05:33:08 -06:00
|
|
|
|
globalCellData_ = &localCellData;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
return true ;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-28 04:27:00 -06:00
|
|
|
|
virtual const SimulationDataContainer& globalReservoirState() const { return *globalState_; }
|
2016-11-16 05:33:08 -06:00
|
|
|
|
virtual const data::Solution& globalCellData() const
|
|
|
|
|
{
|
|
|
|
|
return *globalCellData_;
|
|
|
|
|
}
|
2016-11-21 10:06:22 -06:00
|
|
|
|
virtual const WellStateFullyImplicitBlackoil& globalWellState() const { return *wellState_; }
|
2015-09-14 09:51:27 -05:00
|
|
|
|
virtual bool isIORank () const { return true; }
|
2015-09-21 05:24:07 -05:00
|
|
|
|
virtual bool isParallel () const { return false; }
|
2016-11-30 04:41:24 -06:00
|
|
|
|
virtual int numCells() const { return Opm::AutoDiffGrid::numCells(grid_); }
|
|
|
|
|
virtual const int* globalCell() const { return Opm::AutoDiffGrid::globalCell(grid_); }
|
2015-09-14 09:51:27 -05:00
|
|
|
|
};
|
|
|
|
|
|
2016-04-05 08:17:17 -05:00
|
|
|
|
#if HAVE_OPM_GRID
|
2015-09-14 09:51:27 -05:00
|
|
|
|
template <>
|
|
|
|
|
class ParallelDebugOutput< Dune::CpGrid> : public ParallelDebugOutputInterface
|
|
|
|
|
{
|
|
|
|
|
public:
|
|
|
|
|
typedef Dune::CpGrid Grid;
|
|
|
|
|
typedef typename Grid :: CollectiveCommunication CollectiveCommunication;
|
|
|
|
|
|
|
|
|
|
// global id
|
|
|
|
|
class GlobalCellIndex
|
|
|
|
|
{
|
|
|
|
|
int globalId_;
|
|
|
|
|
int localIndex_;
|
|
|
|
|
bool isInterior_;
|
|
|
|
|
public:
|
|
|
|
|
GlobalCellIndex() : globalId_(-1), localIndex_(-1), isInterior_(true) {}
|
|
|
|
|
void setGhost() { isInterior_ = false; }
|
|
|
|
|
|
|
|
|
|
void setId( const int globalId ) { globalId_ = globalId; }
|
|
|
|
|
void setIndex( const int localIndex ) { localIndex_ = localIndex; }
|
|
|
|
|
|
|
|
|
|
int localIndex () const { return localIndex_; }
|
|
|
|
|
int id () const { return globalId_; }
|
|
|
|
|
bool isInterior() const { return isInterior_; }
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
typedef typename Dune::PersistentContainer< Grid, GlobalCellIndex > GlobalIndexContainer;
|
|
|
|
|
|
|
|
|
|
static const int dimension = Grid :: dimension ;
|
|
|
|
|
|
|
|
|
|
typedef typename Grid :: LeafGridView GridView;
|
|
|
|
|
typedef GridView AllGridView;
|
|
|
|
|
|
|
|
|
|
typedef Dune :: Point2PointCommunicator< Dune :: SimpleMessageBuffer > P2PCommunicatorType;
|
|
|
|
|
typedef typename P2PCommunicatorType :: MessageBufferType MessageBufferType;
|
|
|
|
|
|
|
|
|
|
typedef std::vector< GlobalCellIndex > LocalIndexMapType;
|
|
|
|
|
|
|
|
|
|
typedef std::vector<int> IndexMapType;
|
|
|
|
|
typedef std::vector< IndexMapType > IndexMapStorageType;
|
|
|
|
|
|
|
|
|
|
class DistributeIndexMapping : public P2PCommunicatorType::DataHandleInterface
|
|
|
|
|
{
|
|
|
|
|
protected:
|
|
|
|
|
const std::vector<int>& distributedGlobalIndex_;
|
|
|
|
|
IndexMapType& localIndexMap_;
|
|
|
|
|
IndexMapStorageType& indexMaps_;
|
|
|
|
|
std::map< const int, const int > globalPosition_;
|
2015-09-16 06:05:00 -05:00
|
|
|
|
#ifndef NDEBUG
|
2015-09-14 09:51:27 -05:00
|
|
|
|
std::set< int > checkPosition_;
|
2015-09-16 06:05:00 -05:00
|
|
|
|
#endif
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
DistributeIndexMapping( const std::vector<int>& globalIndex,
|
|
|
|
|
const std::vector<int>& distributedGlobalIndex,
|
|
|
|
|
IndexMapType& localIndexMap,
|
|
|
|
|
IndexMapStorageType& indexMaps )
|
|
|
|
|
: distributedGlobalIndex_( distributedGlobalIndex ),
|
|
|
|
|
localIndexMap_( localIndexMap ),
|
|
|
|
|
indexMaps_( indexMaps ),
|
|
|
|
|
globalPosition_()
|
|
|
|
|
{
|
|
|
|
|
const size_t size = globalIndex.size();
|
|
|
|
|
// create mapping globalIndex --> localIndex
|
|
|
|
|
for ( size_t index = 0; index < size; ++index )
|
|
|
|
|
{
|
|
|
|
|
globalPosition_.insert( std::make_pair( globalIndex[ index ], index ) );
|
|
|
|
|
}
|
|
|
|
|
|
2015-09-21 09:48:57 -05:00
|
|
|
|
// on I/O rank we need to create a mapping from local to global
|
2015-09-14 09:51:27 -05:00
|
|
|
|
if( ! indexMaps_.empty() )
|
|
|
|
|
{
|
|
|
|
|
// for the ioRank create a localIndex to index in global state map
|
|
|
|
|
IndexMapType& indexMap = indexMaps_.back();
|
|
|
|
|
const size_t localSize = localIndexMap_.size();
|
|
|
|
|
indexMap.resize( localSize );
|
|
|
|
|
for( size_t i=0; i<localSize; ++i )
|
|
|
|
|
{
|
|
|
|
|
const int id = distributedGlobalIndex_[ localIndexMap_[ i ] ];
|
2015-09-21 09:48:57 -05:00
|
|
|
|
indexMap[ i ] = globalPosition_[ id ] ;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
#ifndef NDEBUG
|
|
|
|
|
assert( checkPosition_.find( id ) == checkPosition_.end() );
|
|
|
|
|
checkPosition_.insert( id );
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void pack( const int link, MessageBufferType& buffer )
|
|
|
|
|
{
|
|
|
|
|
// we should only get one link
|
2015-09-16 06:05:00 -05:00
|
|
|
|
if( link != 0 ) {
|
|
|
|
|
OPM_THROW(std::logic_error,"link in method pack is not 0 as execpted");
|
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
// pack all interior global cell id's
|
|
|
|
|
const int size = localIndexMap_.size();
|
|
|
|
|
buffer.write( size );
|
|
|
|
|
|
|
|
|
|
for( int index = 0; index < size; ++index )
|
|
|
|
|
{
|
|
|
|
|
const int globalIdx = distributedGlobalIndex_[ localIndexMap_[ index ] ];
|
|
|
|
|
buffer.write( globalIdx );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void unpack( const int link, MessageBufferType& buffer )
|
|
|
|
|
{
|
|
|
|
|
// get index map for current link
|
|
|
|
|
IndexMapType& indexMap = indexMaps_[ link ];
|
|
|
|
|
assert( ! globalPosition_.empty() );
|
|
|
|
|
|
|
|
|
|
// unpack all interior global cell id's
|
|
|
|
|
int numCells = 0;
|
|
|
|
|
buffer.read( numCells );
|
|
|
|
|
indexMap.resize( numCells );
|
|
|
|
|
for( int index = 0; index < numCells; ++index )
|
|
|
|
|
{
|
|
|
|
|
int globalId = -1;
|
|
|
|
|
buffer.read( globalId );
|
|
|
|
|
assert( globalPosition_.find( globalId ) != globalPosition_.end() );
|
|
|
|
|
indexMap[ index ] = globalPosition_[ globalId ];
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
|
assert( checkPosition_.find( globalId ) == checkPosition_.end() );
|
|
|
|
|
checkPosition_.insert( globalId );
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
enum { ioRank = 0 };
|
|
|
|
|
|
2017-01-27 05:18:15 -06:00
|
|
|
|
/// \brief Constructor
|
2017-01-27 05:33:19 -06:00
|
|
|
|
/// \param otherGrid The grid after loadbalance was run.
|
2017-01-27 05:18:15 -06:00
|
|
|
|
/// \param eclipseState The eclipse file parser output
|
|
|
|
|
/// \param numPhases The number of active phases.
|
|
|
|
|
/// \param permeability The permeabilities for the global(!) view.
|
2015-09-14 09:51:27 -05:00
|
|
|
|
ParallelDebugOutput( const Dune::CpGrid& otherGrid,
|
2016-10-14 02:23:26 -05:00
|
|
|
|
const EclipseState& eclipseState,
|
2017-10-29 15:06:19 -05:00
|
|
|
|
const Schedule& schedule,
|
2015-09-21 07:05:35 -05:00
|
|
|
|
const int numPhases,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const Opm::PhaseUsage& phaseUsage)
|
2015-10-27 10:11:38 -05:00
|
|
|
|
: grid_(),
|
|
|
|
|
eclipseState_( eclipseState ),
|
2017-10-29 15:06:19 -05:00
|
|
|
|
schedule_(schedule),
|
2016-11-16 05:33:08 -06:00
|
|
|
|
globalCellData_(new data::Solution),
|
2017-01-27 05:33:19 -06:00
|
|
|
|
isIORank_(true),
|
2016-11-16 05:33:08 -06:00
|
|
|
|
phaseUsage_(phaseUsage)
|
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2017-01-27 05:33:19 -06:00
|
|
|
|
// Switch to distributed view unconditionally for safety.
|
|
|
|
|
Dune::CpGrid distributed_grid = otherGrid;
|
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
const CollectiveCommunication& comm = otherGrid.comm();
|
2015-09-21 05:24:07 -05:00
|
|
|
|
if( comm.size() > 1 )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-09-21 05:24:07 -05:00
|
|
|
|
std::set< int > send, recv;
|
2017-01-27 05:33:19 -06:00
|
|
|
|
distributed_grid.switchToDistributedView();
|
|
|
|
|
toIORankComm_ = distributed_grid.comm();
|
|
|
|
|
isIORank_ = (distributed_grid.comm().rank() == ioRank);
|
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
// the I/O rank receives from all other ranks
|
|
|
|
|
if( isIORank() )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-10-27 10:11:38 -05:00
|
|
|
|
// copy grid
|
|
|
|
|
grid_.reset( new Dune::CpGrid(otherGrid ) );
|
|
|
|
|
grid_->switchToGlobalView();
|
|
|
|
|
Dune::CpGrid& globalGrid = *grid_;
|
2015-09-21 05:24:07 -05:00
|
|
|
|
|
|
|
|
|
// initialize global state with correct sizes
|
2016-02-28 04:27:00 -06:00
|
|
|
|
globalReservoirState_.reset( new SimulationDataContainer( globalGrid.numCells(), globalGrid.numFaces(), numPhases ));
|
2015-09-21 05:24:07 -05:00
|
|
|
|
|
|
|
|
|
// copy global cartesian index
|
|
|
|
|
globalIndex_ = globalGrid.globalCell();
|
|
|
|
|
|
|
|
|
|
unsigned int count = 0;
|
|
|
|
|
auto gridView = globalGrid.leafGridView();
|
|
|
|
|
for( auto it = gridView.begin< 0 >(),
|
|
|
|
|
end = gridView.end< 0 >(); it != end; ++it, ++count )
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
assert( count == globalIndex_.size() );
|
|
|
|
|
|
|
|
|
|
for(int i=0; i<comm.size(); ++i)
|
|
|
|
|
{
|
|
|
|
|
if( i != ioRank )
|
|
|
|
|
{
|
|
|
|
|
recv.insert( i );
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
2015-09-21 05:24:07 -05:00
|
|
|
|
else // all other simply send to the I/O rank
|
|
|
|
|
{
|
2016-05-25 08:41:52 -05:00
|
|
|
|
// globalReservoirState will be deferenced even if this rank is not outputting anything
|
|
|
|
|
// To prevent dereferencing a nullptr we create an empty container
|
|
|
|
|
globalReservoirState_.reset( new SimulationDataContainer( 0, 0, 0));
|
2015-09-21 05:24:07 -05:00
|
|
|
|
send.insert( ioRank );
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
localIndexMap_.clear();
|
2017-01-27 05:33:19 -06:00
|
|
|
|
localIndexMap_.reserve( distributed_grid.size( 0 ) );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
unsigned int index = 0;
|
2017-01-27 05:33:19 -06:00
|
|
|
|
auto localView = distributed_grid.leafGridView();
|
2015-09-21 05:24:07 -05:00
|
|
|
|
for( auto it = localView.begin< 0 >(),
|
|
|
|
|
end = localView.end< 0 >(); it != end; ++it, ++index )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-09-21 05:24:07 -05:00
|
|
|
|
const auto element = *it ;
|
|
|
|
|
// only store interior element for collection
|
|
|
|
|
if( element.partitionType() == Dune :: InteriorEntity )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-09-21 05:24:07 -05:00
|
|
|
|
localIndexMap_.push_back( index );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
// insert send and recv linkage to communicator
|
|
|
|
|
toIORankComm_.insertRequest( send, recv );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
if( isIORank() )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-09-21 05:24:07 -05:00
|
|
|
|
// need an index map for each rank
|
|
|
|
|
indexMaps_.clear();
|
|
|
|
|
indexMaps_.resize( comm.size() );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
// distribute global id's to io rank for later association of dof's
|
2017-01-27 05:33:19 -06:00
|
|
|
|
DistributeIndexMapping distIndexMapping( globalIndex_, distributed_grid.globalCell(), localIndexMap_, indexMaps_ );
|
2015-09-21 05:24:07 -05:00
|
|
|
|
toIORankComm_.exchange( distIndexMapping );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
2015-09-21 05:42:41 -05:00
|
|
|
|
else // serial run
|
|
|
|
|
{
|
|
|
|
|
// copy global cartesian index
|
2017-01-27 05:33:19 -06:00
|
|
|
|
globalIndex_ = distributed_grid.globalCell();
|
2015-09-21 05:42:41 -05:00
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
2016-02-28 04:27:00 -06:00
|
|
|
|
class PackUnPackSimulationDataContainer : public P2PCommunicatorType::DataHandleInterface
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const data::Solution& localCellData_;
|
|
|
|
|
data::Solution& globalCellData_;
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& localWellState_;
|
|
|
|
|
WellStateFullyImplicitBlackoil& globalWellState_;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
const IndexMapType& localIndexMap_;
|
|
|
|
|
const IndexMapStorageType& indexMaps_;
|
|
|
|
|
|
|
|
|
|
public:
|
2017-08-08 01:10:55 -05:00
|
|
|
|
PackUnPackSimulationDataContainer( std::size_t numGlobalCells,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const data::Solution& localCellData,
|
|
|
|
|
data::Solution& globalCellData,
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& localWellState,
|
|
|
|
|
WellStateFullyImplicitBlackoil& globalWellState,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const IndexMapType& localIndexMap,
|
|
|
|
|
const IndexMapStorageType& indexMaps,
|
|
|
|
|
const bool isIORank )
|
2017-08-08 01:10:55 -05:00
|
|
|
|
: localCellData_( localCellData ),
|
2016-11-16 05:33:08 -06:00
|
|
|
|
globalCellData_( globalCellData ),
|
2015-09-14 09:51:27 -05:00
|
|
|
|
localWellState_( localWellState ),
|
|
|
|
|
globalWellState_( globalWellState ),
|
|
|
|
|
localIndexMap_( localIndexMap ),
|
|
|
|
|
indexMaps_( indexMaps )
|
|
|
|
|
{
|
2017-08-08 01:10:55 -05:00
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
if( isIORank )
|
|
|
|
|
{
|
2016-11-16 05:33:08 -06:00
|
|
|
|
// add missing data to global cell data
|
|
|
|
|
for (const auto& pair : localCellData_) {
|
|
|
|
|
const std::string& key = pair.first;
|
2017-08-08 01:10:55 -05:00
|
|
|
|
std::size_t container_size = numGlobalCells;
|
2016-11-16 14:20:14 -06:00
|
|
|
|
auto ret = globalCellData_.insert(key, pair.second.dim,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
std::vector<double>(container_size),
|
|
|
|
|
pair.second.target);
|
2016-11-16 14:20:14 -06:00
|
|
|
|
assert(ret.second);
|
|
|
|
|
DUNE_UNUSED_PARAMETER(ret.second); //dummy op to prevent warning with -DNDEBUG
|
2016-11-16 05:33:08 -06:00
|
|
|
|
}
|
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
MessageBufferType buffer;
|
|
|
|
|
pack( 0, buffer );
|
|
|
|
|
// the last index map is the local one
|
|
|
|
|
doUnpack( indexMaps.back(), buffer );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// pack all data associated with link
|
|
|
|
|
void pack( const int link, MessageBufferType& buffer )
|
|
|
|
|
{
|
|
|
|
|
// we should only get one link
|
2015-09-16 06:05:00 -05:00
|
|
|
|
if( link != 0 ) {
|
|
|
|
|
OPM_THROW(std::logic_error,"link in method pack is not 0 as execpted");
|
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
// write all cell data registered in local state
|
2016-11-16 05:33:08 -06:00
|
|
|
|
for (const auto& pair : localCellData_) {
|
|
|
|
|
const auto& data = pair.second.data;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2017-08-08 01:10:55 -05:00
|
|
|
|
// write all data from local data to buffer
|
|
|
|
|
write( buffer, localIndexMap_, data);
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// write all data from local well state to buffer
|
2015-09-21 07:05:35 -05:00
|
|
|
|
writeWells( buffer );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void doUnpack( const IndexMapType& indexMap, MessageBufferType& buffer )
|
|
|
|
|
{
|
2017-08-08 01:10:55 -05:00
|
|
|
|
// we loop over the data as
|
2016-07-25 10:07:22 -05:00
|
|
|
|
// its order governs the order the data got received.
|
2016-11-16 05:33:08 -06:00
|
|
|
|
for (auto& pair : localCellData_) {
|
2016-02-28 04:27:00 -06:00
|
|
|
|
const std::string& key = pair.first;
|
2016-11-16 05:33:08 -06:00
|
|
|
|
auto& data = globalCellData_.data(key);
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2017-08-08 01:10:55 -05:00
|
|
|
|
//write all data from local cell data to buffer
|
|
|
|
|
read( buffer, indexMap, data);
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// read well data from buffer
|
2015-09-21 07:05:35 -05:00
|
|
|
|
readWells( buffer );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// unpack all data associated with link
|
|
|
|
|
void unpack( const int link, MessageBufferType& buffer )
|
|
|
|
|
{
|
|
|
|
|
doUnpack( indexMaps_[ link ], buffer );
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected:
|
|
|
|
|
template <class Vector>
|
|
|
|
|
void write( MessageBufferType& buffer, const IndexMapType& localIndexMap,
|
|
|
|
|
const Vector& vector,
|
|
|
|
|
const unsigned int offset = 0, const unsigned int stride = 1 ) const
|
|
|
|
|
{
|
|
|
|
|
unsigned int size = localIndexMap.size();
|
|
|
|
|
buffer.write( size );
|
|
|
|
|
assert( vector.size() >= stride * size );
|
|
|
|
|
for( unsigned int i=0; i<size; ++i )
|
|
|
|
|
{
|
|
|
|
|
const unsigned int index = localIndexMap[ i ] * stride + offset;
|
2015-09-21 09:48:57 -05:00
|
|
|
|
assert( index < vector.size() );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
buffer.write( vector[ index ] );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
template <class Vector>
|
|
|
|
|
void read( MessageBufferType& buffer,
|
|
|
|
|
const IndexMapType& indexMap,
|
|
|
|
|
Vector& vector,
|
|
|
|
|
const unsigned int offset = 0, const unsigned int stride = 1 ) const
|
|
|
|
|
{
|
|
|
|
|
unsigned int size = 0;
|
|
|
|
|
buffer.read( size );
|
|
|
|
|
assert( size == indexMap.size() );
|
|
|
|
|
for( unsigned int i=0; i<size; ++i )
|
|
|
|
|
{
|
|
|
|
|
const unsigned int index = indexMap[ i ] * stride + offset;
|
2015-09-21 09:48:57 -05:00
|
|
|
|
assert( index < vector.size() );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
buffer.read( vector[ index ] );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void writeString( MessageBufferType& buffer, const std::string& s) const
|
|
|
|
|
{
|
|
|
|
|
const int size = s.size();
|
|
|
|
|
buffer.write( size );
|
|
|
|
|
for( int i=0; i<size; ++i )
|
|
|
|
|
{
|
|
|
|
|
buffer.write( s[ i ] );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void readString( MessageBufferType& buffer, std::string& s) const
|
|
|
|
|
{
|
|
|
|
|
int size = -1;
|
|
|
|
|
buffer.read( size );
|
|
|
|
|
s.resize( size );
|
|
|
|
|
for( int i=0; i<size; ++i )
|
|
|
|
|
{
|
|
|
|
|
buffer.read( s[ i ] );
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void writeWells( MessageBufferType& buffer ) const
|
|
|
|
|
{
|
|
|
|
|
int nWells = localWellState_.wellMap().size();
|
|
|
|
|
buffer.write( nWells );
|
|
|
|
|
auto end = localWellState_.wellMap().end();
|
|
|
|
|
for( auto it = localWellState_.wellMap().begin(); it != end; ++it )
|
|
|
|
|
{
|
|
|
|
|
const std::string& name = it->first;
|
|
|
|
|
const int wellIdx = it->second[ 0 ];
|
|
|
|
|
|
|
|
|
|
// write well name
|
|
|
|
|
writeString( buffer, name );
|
|
|
|
|
|
|
|
|
|
// write well data
|
|
|
|
|
buffer.write( localWellState_.bhp()[ wellIdx ] );
|
|
|
|
|
buffer.write( localWellState_.thp()[ wellIdx ] );
|
|
|
|
|
const int wellRateIdx = wellIdx * localWellState_.numPhases();
|
|
|
|
|
for( int np=0; np<localWellState_.numPhases(); ++np )
|
|
|
|
|
buffer.write( localWellState_.wellRates()[ wellRateIdx + np ] );
|
|
|
|
|
|
2016-11-21 14:16:31 -06:00
|
|
|
|
// Write well control
|
|
|
|
|
buffer.write(localWellState_.currentControls()[ wellIdx ]);
|
|
|
|
|
|
2016-11-21 08:27:31 -06:00
|
|
|
|
// Write perfRates and perfPress. No need to figure out the index
|
|
|
|
|
// mapping there as the ordering of the perforations should
|
|
|
|
|
// be the same for global and local state.
|
|
|
|
|
const int end_con = it->second[1] + it->second[2];
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
2016-11-21 08:27:31 -06:00
|
|
|
|
for( int con = it->second[1]; con < end_con; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.write( localWellState_.perfRates()[ con ] );
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for( int con = it->second[1]; con < end_con; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.write( localWellState_.perfPress()[ con ] );
|
|
|
|
|
}
|
2016-11-21 10:06:22 -06:00
|
|
|
|
|
|
|
|
|
// Write perfPhaseRate
|
2016-11-21 13:00:08 -06:00
|
|
|
|
const int np = localWellState_.perfPhaseRates().size() /
|
|
|
|
|
localWellState_.perfRates().size();
|
|
|
|
|
|
|
|
|
|
for( int con = it->second[1]*np; con < end_con*np; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.write( localWellState_.perfPhaseRates()[ con ] );
|
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void readWells( MessageBufferType& buffer )
|
|
|
|
|
{
|
|
|
|
|
int nWells = -1;
|
|
|
|
|
buffer.read( nWells );
|
|
|
|
|
// unpack all wells that have been sent
|
|
|
|
|
std::string name ;
|
|
|
|
|
for( int well = 0; well < nWells ; ++well )
|
|
|
|
|
{
|
|
|
|
|
// read well name for local identification
|
|
|
|
|
readString( buffer, name );
|
|
|
|
|
|
|
|
|
|
// unpack values
|
|
|
|
|
auto it = globalWellState_.wellMap().find( name );
|
|
|
|
|
if( it == globalWellState_.wellMap().end() )
|
|
|
|
|
{
|
|
|
|
|
OPM_THROW(std::logic_error,"global state does not contain well " << name );
|
|
|
|
|
}
|
|
|
|
|
const int wellIdx = it->second[ 0 ];
|
|
|
|
|
|
|
|
|
|
buffer.read( globalWellState_.bhp()[ wellIdx ] );
|
|
|
|
|
buffer.read( globalWellState_.thp()[ wellIdx ] );
|
|
|
|
|
const int wellRateIdx = wellIdx * globalWellState_.numPhases();
|
|
|
|
|
for( int np=0; np<globalWellState_.numPhases(); ++np )
|
|
|
|
|
buffer.read( globalWellState_.wellRates()[ wellRateIdx + np ] );
|
|
|
|
|
|
2016-11-21 14:16:31 -06:00
|
|
|
|
// Write well control
|
|
|
|
|
buffer.read(globalWellState_.currentControls()[ wellIdx ]);
|
|
|
|
|
|
2016-11-21 08:27:31 -06:00
|
|
|
|
// Read perfRates and perfPress. No need to figure out the index
|
|
|
|
|
// mapping there as the ordering of the perforations should
|
|
|
|
|
// be the same for global and local state.
|
|
|
|
|
const int end_con = it->second[1] + it->second[2];
|
|
|
|
|
|
|
|
|
|
for( int con = it->second[1]; con < end_con; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.read( globalWellState_.perfRates()[ con ] );
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for( int con = it->second[1]; con < end_con; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.read( globalWellState_.perfPress()[ con ] );
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-21 13:00:08 -06:00
|
|
|
|
// Read perfPhaseRate
|
|
|
|
|
const int np = globalWellState_.perfPhaseRates().size() /
|
|
|
|
|
globalWellState_.perfRates().size();
|
|
|
|
|
|
|
|
|
|
for( int con = it->second[1]*np; con < end_con*np; ++con )
|
|
|
|
|
{
|
|
|
|
|
buffer.read( globalWellState_.perfPhaseRates()[ con ] );
|
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// gather solution to rank 0 for EclipseWriter
|
2017-08-08 01:10:55 -05:00
|
|
|
|
bool collectToIORank( const SimulationDataContainer& /*localReservoirState*/,
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& localWellState,
|
2016-11-16 05:33:08 -06:00
|
|
|
|
const data::Solution& localCellData,
|
2016-07-21 08:33:16 -05:00
|
|
|
|
const int wellStateStepNumber )
|
2015-09-14 09:51:27 -05:00
|
|
|
|
{
|
2015-10-27 10:11:38 -05:00
|
|
|
|
if( isIORank() )
|
|
|
|
|
{
|
|
|
|
|
Dune::CpGrid& globalGrid = *grid_;
|
2016-06-27 06:39:25 -05:00
|
|
|
|
// TODO: make a dummy DynamicListEconLimited here for NOW for compilation and development
|
|
|
|
|
// TODO: NOT SURE whether it will cause problem for parallel running
|
|
|
|
|
// TODO: TO BE TESTED AND IMPROVED
|
|
|
|
|
const DynamicListEconLimited dynamic_list_econ_limited;
|
2015-10-27 10:11:38 -05:00
|
|
|
|
// Create wells and well state.
|
|
|
|
|
WellsManager wells_manager(eclipseState_,
|
2017-10-29 15:06:19 -05:00
|
|
|
|
schedule_,
|
2016-07-21 08:33:16 -05:00
|
|
|
|
wellStateStepNumber,
|
2015-10-27 10:11:38 -05:00
|
|
|
|
Opm::UgGridHelpers::numCells( globalGrid ),
|
|
|
|
|
Opm::UgGridHelpers::globalCell( globalGrid ),
|
|
|
|
|
Opm::UgGridHelpers::cartDims( globalGrid ),
|
|
|
|
|
Opm::UgGridHelpers::dimensions( globalGrid ),
|
|
|
|
|
Opm::UgGridHelpers::cell2Faces( globalGrid ),
|
|
|
|
|
Opm::UgGridHelpers::beginFaceCentroids( globalGrid ),
|
2016-06-27 06:39:25 -05:00
|
|
|
|
dynamic_list_econ_limited,
|
2017-01-26 10:29:22 -06:00
|
|
|
|
false,
|
2016-09-12 05:32:34 -05:00
|
|
|
|
// We need to pass the optionaly arguments
|
|
|
|
|
// as we get the following error otherwise
|
|
|
|
|
// with c++ (Debian 4.9.2-10) 4.9.2 and -std=c++11
|
|
|
|
|
// converting to ‘const std::unordered_set<std::basic_string<char> >’ from initializer list would use explicit constructor
|
2017-01-26 10:29:22 -06:00
|
|
|
|
std::unordered_set<std::string>());
|
2015-10-27 10:11:38 -05:00
|
|
|
|
|
|
|
|
|
const Wells* wells = wells_manager.c_wells();
|
2017-10-06 03:59:42 -05:00
|
|
|
|
globalWellState_.init(wells, *globalReservoirState_, globalWellState_, phaseUsage_ );
|
2016-11-16 14:21:51 -06:00
|
|
|
|
globalCellData_->clear();
|
2015-10-27 10:11:38 -05:00
|
|
|
|
}
|
|
|
|
|
|
2017-08-08 01:10:55 -05:00
|
|
|
|
PackUnPackSimulationDataContainer packUnpack( numCells(),
|
2016-11-16 05:33:08 -06:00
|
|
|
|
localCellData, *globalCellData_,
|
|
|
|
|
localWellState, globalWellState_,
|
|
|
|
|
localIndexMap_, indexMaps_,
|
|
|
|
|
isIORank() );
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
//toIORankComm_.exchangeCached( packUnpack );
|
|
|
|
|
toIORankComm_.exchange( packUnpack );
|
|
|
|
|
#ifndef NDEBUG
|
2016-11-16 05:33:08 -06:00
|
|
|
|
// make sure every process is on the same page
|
2015-09-14 09:51:27 -05:00
|
|
|
|
toIORankComm_.barrier();
|
|
|
|
|
#endif
|
2016-11-16 05:33:08 -06:00
|
|
|
|
if( isIORank() )
|
|
|
|
|
{
|
2017-08-08 01:10:55 -05:00
|
|
|
|
// copy values from globalCellData to globalReservoirState
|
2017-04-07 09:20:35 -05:00
|
|
|
|
const std::map<std::string, std::vector<double> > no_extra_data;
|
|
|
|
|
solutionToSim(*globalCellData_, no_extra_data, phaseUsage_, *globalReservoirState_);
|
2016-11-16 05:33:08 -06:00
|
|
|
|
}
|
2015-09-14 09:51:27 -05:00
|
|
|
|
return isIORank();
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-28 04:27:00 -06:00
|
|
|
|
const SimulationDataContainer& globalReservoirState() const { return *globalReservoirState_; }
|
2016-11-16 05:33:08 -06:00
|
|
|
|
|
|
|
|
|
const data::Solution& globalCellData() const
|
|
|
|
|
{
|
|
|
|
|
return *globalCellData_;
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-21 10:06:22 -06:00
|
|
|
|
const WellStateFullyImplicitBlackoil& globalWellState() const { return globalWellState_; }
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
bool isIORank() const
|
|
|
|
|
{
|
|
|
|
|
return isIORank_;
|
|
|
|
|
}
|
|
|
|
|
|
2015-09-21 05:24:07 -05:00
|
|
|
|
bool isParallel() const
|
|
|
|
|
{
|
|
|
|
|
return toIORankComm_.size() > 1;
|
|
|
|
|
}
|
|
|
|
|
|
2015-09-14 09:51:27 -05:00
|
|
|
|
int numCells () const { return globalIndex_.size(); }
|
|
|
|
|
const int* globalCell () const
|
|
|
|
|
{
|
|
|
|
|
assert( ! globalIndex_.empty() );
|
|
|
|
|
return globalIndex_.data();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected:
|
2016-02-28 04:27:00 -06:00
|
|
|
|
std::unique_ptr< Dune::CpGrid > grid_;
|
2016-10-14 02:23:26 -05:00
|
|
|
|
const EclipseState& eclipseState_;
|
2017-10-29 15:06:19 -05:00
|
|
|
|
const Schedule& schedule_;
|
2016-02-28 04:27:00 -06:00
|
|
|
|
P2PCommunicatorType toIORankComm_;
|
|
|
|
|
IndexMapType globalIndex_;
|
|
|
|
|
IndexMapType localIndexMap_;
|
|
|
|
|
IndexMapStorageType indexMaps_;
|
|
|
|
|
std::unique_ptr<SimulationDataContainer> globalReservoirState_;
|
2016-11-16 05:33:08 -06:00
|
|
|
|
std::unique_ptr<data::Solution> globalCellData_;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
// this needs to be revised
|
2016-02-28 04:27:00 -06:00
|
|
|
|
WellStateFullyImplicitBlackoil globalWellState_;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
// true if we are on I/O rank
|
2017-01-27 05:33:19 -06:00
|
|
|
|
bool isIORank_;
|
2016-11-16 05:33:08 -06:00
|
|
|
|
// Phase usage needed to convert solution to simulation data container
|
|
|
|
|
Opm::PhaseUsage phaseUsage_;
|
2015-09-14 09:51:27 -05:00
|
|
|
|
};
|
2016-04-05 08:17:17 -05:00
|
|
|
|
#endif // #if HAVE_OPM_GRID
|
2015-09-14 09:51:27 -05:00
|
|
|
|
|
|
|
|
|
} // end namespace Opm
|
|
|
|
|
#endif
|