mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Setup output writer and write initial values in parallel correctly.
Before this commit only the solution of process 0 was written. To fix this we make the equilGrid of Ebos available. It is used for the output writer. The properties written initially are gathered from all processes first using the new gather/scatter utility.
This commit is contained in:
parent
45cee3d9f4
commit
fd3e3596a9
@ -32,6 +32,7 @@
|
||||
#include <opm/autodiff/MissingFeatures.hpp>
|
||||
#include <opm/autodiff/moduleVersion.hpp>
|
||||
#include <opm/autodiff/ExtractParallelGridInformationToISTL.hpp>
|
||||
#include <opm/autodiff/RedistributeDataHandles.hpp>
|
||||
|
||||
#include <opm/core/props/satfunc/RelpermDiagnostics.hpp>
|
||||
|
||||
@ -52,6 +53,28 @@
|
||||
|
||||
namespace Opm
|
||||
{
|
||||
|
||||
/// \brief Gather cell data to global random access iterator
|
||||
/// \tparam ConstIter The type of constant iterator.
|
||||
/// \tparam Iter The type of the mutable iterator.
|
||||
/// \param grid The distributed CpGrid where loadbalance has been run.
|
||||
/// \param local The local container from which the data should be sent.
|
||||
/// \param global The global container to gather to.
|
||||
/// \warning The global container has to have the correct size!
|
||||
template<class ConstIter, class Iter>
|
||||
void gatherCellDataToGlobalIterator(const Dune::CpGrid& grid,
|
||||
const ConstIter& local_begin,
|
||||
const Iter& global_begin)
|
||||
{
|
||||
FixedSizeIterCopyHandle<ConstIter,Iter> handle(local_begin,
|
||||
global_begin);
|
||||
const auto& gatherScatterInf = grid.cellScatterGatherInterface();
|
||||
Dune::VariableSizeCommunicator<> comm(grid.comm(),
|
||||
gatherScatterInf);
|
||||
comm.backward(handle);
|
||||
}
|
||||
|
||||
|
||||
// The FlowMain class is the ebos based black-oil simulator.
|
||||
class FlowMainEbos
|
||||
{
|
||||
@ -376,7 +399,7 @@ namespace Opm
|
||||
}
|
||||
}
|
||||
|
||||
// Create grid and property objects.
|
||||
// Create distributed property objects.
|
||||
// Writes to:
|
||||
// fluidprops_
|
||||
void setupGridAndProps()
|
||||
@ -541,13 +564,51 @@ namespace Opm
|
||||
{
|
||||
bool output = param_.getDefault("output", true);
|
||||
bool output_ecl = param_.getDefault("output_ecl", true);
|
||||
const Grid& grid = this->grid();
|
||||
if( output && output_ecl && output_cout_)
|
||||
if( output && output_ecl )
|
||||
{
|
||||
const EclipseGrid& inputGrid = eclState().getInputGrid();
|
||||
eclIO_.reset(new EclipseIO(eclState(), UgGridHelpers::createEclipseGrid( grid , inputGrid )));
|
||||
eclIO_->writeInitial(geoprops_->simProps(grid),
|
||||
geoprops_->nonCartesianConnections());
|
||||
const Grid& grid = this->globalGrid();
|
||||
|
||||
if( output_cout_ ){
|
||||
const EclipseGrid& inputGrid = eclState().getInputGrid();
|
||||
eclIO_.reset(new EclipseIO(eclState(), UgGridHelpers::createEclipseGrid( grid , inputGrid )));
|
||||
}
|
||||
|
||||
const NNC* nnc = &geoprops_->nonCartesianConnections();
|
||||
data::Solution globaltrans;
|
||||
|
||||
if ( must_distribute_ )
|
||||
{
|
||||
// dirty and dangerous hack!
|
||||
// We rely on opmfil in GeoProps being hardcoded to true
|
||||
// which prevents the pinch processing from running.
|
||||
// Ergo the nncs are unchanged.
|
||||
nnc = &eclState().getInputNNC();
|
||||
|
||||
// Gather the global simProps
|
||||
data::Solution localtrans = geoprops_->simProps(this->grid());
|
||||
for( const auto& localkeyval: localtrans)
|
||||
{
|
||||
auto& globalval = globaltrans[localkeyval.first].data;
|
||||
const auto& localval = localkeyval.second.data;
|
||||
|
||||
if( output_cout_ )
|
||||
{
|
||||
globalval.resize( grid.size(0));
|
||||
}
|
||||
gatherCellDataToGlobalIterator(this->grid(), localval.begin(),
|
||||
globalval.begin());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
globaltrans = geoprops_->simProps(grid);
|
||||
}
|
||||
|
||||
if( output_cout_ )
|
||||
{
|
||||
eclIO_->writeInitial(globaltrans,
|
||||
*nnc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -561,6 +622,14 @@ namespace Opm
|
||||
eclState(),
|
||||
std::move( eclIO_ ),
|
||||
Opm::phaseUsageFromDeck(deck())) );
|
||||
// create output writer after grid is distributed, otherwise the parallel output
|
||||
// won't work correctly since we need to create a mapping from the distributed to
|
||||
// the global view
|
||||
output_writer_.reset(new OutputWriter(grid(),
|
||||
param_,
|
||||
eclState(),
|
||||
std::move(eclIO_),
|
||||
Opm::phaseUsageFromDeck(deck())) );
|
||||
}
|
||||
|
||||
// Run the simulator.
|
||||
@ -696,6 +765,9 @@ namespace Opm
|
||||
Grid& grid()
|
||||
{ return ebosSimulator_->gridManager().grid(); }
|
||||
|
||||
const Grid& globalGrid()
|
||||
{ return ebosSimulator_->gridManager().equilGrid(); }
|
||||
|
||||
Problem& ebosProblem()
|
||||
{ return ebosSimulator_->problem(); }
|
||||
|
||||
|
@ -24,6 +24,8 @@
|
||||
|
||||
#include <unordered_set>
|
||||
#include <string>
|
||||
#include <type_traits>
|
||||
#include <iterator>
|
||||
|
||||
#include <opm/core/simulator/BlackoilState.hpp>
|
||||
|
||||
@ -52,6 +54,69 @@ distributeGridAndData( Grid& ,
|
||||
return std::unordered_set<std::string>();
|
||||
}
|
||||
|
||||
/// \brief A handle that copies a fixed number data per index.
|
||||
///
|
||||
/// It works on Iterators to allow for communicating C arrays.
|
||||
/// \tparam Iter1 Constant random access iterator type.
|
||||
/// \tparam Iter1 Mutable random access iterator type.
|
||||
template<class Iter1, class Iter2=Iter1>
|
||||
class FixedSizeIterCopyHandle
|
||||
{
|
||||
typedef typename std::iterator_traits<Iter1>::value_type DataType2;
|
||||
|
||||
public:
|
||||
typedef typename std::iterator_traits<Iter1>::value_type DataType;
|
||||
|
||||
/// \brief Constructor.
|
||||
/// \param send_begin The begin iterator for sending.
|
||||
/// \param receive_begin The begin iterator for receiving.
|
||||
FixedSizeIterCopyHandle(const Iter1& send_begin,
|
||||
const Iter2& receive_begin,
|
||||
std::size_t size = 1)
|
||||
: send_(send_begin), receive_(receive_begin), size_(size)
|
||||
{
|
||||
static_assert(std::is_same<DataType,DataType2>::value,
|
||||
"Iter1 and Iter2 have to have the same value_type!");
|
||||
}
|
||||
|
||||
template<class Buffer>
|
||||
void gather(Buffer& buffer, std::size_t i)
|
||||
{
|
||||
for(auto index = i*size(i), end = (i+1)*size(i);
|
||||
index < end; ++index)
|
||||
{
|
||||
buffer.write(send_[index]);
|
||||
}
|
||||
}
|
||||
|
||||
template<class Buffer>
|
||||
void scatter(Buffer& buffer, std::size_t i, std::size_t s)
|
||||
{
|
||||
assert(s==size(i));
|
||||
|
||||
for(auto index = i*size(i), end = (i+1)*size(i);
|
||||
index < end; ++index)
|
||||
{
|
||||
buffer.read(receive_[index]);
|
||||
}
|
||||
}
|
||||
|
||||
bool fixedsize()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
std::size_t size(std::size_t)
|
||||
{
|
||||
return size_;
|
||||
}
|
||||
private:
|
||||
Iter1 send_;
|
||||
Iter2 receive_;
|
||||
std::size_t size_;
|
||||
};
|
||||
|
||||
|
||||
#if HAVE_OPM_GRID && HAVE_MPI
|
||||
/// \brief a data handle to distribute the threshold pressures
|
||||
class ThresholdPressureDataHandle
|
||||
|
Loading…
Reference in New Issue
Block a user