Merge pull request #5510 from lisajulia/fix/COMPDAT-NLDD-parallel

Fix/compdat nldd parallel
This commit is contained in:
Markus Blatt 2024-08-14 11:18:28 +02:00 committed by GitHub
commit ac232197dc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 38 additions and 10 deletions

View File

@ -945,6 +945,9 @@ private:
const auto wells = need_wells const auto wells = need_wells
? this->model_.simulator().vanguard().schedule().getWellsatEnd() ? this->model_.simulator().vanguard().schedule().getWellsatEnd()
: std::vector<Well>{}; : std::vector<Well>{};
const auto& possibleFutureConnectionSet = need_wells
? this->model_.simulator().vanguard().schedule().getPossibleFutureConnections()
: std::unordered_map<std::string, std::set<int>> {};
// If defaulted parameter for number of domains, choose a reasonable default. // If defaulted parameter for number of domains, choose a reasonable default.
constexpr int default_cells_per_domain = 1000; constexpr int default_cells_per_domain = 1000;
@ -953,7 +956,6 @@ private:
? param.num_local_domains_ ? param.num_local_domains_
: num_cells / default_cells_per_domain; : num_cells / default_cells_per_domain;
const auto& possibleFutureConnectionSet = this->model_.simulator().vanguard().schedule().getPossibleFutureConnections();
return ::Opm::partitionCells(param.local_domain_partition_method_, return ::Opm::partitionCells(param.local_domain_partition_method_,
num_domains, num_domains,
grid.leafGridView(), wells, possibleFutureConnectionSet, zoltan_ctrl); grid.leafGridView(), wells, possibleFutureConnectionSet, zoltan_ctrl);

View File

@ -37,6 +37,10 @@
#include <opm/input/eclipse/Schedule/Well/WellConnections.hpp> #include <opm/input/eclipse/Schedule/Well/WellConnections.hpp>
#endif // HAVE_MPI && HAVE_ZOLTAN #endif // HAVE_MPI && HAVE_ZOLTAN
#if HAVE_MPI
#include <opm/simulators/utils/MPISerializer.hpp>
#endif
#include <opm/grid/CpGrid.hpp> #include <opm/grid/CpGrid.hpp>
#include <opm/grid/polyhedralgrid.hh> #include <opm/grid/polyhedralgrid.hh>
@ -181,7 +185,7 @@ private:
template <typename Comm> template <typename Comm>
void connectWells(const Comm comm, void connectWells(const Comm comm,
const std::vector<Opm::Well>& wells, const std::vector<Opm::Well>& wells,
const std::unordered_map<std::string, std::set<int>>& possibleFutureConnections, std::unordered_map<std::string, std::set<int>> possibleFutureConnections,
const std::unordered_map<int, int>& g2l); const std::unordered_map<int, int>& g2l);
}; };
@ -287,11 +291,15 @@ ZoltanPartitioner::connectElements(const GridView&
template <typename Comm> template <typename Comm>
void ZoltanPartitioner::connectWells(const Comm comm, void ZoltanPartitioner::connectWells(const Comm comm,
const std::vector<Opm::Well>& wells, const std::vector<Opm::Well>& wells,
const std::unordered_map<std::string, std::set<int>>& possibleFutureConnections, std::unordered_map<std::string, std::set<int>> possibleFutureConnections,
const std::unordered_map<int, int>& g2l) const std::unordered_map<int, int>& g2l)
{ {
auto distributedWells = 0; auto distributedWells = 0;
// Communicate Map to other processes, since it is only available on rank 0
Opm::Parallel::MpiSerializer ser(comm);
ser.broadcast(possibleFutureConnections);
for (const auto& well : wells) { for (const auto& well : wells) {
auto cellIx = std::vector<int>{}; auto cellIx = std::vector<int>{};
auto otherProc = 0; auto otherProc = 0;

View File

@ -194,8 +194,14 @@ namespace Opm {
// Create cartesian to compressed mapping // Create cartesian to compressed mapping
const auto& schedule_wells = this->schedule().getWellsatEnd(); const auto& schedule_wells = this->schedule().getWellsatEnd();
const auto& possibleFutureConnections = this->schedule().getPossibleFutureConnections(); auto possibleFutureConnections = this->schedule().getPossibleFutureConnections();
#if HAVE_MPI
// Communicate Map to other processes, since it is only available on rank 0
const auto& comm = this->simulator_.vanguard().grid().comm();
Parallel::MpiSerializer ser(comm);
ser.broadcast(possibleFutureConnections);
#endif
// initialize the additional cell connections introduced by wells. // initialize the additional cell connections introduced by wells.
for (const auto& well : schedule_wells) for (const auto& well : schedule_wells)
{ {

View File

@ -248,3 +248,15 @@ add_test_compareSeparateECLFiles(CASENAME actionx_compdat_nldd_1_proc
IGNORE_EXTRA_KW BOTH IGNORE_EXTRA_KW BOTH
MPI_PROCS 1 MPI_PROCS 1
TEST_ARGS --nonlinear-solver=nldd --matrix-add-well-contributions=true) TEST_ARGS --nonlinear-solver=nldd --matrix-add-well-contributions=true)
add_test_compareSeparateECLFiles(CASENAME actionx_compdat_nldd_8_procs
DIR1 actionx
FILENAME1 COMPDAT_SHORT
DIR2 actionx
FILENAME2 ACTIONX_COMPDAT_SHORT
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
IGNORE_EXTRA_KW BOTH
MPI_PROCS 2
TEST_ARGS --nonlinear-solver=nldd --matrix-add-well-contributions=true)