ParallelWellInfo: template Scalar type

This commit is contained in:
Arne Morten Kvarving
2024-02-20 15:35:13 +01:00
parent 81189b89c7
commit 16f5290038
33 changed files with 316 additions and 218 deletions

View File

@@ -149,7 +149,7 @@ TEST_FOR_TYPE_NAMED(BVec, BlockVectorWrapper)
BOOST_AUTO_TEST_CASE(SingleWellState)
{
Opm::ParallelWellInfo dummy;
Opm::ParallelWellInfo<double> dummy;
auto data_out = Opm::SingleWellState<double>::serializationTestObject(dummy);
Opm::Serialization::MemPacker packer;
Opm::Serializer ser(packer);
@@ -178,7 +178,7 @@ BOOST_AUTO_TEST_CASE(WellContainer)
BOOST_AUTO_TEST_CASE(WellState)
{
Opm::ParallelWellInfo dummy;
Opm::ParallelWellInfo<double> dummy;
auto data_out = Opm::WellState<double>::serializationTestObject(dummy);
Opm::Serialization::MemPacker packer;
Opm::Serializer ser(packer);
@@ -193,7 +193,7 @@ BOOST_AUTO_TEST_CASE(WellState)
BOOST_AUTO_TEST_CASE(WGState)
{
Opm::ParallelWellInfo dummy;
Opm::ParallelWellInfo<double> dummy;
auto data_out = Opm::WGState<double>::serializationTestObject(dummy);
Opm::Serialization::MemPacker packer;
Opm::Serializer ser(packer);
@@ -354,7 +354,7 @@ public:
}
private:
ParallelWellInfo dummy;
ParallelWellInfo<double> dummy;
};
}

View File

@@ -457,10 +457,10 @@ namespace {
return w;
}
Opm::ParallelWellInfo
Opm::ParallelWellInfo<double>
parallelWellInfo(const Opm::Parallel::Communication& comm)
{
auto pwi = Opm::ParallelWellInfo {
auto pwi = Opm::ParallelWellInfo<double> {
std::pair { std::string{ "P" }, true }, comm
};
@@ -469,7 +469,7 @@ namespace {
const auto numLocalPerf = 3;
const auto perfOffset = comm.rank() * numLocalPerf;
auto prev = Opm::ParallelWellInfo::INVALID_ECL_INDEX;
auto prev = Opm::ParallelWellInfo<double>::INVALID_ECL_INDEX;
for (auto perf = 0; perf < numLocalPerf; ++perf) {
const auto curr = perfOffset + perf;
pwi.pushBackEclIndex(prev, curr);
@@ -552,7 +552,7 @@ namespace {
Opm::Parallel::Communication comm;
Opm::GridDims cellIndexMap;
Opm::ParallelWBPCalculation wbpCalcService;
Opm::ParallelWellInfo pwi;
Opm::ParallelWellInfo<double> pwi;
};
} // Anonymous namespace

View File

@@ -97,7 +97,7 @@ std::ostream& operator<<(std::ostream& os, const std::pair<std::string, bool>& p
}
namespace Opm
{
std::ostream& operator<<(std::ostream& os, const Opm::ParallelWellInfo& w)
std::ostream& operator<<(std::ostream& os, const Opm::ParallelWellInfo<double>& w)
{
return os << "{" << w.name() << " "<< w.hasLocalCells() << " "<<
w.isOwner() << "}";
@@ -117,7 +117,7 @@ BOOST_AUTO_TEST_CASE(ParallelWellComparison)
else
pairs = {{"Test1", false},{"Test2", true}, {"Test1", true} };
std::vector<Opm::ParallelWellInfo> well_info;
std::vector<Opm::ParallelWellInfo<double>> well_info;
for (const auto& wellinfo : pairs) {
well_info.emplace_back(wellinfo, Opm::Parallel::Communication());
@@ -138,16 +138,16 @@ BOOST_AUTO_TEST_CASE(ParallelWellComparison)
BOOST_CHECK(well_info[0] != well_info[1]);
Opm::ParallelWellInfo well0, well1;
Opm::ParallelWellInfo<double> well0, well1;
BOOST_CHECK(well0 == well1);
#if HAVE_MPI
BOOST_CHECK(well0.communication()==helper.getLocalCommunicator());
#endif
Opm::ParallelWellInfo well2("Test", false);
Opm::ParallelWellInfo<double> well2("Test", false);
std::pair<std::string, bool> pwell={"Test", true};
BOOST_CHECK(well2 < pwell);
Opm::ParallelWellInfo well3("Test", true);
Opm::ParallelWellInfo<double> well3("Test", true);
BOOST_CHECK(! (well3 < pwell));
pwell.second = false;
BOOST_CHECK(! (well3 < pwell));
@@ -171,7 +171,7 @@ BOOST_AUTO_TEST_CASE(ParallelWellComparison)
BOOST_AUTO_TEST_CASE(CommunicateAboveBelowSelf)
{
auto comm = Dune::MPIHelper::getLocalCommunicator();
Opm::CommunicateAboveBelow commAboveBelow{ comm };
Opm::CommunicateAboveBelow<double> commAboveBelow{ comm };
for(std::size_t count=0; count < 2; ++count)
{
std::vector<int> eclIndex = {0, 1, 2, 3, 7 , 8, 10, 11};
@@ -206,7 +206,7 @@ BOOST_AUTO_TEST_CASE(CommunicateAboveBelowSelf)
BOOST_AUTO_TEST_CASE(CommunicateAboveBelowSelf1)
{
auto comm = Dune::MPIHelper::getLocalCommunicator();
Opm::CommunicateAboveBelow commAboveBelow{ comm };
Opm::CommunicateAboveBelow<double> commAboveBelow{ comm };
for(std::size_t count=0; count < 2; ++count)
{
std::vector<int> eclIndex = {0};
@@ -291,7 +291,7 @@ BOOST_AUTO_TEST_CASE(CommunicateAboveBelowParallel)
{
auto comm = Opm::Parallel::Communication(Dune::MPIHelper::getCommunicator());
Opm::CommunicateAboveBelow commAboveBelow{ comm };
Opm::CommunicateAboveBelow<double> commAboveBelow{ comm };
for(std::size_t count=0; count < 2; ++count)
{
auto globalEclIndex = createGlobalEclIndex(comm);
@@ -351,7 +351,7 @@ BOOST_AUTO_TEST_CASE(PartialSumself)
{
auto comm = Dune::MPIHelper::getLocalCommunicator();
Opm::CommunicateAboveBelow commAboveBelow{ comm };
Opm::CommunicateAboveBelow<double> commAboveBelow{ comm };
std::vector<int> eclIndex = {0, 1, 2, 3, 7 , 8, 10, 11};
std::vector<double> current(eclIndex.size());
std::transform(eclIndex.begin(), eclIndex.end(), current.begin(),
@@ -383,7 +383,7 @@ BOOST_AUTO_TEST_CASE(PartialSumParallel)
auto comm = Opm::Parallel::Communication(Dune::MPIHelper::getCommunicator());
Opm::CommunicateAboveBelow commAboveBelow{ comm };
Opm::CommunicateAboveBelow<double> commAboveBelow{ comm };
auto globalEclIndex = createGlobalEclIndex(comm);
std::vector<double> globalCurrent(globalEclIndex.size());
initRandomNumbers(std::begin(globalCurrent), std::end(globalCurrent),
@@ -411,7 +411,7 @@ void testGlobalPerfFactoryParallel(int num_component, bool local_consecutive = f
{
auto comm = Opm::Parallel::Communication(Dune::MPIHelper::getCommunicator());
Opm::ParallelWellInfo wellInfo{ {"Test", true }, comm };
Opm::ParallelWellInfo<double> wellInfo{ {"Test", true }, comm };
auto globalEclIndex = createGlobalEclIndex(comm);
std::vector<double> globalCurrent(globalEclIndex.size() * num_component);
std::vector<double> globalAdd(globalEclIndex.size() * num_component);
@@ -425,7 +425,7 @@ void testGlobalPerfFactoryParallel(int num_component, bool local_consecutive = f
local_consecutive);
// A hack to get local values to add.
Opm::ParallelWellInfo dummy{ {"Test", true }, comm };
Opm::ParallelWellInfo<double> dummy{ {"Test", true }, comm };
auto localAdd = populateCommAbove(dummy, comm, globalEclIndex,
globalAdd, num_component,
local_consecutive);
@@ -476,7 +476,7 @@ BOOST_AUTO_TEST_CASE(GlobalPerfFactoryParallel1)
BOOST_AUTO_TEST_CASE(EmptyWell) {
auto comm = Opm::Parallel::Communication(Dune::MPIHelper::getCommunicator());
Opm::ParallelWellInfo pw({"WELL1", true}, comm);
Opm::ParallelWellInfo<double> pw({"WELL1", true}, comm);
pw.communicateFirstPerforation(false);
double local_p = 1;
auto global_p = pw.broadcastFirstPerforationValue(local_p);

View File

@@ -131,7 +131,7 @@ BOOST_AUTO_TEST_CASE(TestStandardWellInput) {
pdata[c].ecl_index = c;
}
Opm::ParallelWellInfo pinfo{well.name()};
Opm::ParallelWellInfo<double> pinfo{well.name()};
BOOST_CHECK_THROW( StandardWell( well, pinfo, -1, param, *rateConverter, 0, 3, 3, 0, pdata), std::invalid_argument);
}
@@ -166,7 +166,7 @@ BOOST_AUTO_TEST_CASE(TestBehavoir) {
pdata[c].ecl_index = c;
}
Opm::ParallelWellInfo pinfo{wells_ecl[w].name()};
Opm::ParallelWellInfo<double> pinfo{wells_ecl[w].name()};
wells.emplace_back(new StandardWell(wells_ecl[w], pinfo, current_timestep, param, *rateConverter, 0, 3, 3, w, pdata) );
}
}

View File

@@ -139,7 +139,7 @@ struct Setup
namespace {
Opm::WellState<double>
buildWellState(const Setup& setup, const std::size_t timeStep,
std::vector<Opm::ParallelWellInfo>& pinfos)
std::vector<Opm::ParallelWellInfo<double>>& pinfos)
{
auto state = Opm::WellState<double>{setup.pu};
@@ -149,7 +149,7 @@ namespace {
auto wells = setup.sched.getWells(timeStep);
pinfos.resize(wells.size());
std::vector<std::reference_wrapper<Opm::ParallelWellInfo>> ppinfos;
std::vector<std::reference_wrapper<Opm::ParallelWellInfo<double>>> ppinfos;
auto pw = pinfos.begin();
for (const auto& well : wells)
@@ -259,7 +259,7 @@ BOOST_AUTO_TEST_CASE(Linearisation)
const Setup setup{ "msw.data" };
const auto tstep = std::size_t{0};
std::vector<Opm::ParallelWellInfo> pinfos;
std::vector<Opm::ParallelWellInfo<double>> pinfos;
const auto wstate = buildWellState(setup, tstep, pinfos);
const auto& ws = wstate.well("PROD01");
@@ -276,7 +276,7 @@ BOOST_AUTO_TEST_CASE(Pressure)
const Setup setup{ "msw.data" };
const auto tstep = std::size_t{0};
std::vector<Opm::ParallelWellInfo> pinfos;
std::vector<Opm::ParallelWellInfo<double>> pinfos;
auto wstate = buildWellState(setup, tstep, pinfos);
const auto& wells = setup.sched.getWells(tstep);
@@ -315,7 +315,7 @@ BOOST_AUTO_TEST_CASE(Rates)
const Setup setup{ "msw.data" };
const auto tstep = std::size_t{0};
std::vector<Opm::ParallelWellInfo> pinfos;
std::vector<Opm::ParallelWellInfo<double>> pinfos;
auto wstate = buildWellState(setup, tstep, pinfos);
const auto wells = setup.sched.getWells(tstep);
@@ -368,7 +368,7 @@ BOOST_AUTO_TEST_CASE(STOP_well)
*/
const Setup setup{ "wells_manager_data_wellSTOP.data" };
std::vector<Opm::ParallelWellInfo> pinfos;
std::vector<Opm::ParallelWellInfo<double>> pinfos;
auto wstate = buildWellState(setup, 0, pinfos);
for (std::size_t well_index = 0; well_index < setup.sched.numWells(0); well_index++) {
const auto& ws = wstate.well(well_index);
@@ -525,7 +525,7 @@ BOOST_AUTO_TEST_CASE(TESTSegmentState) {
BOOST_AUTO_TEST_CASE(TESTSegmentState2) {
const Setup setup{ "msw.data" };
std::vector<Opm::ParallelWellInfo> pinfo;
std::vector<Opm::ParallelWellInfo<double>> pinfo;
const auto wstate = buildWellState(setup, 0, pinfo);
const auto& well = setup.sched.getWell("PROD01", 0);
const auto& ws = wstate.well("PROD01");
@@ -581,7 +581,7 @@ BOOST_AUTO_TEST_CASE(TESTPerfData) {
BOOST_AUTO_TEST_CASE(TestSingleWellState) {
Opm::ParallelWellInfo pinfo;
Opm::ParallelWellInfo<double> pinfo;
std::vector<Opm::PerforationData<double>> connections = {{0,1,1,0,0},{1,1,1,0,1},{2,1,1,0,2}};
Opm::PhaseUsage pu;