do not explicitly pass the permeability to the well model anymore

this information is already part of the EclipseState. The reason why
this should IMO be avoided is that this enforces an implementation
(ordering of the permeability matrices) the simulator on the well
model. If this needs to be done for performance reasons, IMO it would
be smarter to pass an array of matrices, instead of passing a raw
array of doubles.  I doubt that this is necessary, though: completing
the full Norne deck takes about 0.25 seconds longer on my machine,
that's substantially less than 0.1% of the total runtime.

in order to avoid code duplication, the permeability extraction
function of the RockFromDeck class is now made a public static
function and used as an implementation detail of the WellsManager.

finally, the permfield_valid_ attribute is removed from the
RockFromDeck class because this data was unused and not accessible via
the class' public API.
This commit is contained in:
Andreas Lauser 2017-01-26 17:36:00 +01:00
parent 5e67765229
commit c5a0ea7524
12 changed files with 68 additions and 45 deletions

View File

@ -182,7 +182,7 @@ try
// Rock and fluid init
IncompPropertiesSinglePhase props(deck, eclipseState, grid);
// Wells init.
WellsManager wells_manager(eclipseState , 0, grid, props.permeability());
WellsManager wells_manager(eclipseState , 0, grid);
std::shared_ptr<Wells> my_wells(clone_wells(wells_manager.c_wells()), destroy_wells);
setBhpWells(*my_wells);

View File

@ -49,7 +49,7 @@ try
RockCompressibility rock_comp(eclipseState);
// Finally handle the wells
WellsManager wells(eclipseState , 0 , *grid.c_grid(), incomp_properties.permeability());
WellsManager wells(eclipseState , 0 , *grid.c_grid());
double gravity[3] = {0.0, 0.0, parameters.getDefault<double>("gravity", 0.0)};
Opm::LinearSolverFactory linsolver(parameters);

View File

@ -74,8 +74,7 @@ namespace Opm
RockFromDeck::RockFromDeck(std::size_t number_of_cells)
: porosity_(number_of_cells, 0),
// full permeability tensor in 3D stores 9 scalars
permeability_(number_of_cells*9, 0.0),
permfield_valid_(number_of_cells, false)
permeability_(number_of_cells*9, 0.0)
{
}
@ -84,10 +83,13 @@ namespace Opm
const int* cart_dims)
{
assignPorosity(eclState, number_of_cells, global_cell);
permfield_valid_.assign(number_of_cells, false);
const double perm_threshold = 0.0; // Maybe turn into parameter?
assignPermeability(eclState, number_of_cells, global_cell, cart_dims,
perm_threshold);
extractInterleavedPermeability(eclState,
number_of_cells,
global_cell,
cart_dims,
perm_threshold,
permeability_);
}
void RockFromDeck::assignPorosity(const Opm::EclipseState& eclState,
@ -105,11 +107,12 @@ namespace Opm
}
}
void RockFromDeck::assignPermeability(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell,
const int* cartdims,
double perm_threshold)
void RockFromDeck::extractInterleavedPermeability(const Opm::EclipseState& eclState,
const int number_of_cells,
const int* global_cell,
const int* cartdims,
const double perm_threshold,
std::vector<double>& permeability)
{
const int dim = 3;
const int nc = number_of_cells;
@ -117,7 +120,7 @@ namespace Opm
assert(cartdims[0]*cartdims[1]*cartdims[2] > 0);
static_cast<void>(cartdims); // Squash warning in release mode.
permeability_.assign(dim * dim * nc, 0.0);
permeability.assign(dim * dim * nc, 0.0);
std::vector<PermComponent> tensor;
tensor.reserve(6);
@ -132,7 +135,6 @@ namespace Opm
assert (! tensor.empty());
{
int off = 0;
for (int c = 0; c < nc; ++c, off += dim*dim) {
// SharedPermTensor K(dim, dim, &permeability_[off]);
int kix = 0;
@ -146,16 +148,14 @@ namespace Opm
// values in the resulting array are the same
// in either order when viewed contiguously
// because fillTensor() enforces symmetry.
permeability_[off + (i + dim*j)] =
permeability[off + (i + dim*j)] =
tensor[kmap[kix]][c];
}
// K(i,i) = std::max(K(i,i), perm_threshold);
double& kii = permeability_[off + i*(dim + 1)];
double& kii = permeability[off + i*(dim + 1)];
kii = std::max(kii, perm_threshold);
}
permfield_valid_[c] = std::vector<unsigned char>::value_type(1);
}
}
}

View File

@ -77,15 +77,27 @@ namespace Opm
return &permeability_[0];
}
/// Convert the permeabilites for the logically Cartesian grid in EclipseState to
/// an array of size number_of_cells*dim*dim for the compressed array.
/// \param eclState The EclipseState (processed deck) produced by the opm-parser code
/// \param number_of_cells The number of cells in the grid.
/// \param global_cell The mapping fom local to global cell indices.
/// global_cell[i] is the corresponding global index of i.
/// \param cart_dims The size of the underlying cartesian grid.
/// \param perm_threshold The threshold for permeability
/// \param permeability The result array
static
void extractInterleavedPermeability(const Opm::EclipseState& eclState,
const int number_of_cells,
const int* global_cell,
const int* cart_dims,
const double perm_threshold,
std::vector<double>& permeability);
private:
void assignPorosity(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell);
void assignPermeability(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell,
const int* cart_dims,
double perm_threshold);
std::vector<double> porosity_;
std::vector<double> permeability_;

View File

@ -328,8 +328,7 @@ namespace Opm
/// Construct wells from deck.
WellsManager::WellsManager(const Opm::EclipseState& eclipseState,
const size_t timeStep,
const UnstructuredGrid& grid,
const double* permeability)
const UnstructuredGrid& grid)
: w_(0), is_parallel_run_(false)
{
std::vector<double> dummy_well_potentials;
@ -340,7 +339,7 @@ namespace Opm
UgGridHelpers::globalCell(grid), UgGridHelpers::cartDims(grid),
UgGridHelpers::dimensions(grid),
UgGridHelpers::cell2Faces(grid), UgGridHelpers::beginFaceCentroids(grid),
permeability, dummy_list_econ_limited, dummy_well_potentials,
dummy_list_econ_limited, dummy_well_potentials,
std::unordered_set<std::string>());
}

View File

@ -91,7 +91,6 @@ namespace Opm
int dimensions,
const F2C& f2c,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
bool is_parallel_run=false,
const std::vector<double>& well_potentials=std::vector<double>(),
@ -99,8 +98,7 @@ namespace Opm
WellsManager(const Opm::EclipseState& eclipseState,
const size_t timeStep,
const UnstructuredGrid& grid,
const double* permeability);
const UnstructuredGrid& grid);
/// Destructor.
~WellsManager();
@ -163,7 +161,6 @@ namespace Opm
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
const std::vector<double>& well_potentials,
const std::unordered_set<std::string>& deactivated_wells);

View File

@ -4,6 +4,7 @@
#include <opm/common/ErrorMacros.hpp>
#include <opm/common/OpmLog/OpmLog.hpp>
#include <opm/core/utility/compressedToCartesian.hpp>
#include <opm/core/props/rock/RockFromDeck.hpp>
#include <opm/parser/eclipse/EclipseState/Grid/EclipseGrid.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/CompletionSet.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
@ -311,7 +312,6 @@ WellsManager(const Opm::EclipseState& eclipseState,
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
bool is_parallel_run,
const std::vector<double>& well_potentials,
@ -320,7 +320,7 @@ WellsManager(const Opm::EclipseState& eclipseState,
{
init(eclipseState, timeStep, number_of_cells, global_cell,
cart_dims, dimensions,
cell_to_faces, begin_face_centroids, permeability, list_econ_limited, well_potentials, deactivated_wells);
cell_to_faces, begin_face_centroids, list_econ_limited, well_potentials, deactivated_wells);
}
/// Construct wells from deck.
@ -334,7 +334,6 @@ WellsManager::init(const Opm::EclipseState& eclipseState,
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
const std::vector<double>& well_potentials,
const std::unordered_set<std::string>& deactivated_wells)
@ -392,13 +391,22 @@ WellsManager::init(const Opm::EclipseState& eclipseState,
}
}
std::vector<double> interleavedPerm;
RockFromDeck::extractInterleavedPermeability(eclipseState,
number_of_cells,
global_cell,
cart_dims,
0.0,
interleavedPerm);
createWellsFromSpecs(wells, timeStep, cell_to_faces,
cart_dims,
begin_face_centroids,
dimensions,
dz,
well_names, well_data, well_names_to_index,
pu, cartesian_to_compressed, permeability, ntg,
pu, cartesian_to_compressed, interleavedPerm.data(), ntg,
wells_on_proc, deactivated_wells, list_econ_limited);
setupWellControls(wells, timeStep, well_names, pu, wells_on_proc, list_econ_limited);

View File

@ -58,7 +58,7 @@ BOOST_AUTO_TEST_CASE(TestStoppedWells)
// Both wells are open in the first schedule step
{
Opm::WellsManager wellsManager(eclipseState , 0 , *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState , 0 , *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];
const struct WellControls* ctrls1 = wells->ctrls[1];
@ -78,7 +78,7 @@ BOOST_AUTO_TEST_CASE(TestStoppedWells)
// The injector is stopped
{
Opm::WellsManager wellsManager(eclipseState , 1 , *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState , 1 , *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];
const struct WellControls* ctrls1 = wells->ctrls[1];

View File

@ -185,19 +185,19 @@ BOOST_AUTO_TEST_CASE(New_Constructor_Works) {
Opm::GridManager gridManager(eclipseState.getInputGrid());
{
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid());
wells_static_check(wellsManager.c_wells());
check_controls_epoch0(wellsManager.c_wells()->ctrls);
}
{
Opm::WellsManager wellsManager(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 1, *gridManager.c_grid());
wells_static_check(wellsManager.c_wells());
check_controls_epoch1(wellsManager.c_wells()->ctrls);
}
{
Opm::WellsManager wellsManager(eclipseState, 3, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 3, *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
// There is 3 wells in total in the deck at the 3rd schedule step.
@ -220,8 +220,8 @@ BOOST_AUTO_TEST_CASE(WellsEqual) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid());
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid());
BOOST_CHECK(wells_equal( wellsManager0.c_wells() , wellsManager0.c_wells(),false));
BOOST_CHECK(!wells_equal( wellsManager0.c_wells() , wellsManager1.c_wells(),false));
@ -235,8 +235,8 @@ BOOST_AUTO_TEST_CASE(ControlsEqual) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid());
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid());
BOOST_CHECK(well_controls_equal( wellsManager0.c_wells()->ctrls[0] , wellsManager0.c_wells()->ctrls[0] , false));
BOOST_CHECK(well_controls_equal( wellsManager0.c_wells()->ctrls[1] , wellsManager0.c_wells()->ctrls[1] , false));
@ -257,7 +257,7 @@ BOOST_AUTO_TEST_CASE(WellShutOK) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager2(eclipseState, 2, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager2(eclipseState, 2, *gridManager.c_grid());
// Shut wells are not added to the deck. i.e number of wells should be 2-1
BOOST_CHECK(wellsManager2.c_wells()->number_of_wells == 1);
@ -273,7 +273,7 @@ BOOST_AUTO_TEST_CASE(WellSTOPOK) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];

View File

@ -21,6 +21,8 @@ DZV
TOPS
100*10 /
PERMX
500*1.0 /
SCHEDULE

View File

@ -22,6 +22,9 @@ DZV
TOPS
100*10 /
PERMX
500*1.0 /
SCHEDULE
WELSPECS

View File

@ -26,6 +26,8 @@ TOPS
100*10
/
PERMX
100*1.0 /
SCHEDULE