Merge pull request #1138 from andlaus/refactor_well_permeability

do not explicitly pass the permeability to the well model anymore
This commit is contained in:
Atgeirr Flø Rasmussen 2017-01-27 13:15:50 +01:00 committed by GitHub
commit 99c69a17d5
10 changed files with 66 additions and 43 deletions

View File

@ -74,8 +74,7 @@ namespace Opm
RockFromDeck::RockFromDeck(std::size_t number_of_cells)
: porosity_(number_of_cells, 0),
// full permeability tensor in 3D stores 9 scalars
permeability_(number_of_cells*9, 0.0),
permfield_valid_(number_of_cells, false)
permeability_(number_of_cells*9, 0.0)
{
}
@ -84,10 +83,13 @@ namespace Opm
const int* cart_dims)
{
assignPorosity(eclState, number_of_cells, global_cell);
permfield_valid_.assign(number_of_cells, false);
const double perm_threshold = 0.0; // Maybe turn into parameter?
assignPermeability(eclState, number_of_cells, global_cell, cart_dims,
perm_threshold);
extractInterleavedPermeability(eclState,
number_of_cells,
global_cell,
cart_dims,
perm_threshold,
permeability_);
}
void RockFromDeck::assignPorosity(const Opm::EclipseState& eclState,
@ -105,11 +107,12 @@ namespace Opm
}
}
void RockFromDeck::assignPermeability(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell,
const int* cartdims,
double perm_threshold)
void RockFromDeck::extractInterleavedPermeability(const Opm::EclipseState& eclState,
const int number_of_cells,
const int* global_cell,
const int* cartdims,
const double perm_threshold,
std::vector<double>& permeability)
{
const int dim = 3;
const int nc = number_of_cells;
@ -117,7 +120,7 @@ namespace Opm
assert(cartdims[0]*cartdims[1]*cartdims[2] > 0);
static_cast<void>(cartdims); // Squash warning in release mode.
permeability_.assign(dim * dim * nc, 0.0);
permeability.assign(dim * dim * nc, 0.0);
std::vector<PermComponent> tensor;
tensor.reserve(6);
@ -132,7 +135,6 @@ namespace Opm
assert (! tensor.empty());
{
int off = 0;
for (int c = 0; c < nc; ++c, off += dim*dim) {
// SharedPermTensor K(dim, dim, &permeability_[off]);
int kix = 0;
@ -146,16 +148,14 @@ namespace Opm
// values in the resulting array are the same
// in either order when viewed contiguously
// because fillTensor() enforces symmetry.
permeability_[off + (i + dim*j)] =
permeability[off + (i + dim*j)] =
tensor[kmap[kix]][c];
}
// K(i,i) = std::max(K(i,i), perm_threshold);
double& kii = permeability_[off + i*(dim + 1)];
double& kii = permeability[off + i*(dim + 1)];
kii = std::max(kii, perm_threshold);
}
permfield_valid_[c] = std::vector<unsigned char>::value_type(1);
}
}
}

View File

@ -77,15 +77,27 @@ namespace Opm
return &permeability_[0];
}
/// Convert the permeabilites for the logically Cartesian grid in EclipseState to
/// an array of size number_of_cells*dim*dim for the compressed array.
/// \param eclState The EclipseState (processed deck) produced by the opm-parser code
/// \param number_of_cells The number of cells in the grid.
/// \param global_cell The mapping fom local to global cell indices.
/// global_cell[i] is the corresponding global index of i.
/// \param cart_dims The size of the underlying cartesian grid.
/// \param perm_threshold The threshold for permeability
/// \param permeability The result array
static
void extractInterleavedPermeability(const Opm::EclipseState& eclState,
const int number_of_cells,
const int* global_cell,
const int* cart_dims,
const double perm_threshold,
std::vector<double>& permeability);
private:
void assignPorosity(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell);
void assignPermeability(const Opm::EclipseState& eclState,
int number_of_cells,
const int* global_cell,
const int* cart_dims,
double perm_threshold);
std::vector<double> porosity_;
std::vector<double> permeability_;

View File

@ -328,8 +328,7 @@ namespace Opm
/// Construct wells from deck.
WellsManager::WellsManager(const Opm::EclipseState& eclipseState,
const size_t timeStep,
const UnstructuredGrid& grid,
const double* permeability)
const UnstructuredGrid& grid)
: w_(0), is_parallel_run_(false)
{
std::vector<double> dummy_well_potentials;
@ -340,7 +339,7 @@ namespace Opm
UgGridHelpers::globalCell(grid), UgGridHelpers::cartDims(grid),
UgGridHelpers::dimensions(grid),
UgGridHelpers::cell2Faces(grid), UgGridHelpers::beginFaceCentroids(grid),
permeability, dummy_list_econ_limited, dummy_well_potentials,
dummy_list_econ_limited, dummy_well_potentials,
std::unordered_set<std::string>());
}

View File

@ -91,7 +91,6 @@ namespace Opm
int dimensions,
const F2C& f2c,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
bool is_parallel_run=false,
const std::vector<double>& well_potentials=std::vector<double>(),
@ -99,8 +98,7 @@ namespace Opm
WellsManager(const Opm::EclipseState& eclipseState,
const size_t timeStep,
const UnstructuredGrid& grid,
const double* permeability);
const UnstructuredGrid& grid);
/// Destructor.
~WellsManager();
@ -163,7 +161,6 @@ namespace Opm
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
const std::vector<double>& well_potentials,
const std::unordered_set<std::string>& deactivated_wells);

View File

@ -4,6 +4,7 @@
#include <opm/common/ErrorMacros.hpp>
#include <opm/common/OpmLog/OpmLog.hpp>
#include <opm/core/utility/compressedToCartesian.hpp>
#include <opm/core/props/rock/RockFromDeck.hpp>
#include <opm/parser/eclipse/EclipseState/Grid/EclipseGrid.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/CompletionSet.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
@ -311,7 +312,6 @@ WellsManager(const Opm::EclipseState& eclipseState,
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
bool is_parallel_run,
const std::vector<double>& well_potentials,
@ -320,7 +320,7 @@ WellsManager(const Opm::EclipseState& eclipseState,
{
init(eclipseState, timeStep, number_of_cells, global_cell,
cart_dims, dimensions,
cell_to_faces, begin_face_centroids, permeability, list_econ_limited, well_potentials, deactivated_wells);
cell_to_faces, begin_face_centroids, list_econ_limited, well_potentials, deactivated_wells);
}
/// Construct wells from deck.
@ -334,7 +334,6 @@ WellsManager::init(const Opm::EclipseState& eclipseState,
int dimensions,
const C2F& cell_to_faces,
FC begin_face_centroids,
const double* permeability,
const DynamicListEconLimited& list_econ_limited,
const std::vector<double>& well_potentials,
const std::unordered_set<std::string>& deactivated_wells)
@ -392,13 +391,22 @@ WellsManager::init(const Opm::EclipseState& eclipseState,
}
}
std::vector<double> interleavedPerm;
RockFromDeck::extractInterleavedPermeability(eclipseState,
number_of_cells,
global_cell,
cart_dims,
0.0,
interleavedPerm);
createWellsFromSpecs(wells, timeStep, cell_to_faces,
cart_dims,
begin_face_centroids,
dimensions,
dz,
well_names, well_data, well_names_to_index,
pu, cartesian_to_compressed, permeability, ntg,
pu, cartesian_to_compressed, interleavedPerm.data(), ntg,
wells_on_proc, deactivated_wells, list_econ_limited);
setupWellControls(wells, timeStep, well_names, pu, wells_on_proc, list_econ_limited);

View File

@ -58,7 +58,7 @@ BOOST_AUTO_TEST_CASE(TestStoppedWells)
// Both wells are open in the first schedule step
{
Opm::WellsManager wellsManager(eclipseState , 0 , *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState , 0 , *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];
const struct WellControls* ctrls1 = wells->ctrls[1];
@ -78,7 +78,7 @@ BOOST_AUTO_TEST_CASE(TestStoppedWells)
// The injector is stopped
{
Opm::WellsManager wellsManager(eclipseState , 1 , *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState , 1 , *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];
const struct WellControls* ctrls1 = wells->ctrls[1];

View File

@ -185,19 +185,19 @@ BOOST_AUTO_TEST_CASE(New_Constructor_Works) {
Opm::GridManager gridManager(eclipseState.getInputGrid());
{
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid());
wells_static_check(wellsManager.c_wells());
check_controls_epoch0(wellsManager.c_wells()->ctrls);
}
{
Opm::WellsManager wellsManager(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 1, *gridManager.c_grid());
wells_static_check(wellsManager.c_wells());
check_controls_epoch1(wellsManager.c_wells()->ctrls);
}
{
Opm::WellsManager wellsManager(eclipseState, 3, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 3, *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
// There is 3 wells in total in the deck at the 3rd schedule step.
@ -220,8 +220,8 @@ BOOST_AUTO_TEST_CASE(WellsEqual) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid());
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid());
BOOST_CHECK(wells_equal( wellsManager0.c_wells() , wellsManager0.c_wells(),false));
BOOST_CHECK(!wells_equal( wellsManager0.c_wells() , wellsManager1.c_wells(),false));
@ -235,8 +235,8 @@ BOOST_AUTO_TEST_CASE(ControlsEqual) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager0(eclipseState, 0, *gridManager.c_grid());
Opm::WellsManager wellsManager1(eclipseState, 1, *gridManager.c_grid());
BOOST_CHECK(well_controls_equal( wellsManager0.c_wells()->ctrls[0] , wellsManager0.c_wells()->ctrls[0] , false));
BOOST_CHECK(well_controls_equal( wellsManager0.c_wells()->ctrls[1] , wellsManager0.c_wells()->ctrls[1] , false));
@ -257,7 +257,7 @@ BOOST_AUTO_TEST_CASE(WellShutOK) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager2(eclipseState, 2, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager2(eclipseState, 2, *gridManager.c_grid());
// Shut wells are not added to the deck. i.e number of wells should be 2-1
BOOST_CHECK(wellsManager2.c_wells()->number_of_wells == 1);
@ -273,7 +273,7 @@ BOOST_AUTO_TEST_CASE(WellSTOPOK) {
Opm::EclipseState eclipseState(deck, parseContext);
Opm::GridManager gridManager(eclipseState.getInputGrid());
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid(), NULL);
Opm::WellsManager wellsManager(eclipseState, 0, *gridManager.c_grid());
const Wells* wells = wellsManager.c_wells();
const struct WellControls* ctrls0 = wells->ctrls[0];

View File

@ -21,6 +21,8 @@ DZV
TOPS
100*10 /
PERMX
500*1.0 /
SCHEDULE

View File

@ -22,6 +22,9 @@ DZV
TOPS
100*10 /
PERMX
500*1.0 /
SCHEDULE
WELSPECS

View File

@ -26,6 +26,8 @@ TOPS
100*10
/
PERMX
100*1.0 /
SCHEDULE