mirror of
https://github.com/OPM/ResInsight.git
synced 2024-12-29 10:21:54 -06:00
Add Python linting using black (#7276)
This commit is contained in:
parent
0ec612ae86
commit
1bacd41037
23
.github/workflows/python-linting.yml
vendored
Normal file
23
.github/workflows/python-linting.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
name: Python Linting
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- name: (Python) Use black to do linting
|
||||
run: |
|
||||
pip install black
|
||||
cd GrpcInterface
|
||||
black .
|
||||
- uses: peter-evans/create-pull-request@v3
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'Python code linting changes detected by black'
|
||||
title: 'Fixes by black (Python)'
|
||||
branch: python-black-patches
|
||||
|
||||
|
@ -12,17 +12,18 @@
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.abspath('../../'))
|
||||
|
||||
sys.path.insert(0, os.path.abspath("../../"))
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'rips'
|
||||
copyright = '2019, Ceetron Solutions AS'
|
||||
author = 'Ceetron Solutions AS'
|
||||
project = "rips"
|
||||
copyright = "2019, Ceetron Solutions AS"
|
||||
author = "Ceetron Solutions AS"
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
release = '2019.04.01'
|
||||
release = "2019.04.01"
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
@ -30,23 +31,19 @@ release = '2019.04.01'
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx_markdown_builder'
|
||||
]
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinx_markdown_builder"]
|
||||
|
||||
master_doc = 'PythonRips'
|
||||
master_doc = "PythonRips"
|
||||
|
||||
napoleon_google_docstring = True
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['build/*', 'rips.rst']
|
||||
exclude_patterns = ["build/*", "rips.rst"]
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
@ -54,14 +51,14 @@ exclude_patterns = ['build/*', 'rips.rst']
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
html_theme = "alabaster"
|
||||
|
||||
smartquotes = False
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
@ -7,12 +7,12 @@
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
if resinsight is not None:
|
||||
# Get a list of all cases
|
||||
cases = resinsight.project.cases()
|
||||
|
||||
print ("Got " + str(len(cases)) + " cases: ")
|
||||
print("Got " + str(len(cases)) + " cases: ")
|
||||
for case in cases:
|
||||
print("Case id: " + str(case.id))
|
||||
print("Case name: " + case.name)
|
||||
@ -26,11 +26,14 @@ if resinsight is not None:
|
||||
print("Month: " + str(t.month))
|
||||
|
||||
if isinstance(case, rips.EclipseCase):
|
||||
print ("Getting coarsening info for case: ", case.name, case.id)
|
||||
print("Getting coarsening info for case: ", case.name, case.id)
|
||||
coarsening_info = case.coarsening_info()
|
||||
if coarsening_info:
|
||||
print("Coarsening information:")
|
||||
|
||||
for c in coarsening_info:
|
||||
print("[{}, {}, {}] - [{}, {}, {}]".format(c.min.x, c.min.y, c.min.z,
|
||||
c.max.x, c.max.y, c.max.z))
|
||||
print(
|
||||
"[{}, {}, {}] - [{}, {}, {}]".format(
|
||||
c.min.x, c.min.y, c.min.z, c.max.x, c.max.y, c.max.z
|
||||
)
|
||||
)
|
||||
|
@ -7,7 +7,7 @@
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
if resinsight is not None:
|
||||
# Get a list of all wells
|
||||
cases = resinsight.project.cases()
|
||||
@ -24,4 +24,13 @@ if resinsight is not None:
|
||||
for (tidx, timestep) in enumerate(timesteps):
|
||||
status = sim_well.status(tidx)
|
||||
cells = sim_well.cells(tidx)
|
||||
print("timestep: " + str(tidx) + " type: " + status.well_type + " open: " + str(status.is_open) + " cells:" + str(len(cells)))
|
||||
print(
|
||||
"timestep: "
|
||||
+ str(tidx)
|
||||
+ " type: "
|
||||
+ status.well_type
|
||||
+ " open: "
|
||||
+ str(status.is_open)
|
||||
+ " cells:"
|
||||
+ str(len(cells))
|
||||
)
|
||||
|
@ -7,11 +7,11 @@
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
if resinsight is not None:
|
||||
# Get a list of all wells
|
||||
wells = resinsight.project.well_paths()
|
||||
|
||||
print ("Got " + str(len(wells)) + " wells: ")
|
||||
print("Got " + str(len(wells)) + " wells: ")
|
||||
for well in wells:
|
||||
print("Well name: " + well.name)
|
||||
|
@ -23,7 +23,5 @@ for wbsplot in wbsplots:
|
||||
params = wbsplot.parameters()
|
||||
params.user_poisson_ratio = 0.12345
|
||||
params.update()
|
||||
wbsplot.update()
|
||||
wbsplot.update()
|
||||
wbsplot.export_snapshot(export_folder=dirname)
|
||||
|
||||
|
||||
|
@ -1,21 +1,27 @@
|
||||
import os
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
case_paths = []
|
||||
case_paths.append("C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID")
|
||||
case_paths.append("C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID")
|
||||
case_paths.append(
|
||||
"C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
|
||||
)
|
||||
case_paths.append(
|
||||
"C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
|
||||
)
|
||||
for case_path in case_paths:
|
||||
assert os.path.exists(case_path), "You need to set valid case paths for this script to work"
|
||||
assert os.path.exists(
|
||||
case_path
|
||||
), "You need to set valid case paths for this script to work"
|
||||
|
||||
case_group = resinsight.project.create_grid_case_group(case_paths=case_paths)
|
||||
|
||||
case_group.print_object_info()
|
||||
|
||||
#stat_cases = caseGroup.statistics_cases()
|
||||
#case_ids = []
|
||||
#for stat_case in stat_cases:
|
||||
|
||||
# stat_cases = caseGroup.statistics_cases()
|
||||
# case_ids = []
|
||||
# for stat_case in stat_cases:
|
||||
# stat_case.set_dynamic_properties_to_calculate(["SWAT"])
|
||||
# case_ids.append(stat_case.id)
|
||||
|
||||
@ -24,4 +30,3 @@ case_group.compute_statistics()
|
||||
view = case_group.views()[0]
|
||||
cell_result = view.cell_result()
|
||||
cell_result.set_result_variable("PRESSURE_DEV")
|
||||
|
@ -6,7 +6,7 @@
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
# Get the first case. This will fail if you haven't loaded any cases
|
||||
case = resinsight.project.cases()[0]
|
||||
@ -20,7 +20,7 @@ print("Total number of reservoir cells: " + str(cell_counts.reservoir_cell_count
|
||||
active_cell_infos = case.cell_info_for_active_cells()
|
||||
|
||||
# A simple check on the size of the cell info
|
||||
assert(cell_counts.active_cell_count == len(active_cell_infos))
|
||||
assert cell_counts.active_cell_count == len(active_cell_infos)
|
||||
|
||||
# Print information for the first active cell
|
||||
print("First active cell: ")
|
||||
|
@ -3,15 +3,13 @@
|
||||
######################################################################
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
view = resinsight.project.views()[0]
|
||||
results = view.cell_result_data()
|
||||
print ("Number of result values: ", len(results))
|
||||
print("Number of result values: ", len(results))
|
||||
|
||||
newresults = []
|
||||
for i in range(0, len(results)):
|
||||
newresults.append(results[i] * -1.0)
|
||||
view.set_cell_result_data(newresults)
|
||||
|
||||
|
@ -26,23 +26,23 @@ view2 = view1.clone()
|
||||
view1.set_time_step(time_step=2)
|
||||
|
||||
# Set cell result to SOIL
|
||||
view1.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable='SOIL')
|
||||
view1.apply_cell_result(result_type="DYNAMIC_NATIVE", result_variable="SOIL")
|
||||
|
||||
|
||||
# Create a temporary directory which will disappear at the end of this script
|
||||
# If you want to keep the files, provide a good path name instead of tmpdirname
|
||||
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
print("Temporary folder: ", tmpdirname)
|
||||
|
||||
|
||||
# Set export folder for snapshots and properties
|
||||
resinsight.set_export_folder(export_type='SNAPSHOTS', path=tmpdirname)
|
||||
resinsight.set_export_folder(export_type='PROPERTIES', path=tmpdirname)
|
||||
|
||||
resinsight.set_export_folder(export_type="SNAPSHOTS", path=tmpdirname)
|
||||
resinsight.set_export_folder(export_type="PROPERTIES", path=tmpdirname)
|
||||
|
||||
# Export all snapshots
|
||||
resinsight.project.export_snapshots()
|
||||
|
||||
assert(len(os.listdir(tmpdirname)) > 0)
|
||||
|
||||
|
||||
assert len(os.listdir(tmpdirname)) > 0
|
||||
|
||||
# Export properties in the view
|
||||
view1.export_property()
|
||||
|
||||
@ -53,5 +53,4 @@ with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
# Print contents of temporary folder
|
||||
print(os.listdir(tmpdirname))
|
||||
|
||||
assert(os.path.exists(full_path))
|
||||
|
||||
assert os.path.exists(full_path)
|
||||
|
@ -19,8 +19,10 @@ print("Facies properties file path:", facies_properties_file_path)
|
||||
|
||||
# Create stim plan model template
|
||||
fmt_collection = project.descendants(rips.StimPlanModelTemplateCollection)[0]
|
||||
stim_plan_model_template = fmt_collection.new_stim_plan_model_template(elastic_properties_file_path=elastic_properties_file_path,
|
||||
facies_properties_file_path=facies_properties_file_path)
|
||||
stim_plan_model_template = fmt_collection.new_stim_plan_model_template(
|
||||
elastic_properties_file_path=elastic_properties_file_path,
|
||||
facies_properties_file_path=facies_properties_file_path,
|
||||
)
|
||||
stim_plan_model_template.overburden_formation = "Garn"
|
||||
stim_plan_model_template.overburden_facies = "Shale"
|
||||
stim_plan_model_template.underburden_formation = "Garn"
|
||||
@ -49,7 +51,9 @@ non_net_layers.update()
|
||||
|
||||
# Add some scaling factors
|
||||
elastic_properties = stim_plan_model_template.elastic_properties()
|
||||
elastic_properties.add_property_scaling(formation="Garn", facies="Calcite", property="YOUNGS_MODULUS", scale=1.44)
|
||||
elastic_properties.add_property_scaling(
|
||||
formation="Garn", facies="Calcite", property="YOUNGS_MODULUS", scale=1.44
|
||||
)
|
||||
|
||||
|
||||
well_name = "B-2 H"
|
||||
@ -73,20 +77,24 @@ export_folder = tempfile.gettempdir()
|
||||
stim_plan_models = []
|
||||
|
||||
# Create and export a StimPlan model for each depth
|
||||
measured_depths = [ 3200.0, 3400.0, 3600.0 ]
|
||||
measured_depths = [3200.0, 3400.0, 3600.0]
|
||||
for measured_depth in measured_depths:
|
||||
|
||||
# Create stim plan model at a give measured depth
|
||||
stim_plan_model = stim_plan_model_collection.new_stim_plan_model(eclipse_case=case,
|
||||
time_step=time_step,
|
||||
well_path=well_path,
|
||||
measured_depth=measured_depth,
|
||||
stim_plan_model_template=stim_plan_model_template)
|
||||
stim_plan_model = stim_plan_model_collection.new_stim_plan_model(
|
||||
eclipse_case=case,
|
||||
time_step=time_step,
|
||||
well_path=well_path,
|
||||
measured_depth=measured_depth,
|
||||
stim_plan_model_template=stim_plan_model_template,
|
||||
)
|
||||
stim_plan_models.append(stim_plan_model)
|
||||
|
||||
# Make the well name safer to use as a directory path
|
||||
well_name_part = well_name.replace(" ", "_")
|
||||
directory_path = Path(export_folder) / '{}_{}'.format(well_name_part, int(measured_depth))
|
||||
directory_path = Path(export_folder) / "{}_{}".format(
|
||||
well_name_part, int(measured_depth)
|
||||
)
|
||||
|
||||
# Create the folder
|
||||
directory_path.mkdir(parents=True, exist_ok=True)
|
||||
@ -95,14 +103,17 @@ for measured_depth in measured_depths:
|
||||
stim_plan_model.export_to_file(directory_path=directory_path.as_posix())
|
||||
|
||||
# Create a fracture mode plot
|
||||
stim_plan_model_plot_collection = project.descendants(rips.StimPlanModelPlotCollection)[0]
|
||||
stim_plan_model_plot = stim_plan_model_plot_collection.new_stim_plan_model_plot(stim_plan_model=stim_plan_model)
|
||||
stim_plan_model_plot_collection = project.descendants(
|
||||
rips.StimPlanModelPlotCollection
|
||||
)[0]
|
||||
stim_plan_model_plot = stim_plan_model_plot_collection.new_stim_plan_model_plot(
|
||||
stim_plan_model=stim_plan_model
|
||||
)
|
||||
|
||||
print("Exporting fracture model plot to: ", directory_path)
|
||||
stim_plan_model_plot.export_snapshot(export_folder=directory_path.as_posix())
|
||||
|
||||
|
||||
|
||||
print("Setting measured depth and perforation length.")
|
||||
stim_plan_models[0].measured_depth = 3300.0
|
||||
stim_plan_models[0].perforation_length = 123.445
|
||||
|
@ -3,6 +3,7 @@ import grpc
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resInsight = rips.Instance.find()
|
||||
|
||||
@ -24,7 +25,7 @@ params.user_ucs = 123
|
||||
|
||||
# Loop through all cases
|
||||
for case in cases:
|
||||
assert(isinstance(case, rips.GeoMechCase))
|
||||
assert isinstance(case, rips.GeoMechCase)
|
||||
min_res_depth, max_res_depth = case.reservoir_depth_range()
|
||||
|
||||
# Find a good output path
|
||||
@ -32,12 +33,18 @@ for case in cases:
|
||||
folder_name = os.path.dirname(case_path)
|
||||
|
||||
# Import formation names
|
||||
case.import_formation_names(formation_files=['D:/Projects/ResInsight-regression-test/ModelData/norne/Norne_ATW2013.lyr'])
|
||||
case.import_formation_names(
|
||||
formation_files=[
|
||||
"D:/Projects/ResInsight-regression-test/ModelData/norne/Norne_ATW2013.lyr"
|
||||
]
|
||||
)
|
||||
|
||||
# create a folder to hold the snapshots
|
||||
dirname = os.path.join(folder_name, 'snapshots')
|
||||
dirname = os.path.join(folder_name, "snapshots")
|
||||
print("Exporting to: " + dirname)
|
||||
|
||||
for well_path in well_paths[0:4]: # Loop through the first five well paths
|
||||
for well_path in well_paths[0:4]: # Loop through the first five well paths
|
||||
# Create plot with parameters
|
||||
wbsplot = case.create_well_bore_stability_plot(well_path=well_path.name, time_step=0, parameters=params)
|
||||
wbsplot = case.create_well_bore_stability_plot(
|
||||
well_path=well_path.name, time_step=0, parameters=params
|
||||
)
|
||||
|
@ -1,5 +1,5 @@
|
||||
###################################################################
|
||||
# This example demonstrates the use of ResInsight exceptions
|
||||
# This example demonstrates the use of ResInsight exceptions
|
||||
# for proper error handling
|
||||
###################################################################
|
||||
|
||||
@ -7,7 +7,7 @@ import rips
|
||||
import grpc
|
||||
import tempfile
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
case = None
|
||||
|
||||
@ -15,62 +15,69 @@ case = None
|
||||
try:
|
||||
case = resinsight.project.load_case("Nonsense")
|
||||
except grpc.RpcError as e:
|
||||
print("Expected Server Exception Received while loading case: ", e.code(), e.details())
|
||||
print(
|
||||
"Expected Server Exception Received while loading case: ", e.code(), e.details()
|
||||
)
|
||||
|
||||
# Try loading well paths from a non-existing folder. We should get a grpc.RpcError exception from the server
|
||||
try:
|
||||
well_path_files = resinsight.project.import_well_paths(well_path_folder="NONSENSE/NONSENSE")
|
||||
well_path_files = resinsight.project.import_well_paths(
|
||||
well_path_folder="NONSENSE/NONSENSE"
|
||||
)
|
||||
except grpc.RpcError as e:
|
||||
print("Expected Server Exception Received while loading wellpaths: ", e.code(), e.details())
|
||||
print(
|
||||
"Expected Server Exception Received while loading wellpaths: ",
|
||||
e.code(),
|
||||
e.details(),
|
||||
)
|
||||
|
||||
# Try loading well paths from an existing but empty folder. We should get a warning.
|
||||
try:
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
well_path_files = resinsight.project.import_well_paths(well_path_folder=tmpdirname)
|
||||
assert(len(well_path_files) == 0)
|
||||
assert(resinsight.project.has_warnings())
|
||||
well_path_files = resinsight.project.import_well_paths(
|
||||
well_path_folder=tmpdirname
|
||||
)
|
||||
assert len(well_path_files) == 0
|
||||
assert resinsight.project.has_warnings()
|
||||
print("Should get warnings below")
|
||||
for warning in resinsight.project.warnings():
|
||||
print (warning)
|
||||
print(warning)
|
||||
except grpc.RpcError as e:
|
||||
print("Unexpected Server Exception caught!!!", e)
|
||||
|
||||
case = resinsight.project.case(case_id=0)
|
||||
if case is not None:
|
||||
results = case.active_cell_property('STATIC_NATIVE', 'PORO', 0)
|
||||
results = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
|
||||
active_cell_count = len(results)
|
||||
|
||||
# Send the results back to ResInsight inside try / except construct
|
||||
try:
|
||||
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0)
|
||||
try:
|
||||
case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
|
||||
print("Everything went well as expected")
|
||||
except: # Match any exception, but it should not happen
|
||||
except: # Match any exception, but it should not happen
|
||||
print("Ooops!")
|
||||
|
||||
# Add another value, so this is outside the bounds of the active cell result storage
|
||||
results.append(1.0)
|
||||
|
||||
# This time we should get a grpc.RpcError exception, which is a server side error.
|
||||
try:
|
||||
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0)
|
||||
try:
|
||||
case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
|
||||
print("Everything went well??")
|
||||
except grpc.RpcError as e:
|
||||
print("Expected Server Exception Received: ", e)
|
||||
except IndexError:
|
||||
print ("Got index out of bounds error. This shouldn't happen here")
|
||||
print("Got index out of bounds error. This shouldn't happen here")
|
||||
|
||||
# With a chunk size exactly matching the active cell count the server will not
|
||||
# be able to see any error as it will successfully close the stream after receiving
|
||||
# the correct number of values, even if the python client has more chunks to send
|
||||
case.chunk_size = active_cell_count
|
||||
|
||||
try:
|
||||
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0)
|
||||
try:
|
||||
case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
|
||||
print("Everything went well??")
|
||||
except grpc.RpcError as e:
|
||||
print("Got unexpected server exception", e, "This should not happen now")
|
||||
except IndexError:
|
||||
print ("Got expected index out of bounds error on client side")
|
||||
|
||||
|
||||
|
||||
print("Got expected index out of bounds error on client side")
|
||||
|
@ -1,7 +1,9 @@
|
||||
# Import the tempfile module
|
||||
import tempfile
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resInsight = rips.Instance.find()
|
||||
|
||||
@ -13,8 +15,8 @@ export_folder = tempfile.mkdtemp()
|
||||
print("Exporting to: " + export_folder)
|
||||
|
||||
for plot in plots:
|
||||
plot.export_snapshot(export_folder=export_folder)
|
||||
plot.export_snapshot(export_folder=export_folder, output_format='PDF')
|
||||
if isinstance(plot, rips.WellLogPlot):
|
||||
plot.export_data_as_las(export_folder=export_folder)
|
||||
plot.export_data_as_ascii(export_folder=export_folder)
|
||||
plot.export_snapshot(export_folder=export_folder)
|
||||
plot.export_snapshot(export_folder=export_folder, output_format="PDF")
|
||||
if isinstance(plot, rips.WellLogPlot):
|
||||
plot.export_data_as_las(export_folder=export_folder)
|
||||
plot.export_data_as_ascii(export_folder=export_folder)
|
||||
|
@ -12,33 +12,35 @@ cases = resinsight.project.cases()
|
||||
# Set main window size
|
||||
resinsight.set_main_window_size(width=800, height=500)
|
||||
|
||||
n = 5 # every n-th time_step for snapshot
|
||||
property_list = ['SOIL', 'PRESSURE'] # list of parameter for snapshot
|
||||
n = 5 # every n-th time_step for snapshot
|
||||
property_list = ["SOIL", "PRESSURE"] # list of parameter for snapshot
|
||||
|
||||
print ("Looping through cases")
|
||||
print("Looping through cases")
|
||||
for case in cases:
|
||||
print("Case name: ", case.name)
|
||||
print("Case id: ", case.id)
|
||||
# Get grid path and its folder name
|
||||
case_path = case.file_path
|
||||
folder_name = os.path.dirname(case_path)
|
||||
|
||||
|
||||
# create a folder to hold the snapshots
|
||||
dirname = os.path.join(folder_name, 'snapshots')
|
||||
|
||||
dirname = os.path.join(folder_name, "snapshots")
|
||||
|
||||
if os.path.exists(dirname) is False:
|
||||
os.mkdir(dirname)
|
||||
|
||||
print ("Exporting to folder: " + dirname)
|
||||
resinsight.set_export_folder(export_type='SNAPSHOTS', path=dirname)
|
||||
|
||||
|
||||
print("Exporting to folder: " + dirname)
|
||||
resinsight.set_export_folder(export_type="SNAPSHOTS", path=dirname)
|
||||
|
||||
time_steps = case.time_steps()
|
||||
print('Number of time_steps: ' + str(len(time_steps)))
|
||||
print("Number of time_steps: " + str(len(time_steps)))
|
||||
|
||||
for view in case.views():
|
||||
if view.is_eclipse_view():
|
||||
for property in property_list:
|
||||
view.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable=property)
|
||||
view.apply_cell_result(
|
||||
result_type="DYNAMIC_NATIVE", result_variable=property
|
||||
)
|
||||
for time_step in range(0, len(time_steps), 10):
|
||||
view.set_time_step(time_step = time_step)
|
||||
view.set_time_step(time_step=time_step)
|
||||
view.export_snapshot()
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
cases = resinsight.project.cases()
|
||||
print("Number of cases found: ", len(cases))
|
||||
@ -14,6 +14,3 @@ for case in cases:
|
||||
print("Number of grids: ", len(grids))
|
||||
for grid in grids:
|
||||
print("Grid dimensions: ", grid.dimensions())
|
||||
|
||||
|
||||
|
||||
|
@ -1,9 +1,12 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resInsight = rips.Instance.find()
|
||||
|
||||
well_paths = resInsight.project.import_well_paths(well_path_folder='D:/Projects/ResInsight-regression-test/ModelData/norne/wellpaths')
|
||||
well_paths = resInsight.project.import_well_paths(
|
||||
well_path_folder="D:/Projects/ResInsight-regression-test/ModelData/norne/wellpaths"
|
||||
)
|
||||
if resInsight.project.has_warnings():
|
||||
for warning in resInsight.project.warnings():
|
||||
print(warning)
|
||||
@ -12,8 +15,12 @@ if resInsight.project.has_warnings():
|
||||
for well_path in well_paths:
|
||||
print("Imported from folder: " + well_path.name)
|
||||
|
||||
well_paths = resInsight.project.import_well_paths(well_path_files=['D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/E-3H.json',
|
||||
'D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/C-1H.json'])
|
||||
well_paths = resInsight.project.import_well_paths(
|
||||
well_path_files=[
|
||||
"D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/E-3H.json",
|
||||
"D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/C-1H.json",
|
||||
]
|
||||
)
|
||||
if resInsight.project.has_warnings():
|
||||
for warning in resInsight.project.warnings():
|
||||
print(warning)
|
||||
@ -23,10 +30,12 @@ for well_path in well_paths:
|
||||
print("Imported from individual files: " + well_path.name)
|
||||
|
||||
|
||||
well_path_names = resInsight.project.import_well_log_files(well_log_folder='D:/Projects/ResInsight-regression-test/ModelData/Norne_PLT_LAS')
|
||||
well_path_names = resInsight.project.import_well_log_files(
|
||||
well_log_folder="D:/Projects/ResInsight-regression-test/ModelData/Norne_PLT_LAS"
|
||||
)
|
||||
if resInsight.project.has_warnings():
|
||||
for warning in resInsight.project.warnings():
|
||||
print(warning)
|
||||
|
||||
for well_path_name in well_path_names:
|
||||
print("Imported well log file for: " + well_path_name)
|
||||
print("Imported well log file for: " + well_path_name)
|
||||
|
@ -17,22 +17,24 @@ def create_result(poro_chunks, permx_chunks):
|
||||
# Return a generator object that behaves like a Python iterator
|
||||
yield resultChunk
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
start = time.time()
|
||||
case = resinsight.project.cases()[0]
|
||||
|
||||
# Get a generator for the poro results. The generator will provide a chunk each time it is iterated
|
||||
poro_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PORO', 0)
|
||||
poro_chunks = case.active_cell_property_async("STATIC_NATIVE", "PORO", 0)
|
||||
# Get a generator for the permx results. The generator will provide a chunk each time it is iterated
|
||||
permx_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PERMX', 0)
|
||||
permx_chunks = case.active_cell_property_async("STATIC_NATIVE", "PERMX", 0)
|
||||
|
||||
# Send back the result with the result provided by a generator object.
|
||||
# Iterating the result generator will cause the script to read from the poro and permx generators
|
||||
# And return the result of each iteration
|
||||
case.set_active_cell_property_async(create_result(poro_chunks, permx_chunks),
|
||||
'GENERATED', 'POROPERMXAS', 0)
|
||||
case.set_active_cell_property_async(
|
||||
create_result(poro_chunks, permx_chunks), "GENERATED", "POROPERMXAS", 0
|
||||
)
|
||||
|
||||
end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
print("Transferred all results back")
|
||||
view = case.views()[0].apply_cell_result('GENERATED', 'POROPERMXAS')
|
||||
view = case.views()[0].apply_cell_result("GENERATED", "POROPERMXAS")
|
||||
|
@ -7,14 +7,14 @@ import rips
|
||||
import time
|
||||
import grpc
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
start = time.time()
|
||||
case = resinsight.project.cases()[0]
|
||||
|
||||
# Read poro result into list
|
||||
poro_results = case.active_cell_property('STATIC_NATIVE', 'PORO', 0)
|
||||
poro_results = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
|
||||
# Read permx result into list
|
||||
permx_results = case.active_cell_property('STATIC_NATIVE', 'PERMX', 0)
|
||||
permx_results = case.active_cell_property("STATIC_NATIVE", "PERMX", 0)
|
||||
|
||||
# Generate output result
|
||||
results = []
|
||||
@ -23,7 +23,7 @@ for (poro, permx) in zip(poro_results, permx_results):
|
||||
|
||||
try:
|
||||
# Send back output result
|
||||
case.set_active_cell_property(results, 'GENERATED', 'POROPERMXSY', 0)
|
||||
case.set_active_cell_property(results, "GENERATED", "POROPERMXSY", 0)
|
||||
except grpc.RpcError as e:
|
||||
print("Exception Received: ", e)
|
||||
|
||||
@ -32,4 +32,4 @@ end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
print("Transferred all results back")
|
||||
|
||||
view = case.views()[0].apply_cell_result('GENERATED', 'POROPERMXSY')
|
||||
view = case.views()[0].apply_cell_result("GENERATED", "POROPERMXSY")
|
||||
|
@ -3,9 +3,9 @@
|
||||
#######################################
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
if resinsight is None:
|
||||
print('ERROR: could not find ResInsight')
|
||||
print("ERROR: could not find ResInsight")
|
||||
else:
|
||||
print('Successfully connected to ResInsight')
|
||||
print("Successfully connected to ResInsight")
|
||||
|
@ -1,17 +1,21 @@
|
||||
# Access to environment variables
|
||||
import os
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.launch()
|
||||
|
||||
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
|
||||
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE')
|
||||
resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
|
||||
|
||||
# Get the TestModels path from the executable path
|
||||
resinsight_install_path = os.path.dirname(resinsight_exe_path)
|
||||
test_models_path = os.path.join(resinsight_install_path, 'TestModels')
|
||||
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID')
|
||||
test_models_path = os.path.join(resinsight_install_path, "TestModels")
|
||||
path_name = os.path.join(
|
||||
test_models_path, "TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
|
||||
# Load an example case. Needs to be replaced with a valid path!
|
||||
case = resinsight.project.load_case(path_name)
|
||||
@ -23,11 +27,11 @@ view1 = case.views()[0]
|
||||
view1.set_time_step(time_step=2)
|
||||
|
||||
# Set cell result to SOIL
|
||||
view1.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable='SOIL')
|
||||
view1.apply_cell_result(result_type="DYNAMIC_NATIVE", result_variable="SOIL")
|
||||
|
||||
# Set export folder for snapshots and properties
|
||||
resinsight.set_export_folder(export_type='SNAPSHOTS', path="e:/temp")
|
||||
resinsight.set_export_folder(export_type='PROPERTIES', path="e:/temp")
|
||||
resinsight.set_export_folder(export_type="SNAPSHOTS", path="e:/temp")
|
||||
resinsight.set_export_folder(export_type="PROPERTIES", path="e:/temp")
|
||||
|
||||
# Export all snapshots
|
||||
resinsight.project.export_snapshots()
|
||||
|
@ -1,11 +1,14 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Launch ResInsight with last project file and a Window size of 600x1000 pixels
|
||||
resinsight = rips.Instance.launch(command_line_parameters=['--last', '--size', 600, 1000])
|
||||
resinsight = rips.Instance.launch(
|
||||
command_line_parameters=["--last", "--size", 600, 1000]
|
||||
)
|
||||
# Get a list of all cases
|
||||
cases = resinsight.project.cases()
|
||||
|
||||
print ("Got " + str(len(cases)) + " cases: ")
|
||||
print("Got " + str(len(cases)) + " cases: ")
|
||||
for case in cases:
|
||||
print("Case name: " + case.name)
|
||||
print("Case grid path: " + case.file_path)
|
||||
|
@ -1,17 +1,21 @@
|
||||
# Access to environment variables and path tools
|
||||
import os
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
|
||||
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE')
|
||||
resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
|
||||
|
||||
# Get the TestModels path from the executable path
|
||||
resinsight_install_path = os.path.dirname(resinsight_exe_path)
|
||||
test_models_path = os.path.join(resinsight_install_path, 'TestModels')
|
||||
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID')
|
||||
test_models_path = os.path.join(resinsight_install_path, "TestModels")
|
||||
path_name = os.path.join(
|
||||
test_models_path, "TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
case = resinsight.project.load_case(path_name)
|
||||
|
||||
# Print out lots of information from the case object
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
# Example code
|
||||
@ -13,4 +14,4 @@ for wellpath in modeled_well_paths:
|
||||
reference_point = geometry.reference_point
|
||||
reference_point[0] += 100
|
||||
geometry.update()
|
||||
geometry.print_object_info()
|
||||
geometry.print_object_info()
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
# Example code
|
||||
@ -7,5 +8,7 @@ project = resinsight.project
|
||||
|
||||
summary_cases = project.descendants(rips.SummaryCase)
|
||||
summary_plot_collection = project.descendants(rips.SummaryPlotCollection)[0]
|
||||
if len(summary_cases) > 0:
|
||||
summary_plot = summary_plot_collection.new_summary_plot(summary_cases=summary_cases, address="FOP*")
|
||||
if len(summary_cases) > 0:
|
||||
summary_plot = summary_plot_collection.new_summary_plot(
|
||||
summary_cases=summary_cases, address="FOP*"
|
||||
)
|
||||
|
@ -1,17 +1,19 @@
|
||||
# Access to environment variables and path tools
|
||||
import os
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
|
||||
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE')
|
||||
resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
|
||||
|
||||
# Get the TestModels path from the executable path
|
||||
resinsight_install_path = os.path.dirname(resinsight_exe_path)
|
||||
test_models_path = os.path.join(resinsight_install_path, 'TestModels')
|
||||
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp')
|
||||
test_models_path = os.path.join(resinsight_install_path, "TestModels")
|
||||
path_name = os.path.join(test_models_path, "TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp")
|
||||
|
||||
# Open a project
|
||||
resinsight.project.open(path_name)
|
||||
|
@ -1,9 +1,12 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
# Example code
|
||||
print("ResInsight version: " + resinsight.version_string())
|
||||
|
||||
case = resinsight.project.case(case_id=0)
|
||||
case.replace(new_grid_file='C:/Users/lindkvis/Projects/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID')
|
||||
case.replace(
|
||||
new_grid_file="C:/Users/lindkvis/Projects/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
|
||||
)
|
||||
|
@ -7,14 +7,12 @@
|
||||
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
if resinsight is not None:
|
||||
cases = resinsight.project.selected_cases()
|
||||
|
||||
print ("Got " + str(len(cases)) + " cases: ")
|
||||
print("Got " + str(len(cases)) + " cases: ")
|
||||
for case in cases:
|
||||
print(case.name)
|
||||
for property in case.available_properties('DYNAMIC_NATIVE'):
|
||||
for property in case.available_properties("DYNAMIC_NATIVE"):
|
||||
print(property)
|
||||
|
||||
|
||||
|
@ -5,11 +5,11 @@
|
||||
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
if resinsight is not None:
|
||||
cases = resinsight.project.cases()
|
||||
|
||||
print ("Got " + str(len(cases)) + " cases: ")
|
||||
print("Got " + str(len(cases)) + " cases: ")
|
||||
for case in cases:
|
||||
print(case.name)
|
||||
cells = case.selected_cells()
|
||||
@ -18,19 +18,31 @@ if resinsight is not None:
|
||||
time_step_info = case.time_steps()
|
||||
|
||||
for (idx, cell) in enumerate(cells):
|
||||
print("Selected cell: [{}, {}, {}] grid: {}".format(cell.ijk.i+1, cell.ijk.j+1, cell.ijk.k+1, cell.grid_index))
|
||||
print(
|
||||
"Selected cell: [{}, {}, {}] grid: {}".format(
|
||||
cell.ijk.i + 1, cell.ijk.j + 1, cell.ijk.k + 1, cell.grid_index
|
||||
)
|
||||
)
|
||||
|
||||
# Get the grid and dimensions
|
||||
grid = case.grids()[cell.grid_index]
|
||||
dimensions = grid.dimensions()
|
||||
|
||||
# Map ijk to cell index
|
||||
cell_index = dimensions.i * dimensions.j * cell.ijk.k + dimensions.i * cell.ijk.j + cell.ijk.i
|
||||
cell_index = (
|
||||
dimensions.i * dimensions.j * cell.ijk.k
|
||||
+ dimensions.i * cell.ijk.j
|
||||
+ cell.ijk.i
|
||||
)
|
||||
|
||||
# Print the cell center
|
||||
cell_centers = grid.cell_centers()
|
||||
cell_center = cell_centers[cell_index]
|
||||
print("Cell center: [{}, {}, {}]".format(cell_center.x, cell_center.y, cell_center.z))
|
||||
print(
|
||||
"Cell center: [{}, {}, {}]".format(
|
||||
cell_center.x, cell_center.y, cell_center.z
|
||||
)
|
||||
)
|
||||
|
||||
# Print the cell corners
|
||||
cell_corners = grid.cell_corners()[cell_index]
|
||||
@ -46,5 +58,11 @@ if resinsight is not None:
|
||||
|
||||
for (tidx, timestep) in enumerate(time_step_info):
|
||||
# Read the full SOIL result for time step
|
||||
soil_results = case.selected_cell_property('DYNAMIC_NATIVE', 'SOIL', tidx)
|
||||
print("SOIL: {} ({}.{}.{})".format(soil_results[idx], timestep.year, timestep.month, timestep.day))
|
||||
soil_results = case.selected_cell_property(
|
||||
"DYNAMIC_NATIVE", "SOIL", tidx
|
||||
)
|
||||
print(
|
||||
"SOIL: {} ({}.{}.{})".format(
|
||||
soil_results[idx], timestep.year, timestep.month, timestep.day
|
||||
)
|
||||
)
|
||||
|
@ -3,7 +3,7 @@
|
||||
######################################################################
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
view = resinsight.project.views()[0]
|
||||
view.apply_cell_result(result_type='STATIC_NATIVE', result_variable='DX')
|
||||
view.apply_cell_result(result_type="STATIC_NATIVE", result_variable="DX")
|
||||
|
@ -4,15 +4,18 @@
|
||||
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
view = resinsight.project.view(view_id=1)
|
||||
#view.apply_flow_diagnostics_cell_result(result_variable='Fraction',
|
||||
# view.apply_flow_diagnostics_cell_result(result_variable='Fraction',
|
||||
# selection_mode='FLOW_TR_INJ_AND_PROD')
|
||||
|
||||
|
||||
# Example of setting individual wells. Commented out because well names are case specific.
|
||||
view.apply_flow_diagnostics_cell_result(result_variable='Fraction',
|
||||
selection_mode='FLOW_TR_BY_SELECTION',
|
||||
injectors = ['C-1H', 'C-2H', 'F-2H'],
|
||||
producers = ['B-1AH', 'B-3H', 'D-1H'])
|
||||
view.apply_flow_diagnostics_cell_result(
|
||||
result_variable="Fraction",
|
||||
selection_mode="FLOW_TR_BY_SELECTION",
|
||||
injectors=["C-1H", "C-2H", "F-2H"],
|
||||
producers=["B-1AH", "B-3H", "D-1H"],
|
||||
)
|
||||
|
@ -3,15 +3,14 @@
|
||||
######################################################################
|
||||
import rips
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
case = resinsight.project.case(case_id=0)
|
||||
total_cell_count = case.cell_count().reservoir_cell_count
|
||||
|
||||
values = []
|
||||
for i in range(0, total_cell_count):
|
||||
values.append(i % 2 * 0.75);
|
||||
values.append(i % 2 * 0.75)
|
||||
|
||||
print("Applying values to full grid")
|
||||
case.set_grid_property(values, 'DYNAMIC_NATIVE', 'SOIL', 0)
|
||||
|
||||
case.set_grid_property(values, "DYNAMIC_NATIVE", "SOIL", 0)
|
||||
|
@ -6,32 +6,32 @@ import rips
|
||||
import itertools
|
||||
import time
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
start = time.time()
|
||||
start = time.time()
|
||||
|
||||
# Get the case with case id 0
|
||||
case = resinsight.project.case(case_id=0)
|
||||
case = resinsight.project.case(case_id=0)
|
||||
|
||||
# Get a list of all time steps
|
||||
timeSteps = case.time_steps()
|
||||
timeSteps = case.time_steps()
|
||||
|
||||
averages = []
|
||||
for i in range(0, len(timeSteps)):
|
||||
# Get the results from time step i asynchronously
|
||||
# It actually returns a generator object almost immediately
|
||||
result_chunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', i)
|
||||
mysum = 0.0
|
||||
count = 0
|
||||
result_chunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", i)
|
||||
mysum = 0.0
|
||||
count = 0
|
||||
# Loop through and append the average. each time we loop resultChunks
|
||||
# We will trigger a read of the input data, meaning the script will start
|
||||
# Calculating averages before the whole resultValue for this time step has been received
|
||||
for chunk in result_chunks:
|
||||
mysum += sum(chunk.values)
|
||||
count += len(chunk.values)
|
||||
for chunk in result_chunks:
|
||||
mysum += sum(chunk.values)
|
||||
count += len(chunk.values)
|
||||
|
||||
averages.append(mysum/count)
|
||||
averages.append(mysum / count)
|
||||
|
||||
end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
print(averages)
|
||||
print(averages)
|
||||
|
@ -5,23 +5,23 @@ import rips
|
||||
import itertools
|
||||
import time
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
start = time.time()
|
||||
start = time.time()
|
||||
|
||||
# Get the case with case id 0
|
||||
case = resinsight.project.case(case_id=0)
|
||||
case = resinsight.project.case(case_id=0)
|
||||
|
||||
# Get a list of all time steps
|
||||
time_steps = case.time_steps()
|
||||
time_steps = case.time_steps()
|
||||
|
||||
averages = []
|
||||
for i in range(0, len(time_steps)):
|
||||
# Get a list of all the results for time step i
|
||||
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', i)
|
||||
mysum = sum(results)
|
||||
averages.append(mysum/len(results))
|
||||
results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", i)
|
||||
mysum = sum(results)
|
||||
averages.append(mysum / len(results))
|
||||
|
||||
end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
print(averages)
|
||||
print(averages)
|
||||
|
@ -16,13 +16,14 @@ def create_result(soil_chunks, porv_chunks):
|
||||
# Return a Python generator
|
||||
yield resultChunk
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
start = time.time()
|
||||
case = resinsight.project.cases()[0]
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
start = time.time()
|
||||
case = resinsight.project.cases()[0]
|
||||
timeStepInfo = case.time_steps()
|
||||
|
||||
# Get a generator for the porv results. The generator will provide a chunk each time it is iterated
|
||||
porv_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PORV', 0)
|
||||
porv_chunks = case.active_cell_property_async("STATIC_NATIVE", "PORV", 0)
|
||||
|
||||
# Read the static result into an array, so we don't have to transfer it for each iteration
|
||||
# Note we use the async method even if we synchronise here, because we need the values chunked
|
||||
@ -31,17 +32,19 @@ porv_array = []
|
||||
for porv_chunk in porv_chunks:
|
||||
porv_array.append(porv_chunk)
|
||||
|
||||
for i in range (0, len(timeStepInfo)):
|
||||
for i in range(0, len(timeStepInfo)):
|
||||
# Get a generator object for the SOIL property for time step i
|
||||
soil_chunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', i)
|
||||
soil_chunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", i)
|
||||
# Create the generator object for the SOIL * PORV derived result
|
||||
result_generator = create_result(soil_chunks, iter(porv_array))
|
||||
# Send back the result asynchronously with a generator object
|
||||
case.set_active_cell_property_async(result_generator, 'GENERATED', 'SOILPORVAsync', i)
|
||||
case.set_active_cell_property_async(
|
||||
result_generator, "GENERATED", "SOILPORVAsync", i
|
||||
)
|
||||
|
||||
end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
|
||||
|
||||
print("Transferred all results back")
|
||||
|
||||
view = case.views()[0].apply_cell_result('GENERATED', 'SOILPORVAsync')
|
||||
view = case.views()[0].apply_cell_result("GENERATED", "SOILPORVAsync")
|
||||
|
@ -7,27 +7,27 @@ import time
|
||||
|
||||
resinsight = rips.Instance.find()
|
||||
start = time.time()
|
||||
case = resinsight.project.cases()[0]
|
||||
case = resinsight.project.cases()[0]
|
||||
|
||||
# Read the full porv result
|
||||
porv_results = case.active_cell_property('STATIC_NATIVE', 'PORV', 0)
|
||||
porv_results = case.active_cell_property("STATIC_NATIVE", "PORV", 0)
|
||||
time_step_info = case.time_steps()
|
||||
|
||||
for i in range (0, len(time_step_info)):
|
||||
for i in range(0, len(time_step_info)):
|
||||
# Read the full SOIl result for time step i
|
||||
soil_results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', i)
|
||||
|
||||
soil_results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", i)
|
||||
|
||||
# Generate the result by looping through both lists in order
|
||||
results = []
|
||||
for (soil, porv) in zip(soil_results, porv_results):
|
||||
results.append(soil * porv)
|
||||
|
||||
# Send back result
|
||||
case.set_active_cell_property(results, 'GENERATED', 'SOILPORVSync', i)
|
||||
case.set_active_cell_property(results, "GENERATED", "SOILPORVSync", i)
|
||||
|
||||
end = time.time()
|
||||
print("Time elapsed: ", end - start)
|
||||
|
||||
print("Transferred all results back")
|
||||
|
||||
view = case.views()[0].apply_cell_result('GENERATED', 'SOILPORVSync')
|
||||
view = case.views()[0].apply_cell_result("GENERATED", "SOILPORVSync")
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
# Example code
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Load ResInsight Processing Server Client Library
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
print("ResInsight version: " + resinsight.version_string())
|
||||
|
@ -4,6 +4,7 @@
|
||||
# Also clones the first view
|
||||
#############################################################
|
||||
import rips
|
||||
|
||||
# Connect to ResInsight instance
|
||||
resinsight = rips.Instance.find()
|
||||
|
||||
|
@ -2,7 +2,8 @@ name = "rips"
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'generated'))
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "generated"))
|
||||
|
||||
from .resinsight_classes import *
|
||||
|
||||
@ -24,4 +25,4 @@ for key in class_dict():
|
||||
__all__.append("Grid")
|
||||
__all__.append("Instance")
|
||||
|
||||
__all__.sort()
|
||||
__all__.sort()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -9,8 +9,14 @@ from .resinsight_classes import EclipseContourMap, GeoMechContourMap
|
||||
|
||||
|
||||
@add_method(EclipseContourMap)
|
||||
def export_to_text(self, export_file_name='', export_local_coordinates=False, undefined_value_label="NaN", exclude_undefined_values=False):
|
||||
""" Export snapshot for the current view
|
||||
def export_to_text(
|
||||
self,
|
||||
export_file_name="",
|
||||
export_local_coordinates=False,
|
||||
undefined_value_label="NaN",
|
||||
exclude_undefined_values=False,
|
||||
):
|
||||
"""Export snapshot for the current view
|
||||
|
||||
Arguments:
|
||||
export_file_name(str): The file location to store results in.
|
||||
@ -24,12 +30,20 @@ def export_to_text(self, export_file_name='', export_local_coordinates=False, un
|
||||
exportLocalCoordinates=export_local_coordinates,
|
||||
undefinedValueLabel=undefined_value_label,
|
||||
excludeUndefinedValues=exclude_undefined_values,
|
||||
viewId=self.id))
|
||||
viewId=self.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(GeoMechContourMap)
|
||||
def export_to_text(self, export_file_name='', export_local_coordinates=False, undefined_value_label="NaN", exclude_undefined_values=False):
|
||||
""" Export snapshot for the current view
|
||||
def export_to_text(
|
||||
self,
|
||||
export_file_name="",
|
||||
export_local_coordinates=False,
|
||||
undefined_value_label="NaN",
|
||||
exclude_undefined_values=False,
|
||||
):
|
||||
"""Export snapshot for the current view
|
||||
|
||||
Arguments:
|
||||
export_file_name(str): The file location to store results in.
|
||||
@ -43,4 +57,6 @@ def export_to_text(self, export_file_name='', export_local_coordinates=False, un
|
||||
exportLocalCoordinates=export_local_coordinates,
|
||||
undefinedValueLabel=undefined_value_label,
|
||||
excludeUndefinedValues=exclude_undefined_values,
|
||||
viewId=self.id))
|
||||
viewId=self.id,
|
||||
)
|
||||
)
|
||||
|
@ -31,8 +31,8 @@ class Grid:
|
||||
"""
|
||||
case_request = Case_pb2.CaseRequest(id=self.case.id)
|
||||
return self.__stub.GetDimensions(
|
||||
Grid_pb2.GridRequest(case_request=case_request,
|
||||
grid_index=self.index)).dimensions
|
||||
Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
|
||||
).dimensions
|
||||
|
||||
def cell_centers_async(self):
|
||||
"""The cells center for all cells in given grid async.
|
||||
@ -42,8 +42,8 @@ class Grid:
|
||||
"""
|
||||
case_request = Case_pb2.CaseRequest(id=self.case.id)
|
||||
chunks = self.__stub.GetCellCenters(
|
||||
Grid_pb2.GridRequest(case_request=case_request,
|
||||
grid_index=self.index))
|
||||
Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
|
||||
)
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
|
||||
@ -68,8 +68,8 @@ class Grid:
|
||||
"""
|
||||
case_request = Case_pb2.CaseRequest(id=self.case.id)
|
||||
chunks = self.__stub.GetCellCorners(
|
||||
Grid_pb2.GridRequest(case_request=case_request,
|
||||
grid_index=self.index))
|
||||
Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
|
||||
)
|
||||
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
|
@ -19,16 +19,17 @@ def create_statistics_case(self):
|
||||
"""
|
||||
command_reply = self._execute_command(
|
||||
createStatisticsCase=Commands_pb2.CreateStatisticsCaseRequest(
|
||||
caseGroupId=self.group_id))
|
||||
return Case(self.channel,
|
||||
command_reply.createStatisticsCaseResult.caseId)
|
||||
caseGroupId=self.group_id
|
||||
)
|
||||
)
|
||||
return Case(self.channel, command_reply.createStatisticsCaseResult.caseId)
|
||||
|
||||
|
||||
@add_method(GridCaseGroup)
|
||||
def statistics_cases(self):
|
||||
"""Get a list of all statistics cases in the Grid Case Group
|
||||
|
||||
Returns:
|
||||
Returns:
|
||||
List of :class:`rips.generated.generated_classes.EclipseCase`
|
||||
|
||||
"""
|
||||
@ -40,7 +41,7 @@ def statistics_cases(self):
|
||||
def views(self):
|
||||
"""Get a list of views belonging to a grid case group
|
||||
|
||||
Returns:
|
||||
Returns:
|
||||
List of :class:`rips.generated.generated_classes.EclipseView`
|
||||
|
||||
"""
|
||||
@ -70,7 +71,7 @@ def view(self, view_id):
|
||||
|
||||
@add_method(GridCaseGroup)
|
||||
def compute_statistics(self, case_ids=None):
|
||||
""" Compute statistics for the given case ids
|
||||
"""Compute statistics for the given case ids
|
||||
|
||||
Arguments:
|
||||
case_ids(list of integers): List of case ids. If this is None all cases in group are included
|
||||
@ -80,4 +81,6 @@ def compute_statistics(self, case_ids=None):
|
||||
case_ids = []
|
||||
return self._execute_command(
|
||||
computeCaseGroupStatistics=Commands_pb2.ComputeCaseGroupStatRequest(
|
||||
caseIds=case_ids, caseGroupId=self.group_id))
|
||||
caseIds=case_ids, caseGroupId=self.group_id
|
||||
)
|
||||
)
|
||||
|
@ -35,20 +35,19 @@ class Instance:
|
||||
project (Project): Current project in ResInsight.
|
||||
Set when creating an instance and updated when opening/closing projects.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def __is_port_in_use(port):
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as my_socket:
|
||||
my_socket.settimeout(0.2)
|
||||
return my_socket.connect_ex(('localhost', port)) == 0
|
||||
return my_socket.connect_ex(("localhost", port)) == 0
|
||||
|
||||
@staticmethod
|
||||
def __is_valid_port(port):
|
||||
location = "localhost:" + str(port)
|
||||
channel = grpc.insecure_channel(location,
|
||||
options=[
|
||||
('grpc.enable_http_proxy',
|
||||
False)
|
||||
])
|
||||
channel = grpc.insecure_channel(
|
||||
location, options=[("grpc.enable_http_proxy", False)]
|
||||
)
|
||||
app = App_pb2_grpc.AppStub(channel)
|
||||
try:
|
||||
app.GetVersion(Empty(), timeout=1)
|
||||
@ -57,11 +56,13 @@ class Instance:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def launch(resinsight_executable='',
|
||||
console=False,
|
||||
launch_port=-1,
|
||||
command_line_parameters=None):
|
||||
""" Launch a new Instance of ResInsight. This requires the environment variable
|
||||
def launch(
|
||||
resinsight_executable="",
|
||||
console=False,
|
||||
launch_port=-1,
|
||||
command_line_parameters=None,
|
||||
):
|
||||
"""Launch a new Instance of ResInsight. This requires the environment variable
|
||||
RESINSIGHT_EXECUTABLE to be set or the parameter resinsight_executable to be provided.
|
||||
The RESINSIGHT_GRPC_PORT environment variable can be set to an alternative port number.
|
||||
|
||||
@ -78,18 +79,19 @@ class Instance:
|
||||
"""
|
||||
|
||||
port = 50051
|
||||
port_env = os.environ.get('RESINSIGHT_GRPC_PORT')
|
||||
port_env = os.environ.get("RESINSIGHT_GRPC_PORT")
|
||||
if port_env:
|
||||
port = int(port_env)
|
||||
if launch_port != -1:
|
||||
port = launch_port
|
||||
|
||||
if not resinsight_executable:
|
||||
resinsight_executable = os.environ.get('RESINSIGHT_EXECUTABLE')
|
||||
resinsight_executable = os.environ.get("RESINSIGHT_EXECUTABLE")
|
||||
if not resinsight_executable:
|
||||
print(
|
||||
'ERROR: Could not launch ResInsight because the environment variable'
|
||||
' RESINSIGHT_EXECUTABLE is not set')
|
||||
"ERROR: Could not launch ResInsight because the environment variable"
|
||||
" RESINSIGHT_EXECUTABLE is not set"
|
||||
)
|
||||
return None
|
||||
|
||||
print("Trying port " + str(port))
|
||||
@ -97,16 +99,15 @@ class Instance:
|
||||
port += 1
|
||||
print("Trying port " + str(port))
|
||||
|
||||
print('Port ' + str(port))
|
||||
print('Trying to launch', resinsight_executable)
|
||||
print("Port " + str(port))
|
||||
print("Trying to launch", resinsight_executable)
|
||||
|
||||
if command_line_parameters is None:
|
||||
command_line_parameters = []
|
||||
elif isinstance(command_line_parameters, str):
|
||||
command_line_parameters = [str]
|
||||
|
||||
parameters = ["ResInsight", "--server",
|
||||
str(port)] + command_line_parameters
|
||||
parameters = ["ResInsight", "--server", str(port)] + command_line_parameters
|
||||
if console:
|
||||
print("Launching as console app")
|
||||
parameters.append("--console")
|
||||
@ -123,7 +124,7 @@ class Instance:
|
||||
|
||||
@staticmethod
|
||||
def find(start_port=50051, end_port=50071):
|
||||
""" Search for an existing Instance of ResInsight by testing ports.
|
||||
"""Search for an existing Instance of ResInsight by testing ports.
|
||||
|
||||
By default we search from port 50051 to 50071 or if the environment
|
||||
variable RESINSIGHT_GRPC_PORT is set we search
|
||||
@ -133,7 +134,7 @@ class Instance:
|
||||
start_port (int): start searching from this port
|
||||
end_port (int): search up to but not including this port
|
||||
"""
|
||||
port_env = os.environ.get('RESINSIGHT_GRPC_PORT')
|
||||
port_env = os.environ.get("RESINSIGHT_GRPC_PORT")
|
||||
if port_env:
|
||||
print("Got port " + port_env + " from environment")
|
||||
start_port = int(port_env)
|
||||
@ -141,12 +142,17 @@ class Instance:
|
||||
|
||||
for try_port in range(start_port, end_port):
|
||||
print("Trying port " + str(try_port))
|
||||
if Instance.__is_port_in_use(try_port) and Instance.__is_valid_port(try_port):
|
||||
if Instance.__is_port_in_use(try_port) and Instance.__is_valid_port(
|
||||
try_port
|
||||
):
|
||||
return Instance(port=try_port)
|
||||
|
||||
print(
|
||||
'Error: Could not find any ResInsight instances responding between ports '
|
||||
+ str(start_port) + ' and ' + str(end_port))
|
||||
"Error: Could not find any ResInsight instances responding between ports "
|
||||
+ str(start_port)
|
||||
+ " and "
|
||||
+ str(end_port)
|
||||
)
|
||||
return None
|
||||
|
||||
def __execute_command(self, **command_params):
|
||||
@ -155,15 +161,17 @@ class Instance:
|
||||
def __check_version(self):
|
||||
try:
|
||||
major_version_ok = self.major_version() == int(
|
||||
RiaVersionInfo.RESINSIGHT_MAJOR_VERSION)
|
||||
RiaVersionInfo.RESINSIGHT_MAJOR_VERSION
|
||||
)
|
||||
minor_version_ok = self.minor_version() == int(
|
||||
RiaVersionInfo.RESINSIGHT_MINOR_VERSION)
|
||||
RiaVersionInfo.RESINSIGHT_MINOR_VERSION
|
||||
)
|
||||
return True, major_version_ok and minor_version_ok
|
||||
except grpc.RpcError:
|
||||
return False, False
|
||||
|
||||
def __init__(self, port=50051, launched=False):
|
||||
""" Attempts to connect to ResInsight at aa specific port on localhost
|
||||
"""Attempts to connect to ResInsight at aa specific port on localhost
|
||||
|
||||
Args:
|
||||
port(int): port number
|
||||
@ -171,11 +179,9 @@ class Instance:
|
||||
logging.basicConfig()
|
||||
location = "localhost:" + str(port)
|
||||
|
||||
self.channel = grpc.insecure_channel(location,
|
||||
options=[
|
||||
('grpc.enable_http_proxy',
|
||||
False)
|
||||
])
|
||||
self.channel = grpc.insecure_channel(
|
||||
location, options=[("grpc.enable_http_proxy", False)]
|
||||
)
|
||||
self.launched = launched
|
||||
self.commands = Commands_pb2_grpc.CommandsStub(self.channel)
|
||||
|
||||
@ -187,7 +193,9 @@ class Instance:
|
||||
# Intercept UNAVAILABLE errors and retry on failures
|
||||
interceptors = (
|
||||
RetryOnRpcErrorClientInterceptor(
|
||||
retry_policy=ExponentialBackoffRetryPolicy(min_backoff=100, max_backoff=5000, max_num_retries=20),
|
||||
retry_policy=ExponentialBackoffRetryPolicy(
|
||||
min_backoff=100, max_backoff=5000, max_num_retries=20
|
||||
),
|
||||
status_for_retry=(grpc.StatusCode.UNAVAILABLE,),
|
||||
),
|
||||
)
|
||||
@ -219,14 +227,21 @@ class Instance:
|
||||
|
||||
if not connection_ok:
|
||||
if self.launched:
|
||||
raise Exception('Error: Could not connect to resinsight at ',
|
||||
location,
|
||||
'.', retry_policy.time_out_message())
|
||||
raise Exception('Error: Could not connect to resinsight at ', location)
|
||||
raise Exception(
|
||||
"Error: Could not connect to resinsight at ",
|
||||
location,
|
||||
".",
|
||||
retry_policy.time_out_message(),
|
||||
)
|
||||
raise Exception("Error: Could not connect to resinsight at ", location)
|
||||
if not version_ok:
|
||||
raise Exception('Error: Wrong Version of ResInsight at ', location,
|
||||
self.version_string(), " ",
|
||||
self.client_version_string())
|
||||
raise Exception(
|
||||
"Error: Wrong Version of ResInsight at ",
|
||||
location,
|
||||
self.version_string(),
|
||||
" ",
|
||||
self.client_version_string(),
|
||||
)
|
||||
|
||||
def __version_message(self):
|
||||
return self.app.GetVersion(Empty())
|
||||
@ -238,7 +253,9 @@ class Instance:
|
||||
path (str): path to directory
|
||||
|
||||
"""
|
||||
return self.__execute_command(setStartDir=Commands_pb2.FilePathRequest(path=path))
|
||||
return self.__execute_command(
|
||||
setStartDir=Commands_pb2.FilePathRequest(path=path)
|
||||
)
|
||||
|
||||
def set_export_folder(self, export_type, path, create_folder=False):
|
||||
"""
|
||||
@ -256,14 +273,17 @@ class Instance:
|
||||
|
||||
Option | Description
|
||||
--------------- | ------------
|
||||
"COMPLETIONS" |
|
||||
"COMPLETIONS" |
|
||||
"SNAPSHOTS" |
|
||||
"PROPERTIES" |
|
||||
"STATISTICS" |
|
||||
"PROPERTIES" |
|
||||
"STATISTICS" |
|
||||
|
||||
"""
|
||||
return self.__execute_command(setExportFolder=Commands_pb2.SetExportFolderRequest(
|
||||
type=export_type, path=path, createFolder=create_folder))
|
||||
return self.__execute_command(
|
||||
setExportFolder=Commands_pb2.SetExportFolderRequest(
|
||||
type=export_type, path=path, createFolder=create_folder
|
||||
)
|
||||
)
|
||||
|
||||
def set_main_window_size(self, width, height):
|
||||
"""
|
||||
@ -277,8 +297,11 @@ class Instance:
|
||||
height | Height in pixels | Integer
|
||||
|
||||
"""
|
||||
return self.__execute_command(setMainWindowSize=Commands_pb2.SetWindowSizeParams(
|
||||
width=width, height=height))
|
||||
return self.__execute_command(
|
||||
setMainWindowSize=Commands_pb2.SetWindowSizeParams(
|
||||
width=width, height=height
|
||||
)
|
||||
)
|
||||
|
||||
def set_plot_window_size(self, width, height):
|
||||
"""
|
||||
@ -291,8 +314,11 @@ class Instance:
|
||||
width | Width in pixels | Integer
|
||||
height | Height in pixels | Integer
|
||||
"""
|
||||
return self.__execute_command(setPlotWindowSize=Commands_pb2.SetWindowSizeParams(
|
||||
width=width, height=height))
|
||||
return self.__execute_command(
|
||||
setPlotWindowSize=Commands_pb2.SetWindowSizeParams(
|
||||
width=width, height=height
|
||||
)
|
||||
)
|
||||
|
||||
def major_version(self):
|
||||
"""Get an integer with the major version number"""
|
||||
@ -308,8 +334,13 @@ class Instance:
|
||||
|
||||
def version_string(self):
|
||||
"""Get a full version string, i.e. 2019.04.01"""
|
||||
return str(self.major_version()) + "." + str(
|
||||
self.minor_version()) + "." + str(self.patch_version())
|
||||
return (
|
||||
str(self.major_version())
|
||||
+ "."
|
||||
+ str(self.minor_version())
|
||||
+ "."
|
||||
+ str(self.patch_version())
|
||||
)
|
||||
|
||||
def client_version_string(self):
|
||||
"""Get a full version string, i.e. 2019.04.01"""
|
||||
@ -326,11 +357,11 @@ class Instance:
|
||||
def is_console(self):
|
||||
"""Returns true if the connected ResInsight instance is a console app"""
|
||||
return self.app.GetRuntimeInfo(
|
||||
Empty()).app_type == App_pb2.ApplicationTypeEnum.Value(
|
||||
'CONSOLE_APPLICATION')
|
||||
Empty()
|
||||
).app_type == App_pb2.ApplicationTypeEnum.Value("CONSOLE_APPLICATION")
|
||||
|
||||
def is_gui(self):
|
||||
"""Returns true if the connected ResInsight instance is a GUI app"""
|
||||
return self.app.GetRuntimeInfo(
|
||||
Empty()).app_type == App_pb2.ApplicationTypeEnum.Value(
|
||||
'GUI_APPLICATION')
|
||||
Empty()
|
||||
).app_type == App_pb2.ApplicationTypeEnum.Value("GUI_APPLICATION")
|
||||
|
@ -16,19 +16,21 @@ import PdmObject_pb2_grpc
|
||||
import Commands_pb2
|
||||
import Commands_pb2_grpc
|
||||
|
||||
|
||||
def camel_to_snake(name):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
||||
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
|
||||
|
||||
|
||||
def snake_to_camel(name):
|
||||
return ''.join(word.title() for word in name.split('_'))
|
||||
return "".join(word.title() for word in name.split("_"))
|
||||
|
||||
|
||||
def add_method(cls):
|
||||
def decorator(func):
|
||||
setattr(cls, func.__name__, func)
|
||||
return func # returning func means func can still be used normally
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@ -37,9 +39,11 @@ def add_static_method(cls):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
setattr(cls, func.__name__, wrapper)
|
||||
# Note we are not binding func, but wrapper which accepts self but does exactly the same as func
|
||||
return func # returning func means func can still be used normally
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@ -51,10 +55,11 @@ class PdmObjectBase:
|
||||
def _execute_command(self, **command_params):
|
||||
self.__warnings = []
|
||||
response, call = self._commands.Execute.with_call(
|
||||
Commands_pb2.CommandParams(**command_params))
|
||||
Commands_pb2.CommandParams(**command_params)
|
||||
)
|
||||
for key, value in call.trailing_metadata():
|
||||
value = value.replace(';;', '\n')
|
||||
if key == 'warning':
|
||||
value = value.replace(";;", "\n")
|
||||
if key == "warning":
|
||||
self.__warnings.append(value)
|
||||
|
||||
return response
|
||||
@ -67,26 +72,29 @@ class PdmObjectBase:
|
||||
|
||||
# Create stubs
|
||||
if self._channel:
|
||||
self._pdm_object_stub = PdmObject_pb2_grpc.PdmObjectServiceStub(self._channel)
|
||||
self._pdm_object_stub = PdmObject_pb2_grpc.PdmObjectServiceStub(
|
||||
self._channel
|
||||
)
|
||||
self._commands = Commands_pb2_grpc.CommandsStub(self._channel)
|
||||
|
||||
if pb2_object is not None:
|
||||
# Copy parameters from ResInsight
|
||||
assert(isinstance(pb2_object, PdmObject_pb2.PdmObject))
|
||||
assert isinstance(pb2_object, PdmObject_pb2.PdmObject)
|
||||
self._pb2_object = pb2_object
|
||||
for camel_keyword in self._pb2_object.parameters:
|
||||
snake_keyword = camel_to_snake(camel_keyword)
|
||||
setattr(self, snake_keyword, self.__get_grpc_value(camel_keyword))
|
||||
else:
|
||||
# Copy parameters from PdmObject defaults
|
||||
self._pb2_object = PdmObject_pb2.PdmObject(class_keyword=self.__class__.__name__)
|
||||
self._pb2_object = PdmObject_pb2.PdmObject(
|
||||
class_keyword=self.__class__.__name__
|
||||
)
|
||||
self.__copy_to_pb2()
|
||||
|
||||
def copy_from(self, object):
|
||||
"""Copy attribute values from object to self
|
||||
"""
|
||||
"""Copy attribute values from object to self"""
|
||||
for attribute in dir(object):
|
||||
if not attribute.startswith('__'):
|
||||
if not attribute.startswith("__"):
|
||||
value = getattr(object, attribute)
|
||||
# This is crucial to avoid overwriting methods
|
||||
if not callable(value):
|
||||
@ -104,7 +112,7 @@ class PdmObjectBase:
|
||||
def __copy_to_pb2(self):
|
||||
if self._pb2_object is not None:
|
||||
for snake_kw in dir(self):
|
||||
if not snake_kw.startswith('_'):
|
||||
if not snake_kw.startswith("_"):
|
||||
value = getattr(self, snake_kw)
|
||||
# This is crucial to avoid overwriting methods
|
||||
if not callable(value):
|
||||
@ -143,17 +151,23 @@ class PdmObjectBase:
|
||||
for snake_kw in dir(self):
|
||||
if not snake_kw.startswith("_") and not callable(getattr(self, snake_kw)):
|
||||
camel_kw = snake_to_camel(snake_kw)
|
||||
print(" " + snake_kw + " [" + type(getattr(self, snake_kw)).__name__ +
|
||||
"]: " + str(getattr(self, snake_kw)))
|
||||
print(
|
||||
" "
|
||||
+ snake_kw
|
||||
+ " ["
|
||||
+ type(getattr(self, snake_kw)).__name__
|
||||
+ "]: "
|
||||
+ str(getattr(self, snake_kw))
|
||||
)
|
||||
print("Object Methods:")
|
||||
for snake_kw in dir(self):
|
||||
if not snake_kw.startswith("_") and callable(getattr(self, snake_kw)):
|
||||
print(" " + snake_kw)
|
||||
|
||||
def __convert_from_grpc_value(self, value):
|
||||
if value.lower() == 'false':
|
||||
if value.lower() == "false":
|
||||
return False
|
||||
if value.lower() == 'true':
|
||||
if value.lower() == "true":
|
||||
return True
|
||||
try:
|
||||
int_val = int(value)
|
||||
@ -164,7 +178,7 @@ class PdmObjectBase:
|
||||
return float_val
|
||||
except ValueError:
|
||||
# We may have a string. Strip internal start and end quotes
|
||||
value = value.strip('\"')
|
||||
value = value.strip('"')
|
||||
if self.__islist(value):
|
||||
return self.__makelist(value)
|
||||
return value
|
||||
@ -184,7 +198,9 @@ class PdmObjectBase:
|
||||
return str(value)
|
||||
|
||||
def __get_grpc_value(self, camel_keyword):
|
||||
return self.__convert_from_grpc_value(self._pb2_object.parameters[camel_keyword])
|
||||
return self.__convert_from_grpc_value(
|
||||
self._pb2_object.parameters[camel_keyword]
|
||||
)
|
||||
|
||||
def __set_grpc_value(self, camel_keyword, value):
|
||||
self._pb2_object.parameters[camel_keyword] = self.__convert_to_grpc_value(value)
|
||||
@ -216,12 +232,15 @@ class PdmObjectBase:
|
||||
def __from_pb2_to_resinsight_classes(self, pb2_object_list, super_class_definition):
|
||||
pdm_object_list = []
|
||||
from .generated.generated_classes import class_from_keyword
|
||||
|
||||
for pb2_object in pb2_object_list:
|
||||
child_class_definition = class_from_keyword(pb2_object.class_keyword)
|
||||
if child_class_definition is None:
|
||||
child_class_definition = super_class_definition
|
||||
|
||||
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel())
|
||||
pdm_object = child_class_definition(
|
||||
pb2_object=pb2_object, channel=self.channel()
|
||||
)
|
||||
pdm_object_list.append(pdm_object)
|
||||
return pdm_object_list
|
||||
|
||||
@ -233,14 +252,14 @@ class PdmObjectBase:
|
||||
Returns:
|
||||
A list of PdmObjects matching the class_definition
|
||||
"""
|
||||
assert(inspect.isclass(class_definition))
|
||||
assert inspect.isclass(class_definition)
|
||||
|
||||
class_keyword = class_definition.__name__
|
||||
try:
|
||||
request = PdmObject_pb2.PdmDescendantObjectRequest(
|
||||
object=self._pb2_object, child_keyword=class_keyword)
|
||||
object_list = self._pdm_object_stub.GetDescendantPdmObjects(
|
||||
request).objects
|
||||
object=self._pb2_object, child_keyword=class_keyword
|
||||
)
|
||||
object_list = self._pdm_object_stub.GetDescendantPdmObjects(request).objects
|
||||
return self.__from_pb2_to_resinsight_classes(object_list, class_definition)
|
||||
except grpc.RpcError as e:
|
||||
if e.code() == grpc.StatusCode.NOT_FOUND:
|
||||
@ -254,8 +273,9 @@ class PdmObjectBase:
|
||||
Returns:
|
||||
A list of PdmObjects inside the child_field
|
||||
"""
|
||||
request = PdmObject_pb2.PdmChildObjectRequest(object=self._pb2_object,
|
||||
child_field=child_field)
|
||||
request = PdmObject_pb2.PdmChildObjectRequest(
|
||||
object=self._pb2_object, child_field=child_field
|
||||
)
|
||||
try:
|
||||
object_list = self._pdm_object_stub.GetChildPdmObjects(request).objects
|
||||
return self.__from_pb2_to_resinsight_classes(object_list, class_definition)
|
||||
@ -269,13 +289,14 @@ class PdmObjectBase:
|
||||
Arguments:
|
||||
class_definition[class]: A class definition matching the type of class wanted
|
||||
"""
|
||||
assert(inspect.isclass(class_definition))
|
||||
assert inspect.isclass(class_definition)
|
||||
|
||||
class_keyword = class_definition.__name__
|
||||
from .generated.generated_classes import class_from_keyword
|
||||
|
||||
request = PdmObject_pb2.PdmParentObjectRequest(
|
||||
object=self._pb2_object, parent_keyword=class_keyword)
|
||||
object=self._pb2_object, parent_keyword=class_keyword
|
||||
)
|
||||
try:
|
||||
pb2_object = self._pdm_object_stub.GetAncestorPdmObject(request)
|
||||
child_class_definition = class_from_keyword(pb2_object.class_keyword)
|
||||
@ -283,7 +304,9 @@ class PdmObjectBase:
|
||||
if child_class_definition is None:
|
||||
child_class_definition = class_definition
|
||||
|
||||
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel())
|
||||
pdm_object = child_class_definition(
|
||||
pb2_object=pb2_object, channel=self.channel()
|
||||
)
|
||||
return pdm_object
|
||||
except grpc.RpcError as e:
|
||||
if e.code() == grpc.StatusCode.NOT_FOUND:
|
||||
@ -291,7 +314,9 @@ class PdmObjectBase:
|
||||
raise e
|
||||
|
||||
def _call_get_method_async(self, method_name):
|
||||
request = PdmObject_pb2.PdmObjectGetterRequest(object=self._pb2_object, method=method_name)
|
||||
request = PdmObject_pb2.PdmObjectGetterRequest(
|
||||
object=self._pb2_object, method=method_name
|
||||
)
|
||||
for chunk in self._pdm_object_stub.CallPdmObjectGetter(request):
|
||||
yield chunk
|
||||
|
||||
@ -299,7 +324,7 @@ class PdmObjectBase:
|
||||
all_values = []
|
||||
generator = self._call_get_method_async(method_name)
|
||||
for chunk in generator:
|
||||
data = getattr(chunk, chunk.WhichOneof('data'))
|
||||
data = getattr(chunk, chunk.WhichOneof("data"))
|
||||
for value in data.data:
|
||||
all_values.append(value)
|
||||
return all_values
|
||||
@ -310,23 +335,38 @@ class PdmObjectBase:
|
||||
while index < len(array):
|
||||
chunk = PdmObject_pb2.PdmObjectSetterChunk()
|
||||
if index == -1:
|
||||
chunk.set_request.CopyFrom(PdmObject_pb2.PdmObjectSetterRequest(
|
||||
request=method_request, data_count=len(array)))
|
||||
chunk.set_request.CopyFrom(
|
||||
PdmObject_pb2.PdmObjectSetterRequest(
|
||||
request=method_request, data_count=len(array)
|
||||
)
|
||||
)
|
||||
index += 1
|
||||
else:
|
||||
actual_chunk_size = min(len(array) - index + 1, self.__chunk_size)
|
||||
if isinstance(array[0], float):
|
||||
chunk.CopyFrom(
|
||||
PdmObject_pb2.PdmObjectSetterChunk(doubles=PdmObject_pb2.DoubleArray(data=array[index:index +
|
||||
actual_chunk_size])))
|
||||
PdmObject_pb2.PdmObjectSetterChunk(
|
||||
doubles=PdmObject_pb2.DoubleArray(
|
||||
data=array[index : index + actual_chunk_size]
|
||||
)
|
||||
)
|
||||
)
|
||||
elif isinstance(array[0], int):
|
||||
chunk.CopyFrom(
|
||||
PdmObject_pb2.PdmObjectSetterChunk(ints=PdmObject_pb2.IntArray(data=array[index:index +
|
||||
actual_chunk_size])))
|
||||
PdmObject_pb2.PdmObjectSetterChunk(
|
||||
ints=PdmObject_pb2.IntArray(
|
||||
data=array[index : index + actual_chunk_size]
|
||||
)
|
||||
)
|
||||
)
|
||||
elif isinstance(array[0], str):
|
||||
chunk.CopyFrom(
|
||||
PdmObject_pb2.PdmObjectSetterChunk(strings=PdmObject_pb2.StringArray(data=array[index:index +
|
||||
actual_chunk_size])))
|
||||
PdmObject_pb2.PdmObjectSetterChunk(
|
||||
strings=PdmObject_pb2.StringArray(
|
||||
data=array[index : index + actual_chunk_size]
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise Exception("Wrong data type for set method")
|
||||
index += actual_chunk_size
|
||||
@ -337,7 +377,8 @@ class PdmObjectBase:
|
||||
|
||||
def _call_set_method(self, method_name, values):
|
||||
method_request = PdmObject_pb2.PdmObjectGetterRequest(
|
||||
object=self._pb2_object, method=method_name)
|
||||
object=self._pb2_object, method=method_name
|
||||
)
|
||||
request_iterator = self.__generate_set_method_chunks(values, method_request)
|
||||
reply = self._pdm_object_stub.CallPdmObjectSetter(request_iterator)
|
||||
if reply.accepted_value_count < len(values):
|
||||
@ -346,9 +387,12 @@ class PdmObjectBase:
|
||||
def _call_pdm_method(self, method_name, **kwargs):
|
||||
pb2_params = PdmObject_pb2.PdmObject(class_keyword=method_name)
|
||||
for key, value in kwargs.items():
|
||||
pb2_params.parameters[snake_to_camel(key)] = self.__convert_to_grpc_value(value)
|
||||
pb2_params.parameters[snake_to_camel(key)] = self.__convert_to_grpc_value(
|
||||
value
|
||||
)
|
||||
request = PdmObject_pb2.PdmObjectMethodRequest(
|
||||
object=self._pb2_object, method=method_name, params=pb2_params)
|
||||
object=self._pb2_object, method=method_name, params=pb2_params
|
||||
)
|
||||
|
||||
pb2_object = self._pdm_object_stub.CallPdmObjectMethod(request)
|
||||
|
||||
@ -358,7 +402,9 @@ class PdmObjectBase:
|
||||
if child_class_definition is None:
|
||||
return None
|
||||
|
||||
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel())
|
||||
pdm_object = child_class_definition(
|
||||
pb2_object=pb2_object, channel=self.channel()
|
||||
)
|
||||
return pdm_object
|
||||
|
||||
def update(self):
|
||||
@ -367,4 +413,6 @@ class PdmObjectBase:
|
||||
if self._pdm_object_stub is not None:
|
||||
self._pdm_object_stub.UpdateExistingPdmObject(self._pb2_object)
|
||||
else:
|
||||
raise Exception("Object is not connected to GRPC service so cannot update ResInsight")
|
||||
raise Exception(
|
||||
"Object is not connected to GRPC service so cannot update ResInsight"
|
||||
)
|
||||
|
@ -8,8 +8,8 @@ from .resinsight_classes import PlotWindow, Plot
|
||||
|
||||
|
||||
@add_method(PlotWindow)
|
||||
def export_snapshot(self, export_folder='', file_prefix='', output_format='PNG'):
|
||||
""" Export snapshot for the current plot
|
||||
def export_snapshot(self, export_folder="", file_prefix="", output_format="PNG"):
|
||||
"""Export snapshot for the current plot
|
||||
|
||||
Arguments:
|
||||
export_folder(str): The path to export to. By default will use the global export folder
|
||||
@ -18,8 +18,11 @@ def export_snapshot(self, export_folder='', file_prefix='', output_format='PNG')
|
||||
|
||||
"""
|
||||
return self._execute_command(
|
||||
exportSnapshots=Commands_pb2.ExportSnapshotsRequest(type='PLOTS',
|
||||
prefix=file_prefix,
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
plotOutputFormat=output_format))
|
||||
exportSnapshots=Commands_pb2.ExportSnapshotsRequest(
|
||||
type="PLOTS",
|
||||
prefix=file_prefix,
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
plotOutputFormat=output_format,
|
||||
)
|
||||
)
|
||||
|
@ -70,8 +70,9 @@ def load_case(self, path):
|
||||
Returns:
|
||||
:class:`rips.generated.generated_classes.Case`
|
||||
"""
|
||||
command_reply = self._execute_command(loadCase=Commands_pb2.FilePathRequest(
|
||||
path=path))
|
||||
command_reply = self._execute_command(
|
||||
loadCase=Commands_pb2.FilePathRequest(path=path)
|
||||
)
|
||||
return self.case(command_reply.loadCaseResult.id)
|
||||
|
||||
|
||||
@ -125,7 +126,9 @@ def replace_source_cases(self, grid_list_file, case_group_id=0):
|
||||
"""
|
||||
return self._execute_command(
|
||||
replaceSourceCases=Commands_pb2.ReplaceSourceCasesRequest(
|
||||
gridListFile=grid_list_file, caseGroupId=case_group_id))
|
||||
gridListFile=grid_list_file, caseGroupId=case_group_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
@ -139,18 +142,21 @@ def create_grid_case_group(self, case_paths):
|
||||
"""
|
||||
command_reply = self._execute_command(
|
||||
createGridCaseGroup=Commands_pb2.CreateGridCaseGroupRequest(
|
||||
casePaths=case_paths))
|
||||
return self.grid_case_group(
|
||||
command_reply.createGridCaseGroupResult.groupId)
|
||||
casePaths=case_paths
|
||||
)
|
||||
)
|
||||
return self.grid_case_group(command_reply.createGridCaseGroupResult.groupId)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def summary_cases(self):
|
||||
"""Get a list of all summary cases in the Project
|
||||
|
||||
Returns: A list of :class:`rips.generated.resinsight_classes.SummaryCase`
|
||||
"""
|
||||
"""
|
||||
return self.descendants(SummaryCase)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def views(self):
|
||||
"""Get a list of views belonging to a project"""
|
||||
@ -243,12 +249,14 @@ def export_multi_case_snapshots(self, grid_list_file):
|
||||
"""
|
||||
return self._execute_command(
|
||||
exportMultiCaseSnapshot=Commands_pb2.ExportMultiCaseRequest(
|
||||
gridListFile=grid_list_file))
|
||||
gridListFile=grid_list_file
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def export_snapshots(self, snapshot_type='ALL', prefix='', plot_format='PNG'):
|
||||
""" Export all snapshots of a given type
|
||||
def export_snapshots(self, snapshot_type="ALL", prefix="", plot_format="PNG"):
|
||||
"""Export all snapshots of a given type
|
||||
|
||||
Arguments:
|
||||
snapshot_type (str): Enum string ('ALL', 'VIEWS' or 'PLOTS')
|
||||
@ -257,12 +265,18 @@ def export_snapshots(self, snapshot_type='ALL', prefix='', plot_format='PNG'):
|
||||
"""
|
||||
return self._execute_command(
|
||||
exportSnapshots=Commands_pb2.ExportSnapshotsRequest(
|
||||
type=snapshot_type, prefix=prefix, caseId=-1, viewId=-1, plotOutputFormat=plot_format))
|
||||
type=snapshot_type,
|
||||
prefix=prefix,
|
||||
caseId=-1,
|
||||
viewId=-1,
|
||||
plotOutputFormat=plot_format,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def export_well_paths(self, well_paths=None, md_step_size=5.0):
|
||||
""" Export a set of well paths
|
||||
"""Export a set of well paths
|
||||
|
||||
Arguments:
|
||||
well_paths(list): List of strings of well paths. If none, export all.
|
||||
@ -272,14 +286,18 @@ def export_well_paths(self, well_paths=None, md_step_size=5.0):
|
||||
well_paths = []
|
||||
elif isinstance(well_paths, str):
|
||||
well_paths = [well_paths]
|
||||
return self._execute_command(exportWellPaths=Commands_pb2.ExportWellPathRequest(
|
||||
wellPathNames=well_paths, mdStepSize=md_step_size))
|
||||
return self._execute_command(
|
||||
exportWellPaths=Commands_pb2.ExportWellPathRequest(
|
||||
wellPathNames=well_paths, mdStepSize=md_step_size
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def scale_fracture_template(self, template_id, half_length, height,
|
||||
d_factor, conductivity):
|
||||
""" Scale fracture template parameters
|
||||
def scale_fracture_template(
|
||||
self, template_id, half_length, height, d_factor, conductivity
|
||||
):
|
||||
"""Scale fracture template parameters
|
||||
|
||||
Arguments:
|
||||
template_id(int): ID of fracture template
|
||||
@ -294,12 +312,14 @@ def scale_fracture_template(self, template_id, half_length, height,
|
||||
halfLength=half_length,
|
||||
height=height,
|
||||
dFactor=d_factor,
|
||||
conductivity=conductivity))
|
||||
conductivity=conductivity,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def set_fracture_containment(self, template_id, top_layer, base_layer):
|
||||
""" Set fracture template containment parameters
|
||||
"""Set fracture template containment parameters
|
||||
|
||||
Arguments:
|
||||
template_id(int): ID of fracture template
|
||||
@ -308,12 +328,14 @@ def set_fracture_containment(self, template_id, top_layer, base_layer):
|
||||
"""
|
||||
return self._execute_command(
|
||||
setFractureContainment=Commands_pb2.SetFracContainmentRequest(
|
||||
id=template_id, topLayer=top_layer, baseLayer=base_layer))
|
||||
id=template_id, topLayer=top_layer, baseLayer=base_layer
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def import_well_paths(self, well_path_files=None, well_path_folder=''):
|
||||
""" Import well paths into project
|
||||
def import_well_paths(self, well_path_files=None, well_path_folder=""):
|
||||
"""Import well paths into project
|
||||
|
||||
Arguments:
|
||||
well_path_files(list): List of file paths to import
|
||||
@ -325,8 +347,11 @@ def import_well_paths(self, well_path_files=None, well_path_folder=''):
|
||||
if well_path_files is None:
|
||||
well_path_files = []
|
||||
|
||||
res = self._execute_command(importWellPaths=Commands_pb2.ImportWellPathsRequest(wellPathFolder=well_path_folder,
|
||||
wellPathFiles=well_path_files))
|
||||
res = self._execute_command(
|
||||
importWellPaths=Commands_pb2.ImportWellPathsRequest(
|
||||
wellPathFolder=well_path_folder, wellPathFiles=well_path_files
|
||||
)
|
||||
)
|
||||
well_paths = []
|
||||
for well_path_name in res.importWellPathsResult.wellPathNames:
|
||||
well_paths.append(self.well_path_by_name(well_path_name))
|
||||
@ -358,8 +383,8 @@ def well_path_by_name(self, well_path_name):
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def import_well_log_files(self, well_log_files=None, well_log_folder=''):
|
||||
""" Import well log files into project
|
||||
def import_well_log_files(self, well_log_files=None, well_log_folder=""):
|
||||
"""Import well log files into project
|
||||
|
||||
Arguments:
|
||||
well_log_files(list): List of file paths to import
|
||||
@ -371,14 +396,17 @@ def import_well_log_files(self, well_log_files=None, well_log_folder=''):
|
||||
|
||||
if well_log_files is None:
|
||||
well_log_files = []
|
||||
res = self._execute_command(importWellLogFiles=Commands_pb2.ImportWellLogFilesRequest(wellLogFolder=well_log_folder,
|
||||
wellLogFiles=well_log_files))
|
||||
res = self._execute_command(
|
||||
importWellLogFiles=Commands_pb2.ImportWellLogFilesRequest(
|
||||
wellLogFolder=well_log_folder, wellLogFiles=well_log_files
|
||||
)
|
||||
)
|
||||
return res.importWellLogFilesResult.wellPathNames
|
||||
|
||||
|
||||
@add_method(Project)
|
||||
def import_formation_names(self, formation_files=None):
|
||||
""" Import formation names into project
|
||||
"""Import formation names into project
|
||||
|
||||
Arguments:
|
||||
formation_files(list): list of files to import
|
||||
@ -389,5 +417,8 @@ def import_formation_names(self, formation_files=None):
|
||||
elif isinstance(formation_files, str):
|
||||
formation_files = [formation_files]
|
||||
|
||||
self._execute_command(importFormationNames=Commands_pb2.ImportFormationNamesRequest(formationFiles=formation_files,
|
||||
applyToCaseId=-1))
|
||||
self._execute_command(
|
||||
importFormationNames=Commands_pb2.ImportFormationNamesRequest(
|
||||
formationFiles=formation_files, applyToCaseId=-1
|
||||
)
|
||||
)
|
||||
|
@ -30,15 +30,15 @@ def status(self, timestep):
|
||||
Parameter | Description | Type
|
||||
----------- | ------------------------------------------------------------- | -----
|
||||
well_type | Well type as string | string
|
||||
is_open | True if simulation well is open at the specified time step | bool
|
||||
is_open | True if simulation well is open at the specified time step | bool
|
||||
|
||||
Arguments:
|
||||
timestep(int): Time step index
|
||||
|
||||
"""
|
||||
sim_well_request = SimulationWell_pb2.SimulationWellRequest(case_id=self.case().id,
|
||||
well_name=self.name,
|
||||
timestep=timestep)
|
||||
sim_well_request = SimulationWell_pb2.SimulationWellRequest(
|
||||
case_id=self.case().id, well_name=self.name, timestep=timestep
|
||||
)
|
||||
return self._simulation_well_stub.GetSimulationWellStatus(sim_well_request)
|
||||
|
||||
|
||||
@ -52,7 +52,7 @@ def cells(self, timestep):
|
||||
----------- | --------------------------------------------------------- | -----
|
||||
ijk | Cell IJK location | Vec3i
|
||||
grid_index | Grid index | int
|
||||
is_open | True if connection to is open at the specified time step | bool
|
||||
is_open | True if connection to is open at the specified time step | bool
|
||||
branch_id | | int
|
||||
segment_id | | int
|
||||
|
||||
@ -63,9 +63,9 @@ def cells(self, timestep):
|
||||
List of SimulationWellCellInfo
|
||||
|
||||
"""
|
||||
sim_well_request = SimulationWell_pb2.SimulationWellRequest(case_id=self.case().id,
|
||||
well_name=self.name,
|
||||
timestep=timestep)
|
||||
sim_well_request = SimulationWell_pb2.SimulationWellRequest(
|
||||
case_id=self.case().id, well_name=self.name, timestep=timestep
|
||||
)
|
||||
return self._simulation_well_stub.GetSimulationWellCells(sim_well_request).data
|
||||
|
||||
|
||||
|
@ -3,7 +3,7 @@ import sys
|
||||
import os
|
||||
import getopt
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
_rips_instance = None
|
||||
@ -22,20 +22,28 @@ def initialize_test():
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--console", action="store_true", default=False,
|
||||
help="Run as console application")
|
||||
parser.addoption("--existing", action="store_true", default=False,
|
||||
help="Look for existing ResInsight")
|
||||
parser.addoption(
|
||||
"--console",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run as console application",
|
||||
)
|
||||
parser.addoption(
|
||||
"--existing",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Look for existing ResInsight",
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
global _rips_instance
|
||||
console = False
|
||||
if config.getoption('--existing'):
|
||||
if config.getoption("--existing"):
|
||||
print("Looking for existing ResInsight")
|
||||
_rips_instance = rips.Instance.find()
|
||||
else:
|
||||
if config.getoption('--console'):
|
||||
if config.getoption("--console"):
|
||||
console = True
|
||||
_rips_instance = rips.Instance.launch(console=console)
|
||||
if not _rips_instance:
|
||||
@ -44,6 +52,6 @@ def pytest_configure(config):
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
if not config.getoption('--existing'):
|
||||
if not config.getoption("--existing"):
|
||||
if _rips_instance:
|
||||
_rips_instance.exit()
|
||||
|
@ -5,45 +5,47 @@ import pytest
|
||||
import grpc
|
||||
import tempfile
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
|
||||
|
||||
def test_Launch(rips_instance, initialize_test):
|
||||
assert(rips_instance is not None)
|
||||
assert rips_instance is not None
|
||||
|
||||
|
||||
def test_EmptyProject(rips_instance, initialize_test):
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) is 0)
|
||||
assert len(cases) is 0
|
||||
|
||||
|
||||
def test_OneCase(rips_instance, initialize_test):
|
||||
case = rips_instance.project.load_case(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID")
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
assert(case.id == 0)
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
assert case.id == 0
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) is 1)
|
||||
assert len(cases) is 1
|
||||
|
||||
|
||||
def test_BoundingBox(rips_instance, initialize_test):
|
||||
case = rips_instance.project.load_case(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID")
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
boundingbox = case.reservoir_boundingbox()
|
||||
assert(math.isclose(3382.90, boundingbox.min_x, abs_tol=1.0e-1))
|
||||
assert(math.isclose(5850.48, boundingbox.max_x, abs_tol=1.0e-1))
|
||||
assert(math.isclose(4157.45, boundingbox.min_y, abs_tol=1.0e-1))
|
||||
assert(math.isclose(7354.93, boundingbox.max_y, abs_tol=1.0e-1))
|
||||
assert(math.isclose(-4252.61, boundingbox.min_z, abs_tol=1.0e-1))
|
||||
assert(math.isclose(-4103.60, boundingbox.max_z, abs_tol=1.0e-1))
|
||||
assert math.isclose(3382.90, boundingbox.min_x, abs_tol=1.0e-1)
|
||||
assert math.isclose(5850.48, boundingbox.max_x, abs_tol=1.0e-1)
|
||||
assert math.isclose(4157.45, boundingbox.min_y, abs_tol=1.0e-1)
|
||||
assert math.isclose(7354.93, boundingbox.max_y, abs_tol=1.0e-1)
|
||||
assert math.isclose(-4252.61, boundingbox.min_z, abs_tol=1.0e-1)
|
||||
assert math.isclose(-4103.60, boundingbox.max_z, abs_tol=1.0e-1)
|
||||
|
||||
min_depth, max_depth = case.reservoir_depth_range()
|
||||
assert(math.isclose(4103.60, min_depth, abs_tol=1.0e-1))
|
||||
assert(math.isclose(4252.61, max_depth, abs_tol=1.0e-1))
|
||||
assert math.isclose(4103.60, min_depth, abs_tol=1.0e-1)
|
||||
assert math.isclose(4252.61, max_depth, abs_tol=1.0e-1)
|
||||
|
||||
|
||||
def test_MultipleCases(rips_instance, initialize_test):
|
||||
@ -59,9 +61,9 @@ def test_MultipleCases(rips_instance, initialize_test):
|
||||
rips_instance.project.load_case(path=case_path)
|
||||
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) == len(case_names))
|
||||
assert len(cases) == len(case_names)
|
||||
for i, case_name in enumerate(case_names):
|
||||
assert(case_name == cases[i].name)
|
||||
assert case_name == cases[i].name
|
||||
|
||||
|
||||
def get_cell_index_with_ijk(cell_info, i, j, k):
|
||||
@ -72,48 +74,50 @@ def get_cell_index_with_ijk(cell_info, i, j, k):
|
||||
|
||||
|
||||
def check_corner(actual, expected):
|
||||
assert(math.isclose(actual.x, expected[0], abs_tol=0.1))
|
||||
assert(math.isclose(actual.y, expected[1], abs_tol=0.1))
|
||||
assert(math.isclose(actual.z, expected[2], abs_tol=0.1))
|
||||
assert math.isclose(actual.x, expected[0], abs_tol=0.1)
|
||||
assert math.isclose(actual.y, expected[1], abs_tol=0.1)
|
||||
assert math.isclose(actual.z, expected[2], abs_tol=0.1)
|
||||
|
||||
|
||||
def test_10k(rips_instance, initialize_test):
|
||||
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=case_path)
|
||||
assert(len(case.grids()) == 2)
|
||||
assert len(case.grids()) == 2
|
||||
cell_count_info = case.cell_count()
|
||||
assert(cell_count_info.active_cell_count == 11125)
|
||||
assert(cell_count_info.reservoir_cell_count == 316224)
|
||||
assert cell_count_info.active_cell_count == 11125
|
||||
assert cell_count_info.reservoir_cell_count == 316224
|
||||
time_steps = case.time_steps()
|
||||
assert(len(time_steps) == 9)
|
||||
assert len(time_steps) == 9
|
||||
days_since_start = case.days_since_start()
|
||||
assert(len(days_since_start) == 9)
|
||||
assert len(days_since_start) == 9
|
||||
cell_info = case.cell_info_for_active_cells()
|
||||
assert(len(cell_info) == cell_count_info.active_cell_count)
|
||||
assert len(cell_info) == cell_count_info.active_cell_count
|
||||
|
||||
# Check an active cell (found in resinsight ui)
|
||||
cell_index = get_cell_index_with_ijk(cell_info, 23, 44, 19)
|
||||
assert(cell_index != -1)
|
||||
assert cell_index != -1
|
||||
|
||||
cell_centers = case.active_cell_centers()
|
||||
assert(len(cell_centers) == cell_count_info.active_cell_count)
|
||||
assert len(cell_centers) == cell_count_info.active_cell_count
|
||||
|
||||
# Check the cell center for the specific cell
|
||||
assert(math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1))
|
||||
assert(math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1))
|
||||
assert(math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1))
|
||||
assert math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)
|
||||
assert math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)
|
||||
assert math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)
|
||||
|
||||
cell_corners = case.active_cell_corners()
|
||||
assert(len(cell_corners) == cell_count_info.active_cell_count)
|
||||
assert len(cell_corners) == cell_count_info.active_cell_count
|
||||
# Expected values from ResInsight UI
|
||||
expected_corners = [[3565.22, 5179.02, 4177.18],
|
||||
[3655.67, 5145.34, 4176.63],
|
||||
[3690.07, 5240.69, 4180.02],
|
||||
[3599.87, 5275.16, 4179.32],
|
||||
[3564.13, 5178.61, 4179.75],
|
||||
[3654.78, 5144.79, 4179.23],
|
||||
[3688.99, 5239.88, 4182.7],
|
||||
[3598.62, 5274.48, 4181.96]]
|
||||
expected_corners = [
|
||||
[3565.22, 5179.02, 4177.18],
|
||||
[3655.67, 5145.34, 4176.63],
|
||||
[3690.07, 5240.69, 4180.02],
|
||||
[3599.87, 5275.16, 4179.32],
|
||||
[3564.13, 5178.61, 4179.75],
|
||||
[3654.78, 5144.79, 4179.23],
|
||||
[3688.99, 5239.88, 4182.7],
|
||||
[3598.62, 5274.48, 4181.96],
|
||||
]
|
||||
check_corner(cell_corners[cell_index].c0, expected_corners[0])
|
||||
check_corner(cell_corners[cell_index].c1, expected_corners[1])
|
||||
check_corner(cell_corners[cell_index].c2, expected_corners[2])
|
||||
@ -125,53 +129,61 @@ def test_10k(rips_instance, initialize_test):
|
||||
|
||||
# No coarsening info for this case
|
||||
coarsening_info = case.coarsening_info()
|
||||
assert(len(coarsening_info) == 0)
|
||||
assert len(coarsening_info) == 0
|
||||
|
||||
|
||||
def test_PdmObject(rips_instance, initialize_test):
|
||||
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=case_path)
|
||||
assert(case.id == 0)
|
||||
assert(case.address() is not 0)
|
||||
assert(case.__class__.__name__ == "EclipseCase")
|
||||
assert case.id == 0
|
||||
assert case.address() is not 0
|
||||
assert case.__class__.__name__ == "EclipseCase"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux")
|
||||
@pytest.mark.skipif(
|
||||
sys.platform.startswith("linux"),
|
||||
reason="Brugge is currently exceptionally slow on Linux",
|
||||
)
|
||||
def test_brugge_0010(rips_instance, initialize_test):
|
||||
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
|
||||
case = rips_instance.project.load_case(path=case_path)
|
||||
assert(len(case.grids()) == 1)
|
||||
assert len(case.grids()) == 1
|
||||
cellCountInfo = case.cell_count()
|
||||
assert(cellCountInfo.active_cell_count == 43374)
|
||||
assert(cellCountInfo.reservoir_cell_count == 60048)
|
||||
assert cellCountInfo.active_cell_count == 43374
|
||||
assert cellCountInfo.reservoir_cell_count == 60048
|
||||
time_steps = case.time_steps()
|
||||
assert(len(time_steps) == 11)
|
||||
assert len(time_steps) == 11
|
||||
days_since_start = case.days_since_start()
|
||||
assert(len(days_since_start) == 11)
|
||||
assert len(days_since_start) == 11
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux")
|
||||
@pytest.mark.skipif(
|
||||
sys.platform.startswith("linux"),
|
||||
reason="Brugge is currently exceptionally slow on Linux",
|
||||
)
|
||||
def test_replaceCase(rips_instance, initialize_test):
|
||||
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp")
|
||||
project = rips_instance.project.open(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
|
||||
)
|
||||
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
|
||||
case = project.case(case_id=0)
|
||||
assert(case is not None)
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
assert(case.id == 0)
|
||||
assert case is not None
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
assert case.id == 0
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) is 1)
|
||||
assert len(cases) is 1
|
||||
|
||||
case.replace(new_grid_file=case_path)
|
||||
# Check that the case object has been changed
|
||||
assert(case.name == "BRUGGE_0000")
|
||||
assert(case.id == 0)
|
||||
assert case.name == "BRUGGE_0000"
|
||||
assert case.id == 0
|
||||
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) is 1)
|
||||
assert len(cases) is 1
|
||||
# Check that retrieving the case object again will yield the changed object
|
||||
case = project.case(case_id=0)
|
||||
assert(case.name == "BRUGGE_0000")
|
||||
assert(case.id == 0)
|
||||
assert case.name == "BRUGGE_0000"
|
||||
assert case.id == 0
|
||||
|
||||
|
||||
def test_loadNonExistingCase(rips_instance, initialize_test):
|
||||
@ -180,26 +192,31 @@ def test_loadNonExistingCase(rips_instance, initialize_test):
|
||||
assert rips_instance.project.load_case(case_path)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux")
|
||||
@pytest.mark.skipif(
|
||||
sys.platform.startswith("linux"),
|
||||
reason="Brugge is currently exceptionally slow on Linux",
|
||||
)
|
||||
def test_exportFlowCharacteristics(rips_instance, initialize_test):
|
||||
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
|
||||
case = rips_instance.project.load_case(case_path)
|
||||
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
print("Temporary folder: ", tmpdirname)
|
||||
file_name = tmpdirname + "/exportFlowChar.txt"
|
||||
case.export_flow_characteristics(time_steps=8, producers=[],
|
||||
injectors="I01", file_name=file_name)
|
||||
case.export_flow_characteristics(
|
||||
time_steps=8, producers=[], injectors="I01", file_name=file_name
|
||||
)
|
||||
|
||||
|
||||
def test_selected_cells(rips_instance, initialize_test):
|
||||
case = rips_instance.project.load_case(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID")
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
selected_cells = case.selected_cells()
|
||||
assert(len(selected_cells) == 0)
|
||||
assert len(selected_cells) == 0
|
||||
|
||||
time_step_info = case.time_steps()
|
||||
for (tidx, timestep) in enumerate(time_step_info):
|
||||
# Try to read for SOIL the time step (will be empty since nothing is selected)
|
||||
soil_results = case.selected_cell_property('DYNAMIC_NATIVE', 'SOIL', tidx)
|
||||
assert(len(soil_results) == 0)
|
||||
soil_results = case.selected_cell_property("DYNAMIC_NATIVE", "SOIL", tidx)
|
||||
assert len(soil_results) == 0
|
||||
|
@ -4,22 +4,30 @@ import tempfile
|
||||
import pytest
|
||||
import grpc
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
|
||||
|
||||
def test_create_lgr_well(rips_instance, initialize_test):
|
||||
case = rips_instance.project.load_case(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID")
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
assert(len(case.grids()) == 2)
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
)
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
assert len(case.grids()) == 2
|
||||
|
||||
well_files=[dataroot.PATH + "/TEST10K_FLT_LGR_NNC/wellpath_a.dev"]
|
||||
well_files = [dataroot.PATH + "/TEST10K_FLT_LGR_NNC/wellpath_a.dev"]
|
||||
rips_instance.project.import_well_paths(well_path_files=well_files)
|
||||
|
||||
time_step=5
|
||||
well_path_names=["Well Path A"]
|
||||
case.create_lgr_for_completion(time_step, well_path_names, refinement_i=2, refinement_j=3,refinement_k=1, split_type="LGR_PER_WELL")
|
||||
assert(len(case.grids()) == 3)
|
||||
|
||||
time_step = 5
|
||||
well_path_names = ["Well Path A"]
|
||||
case.create_lgr_for_completion(
|
||||
time_step,
|
||||
well_path_names,
|
||||
refinement_i=2,
|
||||
refinement_j=3,
|
||||
refinement_k=1,
|
||||
split_type="LGR_PER_WELL",
|
||||
)
|
||||
assert len(case.grids()) == 3
|
||||
|
@ -2,49 +2,51 @@ import sys
|
||||
import os
|
||||
import math
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
|
||||
|
||||
def check_corner(actual, expected):
|
||||
assert(math.isclose(actual.x, expected[0], abs_tol=0.1))
|
||||
assert(math.isclose(actual.y, expected[1], abs_tol=0.1))
|
||||
assert(math.isclose(actual.z, expected[2], abs_tol=0.1))
|
||||
assert math.isclose(actual.x, expected[0], abs_tol=0.1)
|
||||
assert math.isclose(actual.y, expected[1], abs_tol=0.1)
|
||||
assert math.isclose(actual.z, expected[2], abs_tol=0.1)
|
||||
|
||||
|
||||
def test_10k(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
assert(len(case.grids()) == 2)
|
||||
assert len(case.grids()) == 2
|
||||
grid = case.grid(index=0)
|
||||
dimensions = grid.dimensions()
|
||||
assert(dimensions.i == 90)
|
||||
assert(dimensions.j == 96)
|
||||
assert(dimensions.k == 36)
|
||||
assert dimensions.i == 90
|
||||
assert dimensions.j == 96
|
||||
assert dimensions.k == 36
|
||||
|
||||
cell_centers = grid.cell_centers()
|
||||
assert(len(cell_centers) == (dimensions.i * dimensions.j * dimensions.k))
|
||||
assert len(cell_centers) == (dimensions.i * dimensions.j * dimensions.k)
|
||||
|
||||
# Test a specific cell (results from ResInsight UI)
|
||||
cell_index = 168143
|
||||
assert(math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1))
|
||||
assert(math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1))
|
||||
assert(math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1))
|
||||
assert math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)
|
||||
assert math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)
|
||||
assert math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)
|
||||
|
||||
cell_corners = grid.cell_corners()
|
||||
assert(len(cell_corners) == (dimensions.i * dimensions.j * dimensions.k))
|
||||
assert len(cell_corners) == (dimensions.i * dimensions.j * dimensions.k)
|
||||
|
||||
# Expected values from ResInsight UI
|
||||
expected_corners = [[3565.22, 5179.02, 4177.18],
|
||||
[3655.67, 5145.34, 4176.63],
|
||||
[3690.07, 5240.69, 4180.02],
|
||||
[3599.87, 5275.16, 4179.32],
|
||||
[3564.13, 5178.61, 4179.75],
|
||||
[3654.78, 5144.79, 4179.23],
|
||||
[3688.99, 5239.88, 4182.7],
|
||||
[3598.62, 5274.48, 4181.96]]
|
||||
expected_corners = [
|
||||
[3565.22, 5179.02, 4177.18],
|
||||
[3655.67, 5145.34, 4176.63],
|
||||
[3690.07, 5240.69, 4180.02],
|
||||
[3599.87, 5275.16, 4179.32],
|
||||
[3564.13, 5178.61, 4179.75],
|
||||
[3654.78, 5144.79, 4179.23],
|
||||
[3688.99, 5239.88, 4182.7],
|
||||
[3598.62, 5274.48, 4181.96],
|
||||
]
|
||||
check_corner(cell_corners[cell_index].c0, expected_corners[0])
|
||||
check_corner(cell_corners[cell_index].c1, expected_corners[1])
|
||||
check_corner(cell_corners[cell_index].c2, expected_corners[2])
|
||||
|
@ -5,7 +5,7 @@ import pytest
|
||||
|
||||
import rips.generated.NNCProperties_pb2 as NNCProperties_pb2
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
@ -16,25 +16,28 @@ def test_10kSync(rips_instance, initialize_test):
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
properties = case.available_nnc_properties()
|
||||
assert(len(properties) == 1)
|
||||
assert len(properties) == 1
|
||||
|
||||
assert("TRAN" == properties[0].name)
|
||||
assert(NNCProperties_pb2.NNCPropertyType.Value('NNC_STATIC') == properties[0].property_type)
|
||||
assert "TRAN" == properties[0].name
|
||||
assert (
|
||||
NNCProperties_pb2.NNCPropertyType.Value("NNC_STATIC")
|
||||
== properties[0].property_type
|
||||
)
|
||||
|
||||
nnc_connections = case.nnc_connections()
|
||||
assert(len(nnc_connections) == 3627)
|
||||
assert len(nnc_connections) == 3627
|
||||
|
||||
connection = nnc_connections[0]
|
||||
assert(connection.cell1.i == 33)
|
||||
assert(connection.cell1.j == 40)
|
||||
assert(connection.cell1.k == 14)
|
||||
assert(connection.cell_grid_index1 == 0)
|
||||
assert connection.cell1.i == 33
|
||||
assert connection.cell1.j == 40
|
||||
assert connection.cell1.k == 14
|
||||
assert connection.cell_grid_index1 == 0
|
||||
|
||||
tran_vals = case.nnc_connections_static_values("TRAN")
|
||||
assert(len(tran_vals) == len(nnc_connections))
|
||||
assert len(tran_vals) == len(nnc_connections)
|
||||
|
||||
for t in tran_vals:
|
||||
assert(isinstance(t, float))
|
||||
assert isinstance(t, float)
|
||||
|
||||
# Generate some data
|
||||
new_data = []
|
||||
@ -44,9 +47,9 @@ def test_10kSync(rips_instance, initialize_test):
|
||||
property_name = "NEW_PROP"
|
||||
case.set_nnc_connections_values(new_data, property_name, 0)
|
||||
new_prop_vals = case.nnc_connections_generated_values(property_name, 0)
|
||||
assert(len(new_prop_vals) == len(new_data))
|
||||
assert len(new_prop_vals) == len(new_data)
|
||||
for i in range(0, len(new_data)):
|
||||
assert(new_data[i] == new_prop_vals[i])
|
||||
assert new_data[i] == new_prop_vals[i]
|
||||
|
||||
# Set some other data for second time step
|
||||
for i in range(0, len(new_data)):
|
||||
@ -54,9 +57,9 @@ def test_10kSync(rips_instance, initialize_test):
|
||||
|
||||
case.set_nnc_connections_values(new_data, property_name, 1)
|
||||
new_prop_vals = case.nnc_connections_generated_values(property_name, 1)
|
||||
assert(len(new_prop_vals) == len(nnc_connections))
|
||||
assert len(new_prop_vals) == len(nnc_connections)
|
||||
for i in range(0, len(new_data)):
|
||||
assert(new_data[i] == new_prop_vals[i])
|
||||
assert new_data[i] == new_prop_vals[i]
|
||||
|
||||
|
||||
def test_non_existing_dynamic_values(rips_instance, initialize_test):
|
||||
|
@ -4,31 +4,35 @@ import pytest
|
||||
import grpc
|
||||
import tempfile
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
|
||||
|
||||
def test_loadProject(rips_instance, initialize_test):
|
||||
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp")
|
||||
project = rips_instance.project.open(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
|
||||
)
|
||||
case = project.cases()[0]
|
||||
assert(case is not None)
|
||||
assert(case.name == "TEST10K_FLT_LGR_NNC")
|
||||
assert(case.id == 0)
|
||||
assert case is not None
|
||||
assert case.name == "TEST10K_FLT_LGR_NNC"
|
||||
assert case.id == 0
|
||||
cases = rips_instance.project.cases()
|
||||
assert(len(cases) is 1)
|
||||
assert len(cases) is 1
|
||||
|
||||
|
||||
def test_well_log_plots(rips_instance, initialize_test):
|
||||
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp")
|
||||
project = rips_instance.project.open(
|
||||
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
|
||||
)
|
||||
plots = project.plots()
|
||||
well_log_plots = []
|
||||
for plot in plots:
|
||||
if isinstance(plot, rips.WellLogPlot):
|
||||
assert(plot.depth_type == "MEASURED_DEPTH")
|
||||
assert plot.depth_type == "MEASURED_DEPTH"
|
||||
well_log_plots.append(plot)
|
||||
assert(len(well_log_plots) == 2)
|
||||
assert len(well_log_plots) == 2
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
for well_log_plot in well_log_plots:
|
||||
@ -40,23 +44,30 @@ def test_well_log_plots(rips_instance, initialize_test):
|
||||
files = os.listdir(tmpdirname)
|
||||
print(files)
|
||||
if rips_instance.is_gui():
|
||||
assert(len(files) == 4)
|
||||
assert len(files) == 4
|
||||
else:
|
||||
assert(len(files) == 2)
|
||||
assert len(files) == 2
|
||||
|
||||
plots2 = project.plots()
|
||||
for plot2 in plots2:
|
||||
if isinstance(plot2, rips.WellLogPlot):
|
||||
assert(plot2.depth_type == "TRUE_VERTICAL_DEPTH_RKB")
|
||||
assert plot2.depth_type == "TRUE_VERTICAL_DEPTH_RKB"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux")
|
||||
@pytest.mark.skipif(
|
||||
sys.platform.startswith("linux"),
|
||||
reason="Brugge is currently exceptionally slow on Linux",
|
||||
)
|
||||
def test_loadGridCaseGroup(rips_instance, initialize_test):
|
||||
case_paths = []
|
||||
case_paths.append(dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID")
|
||||
case_paths.append(dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID")
|
||||
grid_case_group = rips_instance.project.create_grid_case_group(case_paths=case_paths)
|
||||
assert(grid_case_group is not None and grid_case_group.group_id == 0)
|
||||
case_paths.append(
|
||||
dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
|
||||
)
|
||||
grid_case_group = rips_instance.project.create_grid_case_group(
|
||||
case_paths=case_paths
|
||||
)
|
||||
assert grid_case_group is not None and grid_case_group.group_id == 0
|
||||
|
||||
|
||||
def test_exportSnapshots(rips_instance, initialize_test):
|
||||
@ -67,9 +78,9 @@ def test_exportSnapshots(rips_instance, initialize_test):
|
||||
rips_instance.project.load_case(case_path)
|
||||
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
print("Temporary folder: ", tmpdirname)
|
||||
rips_instance.set_export_folder(export_type='SNAPSHOTS', path=tmpdirname)
|
||||
rips_instance.set_export_folder(export_type="SNAPSHOTS", path=tmpdirname)
|
||||
rips_instance.project.export_snapshots()
|
||||
print(os.listdir(tmpdirname))
|
||||
# assert(len(os.listdir(tmpdirname)) > 0)
|
||||
# assert(len(os.listdir(tmpdirname)) > 0)
|
||||
for fileName in os.listdir(tmpdirname):
|
||||
assert(os.path.splitext(fileName)[1] == '.png')
|
||||
assert os.path.splitext(fileName)[1] == ".png"
|
||||
|
@ -4,7 +4,7 @@ import grpc
|
||||
import pytest
|
||||
import tempfile
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
@ -14,57 +14,57 @@ def test_10kAsync(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
resultChunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', 1)
|
||||
resultChunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", 1)
|
||||
mysum = 0.0
|
||||
count = 0
|
||||
for chunk in resultChunks:
|
||||
mysum += sum(chunk.values)
|
||||
count += len(chunk.values)
|
||||
average = mysum / count
|
||||
assert(mysum == pytest.approx(621.768, abs=0.001))
|
||||
assert(average != pytest.approx(0.0158893, abs=0.0000001))
|
||||
assert(average == pytest.approx(0.0558893, abs=0.0000001))
|
||||
assert mysum == pytest.approx(621.768, abs=0.001)
|
||||
assert average != pytest.approx(0.0158893, abs=0.0000001)
|
||||
assert average == pytest.approx(0.0558893, abs=0.0000001)
|
||||
|
||||
|
||||
def test_10kSync(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1)
|
||||
results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
|
||||
mysum = sum(results)
|
||||
average = mysum / len(results)
|
||||
assert(mysum == pytest.approx(621.768, abs=0.001))
|
||||
assert(average != pytest.approx(0.0158893, abs=0.0000001))
|
||||
assert(average == pytest.approx(0.0558893, abs=0.0000001))
|
||||
assert mysum == pytest.approx(621.768, abs=0.001)
|
||||
assert average != pytest.approx(0.0158893, abs=0.0000001)
|
||||
assert average == pytest.approx(0.0558893, abs=0.0000001)
|
||||
|
||||
|
||||
def test_10k_set(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1)
|
||||
case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1)
|
||||
results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
|
||||
case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
|
||||
|
||||
|
||||
def test_10k_set_out_of_bounds(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1)
|
||||
results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
|
||||
results.append(5.0)
|
||||
with pytest.raises(grpc.RpcError):
|
||||
assert case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1)
|
||||
assert case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
|
||||
|
||||
|
||||
def test_10k_set_out_of_bounds_client(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1)
|
||||
results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
|
||||
case.chunk_size = len(results)
|
||||
results.append(5.0)
|
||||
with pytest.raises(IndexError):
|
||||
assert case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1)
|
||||
assert case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
|
||||
|
||||
|
||||
def createResult(poroChunks, permxChunks):
|
||||
@ -78,22 +78,23 @@ def createResult(poroChunks, permxChunks):
|
||||
def checkResults(poroValues, permxValues, poropermxValues):
|
||||
for (poro, permx, poropermx) in zip(poroValues, permxValues, poropermxValues):
|
||||
recalc = poro * permx
|
||||
assert(recalc == pytest.approx(poropermx, rel=1.0e-10))
|
||||
assert recalc == pytest.approx(poropermx, rel=1.0e-10)
|
||||
|
||||
|
||||
def test_10k_PoroPermX(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=casePath)
|
||||
|
||||
poroChunks = case.active_cell_property_async('STATIC_NATIVE', 'PORO', 0)
|
||||
permxChunks = case.active_cell_property_async('STATIC_NATIVE', 'PERMX', 0)
|
||||
poroChunks = case.active_cell_property_async("STATIC_NATIVE", "PORO", 0)
|
||||
permxChunks = case.active_cell_property_async("STATIC_NATIVE", "PERMX", 0)
|
||||
|
||||
case.set_active_cell_property_async(createResult(
|
||||
poroChunks, permxChunks), 'GENERATED', 'POROPERMXAS', 0)
|
||||
case.set_active_cell_property_async(
|
||||
createResult(poroChunks, permxChunks), "GENERATED", "POROPERMXAS", 0
|
||||
)
|
||||
|
||||
poro = case.active_cell_property('STATIC_NATIVE', 'PORO', 0)
|
||||
permx = case.active_cell_property('STATIC_NATIVE', 'PERMX', 0)
|
||||
poroPermX = case.active_cell_property('GENERATED', 'POROPERMXAS', 0)
|
||||
poro = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
|
||||
permx = case.active_cell_property("STATIC_NATIVE", "PERMX", 0)
|
||||
poroPermX = case.active_cell_property("GENERATED", "POROPERMXAS", 0)
|
||||
|
||||
checkResults(poro, permx, poroPermX)
|
||||
|
||||
@ -103,10 +104,10 @@ def test_exportPropertyInView(rips_instance, initialize_test):
|
||||
rips_instance.project.load_case(case_path)
|
||||
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
|
||||
print("Temporary folder: ", tmpdirname)
|
||||
rips_instance.set_export_folder(export_type='PROPERTIES', path=tmpdirname)
|
||||
rips_instance.set_export_folder(export_type="PROPERTIES", path=tmpdirname)
|
||||
case = rips_instance.project.cases()[0]
|
||||
view = case.views()[0]
|
||||
view.export_property()
|
||||
expected_file_name = case.name + "-" + str("3D_View") + "-" + "T0" + "-SOIL"
|
||||
full_path = tmpdirname + "/" + expected_file_name
|
||||
assert(os.path.exists(full_path))
|
||||
assert os.path.exists(full_path)
|
||||
|
@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
@ -10,32 +10,32 @@ import dataroot
|
||||
def test_10k(rips_instance, initialize_test):
|
||||
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=case_path)
|
||||
assert(len(case.grids()) == 2)
|
||||
assert len(case.grids()) == 2
|
||||
cell_count_info = case.cell_count()
|
||||
|
||||
sim_wells = case.simulation_wells()
|
||||
assert(len(sim_wells) == 3)
|
||||
assert len(sim_wells) == 3
|
||||
|
||||
assert(sim_wells[0].name == "GI1")
|
||||
assert(sim_wells[1].name == "GP1")
|
||||
assert(sim_wells[2].name == "GP2")
|
||||
assert sim_wells[0].name == "GI1"
|
||||
assert sim_wells[1].name == "GP1"
|
||||
assert sim_wells[2].name == "GP2"
|
||||
|
||||
timesteps = case.time_steps()
|
||||
|
||||
# On time step 0 all simulation wells are undefined
|
||||
for sim_well in sim_wells:
|
||||
status = sim_well.status(0)
|
||||
assert(status.well_type == "NotDefined")
|
||||
assert status.well_type == "NotDefined"
|
||||
|
||||
# On time step 3 all wells are producing
|
||||
for sim_well in sim_wells:
|
||||
status = sim_well.status(3)
|
||||
assert(status.well_type == "Producer")
|
||||
assert status.well_type == "Producer"
|
||||
|
||||
# On time step 0 all simulation wells have no cells
|
||||
for sim_well in sim_wells:
|
||||
cells = sim_well.cells(0)
|
||||
assert(len(cells) == 0)
|
||||
assert len(cells) == 0
|
||||
|
||||
# On the other time steps there should be cells
|
||||
expected_cell_count = {}
|
||||
@ -44,8 +44,14 @@ def test_10k(rips_instance, initialize_test):
|
||||
expected_cell_count["GP2"] = 18
|
||||
for sim_well in sim_wells:
|
||||
for (tidx, timestep) in enumerate(timesteps):
|
||||
if (tidx > 0):
|
||||
if tidx > 0:
|
||||
cells = sim_well.cells(tidx)
|
||||
print("well: " + sim_well.name + " timestep: " +
|
||||
str(tidx) + " cells:" + str(len(cells)))
|
||||
assert(len(cells) == expected_cell_count[sim_well.name])
|
||||
print(
|
||||
"well: "
|
||||
+ sim_well.name
|
||||
+ " timestep: "
|
||||
+ str(tidx)
|
||||
+ " cells:"
|
||||
+ str(len(cells))
|
||||
)
|
||||
assert len(cells) == expected_cell_count[sim_well.name]
|
||||
|
@ -6,7 +6,7 @@ import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
@ -15,58 +15,58 @@ import dataroot
|
||||
def test_summary_import_and_find(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
|
||||
summary_case = rips_instance.project.import_summary_case(casePath)
|
||||
assert(summary_case.id == 1)
|
||||
assert summary_case.id == 1
|
||||
|
||||
case_id = 234
|
||||
found_summary_case = rips_instance.project.summary_case(case_id)
|
||||
assert(found_summary_case is None)
|
||||
assert found_summary_case is None
|
||||
|
||||
correct_case_id = 1
|
||||
found_summary_case = rips_instance.project.summary_case(correct_case_id)
|
||||
assert(found_summary_case is not None)
|
||||
assert found_summary_case is not None
|
||||
|
||||
rips_instance.project.close()
|
||||
correct_case_id = 1
|
||||
found_summary_case = rips_instance.project.summary_case(correct_case_id)
|
||||
assert(found_summary_case is None)
|
||||
assert found_summary_case is None
|
||||
|
||||
|
||||
def test_summary_data(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
|
||||
summary_case = rips_instance.project.import_summary_case(casePath)
|
||||
assert(summary_case.id == 1)
|
||||
assert summary_case.id == 1
|
||||
|
||||
addresses = summary_case.available_addresses()
|
||||
assert(len(addresses.values) == 343)
|
||||
assert len(addresses.values) == 343
|
||||
|
||||
summary_data = summary_case.summary_vector_values("FOPT")
|
||||
assert(len(summary_data.values) == 60)
|
||||
assert len(summary_data.values) == 60
|
||||
|
||||
|
||||
def test_summary_resample(rips_instance, initialize_test):
|
||||
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
|
||||
summary_case = rips_instance.project.import_summary_case(casePath)
|
||||
assert(summary_case.id == 1)
|
||||
assert summary_case.id == 1
|
||||
|
||||
summary_data_sampled = summary_case.resample_values("FOPT", "NONE")
|
||||
assert(len(summary_data_sampled.values) == 60)
|
||||
assert(len(summary_data_sampled.time_steps) == 60)
|
||||
assert len(summary_data_sampled.values) == 60
|
||||
assert len(summary_data_sampled.time_steps) == 60
|
||||
|
||||
summary_data_sampled = summary_case.resample_values("FOPT", "DAY")
|
||||
assert(len(summary_data_sampled.values) == 721)
|
||||
assert(len(summary_data_sampled.time_steps) == 721)
|
||||
assert len(summary_data_sampled.values) == 721
|
||||
assert len(summary_data_sampled.time_steps) == 721
|
||||
|
||||
summary_data_sampled = summary_case.resample_values("FOPT", "MONTH")
|
||||
assert(len(summary_data_sampled.values) == 24)
|
||||
assert(len(summary_data_sampled.time_steps) == 24)
|
||||
assert len(summary_data_sampled.values) == 24
|
||||
assert len(summary_data_sampled.time_steps) == 24
|
||||
|
||||
summary_data_sampled = summary_case.resample_values("FOPT", "QUARTER")
|
||||
assert(len(summary_data_sampled.values) == 8)
|
||||
assert(len(summary_data_sampled.time_steps) == 8)
|
||||
assert len(summary_data_sampled.values) == 8
|
||||
assert len(summary_data_sampled.time_steps) == 8
|
||||
|
||||
summary_data_sampled = summary_case.resample_values("FOPT", "YEAR")
|
||||
assert(len(summary_data_sampled.values) == 3)
|
||||
assert(len(summary_data_sampled.time_steps) == 3)
|
||||
assert len(summary_data_sampled.values) == 3
|
||||
assert len(summary_data_sampled.time_steps) == 3
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@ -79,14 +79,18 @@ def cd(newdir, cleanup=lambda: True):
|
||||
os.chdir(prevdir)
|
||||
cleanup()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir():
|
||||
dirpath = tempfile.mkdtemp()
|
||||
|
||||
def cleanup():
|
||||
shutil.rmtree(dirpath)
|
||||
|
||||
with cd(dirpath, cleanup):
|
||||
yield dirpath
|
||||
|
||||
|
||||
# This test ensures that missing unsmry file is handeled gracefully
|
||||
def test_summary_no_unsmry(rips_instance, initialize_test):
|
||||
casePathRelative = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
|
||||
@ -103,13 +107,12 @@ def test_summary_no_unsmry(rips_instance, initialize_test):
|
||||
summary_case = rips_instance.project.import_summary_case(temp_path)
|
||||
|
||||
values = summary_case.summary_vector_values()
|
||||
assert(len(values.values) == 1)
|
||||
assert len(values.values) == 1
|
||||
|
||||
time_steps = summary_case.available_time_steps()
|
||||
assert(len(time_steps.values) == 1)
|
||||
assert len(time_steps.values) == 1
|
||||
|
||||
addresses = summary_case.available_addresses()
|
||||
assert(len(addresses.values) == 1)
|
||||
assert len(addresses.values) == 1
|
||||
|
||||
summary_case.resample_values()
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], '../../'))
|
||||
sys.path.insert(1, os.path.join(sys.path[0], "../../"))
|
||||
import rips
|
||||
|
||||
import dataroot
|
||||
@ -11,10 +11,13 @@ def test_10k(rips_instance, initialize_test):
|
||||
case_root_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC"
|
||||
case_path = case_root_path + "/TEST10K_FLT_LGR_NNC.EGRID"
|
||||
case = rips_instance.project.load_case(path=case_path)
|
||||
assert(len(case.grids()) == 2)
|
||||
well_path_files = [case_root_path + "/wellpath_a.dev", case_root_path + "/wellpath_b.dev"]
|
||||
assert len(case.grids()) == 2
|
||||
well_path_files = [
|
||||
case_root_path + "/wellpath_a.dev",
|
||||
case_root_path + "/wellpath_b.dev",
|
||||
]
|
||||
well_path_names = rips_instance.project.import_well_paths(well_path_files)
|
||||
wells = rips_instance.project.well_paths()
|
||||
assert(len(wells) == 2)
|
||||
assert(wells[0].name == "Well Path A")
|
||||
assert(wells[1].name == "Well Path B")
|
||||
assert len(wells) == 2
|
||||
assert wells[0].name == "Well Path A"
|
||||
assert wells[1].name == "Well Path B"
|
||||
|
@ -34,11 +34,12 @@ def apply_cell_result(self, result_type, result_variable):
|
||||
|
||||
@add_method(View)
|
||||
def apply_flow_diagnostics_cell_result(
|
||||
self,
|
||||
result_variable='TOF',
|
||||
selection_mode='FLOW_TR_BY_SELECTION',
|
||||
injectors=None,
|
||||
producers=None):
|
||||
self,
|
||||
result_variable="TOF",
|
||||
selection_mode="FLOW_TR_BY_SELECTION",
|
||||
injectors=None,
|
||||
producers=None,
|
||||
):
|
||||
"""Apply a flow diagnostics cell result
|
||||
|
||||
**Parameters**::
|
||||
@ -68,7 +69,7 @@ def apply_flow_diagnostics_cell_result(
|
||||
cell_result.result_type = "FLOW_DIAGNOSTICS"
|
||||
cell_result.result_variable = result_variable
|
||||
cell_result.flow_tracer_selection_mode = selection_mode
|
||||
if selection_mode == 'FLOW_TR_BY_SELECTION':
|
||||
if selection_mode == "FLOW_TR_BY_SELECTION":
|
||||
cell_result.selected_injector_tracers = injectors
|
||||
cell_result.selected_producer_tracers = producers
|
||||
cell_result.update()
|
||||
@ -77,8 +78,9 @@ def apply_flow_diagnostics_cell_result(
|
||||
@add_method(View)
|
||||
def clone(self):
|
||||
"""Clone the current view"""
|
||||
view_id = self._execute_command(cloneView=Cmd.CloneViewRequest(
|
||||
viewId=self.id)).createViewResult.viewId
|
||||
view_id = self._execute_command(
|
||||
cloneView=Cmd.CloneViewRequest(viewId=self.id)
|
||||
).createViewResult.viewId
|
||||
return self.case().view(view_id)
|
||||
|
||||
|
||||
@ -86,14 +88,17 @@ def clone(self):
|
||||
def set_time_step(self, time_step):
|
||||
"""Set the time step for current view"""
|
||||
case_id = self.case().id
|
||||
return self._execute_command(setTimeStep=Cmd.SetTimeStepParams(
|
||||
caseId=case_id, viewId=self.id, timeStep=time_step))
|
||||
return self._execute_command(
|
||||
setTimeStep=Cmd.SetTimeStepParams(
|
||||
caseId=case_id, viewId=self.id, timeStep=time_step
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(View)
|
||||
def export_sim_well_fracture_completions(self, time_step,
|
||||
simulation_well_names, file_split,
|
||||
compdat_export):
|
||||
def export_sim_well_fracture_completions(
|
||||
self, time_step, simulation_well_names, file_split, compdat_export
|
||||
):
|
||||
"""Export fracture completions for simulation wells
|
||||
|
||||
**Parameters**::
|
||||
@ -111,13 +116,13 @@ def export_sim_well_fracture_completions(self, time_step,
|
||||
----------------------------------- | ------------
|
||||
"UNIFIED_FILE" <b>Default Option</b>| A single file with all transmissibilities
|
||||
"SPLIT_ON_WELL" | One file for each well transmissibilities
|
||||
"SPLIT_ON_WELL_AND_COMPLETION_TYPE" | One file for each completion type for each well
|
||||
"SPLIT_ON_WELL_AND_COMPLETION_TYPE" | One file for each completion type for each well
|
||||
|
||||
**Enum compdat_export**::
|
||||
|
||||
Option | Description
|
||||
-----------------------------------------| ------------
|
||||
"TRANSMISSIBILITIES"<b>Default Option</b>| Direct export of transmissibilities
|
||||
"TRANSMISSIBILITIES"<b>Default Option</b>| Direct export of transmissibilities
|
||||
"WPIMULT_AND_DEFAULT_CONNECTION_FACTORS" | Include export of WPIMULT
|
||||
|
||||
"""
|
||||
@ -132,15 +137,19 @@ def export_sim_well_fracture_completions(self, time_step,
|
||||
timeStep=time_step,
|
||||
simulationWellNames=simulation_well_names,
|
||||
fileSplit=file_split,
|
||||
compdatExport=compdat_export))
|
||||
compdatExport=compdat_export,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(View)
|
||||
def export_visible_cells(self,
|
||||
export_keyword='FLUXNUM',
|
||||
visible_active_cells_value=1,
|
||||
hidden_active_cells_value=0,
|
||||
inactive_cells_value=0):
|
||||
def export_visible_cells(
|
||||
self,
|
||||
export_keyword="FLUXNUM",
|
||||
visible_active_cells_value=1,
|
||||
hidden_active_cells_value=0,
|
||||
inactive_cells_value=0,
|
||||
):
|
||||
"""Export special properties for all visible cells.
|
||||
|
||||
Arguments:
|
||||
@ -158,12 +167,14 @@ def export_visible_cells(self,
|
||||
exportKeyword=export_keyword,
|
||||
visibleActiveCellsValue=visible_active_cells_value,
|
||||
hiddenActiveCellsValue=hidden_active_cells_value,
|
||||
inactiveCellsValue=inactive_cells_value))
|
||||
inactiveCellsValue=inactive_cells_value,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(View)
|
||||
def export_property(self, undefined_value=0.0):
|
||||
""" Export the current Eclipse property from the view
|
||||
"""Export the current Eclipse property from the view
|
||||
|
||||
Arguments:
|
||||
undefined_value (double): Value to use for undefined values. Defaults to 0.0
|
||||
@ -171,22 +182,22 @@ def export_property(self, undefined_value=0.0):
|
||||
case_id = self.case().id
|
||||
return self._execute_command(
|
||||
exportPropertyInViews=Cmd.ExportPropertyInViewsRequest(
|
||||
caseId=case_id,
|
||||
viewIds=[self.id],
|
||||
undefinedValue=undefined_value))
|
||||
caseId=case_id, viewIds=[self.id], undefinedValue=undefined_value
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@add_method(ViewWindow)
|
||||
def case(self):
|
||||
"""Get the case the view belongs to"""
|
||||
mycase = self.ancestor(rips.case.Case)
|
||||
assert(mycase is not None)
|
||||
assert mycase is not None
|
||||
return mycase
|
||||
|
||||
|
||||
@add_method(ViewWindow)
|
||||
def export_snapshot(self, prefix='', export_folder=''):
|
||||
""" Export snapshot for the current view
|
||||
def export_snapshot(self, prefix="", export_folder=""):
|
||||
"""Export snapshot for the current view
|
||||
|
||||
Arguments:
|
||||
prefix (str): Exported file name prefix
|
||||
@ -194,8 +205,11 @@ def export_snapshot(self, prefix='', export_folder=''):
|
||||
"""
|
||||
case_id = self.case().id
|
||||
return self._execute_command(
|
||||
exportSnapshots=Cmd.ExportSnapshotsRequest(type='VIEWS',
|
||||
prefix=prefix,
|
||||
caseId=case_id,
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder))
|
||||
exportSnapshots=Cmd.ExportSnapshotsRequest(
|
||||
type="VIEWS",
|
||||
prefix=prefix,
|
||||
caseId=case_id,
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
)
|
||||
)
|
||||
|
@ -10,8 +10,16 @@ from .resinsight_classes import WellLogPlot
|
||||
|
||||
|
||||
@add_method(WellLogPlot)
|
||||
def export_data_as_las(self, export_folder, file_prefix='', export_tvdrkb=False, capitalize_file_names=False, resample_interval=0.0, convert_to_standard_units=False):
|
||||
""" Export LAS file(s) for the current plot
|
||||
def export_data_as_las(
|
||||
self,
|
||||
export_folder,
|
||||
file_prefix="",
|
||||
export_tvdrkb=False,
|
||||
capitalize_file_names=False,
|
||||
resample_interval=0.0,
|
||||
convert_to_standard_units=False,
|
||||
):
|
||||
"""Export LAS file(s) for the current plot
|
||||
|
||||
Arguments:
|
||||
export_folder(str): The path to export to. By default will use the global export folder
|
||||
@ -23,20 +31,26 @@ def export_data_as_las(self, export_folder, file_prefix='', export_tvdrkb=False,
|
||||
Returns:
|
||||
A list of files exported
|
||||
"""
|
||||
res = self._execute_command(exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(exportFormat='LAS',
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
filePrefix=file_prefix,
|
||||
exportTvdRkb=export_tvdrkb,
|
||||
capitalizeFileNames=capitalize_file_names,
|
||||
resampleInterval=resample_interval,
|
||||
convertCurveUnits=convert_to_standard_units))
|
||||
res = self._execute_command(
|
||||
exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(
|
||||
exportFormat="LAS",
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
filePrefix=file_prefix,
|
||||
exportTvdRkb=export_tvdrkb,
|
||||
capitalizeFileNames=capitalize_file_names,
|
||||
resampleInterval=resample_interval,
|
||||
convertCurveUnits=convert_to_standard_units,
|
||||
)
|
||||
)
|
||||
return res.exportWellLogPlotDataResult.exportedFiles
|
||||
|
||||
|
||||
@add_method(WellLogPlot)
|
||||
def export_data_as_ascii(self, export_folder, file_prefix='', capitalize_file_names=False):
|
||||
""" Export LAS file(s) for the current plot
|
||||
def export_data_as_ascii(
|
||||
self, export_folder, file_prefix="", capitalize_file_names=False
|
||||
):
|
||||
"""Export LAS file(s) for the current plot
|
||||
|
||||
Arguments:
|
||||
export_folder(str): The path to export to. By default will use the global export folder
|
||||
@ -46,11 +60,15 @@ def export_data_as_ascii(self, export_folder, file_prefix='', capitalize_file_na
|
||||
Returns:
|
||||
A list of files exported
|
||||
"""
|
||||
res = self._execute_command(exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(exportFormat='ASCII',
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
filePrefix=file_prefix,
|
||||
exportTvdRkb=False,
|
||||
capitalizeFileNames=capitalize_file_names,
|
||||
resampleInterval=0.0))
|
||||
res = self._execute_command(
|
||||
exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(
|
||||
exportFormat="ASCII",
|
||||
viewId=self.id,
|
||||
exportFolder=export_folder,
|
||||
filePrefix=file_prefix,
|
||||
exportTvdRkb=False,
|
||||
capitalizeFileNames=capitalize_file_names,
|
||||
resampleInterval=0.0,
|
||||
)
|
||||
)
|
||||
return res.exportWellLogPlotDataResult.exportedFiles
|
||||
|
Loading…
Reference in New Issue
Block a user