Add Python linting using black (#7276)

This commit is contained in:
Magne Sjaastad 2021-01-26 20:48:01 +01:00 committed by GitHub
parent 0ec612ae86
commit 1bacd41037
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 1331 additions and 966 deletions

23
.github/workflows/python-linting.yml vendored Normal file
View File

@ -0,0 +1,23 @@
name: Python Linting
on: [push, pull_request]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- name: (Python) Use black to do linting
run: |
pip install black
cd GrpcInterface
black .
- uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'Python code linting changes detected by black'
title: 'Fixes by black (Python)'
branch: python-black-patches

View File

@ -12,17 +12,18 @@
# #
import os import os
import sys import sys
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath("../../"))
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'rips' project = "rips"
copyright = '2019, Ceetron Solutions AS' copyright = "2019, Ceetron Solutions AS"
author = 'Ceetron Solutions AS' author = "Ceetron Solutions AS"
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = '2019.04.01' release = "2019.04.01"
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------
@ -30,23 +31,19 @@ release = '2019.04.01'
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinx_markdown_builder"]
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx_markdown_builder'
]
master_doc = 'PythonRips' master_doc = "PythonRips"
napoleon_google_docstring = True napoleon_google_docstring = True
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path. # This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['build/*', 'rips.rst'] exclude_patterns = ["build/*", "rips.rst"]
# -- Options for HTML output ------------------------------------------------- # -- Options for HTML output -------------------------------------------------
@ -54,14 +51,14 @@ exclude_patterns = ['build/*', 'rips.rst']
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
# #
html_theme = 'alabaster' html_theme = "alabaster"
smartquotes = False smartquotes = False
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# -- Extension configuration ------------------------------------------------- # -- Extension configuration -------------------------------------------------

View File

@ -7,12 +7,12 @@
import rips import rips
# Connect to ResInsight # Connect to ResInsight
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is not None: if resinsight is not None:
# Get a list of all cases # Get a list of all cases
cases = resinsight.project.cases() cases = resinsight.project.cases()
print ("Got " + str(len(cases)) + " cases: ") print("Got " + str(len(cases)) + " cases: ")
for case in cases: for case in cases:
print("Case id: " + str(case.id)) print("Case id: " + str(case.id))
print("Case name: " + case.name) print("Case name: " + case.name)
@ -26,11 +26,14 @@ if resinsight is not None:
print("Month: " + str(t.month)) print("Month: " + str(t.month))
if isinstance(case, rips.EclipseCase): if isinstance(case, rips.EclipseCase):
print ("Getting coarsening info for case: ", case.name, case.id) print("Getting coarsening info for case: ", case.name, case.id)
coarsening_info = case.coarsening_info() coarsening_info = case.coarsening_info()
if coarsening_info: if coarsening_info:
print("Coarsening information:") print("Coarsening information:")
for c in coarsening_info: for c in coarsening_info:
print("[{}, {}, {}] - [{}, {}, {}]".format(c.min.x, c.min.y, c.min.z, print(
c.max.x, c.max.y, c.max.z)) "[{}, {}, {}] - [{}, {}, {}]".format(
c.min.x, c.min.y, c.min.z, c.max.x, c.max.y, c.max.z
)
)

View File

@ -7,7 +7,7 @@
import rips import rips
# Connect to ResInsight # Connect to ResInsight
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is not None: if resinsight is not None:
# Get a list of all wells # Get a list of all wells
cases = resinsight.project.cases() cases = resinsight.project.cases()
@ -24,4 +24,13 @@ if resinsight is not None:
for (tidx, timestep) in enumerate(timesteps): for (tidx, timestep) in enumerate(timesteps):
status = sim_well.status(tidx) status = sim_well.status(tidx)
cells = sim_well.cells(tidx) cells = sim_well.cells(tidx)
print("timestep: " + str(tidx) + " type: " + status.well_type + " open: " + str(status.is_open) + " cells:" + str(len(cells))) print(
"timestep: "
+ str(tidx)
+ " type: "
+ status.well_type
+ " open: "
+ str(status.is_open)
+ " cells:"
+ str(len(cells))
)

View File

@ -7,11 +7,11 @@
import rips import rips
# Connect to ResInsight # Connect to ResInsight
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is not None: if resinsight is not None:
# Get a list of all wells # Get a list of all wells
wells = resinsight.project.well_paths() wells = resinsight.project.well_paths()
print ("Got " + str(len(wells)) + " wells: ") print("Got " + str(len(wells)) + " wells: ")
for well in wells: for well in wells:
print("Well name: " + well.name) print("Well name: " + well.name)

View File

@ -25,5 +25,3 @@ for wbsplot in wbsplots:
params.update() params.update()
wbsplot.update() wbsplot.update()
wbsplot.export_snapshot(export_folder=dirname) wbsplot.export_snapshot(export_folder=dirname)

View File

@ -1,21 +1,27 @@
import os import os
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
case_paths = [] case_paths = []
case_paths.append("C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID") case_paths.append(
case_paths.append("C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID") "C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
)
case_paths.append(
"C:/Users/lindk/source/repos/ResInsight/TestModels/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
)
for case_path in case_paths: for case_path in case_paths:
assert os.path.exists(case_path), "You need to set valid case paths for this script to work" assert os.path.exists(
case_path
), "You need to set valid case paths for this script to work"
case_group = resinsight.project.create_grid_case_group(case_paths=case_paths) case_group = resinsight.project.create_grid_case_group(case_paths=case_paths)
case_group.print_object_info() case_group.print_object_info()
#stat_cases = caseGroup.statistics_cases() # stat_cases = caseGroup.statistics_cases()
#case_ids = [] # case_ids = []
#for stat_case in stat_cases: # for stat_case in stat_cases:
# stat_case.set_dynamic_properties_to_calculate(["SWAT"]) # stat_case.set_dynamic_properties_to_calculate(["SWAT"])
# case_ids.append(stat_case.id) # case_ids.append(stat_case.id)
@ -24,4 +30,3 @@ case_group.compute_statistics()
view = case_group.views()[0] view = case_group.views()[0]
cell_result = view.cell_result() cell_result = view.cell_result()
cell_result.set_result_variable("PRESSURE_DEV") cell_result.set_result_variable("PRESSURE_DEV")

View File

@ -6,7 +6,7 @@
import rips import rips
# Connect to ResInsight # Connect to ResInsight
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# Get the first case. This will fail if you haven't loaded any cases # Get the first case. This will fail if you haven't loaded any cases
case = resinsight.project.cases()[0] case = resinsight.project.cases()[0]
@ -20,7 +20,7 @@ print("Total number of reservoir cells: " + str(cell_counts.reservoir_cell_count
active_cell_infos = case.cell_info_for_active_cells() active_cell_infos = case.cell_info_for_active_cells()
# A simple check on the size of the cell info # A simple check on the size of the cell info
assert(cell_counts.active_cell_count == len(active_cell_infos)) assert cell_counts.active_cell_count == len(active_cell_infos)
# Print information for the first active cell # Print information for the first active cell
print("First active cell: ") print("First active cell: ")

View File

@ -3,15 +3,13 @@
###################################################################### ######################################################################
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
view = resinsight.project.views()[0] view = resinsight.project.views()[0]
results = view.cell_result_data() results = view.cell_result_data()
print ("Number of result values: ", len(results)) print("Number of result values: ", len(results))
newresults = [] newresults = []
for i in range(0, len(results)): for i in range(0, len(results)):
newresults.append(results[i] * -1.0) newresults.append(results[i] * -1.0)
view.set_cell_result_data(newresults) view.set_cell_result_data(newresults)

View File

@ -26,7 +26,7 @@ view2 = view1.clone()
view1.set_time_step(time_step=2) view1.set_time_step(time_step=2)
# Set cell result to SOIL # Set cell result to SOIL
view1.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable='SOIL') view1.apply_cell_result(result_type="DYNAMIC_NATIVE", result_variable="SOIL")
# Create a temporary directory which will disappear at the end of this script # Create a temporary directory which will disappear at the end of this script
@ -35,13 +35,13 @@ with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
print("Temporary folder: ", tmpdirname) print("Temporary folder: ", tmpdirname)
# Set export folder for snapshots and properties # Set export folder for snapshots and properties
resinsight.set_export_folder(export_type='SNAPSHOTS', path=tmpdirname) resinsight.set_export_folder(export_type="SNAPSHOTS", path=tmpdirname)
resinsight.set_export_folder(export_type='PROPERTIES', path=tmpdirname) resinsight.set_export_folder(export_type="PROPERTIES", path=tmpdirname)
# Export all snapshots # Export all snapshots
resinsight.project.export_snapshots() resinsight.project.export_snapshots()
assert(len(os.listdir(tmpdirname)) > 0) assert len(os.listdir(tmpdirname)) > 0
# Export properties in the view # Export properties in the view
view1.export_property() view1.export_property()
@ -53,5 +53,4 @@ with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
# Print contents of temporary folder # Print contents of temporary folder
print(os.listdir(tmpdirname)) print(os.listdir(tmpdirname))
assert(os.path.exists(full_path)) assert os.path.exists(full_path)

View File

@ -19,8 +19,10 @@ print("Facies properties file path:", facies_properties_file_path)
# Create stim plan model template # Create stim plan model template
fmt_collection = project.descendants(rips.StimPlanModelTemplateCollection)[0] fmt_collection = project.descendants(rips.StimPlanModelTemplateCollection)[0]
stim_plan_model_template = fmt_collection.new_stim_plan_model_template(elastic_properties_file_path=elastic_properties_file_path, stim_plan_model_template = fmt_collection.new_stim_plan_model_template(
facies_properties_file_path=facies_properties_file_path) elastic_properties_file_path=elastic_properties_file_path,
facies_properties_file_path=facies_properties_file_path,
)
stim_plan_model_template.overburden_formation = "Garn" stim_plan_model_template.overburden_formation = "Garn"
stim_plan_model_template.overburden_facies = "Shale" stim_plan_model_template.overburden_facies = "Shale"
stim_plan_model_template.underburden_formation = "Garn" stim_plan_model_template.underburden_formation = "Garn"
@ -49,7 +51,9 @@ non_net_layers.update()
# Add some scaling factors # Add some scaling factors
elastic_properties = stim_plan_model_template.elastic_properties() elastic_properties = stim_plan_model_template.elastic_properties()
elastic_properties.add_property_scaling(formation="Garn", facies="Calcite", property="YOUNGS_MODULUS", scale=1.44) elastic_properties.add_property_scaling(
formation="Garn", facies="Calcite", property="YOUNGS_MODULUS", scale=1.44
)
well_name = "B-2 H" well_name = "B-2 H"
@ -73,20 +77,24 @@ export_folder = tempfile.gettempdir()
stim_plan_models = [] stim_plan_models = []
# Create and export a StimPlan model for each depth # Create and export a StimPlan model for each depth
measured_depths = [ 3200.0, 3400.0, 3600.0 ] measured_depths = [3200.0, 3400.0, 3600.0]
for measured_depth in measured_depths: for measured_depth in measured_depths:
# Create stim plan model at a give measured depth # Create stim plan model at a give measured depth
stim_plan_model = stim_plan_model_collection.new_stim_plan_model(eclipse_case=case, stim_plan_model = stim_plan_model_collection.new_stim_plan_model(
time_step=time_step, eclipse_case=case,
well_path=well_path, time_step=time_step,
measured_depth=measured_depth, well_path=well_path,
stim_plan_model_template=stim_plan_model_template) measured_depth=measured_depth,
stim_plan_model_template=stim_plan_model_template,
)
stim_plan_models.append(stim_plan_model) stim_plan_models.append(stim_plan_model)
# Make the well name safer to use as a directory path # Make the well name safer to use as a directory path
well_name_part = well_name.replace(" ", "_") well_name_part = well_name.replace(" ", "_")
directory_path = Path(export_folder) / '{}_{}'.format(well_name_part, int(measured_depth)) directory_path = Path(export_folder) / "{}_{}".format(
well_name_part, int(measured_depth)
)
# Create the folder # Create the folder
directory_path.mkdir(parents=True, exist_ok=True) directory_path.mkdir(parents=True, exist_ok=True)
@ -95,14 +103,17 @@ for measured_depth in measured_depths:
stim_plan_model.export_to_file(directory_path=directory_path.as_posix()) stim_plan_model.export_to_file(directory_path=directory_path.as_posix())
# Create a fracture mode plot # Create a fracture mode plot
stim_plan_model_plot_collection = project.descendants(rips.StimPlanModelPlotCollection)[0] stim_plan_model_plot_collection = project.descendants(
stim_plan_model_plot = stim_plan_model_plot_collection.new_stim_plan_model_plot(stim_plan_model=stim_plan_model) rips.StimPlanModelPlotCollection
)[0]
stim_plan_model_plot = stim_plan_model_plot_collection.new_stim_plan_model_plot(
stim_plan_model=stim_plan_model
)
print("Exporting fracture model plot to: ", directory_path) print("Exporting fracture model plot to: ", directory_path)
stim_plan_model_plot.export_snapshot(export_folder=directory_path.as_posix()) stim_plan_model_plot.export_snapshot(export_folder=directory_path.as_posix())
print("Setting measured depth and perforation length.") print("Setting measured depth and perforation length.")
stim_plan_models[0].measured_depth = 3300.0 stim_plan_models[0].measured_depth = 3300.0
stim_plan_models[0].perforation_length = 123.445 stim_plan_models[0].perforation_length = 123.445

View File

@ -3,6 +3,7 @@ import grpc
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resInsight = rips.Instance.find() resInsight = rips.Instance.find()
@ -24,7 +25,7 @@ params.user_ucs = 123
# Loop through all cases # Loop through all cases
for case in cases: for case in cases:
assert(isinstance(case, rips.GeoMechCase)) assert isinstance(case, rips.GeoMechCase)
min_res_depth, max_res_depth = case.reservoir_depth_range() min_res_depth, max_res_depth = case.reservoir_depth_range()
# Find a good output path # Find a good output path
@ -32,12 +33,18 @@ for case in cases:
folder_name = os.path.dirname(case_path) folder_name = os.path.dirname(case_path)
# Import formation names # Import formation names
case.import_formation_names(formation_files=['D:/Projects/ResInsight-regression-test/ModelData/norne/Norne_ATW2013.lyr']) case.import_formation_names(
formation_files=[
"D:/Projects/ResInsight-regression-test/ModelData/norne/Norne_ATW2013.lyr"
]
)
# create a folder to hold the snapshots # create a folder to hold the snapshots
dirname = os.path.join(folder_name, 'snapshots') dirname = os.path.join(folder_name, "snapshots")
print("Exporting to: " + dirname) print("Exporting to: " + dirname)
for well_path in well_paths[0:4]: # Loop through the first five well paths for well_path in well_paths[0:4]: # Loop through the first five well paths
# Create plot with parameters # Create plot with parameters
wbsplot = case.create_well_bore_stability_plot(well_path=well_path.name, time_step=0, parameters=params) wbsplot = case.create_well_bore_stability_plot(
well_path=well_path.name, time_step=0, parameters=params
)

View File

@ -7,7 +7,7 @@ import rips
import grpc import grpc
import tempfile import tempfile
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
case = None case = None
@ -15,36 +15,46 @@ case = None
try: try:
case = resinsight.project.load_case("Nonsense") case = resinsight.project.load_case("Nonsense")
except grpc.RpcError as e: except grpc.RpcError as e:
print("Expected Server Exception Received while loading case: ", e.code(), e.details()) print(
"Expected Server Exception Received while loading case: ", e.code(), e.details()
)
# Try loading well paths from a non-existing folder. We should get a grpc.RpcError exception from the server # Try loading well paths from a non-existing folder. We should get a grpc.RpcError exception from the server
try: try:
well_path_files = resinsight.project.import_well_paths(well_path_folder="NONSENSE/NONSENSE") well_path_files = resinsight.project.import_well_paths(
well_path_folder="NONSENSE/NONSENSE"
)
except grpc.RpcError as e: except grpc.RpcError as e:
print("Expected Server Exception Received while loading wellpaths: ", e.code(), e.details()) print(
"Expected Server Exception Received while loading wellpaths: ",
e.code(),
e.details(),
)
# Try loading well paths from an existing but empty folder. We should get a warning. # Try loading well paths from an existing but empty folder. We should get a warning.
try: try:
with tempfile.TemporaryDirectory() as tmpdirname: with tempfile.TemporaryDirectory() as tmpdirname:
well_path_files = resinsight.project.import_well_paths(well_path_folder=tmpdirname) well_path_files = resinsight.project.import_well_paths(
assert(len(well_path_files) == 0) well_path_folder=tmpdirname
assert(resinsight.project.has_warnings()) )
assert len(well_path_files) == 0
assert resinsight.project.has_warnings()
print("Should get warnings below") print("Should get warnings below")
for warning in resinsight.project.warnings(): for warning in resinsight.project.warnings():
print (warning) print(warning)
except grpc.RpcError as e: except grpc.RpcError as e:
print("Unexpected Server Exception caught!!!", e) print("Unexpected Server Exception caught!!!", e)
case = resinsight.project.case(case_id=0) case = resinsight.project.case(case_id=0)
if case is not None: if case is not None:
results = case.active_cell_property('STATIC_NATIVE', 'PORO', 0) results = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
active_cell_count = len(results) active_cell_count = len(results)
# Send the results back to ResInsight inside try / except construct # Send the results back to ResInsight inside try / except construct
try: try:
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0) case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
print("Everything went well as expected") print("Everything went well as expected")
except: # Match any exception, but it should not happen except: # Match any exception, but it should not happen
print("Ooops!") print("Ooops!")
# Add another value, so this is outside the bounds of the active cell result storage # Add another value, so this is outside the bounds of the active cell result storage
@ -52,12 +62,12 @@ if case is not None:
# This time we should get a grpc.RpcError exception, which is a server side error. # This time we should get a grpc.RpcError exception, which is a server side error.
try: try:
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0) case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
print("Everything went well??") print("Everything went well??")
except grpc.RpcError as e: except grpc.RpcError as e:
print("Expected Server Exception Received: ", e) print("Expected Server Exception Received: ", e)
except IndexError: except IndexError:
print ("Got index out of bounds error. This shouldn't happen here") print("Got index out of bounds error. This shouldn't happen here")
# With a chunk size exactly matching the active cell count the server will not # With a chunk size exactly matching the active cell count the server will not
# be able to see any error as it will successfully close the stream after receiving # be able to see any error as it will successfully close the stream after receiving
@ -65,12 +75,9 @@ if case is not None:
case.chunk_size = active_cell_count case.chunk_size = active_cell_count
try: try:
case.set_active_cell_property(results, 'GENERATED', 'POROAPPENDED', 0) case.set_active_cell_property(results, "GENERATED", "POROAPPENDED", 0)
print("Everything went well??") print("Everything went well??")
except grpc.RpcError as e: except grpc.RpcError as e:
print("Got unexpected server exception", e, "This should not happen now") print("Got unexpected server exception", e, "This should not happen now")
except IndexError: except IndexError:
print ("Got expected index out of bounds error on client side") print("Got expected index out of bounds error on client side")

View File

@ -1,7 +1,9 @@
# Import the tempfile module # Import the tempfile module
import tempfile import tempfile
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resInsight = rips.Instance.find() resInsight = rips.Instance.find()
@ -13,8 +15,8 @@ export_folder = tempfile.mkdtemp()
print("Exporting to: " + export_folder) print("Exporting to: " + export_folder)
for plot in plots: for plot in plots:
plot.export_snapshot(export_folder=export_folder) plot.export_snapshot(export_folder=export_folder)
plot.export_snapshot(export_folder=export_folder, output_format='PDF') plot.export_snapshot(export_folder=export_folder, output_format="PDF")
if isinstance(plot, rips.WellLogPlot): if isinstance(plot, rips.WellLogPlot):
plot.export_data_as_las(export_folder=export_folder) plot.export_data_as_las(export_folder=export_folder)
plot.export_data_as_ascii(export_folder=export_folder) plot.export_data_as_ascii(export_folder=export_folder)

View File

@ -12,10 +12,10 @@ cases = resinsight.project.cases()
# Set main window size # Set main window size
resinsight.set_main_window_size(width=800, height=500) resinsight.set_main_window_size(width=800, height=500)
n = 5 # every n-th time_step for snapshot n = 5 # every n-th time_step for snapshot
property_list = ['SOIL', 'PRESSURE'] # list of parameter for snapshot property_list = ["SOIL", "PRESSURE"] # list of parameter for snapshot
print ("Looping through cases") print("Looping through cases")
for case in cases: for case in cases:
print("Case name: ", case.name) print("Case name: ", case.name)
print("Case id: ", case.id) print("Case id: ", case.id)
@ -24,21 +24,23 @@ for case in cases:
folder_name = os.path.dirname(case_path) folder_name = os.path.dirname(case_path)
# create a folder to hold the snapshots # create a folder to hold the snapshots
dirname = os.path.join(folder_name, 'snapshots') dirname = os.path.join(folder_name, "snapshots")
if os.path.exists(dirname) is False: if os.path.exists(dirname) is False:
os.mkdir(dirname) os.mkdir(dirname)
print ("Exporting to folder: " + dirname) print("Exporting to folder: " + dirname)
resinsight.set_export_folder(export_type='SNAPSHOTS', path=dirname) resinsight.set_export_folder(export_type="SNAPSHOTS", path=dirname)
time_steps = case.time_steps() time_steps = case.time_steps()
print('Number of time_steps: ' + str(len(time_steps))) print("Number of time_steps: " + str(len(time_steps)))
for view in case.views(): for view in case.views():
if view.is_eclipse_view(): if view.is_eclipse_view():
for property in property_list: for property in property_list:
view.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable=property) view.apply_cell_result(
result_type="DYNAMIC_NATIVE", result_variable=property
)
for time_step in range(0, len(time_steps), 10): for time_step in range(0, len(time_steps), 10):
view.set_time_step(time_step = time_step) view.set_time_step(time_step=time_step)
view.export_snapshot() view.export_snapshot()

View File

@ -4,7 +4,7 @@
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
cases = resinsight.project.cases() cases = resinsight.project.cases()
print("Number of cases found: ", len(cases)) print("Number of cases found: ", len(cases))
@ -14,6 +14,3 @@ for case in cases:
print("Number of grids: ", len(grids)) print("Number of grids: ", len(grids))
for grid in grids: for grid in grids:
print("Grid dimensions: ", grid.dimensions()) print("Grid dimensions: ", grid.dimensions())

View File

@ -1,9 +1,12 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resInsight = rips.Instance.find() resInsight = rips.Instance.find()
well_paths = resInsight.project.import_well_paths(well_path_folder='D:/Projects/ResInsight-regression-test/ModelData/norne/wellpaths') well_paths = resInsight.project.import_well_paths(
well_path_folder="D:/Projects/ResInsight-regression-test/ModelData/norne/wellpaths"
)
if resInsight.project.has_warnings(): if resInsight.project.has_warnings():
for warning in resInsight.project.warnings(): for warning in resInsight.project.warnings():
print(warning) print(warning)
@ -12,8 +15,12 @@ if resInsight.project.has_warnings():
for well_path in well_paths: for well_path in well_paths:
print("Imported from folder: " + well_path.name) print("Imported from folder: " + well_path.name)
well_paths = resInsight.project.import_well_paths(well_path_files=['D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/E-3H.json', well_paths = resInsight.project.import_well_paths(
'D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/C-1H.json']) well_path_files=[
"D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/E-3H.json",
"D:/Projects/ResInsight-regression-test/ModelData/Norne_WellPaths/C-1H.json",
]
)
if resInsight.project.has_warnings(): if resInsight.project.has_warnings():
for warning in resInsight.project.warnings(): for warning in resInsight.project.warnings():
print(warning) print(warning)
@ -23,7 +30,9 @@ for well_path in well_paths:
print("Imported from individual files: " + well_path.name) print("Imported from individual files: " + well_path.name)
well_path_names = resInsight.project.import_well_log_files(well_log_folder='D:/Projects/ResInsight-regression-test/ModelData/Norne_PLT_LAS') well_path_names = resInsight.project.import_well_log_files(
well_log_folder="D:/Projects/ResInsight-regression-test/ModelData/Norne_PLT_LAS"
)
if resInsight.project.has_warnings(): if resInsight.project.has_warnings():
for warning in resInsight.project.warnings(): for warning in resInsight.project.warnings():
print(warning) print(warning)

View File

@ -17,22 +17,24 @@ def create_result(poro_chunks, permx_chunks):
# Return a generator object that behaves like a Python iterator # Return a generator object that behaves like a Python iterator
yield resultChunk yield resultChunk
resinsight = rips.Instance.find()
resinsight = rips.Instance.find()
start = time.time() start = time.time()
case = resinsight.project.cases()[0] case = resinsight.project.cases()[0]
# Get a generator for the poro results. The generator will provide a chunk each time it is iterated # Get a generator for the poro results. The generator will provide a chunk each time it is iterated
poro_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PORO', 0) poro_chunks = case.active_cell_property_async("STATIC_NATIVE", "PORO", 0)
# Get a generator for the permx results. The generator will provide a chunk each time it is iterated # Get a generator for the permx results. The generator will provide a chunk each time it is iterated
permx_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PERMX', 0) permx_chunks = case.active_cell_property_async("STATIC_NATIVE", "PERMX", 0)
# Send back the result with the result provided by a generator object. # Send back the result with the result provided by a generator object.
# Iterating the result generator will cause the script to read from the poro and permx generators # Iterating the result generator will cause the script to read from the poro and permx generators
# And return the result of each iteration # And return the result of each iteration
case.set_active_cell_property_async(create_result(poro_chunks, permx_chunks), case.set_active_cell_property_async(
'GENERATED', 'POROPERMXAS', 0) create_result(poro_chunks, permx_chunks), "GENERATED", "POROPERMXAS", 0
)
end = time.time() end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)
print("Transferred all results back") print("Transferred all results back")
view = case.views()[0].apply_cell_result('GENERATED', 'POROPERMXAS') view = case.views()[0].apply_cell_result("GENERATED", "POROPERMXAS")

View File

@ -7,14 +7,14 @@ import rips
import time import time
import grpc import grpc
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
start = time.time() start = time.time()
case = resinsight.project.cases()[0] case = resinsight.project.cases()[0]
# Read poro result into list # Read poro result into list
poro_results = case.active_cell_property('STATIC_NATIVE', 'PORO', 0) poro_results = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
# Read permx result into list # Read permx result into list
permx_results = case.active_cell_property('STATIC_NATIVE', 'PERMX', 0) permx_results = case.active_cell_property("STATIC_NATIVE", "PERMX", 0)
# Generate output result # Generate output result
results = [] results = []
@ -23,7 +23,7 @@ for (poro, permx) in zip(poro_results, permx_results):
try: try:
# Send back output result # Send back output result
case.set_active_cell_property(results, 'GENERATED', 'POROPERMXSY', 0) case.set_active_cell_property(results, "GENERATED", "POROPERMXSY", 0)
except grpc.RpcError as e: except grpc.RpcError as e:
print("Exception Received: ", e) print("Exception Received: ", e)
@ -32,4 +32,4 @@ end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)
print("Transferred all results back") print("Transferred all results back")
view = case.views()[0].apply_cell_result('GENERATED', 'POROPERMXSY') view = case.views()[0].apply_cell_result("GENERATED", "POROPERMXSY")

View File

@ -3,9 +3,9 @@
####################################### #######################################
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is None: if resinsight is None:
print('ERROR: could not find ResInsight') print("ERROR: could not find ResInsight")
else: else:
print('Successfully connected to ResInsight') print("Successfully connected to ResInsight")

View File

@ -1,17 +1,21 @@
# Access to environment variables # Access to environment variables
import os import os
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.launch() resinsight = rips.Instance.launch()
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS): # This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE') resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
# Get the TestModels path from the executable path # Get the TestModels path from the executable path
resinsight_install_path = os.path.dirname(resinsight_exe_path) resinsight_install_path = os.path.dirname(resinsight_exe_path)
test_models_path = os.path.join(resinsight_install_path, 'TestModels') test_models_path = os.path.join(resinsight_install_path, "TestModels")
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID') path_name = os.path.join(
test_models_path, "TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
)
# Load an example case. Needs to be replaced with a valid path! # Load an example case. Needs to be replaced with a valid path!
case = resinsight.project.load_case(path_name) case = resinsight.project.load_case(path_name)
@ -23,11 +27,11 @@ view1 = case.views()[0]
view1.set_time_step(time_step=2) view1.set_time_step(time_step=2)
# Set cell result to SOIL # Set cell result to SOIL
view1.apply_cell_result(result_type='DYNAMIC_NATIVE', result_variable='SOIL') view1.apply_cell_result(result_type="DYNAMIC_NATIVE", result_variable="SOIL")
# Set export folder for snapshots and properties # Set export folder for snapshots and properties
resinsight.set_export_folder(export_type='SNAPSHOTS', path="e:/temp") resinsight.set_export_folder(export_type="SNAPSHOTS", path="e:/temp")
resinsight.set_export_folder(export_type='PROPERTIES', path="e:/temp") resinsight.set_export_folder(export_type="PROPERTIES", path="e:/temp")
# Export all snapshots # Export all snapshots
resinsight.project.export_snapshots() resinsight.project.export_snapshots()

View File

@ -1,11 +1,14 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Launch ResInsight with last project file and a Window size of 600x1000 pixels # Launch ResInsight with last project file and a Window size of 600x1000 pixels
resinsight = rips.Instance.launch(command_line_parameters=['--last', '--size', 600, 1000]) resinsight = rips.Instance.launch(
command_line_parameters=["--last", "--size", 600, 1000]
)
# Get a list of all cases # Get a list of all cases
cases = resinsight.project.cases() cases = resinsight.project.cases()
print ("Got " + str(len(cases)) + " cases: ") print("Got " + str(len(cases)) + " cases: ")
for case in cases: for case in cases:
print("Case name: " + case.name) print("Case name: " + case.name)
print("Case grid path: " + case.file_path) print("Case grid path: " + case.file_path)

View File

@ -1,17 +1,21 @@
# Access to environment variables and path tools # Access to environment variables and path tools
import os import os
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS): # This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE') resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
# Get the TestModels path from the executable path # Get the TestModels path from the executable path
resinsight_install_path = os.path.dirname(resinsight_exe_path) resinsight_install_path = os.path.dirname(resinsight_exe_path)
test_models_path = os.path.join(resinsight_install_path, 'TestModels') test_models_path = os.path.join(resinsight_install_path, "TestModels")
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID') path_name = os.path.join(
test_models_path, "TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
)
case = resinsight.project.load_case(path_name) case = resinsight.project.load_case(path_name)
# Print out lots of information from the case object # Print out lots of information from the case object

View File

@ -1,5 +1,6 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# Example code # Example code

View File

@ -1,5 +1,6 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# Example code # Example code
@ -8,4 +9,6 @@ project = resinsight.project
summary_cases = project.descendants(rips.SummaryCase) summary_cases = project.descendants(rips.SummaryCase)
summary_plot_collection = project.descendants(rips.SummaryPlotCollection)[0] summary_plot_collection = project.descendants(rips.SummaryPlotCollection)[0]
if len(summary_cases) > 0: if len(summary_cases) > 0:
summary_plot = summary_plot_collection.new_summary_plot(summary_cases=summary_cases, address="FOP*") summary_plot = summary_plot_collection.new_summary_plot(
summary_cases=summary_cases, address="FOP*"
)

View File

@ -1,17 +1,19 @@
# Access to environment variables and path tools # Access to environment variables and path tools
import os import os
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS): # This requires the TestModels to be installed with ResInsight (RESINSIGHT_BUNDLE_TESTMODELS):
resinsight_exe_path = os.environ.get('RESINSIGHT_EXECUTABLE') resinsight_exe_path = os.environ.get("RESINSIGHT_EXECUTABLE")
# Get the TestModels path from the executable path # Get the TestModels path from the executable path
resinsight_install_path = os.path.dirname(resinsight_exe_path) resinsight_install_path = os.path.dirname(resinsight_exe_path)
test_models_path = os.path.join(resinsight_install_path, 'TestModels') test_models_path = os.path.join(resinsight_install_path, "TestModels")
path_name = os.path.join(test_models_path, 'TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp') path_name = os.path.join(test_models_path, "TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp")
# Open a project # Open a project
resinsight.project.open(path_name) resinsight.project.open(path_name)

View File

@ -1,9 +1,12 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# Example code # Example code
print("ResInsight version: " + resinsight.version_string()) print("ResInsight version: " + resinsight.version_string())
case = resinsight.project.case(case_id=0) case = resinsight.project.case(case_id=0)
case.replace(new_grid_file='C:/Users/lindkvis/Projects/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID') case.replace(
new_grid_file="C:/Users/lindkvis/Projects/ResInsight/TestModels/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
)

View File

@ -7,14 +7,12 @@
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is not None: if resinsight is not None:
cases = resinsight.project.selected_cases() cases = resinsight.project.selected_cases()
print ("Got " + str(len(cases)) + " cases: ") print("Got " + str(len(cases)) + " cases: ")
for case in cases: for case in cases:
print(case.name) print(case.name)
for property in case.available_properties('DYNAMIC_NATIVE'): for property in case.available_properties("DYNAMIC_NATIVE"):
print(property) print(property)

View File

@ -5,11 +5,11 @@
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
if resinsight is not None: if resinsight is not None:
cases = resinsight.project.cases() cases = resinsight.project.cases()
print ("Got " + str(len(cases)) + " cases: ") print("Got " + str(len(cases)) + " cases: ")
for case in cases: for case in cases:
print(case.name) print(case.name)
cells = case.selected_cells() cells = case.selected_cells()
@ -18,19 +18,31 @@ if resinsight is not None:
time_step_info = case.time_steps() time_step_info = case.time_steps()
for (idx, cell) in enumerate(cells): for (idx, cell) in enumerate(cells):
print("Selected cell: [{}, {}, {}] grid: {}".format(cell.ijk.i+1, cell.ijk.j+1, cell.ijk.k+1, cell.grid_index)) print(
"Selected cell: [{}, {}, {}] grid: {}".format(
cell.ijk.i + 1, cell.ijk.j + 1, cell.ijk.k + 1, cell.grid_index
)
)
# Get the grid and dimensions # Get the grid and dimensions
grid = case.grids()[cell.grid_index] grid = case.grids()[cell.grid_index]
dimensions = grid.dimensions() dimensions = grid.dimensions()
# Map ijk to cell index # Map ijk to cell index
cell_index = dimensions.i * dimensions.j * cell.ijk.k + dimensions.i * cell.ijk.j + cell.ijk.i cell_index = (
dimensions.i * dimensions.j * cell.ijk.k
+ dimensions.i * cell.ijk.j
+ cell.ijk.i
)
# Print the cell center # Print the cell center
cell_centers = grid.cell_centers() cell_centers = grid.cell_centers()
cell_center = cell_centers[cell_index] cell_center = cell_centers[cell_index]
print("Cell center: [{}, {}, {}]".format(cell_center.x, cell_center.y, cell_center.z)) print(
"Cell center: [{}, {}, {}]".format(
cell_center.x, cell_center.y, cell_center.z
)
)
# Print the cell corners # Print the cell corners
cell_corners = grid.cell_corners()[cell_index] cell_corners = grid.cell_corners()[cell_index]
@ -46,5 +58,11 @@ if resinsight is not None:
for (tidx, timestep) in enumerate(time_step_info): for (tidx, timestep) in enumerate(time_step_info):
# Read the full SOIL result for time step # Read the full SOIL result for time step
soil_results = case.selected_cell_property('DYNAMIC_NATIVE', 'SOIL', tidx) soil_results = case.selected_cell_property(
print("SOIL: {} ({}.{}.{})".format(soil_results[idx], timestep.year, timestep.month, timestep.day)) "DYNAMIC_NATIVE", "SOIL", tidx
)
print(
"SOIL: {} ({}.{}.{})".format(
soil_results[idx], timestep.year, timestep.month, timestep.day
)
)

View File

@ -3,7 +3,7 @@
###################################################################### ######################################################################
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
view = resinsight.project.views()[0] view = resinsight.project.views()[0]
view.apply_cell_result(result_type='STATIC_NATIVE', result_variable='DX') view.apply_cell_result(result_type="STATIC_NATIVE", result_variable="DX")

View File

@ -4,15 +4,18 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
view = resinsight.project.view(view_id=1) view = resinsight.project.view(view_id=1)
#view.apply_flow_diagnostics_cell_result(result_variable='Fraction', # view.apply_flow_diagnostics_cell_result(result_variable='Fraction',
# selection_mode='FLOW_TR_INJ_AND_PROD') # selection_mode='FLOW_TR_INJ_AND_PROD')
# Example of setting individual wells. Commented out because well names are case specific. # Example of setting individual wells. Commented out because well names are case specific.
view.apply_flow_diagnostics_cell_result(result_variable='Fraction', view.apply_flow_diagnostics_cell_result(
selection_mode='FLOW_TR_BY_SELECTION', result_variable="Fraction",
injectors = ['C-1H', 'C-2H', 'F-2H'], selection_mode="FLOW_TR_BY_SELECTION",
producers = ['B-1AH', 'B-3H', 'D-1H']) injectors=["C-1H", "C-2H", "F-2H"],
producers=["B-1AH", "B-3H", "D-1H"],
)

View File

@ -3,15 +3,14 @@
###################################################################### ######################################################################
import rips import rips
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
case = resinsight.project.case(case_id=0) case = resinsight.project.case(case_id=0)
total_cell_count = case.cell_count().reservoir_cell_count total_cell_count = case.cell_count().reservoir_cell_count
values = [] values = []
for i in range(0, total_cell_count): for i in range(0, total_cell_count):
values.append(i % 2 * 0.75); values.append(i % 2 * 0.75)
print("Applying values to full grid") print("Applying values to full grid")
case.set_grid_property(values, 'DYNAMIC_NATIVE', 'SOIL', 0) case.set_grid_property(values, "DYNAMIC_NATIVE", "SOIL", 0)

View File

@ -6,31 +6,31 @@ import rips
import itertools import itertools
import time import time
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
start = time.time() start = time.time()
# Get the case with case id 0 # Get the case with case id 0
case = resinsight.project.case(case_id=0) case = resinsight.project.case(case_id=0)
# Get a list of all time steps # Get a list of all time steps
timeSteps = case.time_steps() timeSteps = case.time_steps()
averages = [] averages = []
for i in range(0, len(timeSteps)): for i in range(0, len(timeSteps)):
# Get the results from time step i asynchronously # Get the results from time step i asynchronously
# It actually returns a generator object almost immediately # It actually returns a generator object almost immediately
result_chunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', i) result_chunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", i)
mysum = 0.0 mysum = 0.0
count = 0 count = 0
# Loop through and append the average. each time we loop resultChunks # Loop through and append the average. each time we loop resultChunks
# We will trigger a read of the input data, meaning the script will start # We will trigger a read of the input data, meaning the script will start
# Calculating averages before the whole resultValue for this time step has been received # Calculating averages before the whole resultValue for this time step has been received
for chunk in result_chunks: for chunk in result_chunks:
mysum += sum(chunk.values) mysum += sum(chunk.values)
count += len(chunk.values) count += len(chunk.values)
averages.append(mysum/count) averages.append(mysum / count)
end = time.time() end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)

View File

@ -5,22 +5,22 @@ import rips
import itertools import itertools
import time import time
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
start = time.time() start = time.time()
# Get the case with case id 0 # Get the case with case id 0
case = resinsight.project.case(case_id=0) case = resinsight.project.case(case_id=0)
# Get a list of all time steps # Get a list of all time steps
time_steps = case.time_steps() time_steps = case.time_steps()
averages = [] averages = []
for i in range(0, len(time_steps)): for i in range(0, len(time_steps)):
# Get a list of all the results for time step i # Get a list of all the results for time step i
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', i) results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", i)
mysum = sum(results) mysum = sum(results)
averages.append(mysum/len(results)) averages.append(mysum / len(results))
end = time.time() end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)

View File

@ -16,13 +16,14 @@ def create_result(soil_chunks, porv_chunks):
# Return a Python generator # Return a Python generator
yield resultChunk yield resultChunk
resinsight = rips.Instance.find()
start = time.time() resinsight = rips.Instance.find()
case = resinsight.project.cases()[0] start = time.time()
case = resinsight.project.cases()[0]
timeStepInfo = case.time_steps() timeStepInfo = case.time_steps()
# Get a generator for the porv results. The generator will provide a chunk each time it is iterated # Get a generator for the porv results. The generator will provide a chunk each time it is iterated
porv_chunks = case.active_cell_property_async('STATIC_NATIVE', 'PORV', 0) porv_chunks = case.active_cell_property_async("STATIC_NATIVE", "PORV", 0)
# Read the static result into an array, so we don't have to transfer it for each iteration # Read the static result into an array, so we don't have to transfer it for each iteration
# Note we use the async method even if we synchronise here, because we need the values chunked # Note we use the async method even if we synchronise here, because we need the values chunked
@ -31,17 +32,19 @@ porv_array = []
for porv_chunk in porv_chunks: for porv_chunk in porv_chunks:
porv_array.append(porv_chunk) porv_array.append(porv_chunk)
for i in range (0, len(timeStepInfo)): for i in range(0, len(timeStepInfo)):
# Get a generator object for the SOIL property for time step i # Get a generator object for the SOIL property for time step i
soil_chunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', i) soil_chunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", i)
# Create the generator object for the SOIL * PORV derived result # Create the generator object for the SOIL * PORV derived result
result_generator = create_result(soil_chunks, iter(porv_array)) result_generator = create_result(soil_chunks, iter(porv_array))
# Send back the result asynchronously with a generator object # Send back the result asynchronously with a generator object
case.set_active_cell_property_async(result_generator, 'GENERATED', 'SOILPORVAsync', i) case.set_active_cell_property_async(
result_generator, "GENERATED", "SOILPORVAsync", i
)
end = time.time() end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)
print("Transferred all results back") print("Transferred all results back")
view = case.views()[0].apply_cell_result('GENERATED', 'SOILPORVAsync') view = case.views()[0].apply_cell_result("GENERATED", "SOILPORVAsync")

View File

@ -7,15 +7,15 @@ import time
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
start = time.time() start = time.time()
case = resinsight.project.cases()[0] case = resinsight.project.cases()[0]
# Read the full porv result # Read the full porv result
porv_results = case.active_cell_property('STATIC_NATIVE', 'PORV', 0) porv_results = case.active_cell_property("STATIC_NATIVE", "PORV", 0)
time_step_info = case.time_steps() time_step_info = case.time_steps()
for i in range (0, len(time_step_info)): for i in range(0, len(time_step_info)):
# Read the full SOIl result for time step i # Read the full SOIl result for time step i
soil_results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', i) soil_results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", i)
# Generate the result by looping through both lists in order # Generate the result by looping through both lists in order
results = [] results = []
@ -23,11 +23,11 @@ for i in range (0, len(time_step_info)):
results.append(soil * porv) results.append(soil * porv)
# Send back result # Send back result
case.set_active_cell_property(results, 'GENERATED', 'SOILPORVSync', i) case.set_active_cell_property(results, "GENERATED", "SOILPORVSync", i)
end = time.time() end = time.time()
print("Time elapsed: ", end - start) print("Time elapsed: ", end - start)
print("Transferred all results back") print("Transferred all results back")
view = case.views()[0].apply_cell_result('GENERATED', 'SOILPORVSync') view = case.views()[0].apply_cell_result("GENERATED", "SOILPORVSync")

View File

@ -1,5 +1,6 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
# Example code # Example code

View File

@ -1,5 +1,6 @@
# Load ResInsight Processing Server Client Library # Load ResInsight Processing Server Client Library
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()
print("ResInsight version: " + resinsight.version_string()) print("ResInsight version: " + resinsight.version_string())

View File

@ -4,6 +4,7 @@
# Also clones the first view # Also clones the first view
############################################################# #############################################################
import rips import rips
# Connect to ResInsight instance # Connect to ResInsight instance
resinsight = rips.Instance.find() resinsight = rips.Instance.find()

View File

@ -2,7 +2,8 @@ name = "rips"
import os import os
import sys import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'generated'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "generated"))
from .resinsight_classes import * from .resinsight_classes import *

File diff suppressed because it is too large Load Diff

View File

@ -9,8 +9,14 @@ from .resinsight_classes import EclipseContourMap, GeoMechContourMap
@add_method(EclipseContourMap) @add_method(EclipseContourMap)
def export_to_text(self, export_file_name='', export_local_coordinates=False, undefined_value_label="NaN", exclude_undefined_values=False): def export_to_text(
""" Export snapshot for the current view self,
export_file_name="",
export_local_coordinates=False,
undefined_value_label="NaN",
exclude_undefined_values=False,
):
"""Export snapshot for the current view
Arguments: Arguments:
export_file_name(str): The file location to store results in. export_file_name(str): The file location to store results in.
@ -24,12 +30,20 @@ def export_to_text(self, export_file_name='', export_local_coordinates=False, un
exportLocalCoordinates=export_local_coordinates, exportLocalCoordinates=export_local_coordinates,
undefinedValueLabel=undefined_value_label, undefinedValueLabel=undefined_value_label,
excludeUndefinedValues=exclude_undefined_values, excludeUndefinedValues=exclude_undefined_values,
viewId=self.id)) viewId=self.id,
)
)
@add_method(GeoMechContourMap) @add_method(GeoMechContourMap)
def export_to_text(self, export_file_name='', export_local_coordinates=False, undefined_value_label="NaN", exclude_undefined_values=False): def export_to_text(
""" Export snapshot for the current view self,
export_file_name="",
export_local_coordinates=False,
undefined_value_label="NaN",
exclude_undefined_values=False,
):
"""Export snapshot for the current view
Arguments: Arguments:
export_file_name(str): The file location to store results in. export_file_name(str): The file location to store results in.
@ -43,4 +57,6 @@ def export_to_text(self, export_file_name='', export_local_coordinates=False, un
exportLocalCoordinates=export_local_coordinates, exportLocalCoordinates=export_local_coordinates,
undefinedValueLabel=undefined_value_label, undefinedValueLabel=undefined_value_label,
excludeUndefinedValues=exclude_undefined_values, excludeUndefinedValues=exclude_undefined_values,
viewId=self.id)) viewId=self.id,
)
)

View File

@ -31,8 +31,8 @@ class Grid:
""" """
case_request = Case_pb2.CaseRequest(id=self.case.id) case_request = Case_pb2.CaseRequest(id=self.case.id)
return self.__stub.GetDimensions( return self.__stub.GetDimensions(
Grid_pb2.GridRequest(case_request=case_request, Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
grid_index=self.index)).dimensions ).dimensions
def cell_centers_async(self): def cell_centers_async(self):
"""The cells center for all cells in given grid async. """The cells center for all cells in given grid async.
@ -42,8 +42,8 @@ class Grid:
""" """
case_request = Case_pb2.CaseRequest(id=self.case.id) case_request = Case_pb2.CaseRequest(id=self.case.id)
chunks = self.__stub.GetCellCenters( chunks = self.__stub.GetCellCenters(
Grid_pb2.GridRequest(case_request=case_request, Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
grid_index=self.index)) )
for chunk in chunks: for chunk in chunks:
yield chunk yield chunk
@ -68,8 +68,8 @@ class Grid:
""" """
case_request = Case_pb2.CaseRequest(id=self.case.id) case_request = Case_pb2.CaseRequest(id=self.case.id)
chunks = self.__stub.GetCellCorners( chunks = self.__stub.GetCellCorners(
Grid_pb2.GridRequest(case_request=case_request, Grid_pb2.GridRequest(case_request=case_request, grid_index=self.index)
grid_index=self.index)) )
for chunk in chunks: for chunk in chunks:
yield chunk yield chunk

View File

@ -19,9 +19,10 @@ def create_statistics_case(self):
""" """
command_reply = self._execute_command( command_reply = self._execute_command(
createStatisticsCase=Commands_pb2.CreateStatisticsCaseRequest( createStatisticsCase=Commands_pb2.CreateStatisticsCaseRequest(
caseGroupId=self.group_id)) caseGroupId=self.group_id
return Case(self.channel, )
command_reply.createStatisticsCaseResult.caseId) )
return Case(self.channel, command_reply.createStatisticsCaseResult.caseId)
@add_method(GridCaseGroup) @add_method(GridCaseGroup)
@ -70,7 +71,7 @@ def view(self, view_id):
@add_method(GridCaseGroup) @add_method(GridCaseGroup)
def compute_statistics(self, case_ids=None): def compute_statistics(self, case_ids=None):
""" Compute statistics for the given case ids """Compute statistics for the given case ids
Arguments: Arguments:
case_ids(list of integers): List of case ids. If this is None all cases in group are included case_ids(list of integers): List of case ids. If this is None all cases in group are included
@ -80,4 +81,6 @@ def compute_statistics(self, case_ids=None):
case_ids = [] case_ids = []
return self._execute_command( return self._execute_command(
computeCaseGroupStatistics=Commands_pb2.ComputeCaseGroupStatRequest( computeCaseGroupStatistics=Commands_pb2.ComputeCaseGroupStatRequest(
caseIds=case_ids, caseGroupId=self.group_id)) caseIds=case_ids, caseGroupId=self.group_id
)
)

View File

@ -35,20 +35,19 @@ class Instance:
project (Project): Current project in ResInsight. project (Project): Current project in ResInsight.
Set when creating an instance and updated when opening/closing projects. Set when creating an instance and updated when opening/closing projects.
""" """
@staticmethod @staticmethod
def __is_port_in_use(port): def __is_port_in_use(port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as my_socket: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as my_socket:
my_socket.settimeout(0.2) my_socket.settimeout(0.2)
return my_socket.connect_ex(('localhost', port)) == 0 return my_socket.connect_ex(("localhost", port)) == 0
@staticmethod @staticmethod
def __is_valid_port(port): def __is_valid_port(port):
location = "localhost:" + str(port) location = "localhost:" + str(port)
channel = grpc.insecure_channel(location, channel = grpc.insecure_channel(
options=[ location, options=[("grpc.enable_http_proxy", False)]
('grpc.enable_http_proxy', )
False)
])
app = App_pb2_grpc.AppStub(channel) app = App_pb2_grpc.AppStub(channel)
try: try:
app.GetVersion(Empty(), timeout=1) app.GetVersion(Empty(), timeout=1)
@ -57,11 +56,13 @@ class Instance:
return True return True
@staticmethod @staticmethod
def launch(resinsight_executable='', def launch(
console=False, resinsight_executable="",
launch_port=-1, console=False,
command_line_parameters=None): launch_port=-1,
""" Launch a new Instance of ResInsight. This requires the environment variable command_line_parameters=None,
):
"""Launch a new Instance of ResInsight. This requires the environment variable
RESINSIGHT_EXECUTABLE to be set or the parameter resinsight_executable to be provided. RESINSIGHT_EXECUTABLE to be set or the parameter resinsight_executable to be provided.
The RESINSIGHT_GRPC_PORT environment variable can be set to an alternative port number. The RESINSIGHT_GRPC_PORT environment variable can be set to an alternative port number.
@ -78,18 +79,19 @@ class Instance:
""" """
port = 50051 port = 50051
port_env = os.environ.get('RESINSIGHT_GRPC_PORT') port_env = os.environ.get("RESINSIGHT_GRPC_PORT")
if port_env: if port_env:
port = int(port_env) port = int(port_env)
if launch_port != -1: if launch_port != -1:
port = launch_port port = launch_port
if not resinsight_executable: if not resinsight_executable:
resinsight_executable = os.environ.get('RESINSIGHT_EXECUTABLE') resinsight_executable = os.environ.get("RESINSIGHT_EXECUTABLE")
if not resinsight_executable: if not resinsight_executable:
print( print(
'ERROR: Could not launch ResInsight because the environment variable' "ERROR: Could not launch ResInsight because the environment variable"
' RESINSIGHT_EXECUTABLE is not set') " RESINSIGHT_EXECUTABLE is not set"
)
return None return None
print("Trying port " + str(port)) print("Trying port " + str(port))
@ -97,16 +99,15 @@ class Instance:
port += 1 port += 1
print("Trying port " + str(port)) print("Trying port " + str(port))
print('Port ' + str(port)) print("Port " + str(port))
print('Trying to launch', resinsight_executable) print("Trying to launch", resinsight_executable)
if command_line_parameters is None: if command_line_parameters is None:
command_line_parameters = [] command_line_parameters = []
elif isinstance(command_line_parameters, str): elif isinstance(command_line_parameters, str):
command_line_parameters = [str] command_line_parameters = [str]
parameters = ["ResInsight", "--server", parameters = ["ResInsight", "--server", str(port)] + command_line_parameters
str(port)] + command_line_parameters
if console: if console:
print("Launching as console app") print("Launching as console app")
parameters.append("--console") parameters.append("--console")
@ -123,7 +124,7 @@ class Instance:
@staticmethod @staticmethod
def find(start_port=50051, end_port=50071): def find(start_port=50051, end_port=50071):
""" Search for an existing Instance of ResInsight by testing ports. """Search for an existing Instance of ResInsight by testing ports.
By default we search from port 50051 to 50071 or if the environment By default we search from port 50051 to 50071 or if the environment
variable RESINSIGHT_GRPC_PORT is set we search variable RESINSIGHT_GRPC_PORT is set we search
@ -133,7 +134,7 @@ class Instance:
start_port (int): start searching from this port start_port (int): start searching from this port
end_port (int): search up to but not including this port end_port (int): search up to but not including this port
""" """
port_env = os.environ.get('RESINSIGHT_GRPC_PORT') port_env = os.environ.get("RESINSIGHT_GRPC_PORT")
if port_env: if port_env:
print("Got port " + port_env + " from environment") print("Got port " + port_env + " from environment")
start_port = int(port_env) start_port = int(port_env)
@ -141,12 +142,17 @@ class Instance:
for try_port in range(start_port, end_port): for try_port in range(start_port, end_port):
print("Trying port " + str(try_port)) print("Trying port " + str(try_port))
if Instance.__is_port_in_use(try_port) and Instance.__is_valid_port(try_port): if Instance.__is_port_in_use(try_port) and Instance.__is_valid_port(
try_port
):
return Instance(port=try_port) return Instance(port=try_port)
print( print(
'Error: Could not find any ResInsight instances responding between ports ' "Error: Could not find any ResInsight instances responding between ports "
+ str(start_port) + ' and ' + str(end_port)) + str(start_port)
+ " and "
+ str(end_port)
)
return None return None
def __execute_command(self, **command_params): def __execute_command(self, **command_params):
@ -155,15 +161,17 @@ class Instance:
def __check_version(self): def __check_version(self):
try: try:
major_version_ok = self.major_version() == int( major_version_ok = self.major_version() == int(
RiaVersionInfo.RESINSIGHT_MAJOR_VERSION) RiaVersionInfo.RESINSIGHT_MAJOR_VERSION
)
minor_version_ok = self.minor_version() == int( minor_version_ok = self.minor_version() == int(
RiaVersionInfo.RESINSIGHT_MINOR_VERSION) RiaVersionInfo.RESINSIGHT_MINOR_VERSION
)
return True, major_version_ok and minor_version_ok return True, major_version_ok and minor_version_ok
except grpc.RpcError: except grpc.RpcError:
return False, False return False, False
def __init__(self, port=50051, launched=False): def __init__(self, port=50051, launched=False):
""" Attempts to connect to ResInsight at aa specific port on localhost """Attempts to connect to ResInsight at aa specific port on localhost
Args: Args:
port(int): port number port(int): port number
@ -171,11 +179,9 @@ class Instance:
logging.basicConfig() logging.basicConfig()
location = "localhost:" + str(port) location = "localhost:" + str(port)
self.channel = grpc.insecure_channel(location, self.channel = grpc.insecure_channel(
options=[ location, options=[("grpc.enable_http_proxy", False)]
('grpc.enable_http_proxy', )
False)
])
self.launched = launched self.launched = launched
self.commands = Commands_pb2_grpc.CommandsStub(self.channel) self.commands = Commands_pb2_grpc.CommandsStub(self.channel)
@ -187,7 +193,9 @@ class Instance:
# Intercept UNAVAILABLE errors and retry on failures # Intercept UNAVAILABLE errors and retry on failures
interceptors = ( interceptors = (
RetryOnRpcErrorClientInterceptor( RetryOnRpcErrorClientInterceptor(
retry_policy=ExponentialBackoffRetryPolicy(min_backoff=100, max_backoff=5000, max_num_retries=20), retry_policy=ExponentialBackoffRetryPolicy(
min_backoff=100, max_backoff=5000, max_num_retries=20
),
status_for_retry=(grpc.StatusCode.UNAVAILABLE,), status_for_retry=(grpc.StatusCode.UNAVAILABLE,),
), ),
) )
@ -219,14 +227,21 @@ class Instance:
if not connection_ok: if not connection_ok:
if self.launched: if self.launched:
raise Exception('Error: Could not connect to resinsight at ', raise Exception(
location, "Error: Could not connect to resinsight at ",
'.', retry_policy.time_out_message()) location,
raise Exception('Error: Could not connect to resinsight at ', location) ".",
retry_policy.time_out_message(),
)
raise Exception("Error: Could not connect to resinsight at ", location)
if not version_ok: if not version_ok:
raise Exception('Error: Wrong Version of ResInsight at ', location, raise Exception(
self.version_string(), " ", "Error: Wrong Version of ResInsight at ",
self.client_version_string()) location,
self.version_string(),
" ",
self.client_version_string(),
)
def __version_message(self): def __version_message(self):
return self.app.GetVersion(Empty()) return self.app.GetVersion(Empty())
@ -238,7 +253,9 @@ class Instance:
path (str): path to directory path (str): path to directory
""" """
return self.__execute_command(setStartDir=Commands_pb2.FilePathRequest(path=path)) return self.__execute_command(
setStartDir=Commands_pb2.FilePathRequest(path=path)
)
def set_export_folder(self, export_type, path, create_folder=False): def set_export_folder(self, export_type, path, create_folder=False):
""" """
@ -262,8 +279,11 @@ class Instance:
"STATISTICS" | "STATISTICS" |
""" """
return self.__execute_command(setExportFolder=Commands_pb2.SetExportFolderRequest( return self.__execute_command(
type=export_type, path=path, createFolder=create_folder)) setExportFolder=Commands_pb2.SetExportFolderRequest(
type=export_type, path=path, createFolder=create_folder
)
)
def set_main_window_size(self, width, height): def set_main_window_size(self, width, height):
""" """
@ -277,8 +297,11 @@ class Instance:
height | Height in pixels | Integer height | Height in pixels | Integer
""" """
return self.__execute_command(setMainWindowSize=Commands_pb2.SetWindowSizeParams( return self.__execute_command(
width=width, height=height)) setMainWindowSize=Commands_pb2.SetWindowSizeParams(
width=width, height=height
)
)
def set_plot_window_size(self, width, height): def set_plot_window_size(self, width, height):
""" """
@ -291,8 +314,11 @@ class Instance:
width | Width in pixels | Integer width | Width in pixels | Integer
height | Height in pixels | Integer height | Height in pixels | Integer
""" """
return self.__execute_command(setPlotWindowSize=Commands_pb2.SetWindowSizeParams( return self.__execute_command(
width=width, height=height)) setPlotWindowSize=Commands_pb2.SetWindowSizeParams(
width=width, height=height
)
)
def major_version(self): def major_version(self):
"""Get an integer with the major version number""" """Get an integer with the major version number"""
@ -308,8 +334,13 @@ class Instance:
def version_string(self): def version_string(self):
"""Get a full version string, i.e. 2019.04.01""" """Get a full version string, i.e. 2019.04.01"""
return str(self.major_version()) + "." + str( return (
self.minor_version()) + "." + str(self.patch_version()) str(self.major_version())
+ "."
+ str(self.minor_version())
+ "."
+ str(self.patch_version())
)
def client_version_string(self): def client_version_string(self):
"""Get a full version string, i.e. 2019.04.01""" """Get a full version string, i.e. 2019.04.01"""
@ -326,11 +357,11 @@ class Instance:
def is_console(self): def is_console(self):
"""Returns true if the connected ResInsight instance is a console app""" """Returns true if the connected ResInsight instance is a console app"""
return self.app.GetRuntimeInfo( return self.app.GetRuntimeInfo(
Empty()).app_type == App_pb2.ApplicationTypeEnum.Value( Empty()
'CONSOLE_APPLICATION') ).app_type == App_pb2.ApplicationTypeEnum.Value("CONSOLE_APPLICATION")
def is_gui(self): def is_gui(self):
"""Returns true if the connected ResInsight instance is a GUI app""" """Returns true if the connected ResInsight instance is a GUI app"""
return self.app.GetRuntimeInfo( return self.app.GetRuntimeInfo(
Empty()).app_type == App_pb2.ApplicationTypeEnum.Value( Empty()
'GUI_APPLICATION') ).app_type == App_pb2.ApplicationTypeEnum.Value("GUI_APPLICATION")

View File

@ -16,19 +16,21 @@ import PdmObject_pb2_grpc
import Commands_pb2 import Commands_pb2
import Commands_pb2_grpc import Commands_pb2_grpc
def camel_to_snake(name): def camel_to_snake(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
def snake_to_camel(name): def snake_to_camel(name):
return ''.join(word.title() for word in name.split('_')) return "".join(word.title() for word in name.split("_"))
def add_method(cls): def add_method(cls):
def decorator(func): def decorator(func):
setattr(cls, func.__name__, func) setattr(cls, func.__name__, func)
return func # returning func means func can still be used normally return func # returning func means func can still be used normally
return decorator return decorator
@ -37,9 +39,11 @@ def add_static_method(cls):
@wraps(func) @wraps(func)
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
return func(*args, **kwargs) return func(*args, **kwargs)
setattr(cls, func.__name__, wrapper) setattr(cls, func.__name__, wrapper)
# Note we are not binding func, but wrapper which accepts self but does exactly the same as func # Note we are not binding func, but wrapper which accepts self but does exactly the same as func
return func # returning func means func can still be used normally return func # returning func means func can still be used normally
return decorator return decorator
@ -51,10 +55,11 @@ class PdmObjectBase:
def _execute_command(self, **command_params): def _execute_command(self, **command_params):
self.__warnings = [] self.__warnings = []
response, call = self._commands.Execute.with_call( response, call = self._commands.Execute.with_call(
Commands_pb2.CommandParams(**command_params)) Commands_pb2.CommandParams(**command_params)
)
for key, value in call.trailing_metadata(): for key, value in call.trailing_metadata():
value = value.replace(';;', '\n') value = value.replace(";;", "\n")
if key == 'warning': if key == "warning":
self.__warnings.append(value) self.__warnings.append(value)
return response return response
@ -67,26 +72,29 @@ class PdmObjectBase:
# Create stubs # Create stubs
if self._channel: if self._channel:
self._pdm_object_stub = PdmObject_pb2_grpc.PdmObjectServiceStub(self._channel) self._pdm_object_stub = PdmObject_pb2_grpc.PdmObjectServiceStub(
self._channel
)
self._commands = Commands_pb2_grpc.CommandsStub(self._channel) self._commands = Commands_pb2_grpc.CommandsStub(self._channel)
if pb2_object is not None: if pb2_object is not None:
# Copy parameters from ResInsight # Copy parameters from ResInsight
assert(isinstance(pb2_object, PdmObject_pb2.PdmObject)) assert isinstance(pb2_object, PdmObject_pb2.PdmObject)
self._pb2_object = pb2_object self._pb2_object = pb2_object
for camel_keyword in self._pb2_object.parameters: for camel_keyword in self._pb2_object.parameters:
snake_keyword = camel_to_snake(camel_keyword) snake_keyword = camel_to_snake(camel_keyword)
setattr(self, snake_keyword, self.__get_grpc_value(camel_keyword)) setattr(self, snake_keyword, self.__get_grpc_value(camel_keyword))
else: else:
# Copy parameters from PdmObject defaults # Copy parameters from PdmObject defaults
self._pb2_object = PdmObject_pb2.PdmObject(class_keyword=self.__class__.__name__) self._pb2_object = PdmObject_pb2.PdmObject(
class_keyword=self.__class__.__name__
)
self.__copy_to_pb2() self.__copy_to_pb2()
def copy_from(self, object): def copy_from(self, object):
"""Copy attribute values from object to self """Copy attribute values from object to self"""
"""
for attribute in dir(object): for attribute in dir(object):
if not attribute.startswith('__'): if not attribute.startswith("__"):
value = getattr(object, attribute) value = getattr(object, attribute)
# This is crucial to avoid overwriting methods # This is crucial to avoid overwriting methods
if not callable(value): if not callable(value):
@ -104,7 +112,7 @@ class PdmObjectBase:
def __copy_to_pb2(self): def __copy_to_pb2(self):
if self._pb2_object is not None: if self._pb2_object is not None:
for snake_kw in dir(self): for snake_kw in dir(self):
if not snake_kw.startswith('_'): if not snake_kw.startswith("_"):
value = getattr(self, snake_kw) value = getattr(self, snake_kw)
# This is crucial to avoid overwriting methods # This is crucial to avoid overwriting methods
if not callable(value): if not callable(value):
@ -143,17 +151,23 @@ class PdmObjectBase:
for snake_kw in dir(self): for snake_kw in dir(self):
if not snake_kw.startswith("_") and not callable(getattr(self, snake_kw)): if not snake_kw.startswith("_") and not callable(getattr(self, snake_kw)):
camel_kw = snake_to_camel(snake_kw) camel_kw = snake_to_camel(snake_kw)
print(" " + snake_kw + " [" + type(getattr(self, snake_kw)).__name__ + print(
"]: " + str(getattr(self, snake_kw))) " "
+ snake_kw
+ " ["
+ type(getattr(self, snake_kw)).__name__
+ "]: "
+ str(getattr(self, snake_kw))
)
print("Object Methods:") print("Object Methods:")
for snake_kw in dir(self): for snake_kw in dir(self):
if not snake_kw.startswith("_") and callable(getattr(self, snake_kw)): if not snake_kw.startswith("_") and callable(getattr(self, snake_kw)):
print(" " + snake_kw) print(" " + snake_kw)
def __convert_from_grpc_value(self, value): def __convert_from_grpc_value(self, value):
if value.lower() == 'false': if value.lower() == "false":
return False return False
if value.lower() == 'true': if value.lower() == "true":
return True return True
try: try:
int_val = int(value) int_val = int(value)
@ -164,7 +178,7 @@ class PdmObjectBase:
return float_val return float_val
except ValueError: except ValueError:
# We may have a string. Strip internal start and end quotes # We may have a string. Strip internal start and end quotes
value = value.strip('\"') value = value.strip('"')
if self.__islist(value): if self.__islist(value):
return self.__makelist(value) return self.__makelist(value)
return value return value
@ -184,7 +198,9 @@ class PdmObjectBase:
return str(value) return str(value)
def __get_grpc_value(self, camel_keyword): def __get_grpc_value(self, camel_keyword):
return self.__convert_from_grpc_value(self._pb2_object.parameters[camel_keyword]) return self.__convert_from_grpc_value(
self._pb2_object.parameters[camel_keyword]
)
def __set_grpc_value(self, camel_keyword, value): def __set_grpc_value(self, camel_keyword, value):
self._pb2_object.parameters[camel_keyword] = self.__convert_to_grpc_value(value) self._pb2_object.parameters[camel_keyword] = self.__convert_to_grpc_value(value)
@ -216,12 +232,15 @@ class PdmObjectBase:
def __from_pb2_to_resinsight_classes(self, pb2_object_list, super_class_definition): def __from_pb2_to_resinsight_classes(self, pb2_object_list, super_class_definition):
pdm_object_list = [] pdm_object_list = []
from .generated.generated_classes import class_from_keyword from .generated.generated_classes import class_from_keyword
for pb2_object in pb2_object_list: for pb2_object in pb2_object_list:
child_class_definition = class_from_keyword(pb2_object.class_keyword) child_class_definition = class_from_keyword(pb2_object.class_keyword)
if child_class_definition is None: if child_class_definition is None:
child_class_definition = super_class_definition child_class_definition = super_class_definition
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel()) pdm_object = child_class_definition(
pb2_object=pb2_object, channel=self.channel()
)
pdm_object_list.append(pdm_object) pdm_object_list.append(pdm_object)
return pdm_object_list return pdm_object_list
@ -233,14 +252,14 @@ class PdmObjectBase:
Returns: Returns:
A list of PdmObjects matching the class_definition A list of PdmObjects matching the class_definition
""" """
assert(inspect.isclass(class_definition)) assert inspect.isclass(class_definition)
class_keyword = class_definition.__name__ class_keyword = class_definition.__name__
try: try:
request = PdmObject_pb2.PdmDescendantObjectRequest( request = PdmObject_pb2.PdmDescendantObjectRequest(
object=self._pb2_object, child_keyword=class_keyword) object=self._pb2_object, child_keyword=class_keyword
object_list = self._pdm_object_stub.GetDescendantPdmObjects( )
request).objects object_list = self._pdm_object_stub.GetDescendantPdmObjects(request).objects
return self.__from_pb2_to_resinsight_classes(object_list, class_definition) return self.__from_pb2_to_resinsight_classes(object_list, class_definition)
except grpc.RpcError as e: except grpc.RpcError as e:
if e.code() == grpc.StatusCode.NOT_FOUND: if e.code() == grpc.StatusCode.NOT_FOUND:
@ -254,8 +273,9 @@ class PdmObjectBase:
Returns: Returns:
A list of PdmObjects inside the child_field A list of PdmObjects inside the child_field
""" """
request = PdmObject_pb2.PdmChildObjectRequest(object=self._pb2_object, request = PdmObject_pb2.PdmChildObjectRequest(
child_field=child_field) object=self._pb2_object, child_field=child_field
)
try: try:
object_list = self._pdm_object_stub.GetChildPdmObjects(request).objects object_list = self._pdm_object_stub.GetChildPdmObjects(request).objects
return self.__from_pb2_to_resinsight_classes(object_list, class_definition) return self.__from_pb2_to_resinsight_classes(object_list, class_definition)
@ -269,13 +289,14 @@ class PdmObjectBase:
Arguments: Arguments:
class_definition[class]: A class definition matching the type of class wanted class_definition[class]: A class definition matching the type of class wanted
""" """
assert(inspect.isclass(class_definition)) assert inspect.isclass(class_definition)
class_keyword = class_definition.__name__ class_keyword = class_definition.__name__
from .generated.generated_classes import class_from_keyword from .generated.generated_classes import class_from_keyword
request = PdmObject_pb2.PdmParentObjectRequest( request = PdmObject_pb2.PdmParentObjectRequest(
object=self._pb2_object, parent_keyword=class_keyword) object=self._pb2_object, parent_keyword=class_keyword
)
try: try:
pb2_object = self._pdm_object_stub.GetAncestorPdmObject(request) pb2_object = self._pdm_object_stub.GetAncestorPdmObject(request)
child_class_definition = class_from_keyword(pb2_object.class_keyword) child_class_definition = class_from_keyword(pb2_object.class_keyword)
@ -283,7 +304,9 @@ class PdmObjectBase:
if child_class_definition is None: if child_class_definition is None:
child_class_definition = class_definition child_class_definition = class_definition
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel()) pdm_object = child_class_definition(
pb2_object=pb2_object, channel=self.channel()
)
return pdm_object return pdm_object
except grpc.RpcError as e: except grpc.RpcError as e:
if e.code() == grpc.StatusCode.NOT_FOUND: if e.code() == grpc.StatusCode.NOT_FOUND:
@ -291,7 +314,9 @@ class PdmObjectBase:
raise e raise e
def _call_get_method_async(self, method_name): def _call_get_method_async(self, method_name):
request = PdmObject_pb2.PdmObjectGetterRequest(object=self._pb2_object, method=method_name) request = PdmObject_pb2.PdmObjectGetterRequest(
object=self._pb2_object, method=method_name
)
for chunk in self._pdm_object_stub.CallPdmObjectGetter(request): for chunk in self._pdm_object_stub.CallPdmObjectGetter(request):
yield chunk yield chunk
@ -299,7 +324,7 @@ class PdmObjectBase:
all_values = [] all_values = []
generator = self._call_get_method_async(method_name) generator = self._call_get_method_async(method_name)
for chunk in generator: for chunk in generator:
data = getattr(chunk, chunk.WhichOneof('data')) data = getattr(chunk, chunk.WhichOneof("data"))
for value in data.data: for value in data.data:
all_values.append(value) all_values.append(value)
return all_values return all_values
@ -310,23 +335,38 @@ class PdmObjectBase:
while index < len(array): while index < len(array):
chunk = PdmObject_pb2.PdmObjectSetterChunk() chunk = PdmObject_pb2.PdmObjectSetterChunk()
if index == -1: if index == -1:
chunk.set_request.CopyFrom(PdmObject_pb2.PdmObjectSetterRequest( chunk.set_request.CopyFrom(
request=method_request, data_count=len(array))) PdmObject_pb2.PdmObjectSetterRequest(
request=method_request, data_count=len(array)
)
)
index += 1 index += 1
else: else:
actual_chunk_size = min(len(array) - index + 1, self.__chunk_size) actual_chunk_size = min(len(array) - index + 1, self.__chunk_size)
if isinstance(array[0], float): if isinstance(array[0], float):
chunk.CopyFrom( chunk.CopyFrom(
PdmObject_pb2.PdmObjectSetterChunk(doubles=PdmObject_pb2.DoubleArray(data=array[index:index + PdmObject_pb2.PdmObjectSetterChunk(
actual_chunk_size]))) doubles=PdmObject_pb2.DoubleArray(
data=array[index : index + actual_chunk_size]
)
)
)
elif isinstance(array[0], int): elif isinstance(array[0], int):
chunk.CopyFrom( chunk.CopyFrom(
PdmObject_pb2.PdmObjectSetterChunk(ints=PdmObject_pb2.IntArray(data=array[index:index + PdmObject_pb2.PdmObjectSetterChunk(
actual_chunk_size]))) ints=PdmObject_pb2.IntArray(
data=array[index : index + actual_chunk_size]
)
)
)
elif isinstance(array[0], str): elif isinstance(array[0], str):
chunk.CopyFrom( chunk.CopyFrom(
PdmObject_pb2.PdmObjectSetterChunk(strings=PdmObject_pb2.StringArray(data=array[index:index + PdmObject_pb2.PdmObjectSetterChunk(
actual_chunk_size]))) strings=PdmObject_pb2.StringArray(
data=array[index : index + actual_chunk_size]
)
)
)
else: else:
raise Exception("Wrong data type for set method") raise Exception("Wrong data type for set method")
index += actual_chunk_size index += actual_chunk_size
@ -337,7 +377,8 @@ class PdmObjectBase:
def _call_set_method(self, method_name, values): def _call_set_method(self, method_name, values):
method_request = PdmObject_pb2.PdmObjectGetterRequest( method_request = PdmObject_pb2.PdmObjectGetterRequest(
object=self._pb2_object, method=method_name) object=self._pb2_object, method=method_name
)
request_iterator = self.__generate_set_method_chunks(values, method_request) request_iterator = self.__generate_set_method_chunks(values, method_request)
reply = self._pdm_object_stub.CallPdmObjectSetter(request_iterator) reply = self._pdm_object_stub.CallPdmObjectSetter(request_iterator)
if reply.accepted_value_count < len(values): if reply.accepted_value_count < len(values):
@ -346,9 +387,12 @@ class PdmObjectBase:
def _call_pdm_method(self, method_name, **kwargs): def _call_pdm_method(self, method_name, **kwargs):
pb2_params = PdmObject_pb2.PdmObject(class_keyword=method_name) pb2_params = PdmObject_pb2.PdmObject(class_keyword=method_name)
for key, value in kwargs.items(): for key, value in kwargs.items():
pb2_params.parameters[snake_to_camel(key)] = self.__convert_to_grpc_value(value) pb2_params.parameters[snake_to_camel(key)] = self.__convert_to_grpc_value(
value
)
request = PdmObject_pb2.PdmObjectMethodRequest( request = PdmObject_pb2.PdmObjectMethodRequest(
object=self._pb2_object, method=method_name, params=pb2_params) object=self._pb2_object, method=method_name, params=pb2_params
)
pb2_object = self._pdm_object_stub.CallPdmObjectMethod(request) pb2_object = self._pdm_object_stub.CallPdmObjectMethod(request)
@ -358,7 +402,9 @@ class PdmObjectBase:
if child_class_definition is None: if child_class_definition is None:
return None return None
pdm_object = child_class_definition(pb2_object=pb2_object, channel=self.channel()) pdm_object = child_class_definition(
pb2_object=pb2_object, channel=self.channel()
)
return pdm_object return pdm_object
def update(self): def update(self):
@ -367,4 +413,6 @@ class PdmObjectBase:
if self._pdm_object_stub is not None: if self._pdm_object_stub is not None:
self._pdm_object_stub.UpdateExistingPdmObject(self._pb2_object) self._pdm_object_stub.UpdateExistingPdmObject(self._pb2_object)
else: else:
raise Exception("Object is not connected to GRPC service so cannot update ResInsight") raise Exception(
"Object is not connected to GRPC service so cannot update ResInsight"
)

View File

@ -8,8 +8,8 @@ from .resinsight_classes import PlotWindow, Plot
@add_method(PlotWindow) @add_method(PlotWindow)
def export_snapshot(self, export_folder='', file_prefix='', output_format='PNG'): def export_snapshot(self, export_folder="", file_prefix="", output_format="PNG"):
""" Export snapshot for the current plot """Export snapshot for the current plot
Arguments: Arguments:
export_folder(str): The path to export to. By default will use the global export folder export_folder(str): The path to export to. By default will use the global export folder
@ -18,8 +18,11 @@ def export_snapshot(self, export_folder='', file_prefix='', output_format='PNG')
""" """
return self._execute_command( return self._execute_command(
exportSnapshots=Commands_pb2.ExportSnapshotsRequest(type='PLOTS', exportSnapshots=Commands_pb2.ExportSnapshotsRequest(
prefix=file_prefix, type="PLOTS",
viewId=self.id, prefix=file_prefix,
exportFolder=export_folder, viewId=self.id,
plotOutputFormat=output_format)) exportFolder=export_folder,
plotOutputFormat=output_format,
)
)

View File

@ -70,8 +70,9 @@ def load_case(self, path):
Returns: Returns:
:class:`rips.generated.generated_classes.Case` :class:`rips.generated.generated_classes.Case`
""" """
command_reply = self._execute_command(loadCase=Commands_pb2.FilePathRequest( command_reply = self._execute_command(
path=path)) loadCase=Commands_pb2.FilePathRequest(path=path)
)
return self.case(command_reply.loadCaseResult.id) return self.case(command_reply.loadCaseResult.id)
@ -125,7 +126,9 @@ def replace_source_cases(self, grid_list_file, case_group_id=0):
""" """
return self._execute_command( return self._execute_command(
replaceSourceCases=Commands_pb2.ReplaceSourceCasesRequest( replaceSourceCases=Commands_pb2.ReplaceSourceCasesRequest(
gridListFile=grid_list_file, caseGroupId=case_group_id)) gridListFile=grid_list_file, caseGroupId=case_group_id
)
)
@add_method(Project) @add_method(Project)
@ -139,9 +142,11 @@ def create_grid_case_group(self, case_paths):
""" """
command_reply = self._execute_command( command_reply = self._execute_command(
createGridCaseGroup=Commands_pb2.CreateGridCaseGroupRequest( createGridCaseGroup=Commands_pb2.CreateGridCaseGroupRequest(
casePaths=case_paths)) casePaths=case_paths
return self.grid_case_group( )
command_reply.createGridCaseGroupResult.groupId) )
return self.grid_case_group(command_reply.createGridCaseGroupResult.groupId)
@add_method(Project) @add_method(Project)
def summary_cases(self): def summary_cases(self):
@ -151,6 +156,7 @@ def summary_cases(self):
""" """
return self.descendants(SummaryCase) return self.descendants(SummaryCase)
@add_method(Project) @add_method(Project)
def views(self): def views(self):
"""Get a list of views belonging to a project""" """Get a list of views belonging to a project"""
@ -243,12 +249,14 @@ def export_multi_case_snapshots(self, grid_list_file):
""" """
return self._execute_command( return self._execute_command(
exportMultiCaseSnapshot=Commands_pb2.ExportMultiCaseRequest( exportMultiCaseSnapshot=Commands_pb2.ExportMultiCaseRequest(
gridListFile=grid_list_file)) gridListFile=grid_list_file
)
)
@add_method(Project) @add_method(Project)
def export_snapshots(self, snapshot_type='ALL', prefix='', plot_format='PNG'): def export_snapshots(self, snapshot_type="ALL", prefix="", plot_format="PNG"):
""" Export all snapshots of a given type """Export all snapshots of a given type
Arguments: Arguments:
snapshot_type (str): Enum string ('ALL', 'VIEWS' or 'PLOTS') snapshot_type (str): Enum string ('ALL', 'VIEWS' or 'PLOTS')
@ -257,12 +265,18 @@ def export_snapshots(self, snapshot_type='ALL', prefix='', plot_format='PNG'):
""" """
return self._execute_command( return self._execute_command(
exportSnapshots=Commands_pb2.ExportSnapshotsRequest( exportSnapshots=Commands_pb2.ExportSnapshotsRequest(
type=snapshot_type, prefix=prefix, caseId=-1, viewId=-1, plotOutputFormat=plot_format)) type=snapshot_type,
prefix=prefix,
caseId=-1,
viewId=-1,
plotOutputFormat=plot_format,
)
)
@add_method(Project) @add_method(Project)
def export_well_paths(self, well_paths=None, md_step_size=5.0): def export_well_paths(self, well_paths=None, md_step_size=5.0):
""" Export a set of well paths """Export a set of well paths
Arguments: Arguments:
well_paths(list): List of strings of well paths. If none, export all. well_paths(list): List of strings of well paths. If none, export all.
@ -272,14 +286,18 @@ def export_well_paths(self, well_paths=None, md_step_size=5.0):
well_paths = [] well_paths = []
elif isinstance(well_paths, str): elif isinstance(well_paths, str):
well_paths = [well_paths] well_paths = [well_paths]
return self._execute_command(exportWellPaths=Commands_pb2.ExportWellPathRequest( return self._execute_command(
wellPathNames=well_paths, mdStepSize=md_step_size)) exportWellPaths=Commands_pb2.ExportWellPathRequest(
wellPathNames=well_paths, mdStepSize=md_step_size
)
)
@add_method(Project) @add_method(Project)
def scale_fracture_template(self, template_id, half_length, height, def scale_fracture_template(
d_factor, conductivity): self, template_id, half_length, height, d_factor, conductivity
""" Scale fracture template parameters ):
"""Scale fracture template parameters
Arguments: Arguments:
template_id(int): ID of fracture template template_id(int): ID of fracture template
@ -294,12 +312,14 @@ def scale_fracture_template(self, template_id, half_length, height,
halfLength=half_length, halfLength=half_length,
height=height, height=height,
dFactor=d_factor, dFactor=d_factor,
conductivity=conductivity)) conductivity=conductivity,
)
)
@add_method(Project) @add_method(Project)
def set_fracture_containment(self, template_id, top_layer, base_layer): def set_fracture_containment(self, template_id, top_layer, base_layer):
""" Set fracture template containment parameters """Set fracture template containment parameters
Arguments: Arguments:
template_id(int): ID of fracture template template_id(int): ID of fracture template
@ -308,12 +328,14 @@ def set_fracture_containment(self, template_id, top_layer, base_layer):
""" """
return self._execute_command( return self._execute_command(
setFractureContainment=Commands_pb2.SetFracContainmentRequest( setFractureContainment=Commands_pb2.SetFracContainmentRequest(
id=template_id, topLayer=top_layer, baseLayer=base_layer)) id=template_id, topLayer=top_layer, baseLayer=base_layer
)
)
@add_method(Project) @add_method(Project)
def import_well_paths(self, well_path_files=None, well_path_folder=''): def import_well_paths(self, well_path_files=None, well_path_folder=""):
""" Import well paths into project """Import well paths into project
Arguments: Arguments:
well_path_files(list): List of file paths to import well_path_files(list): List of file paths to import
@ -325,8 +347,11 @@ def import_well_paths(self, well_path_files=None, well_path_folder=''):
if well_path_files is None: if well_path_files is None:
well_path_files = [] well_path_files = []
res = self._execute_command(importWellPaths=Commands_pb2.ImportWellPathsRequest(wellPathFolder=well_path_folder, res = self._execute_command(
wellPathFiles=well_path_files)) importWellPaths=Commands_pb2.ImportWellPathsRequest(
wellPathFolder=well_path_folder, wellPathFiles=well_path_files
)
)
well_paths = [] well_paths = []
for well_path_name in res.importWellPathsResult.wellPathNames: for well_path_name in res.importWellPathsResult.wellPathNames:
well_paths.append(self.well_path_by_name(well_path_name)) well_paths.append(self.well_path_by_name(well_path_name))
@ -358,8 +383,8 @@ def well_path_by_name(self, well_path_name):
@add_method(Project) @add_method(Project)
def import_well_log_files(self, well_log_files=None, well_log_folder=''): def import_well_log_files(self, well_log_files=None, well_log_folder=""):
""" Import well log files into project """Import well log files into project
Arguments: Arguments:
well_log_files(list): List of file paths to import well_log_files(list): List of file paths to import
@ -371,14 +396,17 @@ def import_well_log_files(self, well_log_files=None, well_log_folder=''):
if well_log_files is None: if well_log_files is None:
well_log_files = [] well_log_files = []
res = self._execute_command(importWellLogFiles=Commands_pb2.ImportWellLogFilesRequest(wellLogFolder=well_log_folder, res = self._execute_command(
wellLogFiles=well_log_files)) importWellLogFiles=Commands_pb2.ImportWellLogFilesRequest(
wellLogFolder=well_log_folder, wellLogFiles=well_log_files
)
)
return res.importWellLogFilesResult.wellPathNames return res.importWellLogFilesResult.wellPathNames
@add_method(Project) @add_method(Project)
def import_formation_names(self, formation_files=None): def import_formation_names(self, formation_files=None):
""" Import formation names into project """Import formation names into project
Arguments: Arguments:
formation_files(list): list of files to import formation_files(list): list of files to import
@ -389,5 +417,8 @@ def import_formation_names(self, formation_files=None):
elif isinstance(formation_files, str): elif isinstance(formation_files, str):
formation_files = [formation_files] formation_files = [formation_files]
self._execute_command(importFormationNames=Commands_pb2.ImportFormationNamesRequest(formationFiles=formation_files, self._execute_command(
applyToCaseId=-1)) importFormationNames=Commands_pb2.ImportFormationNamesRequest(
formationFiles=formation_files, applyToCaseId=-1
)
)

View File

@ -36,9 +36,9 @@ def status(self, timestep):
timestep(int): Time step index timestep(int): Time step index
""" """
sim_well_request = SimulationWell_pb2.SimulationWellRequest(case_id=self.case().id, sim_well_request = SimulationWell_pb2.SimulationWellRequest(
well_name=self.name, case_id=self.case().id, well_name=self.name, timestep=timestep
timestep=timestep) )
return self._simulation_well_stub.GetSimulationWellStatus(sim_well_request) return self._simulation_well_stub.GetSimulationWellStatus(sim_well_request)
@ -63,9 +63,9 @@ def cells(self, timestep):
List of SimulationWellCellInfo List of SimulationWellCellInfo
""" """
sim_well_request = SimulationWell_pb2.SimulationWellRequest(case_id=self.case().id, sim_well_request = SimulationWell_pb2.SimulationWellRequest(
well_name=self.name, case_id=self.case().id, well_name=self.name, timestep=timestep
timestep=timestep) )
return self._simulation_well_stub.GetSimulationWellCells(sim_well_request).data return self._simulation_well_stub.GetSimulationWellCells(sim_well_request).data

View File

@ -3,7 +3,7 @@ import sys
import os import os
import getopt import getopt
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
_rips_instance = None _rips_instance = None
@ -22,20 +22,28 @@ def initialize_test():
def pytest_addoption(parser): def pytest_addoption(parser):
parser.addoption("--console", action="store_true", default=False, parser.addoption(
help="Run as console application") "--console",
parser.addoption("--existing", action="store_true", default=False, action="store_true",
help="Look for existing ResInsight") default=False,
help="Run as console application",
)
parser.addoption(
"--existing",
action="store_true",
default=False,
help="Look for existing ResInsight",
)
def pytest_configure(config): def pytest_configure(config):
global _rips_instance global _rips_instance
console = False console = False
if config.getoption('--existing'): if config.getoption("--existing"):
print("Looking for existing ResInsight") print("Looking for existing ResInsight")
_rips_instance = rips.Instance.find() _rips_instance = rips.Instance.find()
else: else:
if config.getoption('--console'): if config.getoption("--console"):
console = True console = True
_rips_instance = rips.Instance.launch(console=console) _rips_instance = rips.Instance.launch(console=console)
if not _rips_instance: if not _rips_instance:
@ -44,6 +52,6 @@ def pytest_configure(config):
def pytest_unconfigure(config): def pytest_unconfigure(config):
if not config.getoption('--existing'): if not config.getoption("--existing"):
if _rips_instance: if _rips_instance:
_rips_instance.exit() _rips_instance.exit()

View File

@ -5,45 +5,47 @@ import pytest
import grpc import grpc
import tempfile import tempfile
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
def test_Launch(rips_instance, initialize_test): def test_Launch(rips_instance, initialize_test):
assert(rips_instance is not None) assert rips_instance is not None
def test_EmptyProject(rips_instance, initialize_test): def test_EmptyProject(rips_instance, initialize_test):
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) is 0) assert len(cases) is 0
def test_OneCase(rips_instance, initialize_test): def test_OneCase(rips_instance, initialize_test):
case = rips_instance.project.load_case( case = rips_instance.project.load_case(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID") dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
assert(case.name == "TEST10K_FLT_LGR_NNC") )
assert(case.id == 0) assert case.name == "TEST10K_FLT_LGR_NNC"
assert case.id == 0
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) is 1) assert len(cases) is 1
def test_BoundingBox(rips_instance, initialize_test): def test_BoundingBox(rips_instance, initialize_test):
case = rips_instance.project.load_case( case = rips_instance.project.load_case(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID") dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
assert(case.name == "TEST10K_FLT_LGR_NNC") )
assert case.name == "TEST10K_FLT_LGR_NNC"
boundingbox = case.reservoir_boundingbox() boundingbox = case.reservoir_boundingbox()
assert(math.isclose(3382.90, boundingbox.min_x, abs_tol=1.0e-1)) assert math.isclose(3382.90, boundingbox.min_x, abs_tol=1.0e-1)
assert(math.isclose(5850.48, boundingbox.max_x, abs_tol=1.0e-1)) assert math.isclose(5850.48, boundingbox.max_x, abs_tol=1.0e-1)
assert(math.isclose(4157.45, boundingbox.min_y, abs_tol=1.0e-1)) assert math.isclose(4157.45, boundingbox.min_y, abs_tol=1.0e-1)
assert(math.isclose(7354.93, boundingbox.max_y, abs_tol=1.0e-1)) assert math.isclose(7354.93, boundingbox.max_y, abs_tol=1.0e-1)
assert(math.isclose(-4252.61, boundingbox.min_z, abs_tol=1.0e-1)) assert math.isclose(-4252.61, boundingbox.min_z, abs_tol=1.0e-1)
assert(math.isclose(-4103.60, boundingbox.max_z, abs_tol=1.0e-1)) assert math.isclose(-4103.60, boundingbox.max_z, abs_tol=1.0e-1)
min_depth, max_depth = case.reservoir_depth_range() min_depth, max_depth = case.reservoir_depth_range()
assert(math.isclose(4103.60, min_depth, abs_tol=1.0e-1)) assert math.isclose(4103.60, min_depth, abs_tol=1.0e-1)
assert(math.isclose(4252.61, max_depth, abs_tol=1.0e-1)) assert math.isclose(4252.61, max_depth, abs_tol=1.0e-1)
def test_MultipleCases(rips_instance, initialize_test): def test_MultipleCases(rips_instance, initialize_test):
@ -59,9 +61,9 @@ def test_MultipleCases(rips_instance, initialize_test):
rips_instance.project.load_case(path=case_path) rips_instance.project.load_case(path=case_path)
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) == len(case_names)) assert len(cases) == len(case_names)
for i, case_name in enumerate(case_names): for i, case_name in enumerate(case_names):
assert(case_name == cases[i].name) assert case_name == cases[i].name
def get_cell_index_with_ijk(cell_info, i, j, k): def get_cell_index_with_ijk(cell_info, i, j, k):
@ -72,48 +74,50 @@ def get_cell_index_with_ijk(cell_info, i, j, k):
def check_corner(actual, expected): def check_corner(actual, expected):
assert(math.isclose(actual.x, expected[0], abs_tol=0.1)) assert math.isclose(actual.x, expected[0], abs_tol=0.1)
assert(math.isclose(actual.y, expected[1], abs_tol=0.1)) assert math.isclose(actual.y, expected[1], abs_tol=0.1)
assert(math.isclose(actual.z, expected[2], abs_tol=0.1)) assert math.isclose(actual.z, expected[2], abs_tol=0.1)
def test_10k(rips_instance, initialize_test): def test_10k(rips_instance, initialize_test):
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=case_path) case = rips_instance.project.load_case(path=case_path)
assert(len(case.grids()) == 2) assert len(case.grids()) == 2
cell_count_info = case.cell_count() cell_count_info = case.cell_count()
assert(cell_count_info.active_cell_count == 11125) assert cell_count_info.active_cell_count == 11125
assert(cell_count_info.reservoir_cell_count == 316224) assert cell_count_info.reservoir_cell_count == 316224
time_steps = case.time_steps() time_steps = case.time_steps()
assert(len(time_steps) == 9) assert len(time_steps) == 9
days_since_start = case.days_since_start() days_since_start = case.days_since_start()
assert(len(days_since_start) == 9) assert len(days_since_start) == 9
cell_info = case.cell_info_for_active_cells() cell_info = case.cell_info_for_active_cells()
assert(len(cell_info) == cell_count_info.active_cell_count) assert len(cell_info) == cell_count_info.active_cell_count
# Check an active cell (found in resinsight ui) # Check an active cell (found in resinsight ui)
cell_index = get_cell_index_with_ijk(cell_info, 23, 44, 19) cell_index = get_cell_index_with_ijk(cell_info, 23, 44, 19)
assert(cell_index != -1) assert cell_index != -1
cell_centers = case.active_cell_centers() cell_centers = case.active_cell_centers()
assert(len(cell_centers) == cell_count_info.active_cell_count) assert len(cell_centers) == cell_count_info.active_cell_count
# Check the cell center for the specific cell # Check the cell center for the specific cell
assert(math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)) assert math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)
assert(math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)) assert math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)
assert(math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)) assert math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)
cell_corners = case.active_cell_corners() cell_corners = case.active_cell_corners()
assert(len(cell_corners) == cell_count_info.active_cell_count) assert len(cell_corners) == cell_count_info.active_cell_count
# Expected values from ResInsight UI # Expected values from ResInsight UI
expected_corners = [[3565.22, 5179.02, 4177.18], expected_corners = [
[3655.67, 5145.34, 4176.63], [3565.22, 5179.02, 4177.18],
[3690.07, 5240.69, 4180.02], [3655.67, 5145.34, 4176.63],
[3599.87, 5275.16, 4179.32], [3690.07, 5240.69, 4180.02],
[3564.13, 5178.61, 4179.75], [3599.87, 5275.16, 4179.32],
[3654.78, 5144.79, 4179.23], [3564.13, 5178.61, 4179.75],
[3688.99, 5239.88, 4182.7], [3654.78, 5144.79, 4179.23],
[3598.62, 5274.48, 4181.96]] [3688.99, 5239.88, 4182.7],
[3598.62, 5274.48, 4181.96],
]
check_corner(cell_corners[cell_index].c0, expected_corners[0]) check_corner(cell_corners[cell_index].c0, expected_corners[0])
check_corner(cell_corners[cell_index].c1, expected_corners[1]) check_corner(cell_corners[cell_index].c1, expected_corners[1])
check_corner(cell_corners[cell_index].c2, expected_corners[2]) check_corner(cell_corners[cell_index].c2, expected_corners[2])
@ -125,53 +129,61 @@ def test_10k(rips_instance, initialize_test):
# No coarsening info for this case # No coarsening info for this case
coarsening_info = case.coarsening_info() coarsening_info = case.coarsening_info()
assert(len(coarsening_info) == 0) assert len(coarsening_info) == 0
def test_PdmObject(rips_instance, initialize_test): def test_PdmObject(rips_instance, initialize_test):
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=case_path) case = rips_instance.project.load_case(path=case_path)
assert(case.id == 0) assert case.id == 0
assert(case.address() is not 0) assert case.address() is not 0
assert(case.__class__.__name__ == "EclipseCase") assert case.__class__.__name__ == "EclipseCase"
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux") @pytest.mark.skipif(
sys.platform.startswith("linux"),
reason="Brugge is currently exceptionally slow on Linux",
)
def test_brugge_0010(rips_instance, initialize_test): def test_brugge_0010(rips_instance, initialize_test):
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID" case_path = dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
case = rips_instance.project.load_case(path=case_path) case = rips_instance.project.load_case(path=case_path)
assert(len(case.grids()) == 1) assert len(case.grids()) == 1
cellCountInfo = case.cell_count() cellCountInfo = case.cell_count()
assert(cellCountInfo.active_cell_count == 43374) assert cellCountInfo.active_cell_count == 43374
assert(cellCountInfo.reservoir_cell_count == 60048) assert cellCountInfo.reservoir_cell_count == 60048
time_steps = case.time_steps() time_steps = case.time_steps()
assert(len(time_steps) == 11) assert len(time_steps) == 11
days_since_start = case.days_since_start() days_since_start = case.days_since_start()
assert(len(days_since_start) == 11) assert len(days_since_start) == 11
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux") @pytest.mark.skipif(
sys.platform.startswith("linux"),
reason="Brugge is currently exceptionally slow on Linux",
)
def test_replaceCase(rips_instance, initialize_test): def test_replaceCase(rips_instance, initialize_test):
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp") project = rips_instance.project.open(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
)
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID" case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
case = project.case(case_id=0) case = project.case(case_id=0)
assert(case is not None) assert case is not None
assert(case.name == "TEST10K_FLT_LGR_NNC") assert case.name == "TEST10K_FLT_LGR_NNC"
assert(case.id == 0) assert case.id == 0
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) is 1) assert len(cases) is 1
case.replace(new_grid_file=case_path) case.replace(new_grid_file=case_path)
# Check that the case object has been changed # Check that the case object has been changed
assert(case.name == "BRUGGE_0000") assert case.name == "BRUGGE_0000"
assert(case.id == 0) assert case.id == 0
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) is 1) assert len(cases) is 1
# Check that retrieving the case object again will yield the changed object # Check that retrieving the case object again will yield the changed object
case = project.case(case_id=0) case = project.case(case_id=0)
assert(case.name == "BRUGGE_0000") assert case.name == "BRUGGE_0000"
assert(case.id == 0) assert case.id == 0
def test_loadNonExistingCase(rips_instance, initialize_test): def test_loadNonExistingCase(rips_instance, initialize_test):
@ -180,26 +192,31 @@ def test_loadNonExistingCase(rips_instance, initialize_test):
assert rips_instance.project.load_case(case_path) assert rips_instance.project.load_case(case_path)
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux") @pytest.mark.skipif(
sys.platform.startswith("linux"),
reason="Brugge is currently exceptionally slow on Linux",
)
def test_exportFlowCharacteristics(rips_instance, initialize_test): def test_exportFlowCharacteristics(rips_instance, initialize_test):
case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID" case_path = dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID"
case = rips_instance.project.load_case(case_path) case = rips_instance.project.load_case(case_path)
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname: with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
print("Temporary folder: ", tmpdirname) print("Temporary folder: ", tmpdirname)
file_name = tmpdirname + "/exportFlowChar.txt" file_name = tmpdirname + "/exportFlowChar.txt"
case.export_flow_characteristics(time_steps=8, producers=[], case.export_flow_characteristics(
injectors="I01", file_name=file_name) time_steps=8, producers=[], injectors="I01", file_name=file_name
)
def test_selected_cells(rips_instance, initialize_test): def test_selected_cells(rips_instance, initialize_test):
case = rips_instance.project.load_case( case = rips_instance.project.load_case(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID") dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
assert(case.name == "TEST10K_FLT_LGR_NNC") )
assert case.name == "TEST10K_FLT_LGR_NNC"
selected_cells = case.selected_cells() selected_cells = case.selected_cells()
assert(len(selected_cells) == 0) assert len(selected_cells) == 0
time_step_info = case.time_steps() time_step_info = case.time_steps()
for (tidx, timestep) in enumerate(time_step_info): for (tidx, timestep) in enumerate(time_step_info):
# Try to read for SOIL the time step (will be empty since nothing is selected) # Try to read for SOIL the time step (will be empty since nothing is selected)
soil_results = case.selected_cell_property('DYNAMIC_NATIVE', 'SOIL', tidx) soil_results = case.selected_cell_property("DYNAMIC_NATIVE", "SOIL", tidx)
assert(len(soil_results) == 0) assert len(soil_results) == 0

View File

@ -4,22 +4,30 @@ import tempfile
import pytest import pytest
import grpc import grpc
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
def test_create_lgr_well(rips_instance, initialize_test): def test_create_lgr_well(rips_instance, initialize_test):
case = rips_instance.project.load_case( case = rips_instance.project.load_case(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID") dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
assert(case.name == "TEST10K_FLT_LGR_NNC") )
assert(len(case.grids()) == 2) assert case.name == "TEST10K_FLT_LGR_NNC"
assert len(case.grids()) == 2
well_files=[dataroot.PATH + "/TEST10K_FLT_LGR_NNC/wellpath_a.dev"] well_files = [dataroot.PATH + "/TEST10K_FLT_LGR_NNC/wellpath_a.dev"]
rips_instance.project.import_well_paths(well_path_files=well_files) rips_instance.project.import_well_paths(well_path_files=well_files)
time_step=5 time_step = 5
well_path_names=["Well Path A"] well_path_names = ["Well Path A"]
case.create_lgr_for_completion(time_step, well_path_names, refinement_i=2, refinement_j=3,refinement_k=1, split_type="LGR_PER_WELL") case.create_lgr_for_completion(
assert(len(case.grids()) == 3) time_step,
well_path_names,
refinement_i=2,
refinement_j=3,
refinement_k=1,
split_type="LGR_PER_WELL",
)
assert len(case.grids()) == 3

View File

@ -2,49 +2,51 @@ import sys
import os import os
import math import math
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
def check_corner(actual, expected): def check_corner(actual, expected):
assert(math.isclose(actual.x, expected[0], abs_tol=0.1)) assert math.isclose(actual.x, expected[0], abs_tol=0.1)
assert(math.isclose(actual.y, expected[1], abs_tol=0.1)) assert math.isclose(actual.y, expected[1], abs_tol=0.1)
assert(math.isclose(actual.z, expected[2], abs_tol=0.1)) assert math.isclose(actual.z, expected[2], abs_tol=0.1)
def test_10k(rips_instance, initialize_test): def test_10k(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
assert(len(case.grids()) == 2) assert len(case.grids()) == 2
grid = case.grid(index=0) grid = case.grid(index=0)
dimensions = grid.dimensions() dimensions = grid.dimensions()
assert(dimensions.i == 90) assert dimensions.i == 90
assert(dimensions.j == 96) assert dimensions.j == 96
assert(dimensions.k == 36) assert dimensions.k == 36
cell_centers = grid.cell_centers() cell_centers = grid.cell_centers()
assert(len(cell_centers) == (dimensions.i * dimensions.j * dimensions.k)) assert len(cell_centers) == (dimensions.i * dimensions.j * dimensions.k)
# Test a specific cell (results from ResInsight UI) # Test a specific cell (results from ResInsight UI)
cell_index = 168143 cell_index = 168143
assert(math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)) assert math.isclose(3627.17, cell_centers[cell_index].x, abs_tol=0.1)
assert(math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)) assert math.isclose(5209.75, cell_centers[cell_index].y, abs_tol=0.1)
assert(math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)) assert math.isclose(4179.6, cell_centers[cell_index].z, abs_tol=0.1)
cell_corners = grid.cell_corners() cell_corners = grid.cell_corners()
assert(len(cell_corners) == (dimensions.i * dimensions.j * dimensions.k)) assert len(cell_corners) == (dimensions.i * dimensions.j * dimensions.k)
# Expected values from ResInsight UI # Expected values from ResInsight UI
expected_corners = [[3565.22, 5179.02, 4177.18], expected_corners = [
[3655.67, 5145.34, 4176.63], [3565.22, 5179.02, 4177.18],
[3690.07, 5240.69, 4180.02], [3655.67, 5145.34, 4176.63],
[3599.87, 5275.16, 4179.32], [3690.07, 5240.69, 4180.02],
[3564.13, 5178.61, 4179.75], [3599.87, 5275.16, 4179.32],
[3654.78, 5144.79, 4179.23], [3564.13, 5178.61, 4179.75],
[3688.99, 5239.88, 4182.7], [3654.78, 5144.79, 4179.23],
[3598.62, 5274.48, 4181.96]] [3688.99, 5239.88, 4182.7],
[3598.62, 5274.48, 4181.96],
]
check_corner(cell_corners[cell_index].c0, expected_corners[0]) check_corner(cell_corners[cell_index].c0, expected_corners[0])
check_corner(cell_corners[cell_index].c1, expected_corners[1]) check_corner(cell_corners[cell_index].c1, expected_corners[1])
check_corner(cell_corners[cell_index].c2, expected_corners[2]) check_corner(cell_corners[cell_index].c2, expected_corners[2])

View File

@ -5,7 +5,7 @@ import pytest
import rips.generated.NNCProperties_pb2 as NNCProperties_pb2 import rips.generated.NNCProperties_pb2 as NNCProperties_pb2
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
@ -16,25 +16,28 @@ def test_10kSync(rips_instance, initialize_test):
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
properties = case.available_nnc_properties() properties = case.available_nnc_properties()
assert(len(properties) == 1) assert len(properties) == 1
assert("TRAN" == properties[0].name) assert "TRAN" == properties[0].name
assert(NNCProperties_pb2.NNCPropertyType.Value('NNC_STATIC') == properties[0].property_type) assert (
NNCProperties_pb2.NNCPropertyType.Value("NNC_STATIC")
== properties[0].property_type
)
nnc_connections = case.nnc_connections() nnc_connections = case.nnc_connections()
assert(len(nnc_connections) == 3627) assert len(nnc_connections) == 3627
connection = nnc_connections[0] connection = nnc_connections[0]
assert(connection.cell1.i == 33) assert connection.cell1.i == 33
assert(connection.cell1.j == 40) assert connection.cell1.j == 40
assert(connection.cell1.k == 14) assert connection.cell1.k == 14
assert(connection.cell_grid_index1 == 0) assert connection.cell_grid_index1 == 0
tran_vals = case.nnc_connections_static_values("TRAN") tran_vals = case.nnc_connections_static_values("TRAN")
assert(len(tran_vals) == len(nnc_connections)) assert len(tran_vals) == len(nnc_connections)
for t in tran_vals: for t in tran_vals:
assert(isinstance(t, float)) assert isinstance(t, float)
# Generate some data # Generate some data
new_data = [] new_data = []
@ -44,9 +47,9 @@ def test_10kSync(rips_instance, initialize_test):
property_name = "NEW_PROP" property_name = "NEW_PROP"
case.set_nnc_connections_values(new_data, property_name, 0) case.set_nnc_connections_values(new_data, property_name, 0)
new_prop_vals = case.nnc_connections_generated_values(property_name, 0) new_prop_vals = case.nnc_connections_generated_values(property_name, 0)
assert(len(new_prop_vals) == len(new_data)) assert len(new_prop_vals) == len(new_data)
for i in range(0, len(new_data)): for i in range(0, len(new_data)):
assert(new_data[i] == new_prop_vals[i]) assert new_data[i] == new_prop_vals[i]
# Set some other data for second time step # Set some other data for second time step
for i in range(0, len(new_data)): for i in range(0, len(new_data)):
@ -54,9 +57,9 @@ def test_10kSync(rips_instance, initialize_test):
case.set_nnc_connections_values(new_data, property_name, 1) case.set_nnc_connections_values(new_data, property_name, 1)
new_prop_vals = case.nnc_connections_generated_values(property_name, 1) new_prop_vals = case.nnc_connections_generated_values(property_name, 1)
assert(len(new_prop_vals) == len(nnc_connections)) assert len(new_prop_vals) == len(nnc_connections)
for i in range(0, len(new_data)): for i in range(0, len(new_data)):
assert(new_data[i] == new_prop_vals[i]) assert new_data[i] == new_prop_vals[i]
def test_non_existing_dynamic_values(rips_instance, initialize_test): def test_non_existing_dynamic_values(rips_instance, initialize_test):

View File

@ -4,31 +4,35 @@ import pytest
import grpc import grpc
import tempfile import tempfile
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
def test_loadProject(rips_instance, initialize_test): def test_loadProject(rips_instance, initialize_test):
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp") project = rips_instance.project.open(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
)
case = project.cases()[0] case = project.cases()[0]
assert(case is not None) assert case is not None
assert(case.name == "TEST10K_FLT_LGR_NNC") assert case.name == "TEST10K_FLT_LGR_NNC"
assert(case.id == 0) assert case.id == 0
cases = rips_instance.project.cases() cases = rips_instance.project.cases()
assert(len(cases) is 1) assert len(cases) is 1
def test_well_log_plots(rips_instance, initialize_test): def test_well_log_plots(rips_instance, initialize_test):
project = rips_instance.project.open(dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp") project = rips_instance.project.open(
dataroot.PATH + "/TEST10K_FLT_LGR_NNC/10KWithWellLog.rsp"
)
plots = project.plots() plots = project.plots()
well_log_plots = [] well_log_plots = []
for plot in plots: for plot in plots:
if isinstance(plot, rips.WellLogPlot): if isinstance(plot, rips.WellLogPlot):
assert(plot.depth_type == "MEASURED_DEPTH") assert plot.depth_type == "MEASURED_DEPTH"
well_log_plots.append(plot) well_log_plots.append(plot)
assert(len(well_log_plots) == 2) assert len(well_log_plots) == 2
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname: with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
for well_log_plot in well_log_plots: for well_log_plot in well_log_plots:
@ -40,23 +44,30 @@ def test_well_log_plots(rips_instance, initialize_test):
files = os.listdir(tmpdirname) files = os.listdir(tmpdirname)
print(files) print(files)
if rips_instance.is_gui(): if rips_instance.is_gui():
assert(len(files) == 4) assert len(files) == 4
else: else:
assert(len(files) == 2) assert len(files) == 2
plots2 = project.plots() plots2 = project.plots()
for plot2 in plots2: for plot2 in plots2:
if isinstance(plot2, rips.WellLogPlot): if isinstance(plot2, rips.WellLogPlot):
assert(plot2.depth_type == "TRUE_VERTICAL_DEPTH_RKB") assert plot2.depth_type == "TRUE_VERTICAL_DEPTH_RKB"
@pytest.mark.skipif(sys.platform.startswith('linux'), reason="Brugge is currently exceptionally slow on Linux") @pytest.mark.skipif(
sys.platform.startswith("linux"),
reason="Brugge is currently exceptionally slow on Linux",
)
def test_loadGridCaseGroup(rips_instance, initialize_test): def test_loadGridCaseGroup(rips_instance, initialize_test):
case_paths = [] case_paths = []
case_paths.append(dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID") case_paths.append(dataroot.PATH + "/Case_with_10_timesteps/Real0/BRUGGE_0000.EGRID")
case_paths.append(dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID") case_paths.append(
grid_case_group = rips_instance.project.create_grid_case_group(case_paths=case_paths) dataroot.PATH + "/Case_with_10_timesteps/Real10/BRUGGE_0010.EGRID"
assert(grid_case_group is not None and grid_case_group.group_id == 0) )
grid_case_group = rips_instance.project.create_grid_case_group(
case_paths=case_paths
)
assert grid_case_group is not None and grid_case_group.group_id == 0
def test_exportSnapshots(rips_instance, initialize_test): def test_exportSnapshots(rips_instance, initialize_test):
@ -67,9 +78,9 @@ def test_exportSnapshots(rips_instance, initialize_test):
rips_instance.project.load_case(case_path) rips_instance.project.load_case(case_path)
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname: with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
print("Temporary folder: ", tmpdirname) print("Temporary folder: ", tmpdirname)
rips_instance.set_export_folder(export_type='SNAPSHOTS', path=tmpdirname) rips_instance.set_export_folder(export_type="SNAPSHOTS", path=tmpdirname)
rips_instance.project.export_snapshots() rips_instance.project.export_snapshots()
print(os.listdir(tmpdirname)) print(os.listdir(tmpdirname))
# assert(len(os.listdir(tmpdirname)) > 0) # assert(len(os.listdir(tmpdirname)) > 0)
for fileName in os.listdir(tmpdirname): for fileName in os.listdir(tmpdirname):
assert(os.path.splitext(fileName)[1] == '.png') assert os.path.splitext(fileName)[1] == ".png"

View File

@ -4,7 +4,7 @@ import grpc
import pytest import pytest
import tempfile import tempfile
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
@ -14,57 +14,57 @@ def test_10kAsync(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
resultChunks = case.active_cell_property_async('DYNAMIC_NATIVE', 'SOIL', 1) resultChunks = case.active_cell_property_async("DYNAMIC_NATIVE", "SOIL", 1)
mysum = 0.0 mysum = 0.0
count = 0 count = 0
for chunk in resultChunks: for chunk in resultChunks:
mysum += sum(chunk.values) mysum += sum(chunk.values)
count += len(chunk.values) count += len(chunk.values)
average = mysum / count average = mysum / count
assert(mysum == pytest.approx(621.768, abs=0.001)) assert mysum == pytest.approx(621.768, abs=0.001)
assert(average != pytest.approx(0.0158893, abs=0.0000001)) assert average != pytest.approx(0.0158893, abs=0.0000001)
assert(average == pytest.approx(0.0558893, abs=0.0000001)) assert average == pytest.approx(0.0558893, abs=0.0000001)
def test_10kSync(rips_instance, initialize_test): def test_10kSync(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1) results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
mysum = sum(results) mysum = sum(results)
average = mysum / len(results) average = mysum / len(results)
assert(mysum == pytest.approx(621.768, abs=0.001)) assert mysum == pytest.approx(621.768, abs=0.001)
assert(average != pytest.approx(0.0158893, abs=0.0000001)) assert average != pytest.approx(0.0158893, abs=0.0000001)
assert(average == pytest.approx(0.0558893, abs=0.0000001)) assert average == pytest.approx(0.0558893, abs=0.0000001)
def test_10k_set(rips_instance, initialize_test): def test_10k_set(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1) results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1) case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
def test_10k_set_out_of_bounds(rips_instance, initialize_test): def test_10k_set_out_of_bounds(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1) results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
results.append(5.0) results.append(5.0)
with pytest.raises(grpc.RpcError): with pytest.raises(grpc.RpcError):
assert case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1) assert case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
def test_10k_set_out_of_bounds_client(rips_instance, initialize_test): def test_10k_set_out_of_bounds_client(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
results = case.active_cell_property('DYNAMIC_NATIVE', 'SOIL', 1) results = case.active_cell_property("DYNAMIC_NATIVE", "SOIL", 1)
case.chunk_size = len(results) case.chunk_size = len(results)
results.append(5.0) results.append(5.0)
with pytest.raises(IndexError): with pytest.raises(IndexError):
assert case.set_active_cell_property(results, 'GENERATED', 'SOIL', 1) assert case.set_active_cell_property(results, "GENERATED", "SOIL", 1)
def createResult(poroChunks, permxChunks): def createResult(poroChunks, permxChunks):
@ -78,22 +78,23 @@ def createResult(poroChunks, permxChunks):
def checkResults(poroValues, permxValues, poropermxValues): def checkResults(poroValues, permxValues, poropermxValues):
for (poro, permx, poropermx) in zip(poroValues, permxValues, poropermxValues): for (poro, permx, poropermx) in zip(poroValues, permxValues, poropermxValues):
recalc = poro * permx recalc = poro * permx
assert(recalc == pytest.approx(poropermx, rel=1.0e-10)) assert recalc == pytest.approx(poropermx, rel=1.0e-10)
def test_10k_PoroPermX(rips_instance, initialize_test): def test_10k_PoroPermX(rips_instance, initialize_test):
casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" casePath = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=casePath) case = rips_instance.project.load_case(path=casePath)
poroChunks = case.active_cell_property_async('STATIC_NATIVE', 'PORO', 0) poroChunks = case.active_cell_property_async("STATIC_NATIVE", "PORO", 0)
permxChunks = case.active_cell_property_async('STATIC_NATIVE', 'PERMX', 0) permxChunks = case.active_cell_property_async("STATIC_NATIVE", "PERMX", 0)
case.set_active_cell_property_async(createResult( case.set_active_cell_property_async(
poroChunks, permxChunks), 'GENERATED', 'POROPERMXAS', 0) createResult(poroChunks, permxChunks), "GENERATED", "POROPERMXAS", 0
)
poro = case.active_cell_property('STATIC_NATIVE', 'PORO', 0) poro = case.active_cell_property("STATIC_NATIVE", "PORO", 0)
permx = case.active_cell_property('STATIC_NATIVE', 'PERMX', 0) permx = case.active_cell_property("STATIC_NATIVE", "PERMX", 0)
poroPermX = case.active_cell_property('GENERATED', 'POROPERMXAS', 0) poroPermX = case.active_cell_property("GENERATED", "POROPERMXAS", 0)
checkResults(poro, permx, poroPermX) checkResults(poro, permx, poroPermX)
@ -103,10 +104,10 @@ def test_exportPropertyInView(rips_instance, initialize_test):
rips_instance.project.load_case(case_path) rips_instance.project.load_case(case_path)
with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname: with tempfile.TemporaryDirectory(prefix="rips") as tmpdirname:
print("Temporary folder: ", tmpdirname) print("Temporary folder: ", tmpdirname)
rips_instance.set_export_folder(export_type='PROPERTIES', path=tmpdirname) rips_instance.set_export_folder(export_type="PROPERTIES", path=tmpdirname)
case = rips_instance.project.cases()[0] case = rips_instance.project.cases()[0]
view = case.views()[0] view = case.views()[0]
view.export_property() view.export_property()
expected_file_name = case.name + "-" + str("3D_View") + "-" + "T0" + "-SOIL" expected_file_name = case.name + "-" + str("3D_View") + "-" + "T0" + "-SOIL"
full_path = tmpdirname + "/" + expected_file_name full_path = tmpdirname + "/" + expected_file_name
assert(os.path.exists(full_path)) assert os.path.exists(full_path)

View File

@ -1,7 +1,7 @@
import sys import sys
import os import os
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
@ -10,32 +10,32 @@ import dataroot
def test_10k(rips_instance, initialize_test): def test_10k(rips_instance, initialize_test):
case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID" case_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=case_path) case = rips_instance.project.load_case(path=case_path)
assert(len(case.grids()) == 2) assert len(case.grids()) == 2
cell_count_info = case.cell_count() cell_count_info = case.cell_count()
sim_wells = case.simulation_wells() sim_wells = case.simulation_wells()
assert(len(sim_wells) == 3) assert len(sim_wells) == 3
assert(sim_wells[0].name == "GI1") assert sim_wells[0].name == "GI1"
assert(sim_wells[1].name == "GP1") assert sim_wells[1].name == "GP1"
assert(sim_wells[2].name == "GP2") assert sim_wells[2].name == "GP2"
timesteps = case.time_steps() timesteps = case.time_steps()
# On time step 0 all simulation wells are undefined # On time step 0 all simulation wells are undefined
for sim_well in sim_wells: for sim_well in sim_wells:
status = sim_well.status(0) status = sim_well.status(0)
assert(status.well_type == "NotDefined") assert status.well_type == "NotDefined"
# On time step 3 all wells are producing # On time step 3 all wells are producing
for sim_well in sim_wells: for sim_well in sim_wells:
status = sim_well.status(3) status = sim_well.status(3)
assert(status.well_type == "Producer") assert status.well_type == "Producer"
# On time step 0 all simulation wells have no cells # On time step 0 all simulation wells have no cells
for sim_well in sim_wells: for sim_well in sim_wells:
cells = sim_well.cells(0) cells = sim_well.cells(0)
assert(len(cells) == 0) assert len(cells) == 0
# On the other time steps there should be cells # On the other time steps there should be cells
expected_cell_count = {} expected_cell_count = {}
@ -44,8 +44,14 @@ def test_10k(rips_instance, initialize_test):
expected_cell_count["GP2"] = 18 expected_cell_count["GP2"] = 18
for sim_well in sim_wells: for sim_well in sim_wells:
for (tidx, timestep) in enumerate(timesteps): for (tidx, timestep) in enumerate(timesteps):
if (tidx > 0): if tidx > 0:
cells = sim_well.cells(tidx) cells = sim_well.cells(tidx)
print("well: " + sim_well.name + " timestep: " + print(
str(tidx) + " cells:" + str(len(cells))) "well: "
assert(len(cells) == expected_cell_count[sim_well.name]) + sim_well.name
+ " timestep: "
+ str(tidx)
+ " cells:"
+ str(len(cells))
)
assert len(cells) == expected_cell_count[sim_well.name]

View File

@ -6,7 +6,7 @@ import os
import shutil import shutil
import tempfile import tempfile
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
@ -15,58 +15,58 @@ import dataroot
def test_summary_import_and_find(rips_instance, initialize_test): def test_summary_import_and_find(rips_instance, initialize_test):
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC" casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
summary_case = rips_instance.project.import_summary_case(casePath) summary_case = rips_instance.project.import_summary_case(casePath)
assert(summary_case.id == 1) assert summary_case.id == 1
case_id = 234 case_id = 234
found_summary_case = rips_instance.project.summary_case(case_id) found_summary_case = rips_instance.project.summary_case(case_id)
assert(found_summary_case is None) assert found_summary_case is None
correct_case_id = 1 correct_case_id = 1
found_summary_case = rips_instance.project.summary_case(correct_case_id) found_summary_case = rips_instance.project.summary_case(correct_case_id)
assert(found_summary_case is not None) assert found_summary_case is not None
rips_instance.project.close() rips_instance.project.close()
correct_case_id = 1 correct_case_id = 1
found_summary_case = rips_instance.project.summary_case(correct_case_id) found_summary_case = rips_instance.project.summary_case(correct_case_id)
assert(found_summary_case is None) assert found_summary_case is None
def test_summary_data(rips_instance, initialize_test): def test_summary_data(rips_instance, initialize_test):
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC" casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
summary_case = rips_instance.project.import_summary_case(casePath) summary_case = rips_instance.project.import_summary_case(casePath)
assert(summary_case.id == 1) assert summary_case.id == 1
addresses = summary_case.available_addresses() addresses = summary_case.available_addresses()
assert(len(addresses.values) == 343) assert len(addresses.values) == 343
summary_data = summary_case.summary_vector_values("FOPT") summary_data = summary_case.summary_vector_values("FOPT")
assert(len(summary_data.values) == 60) assert len(summary_data.values) == 60
def test_summary_resample(rips_instance, initialize_test): def test_summary_resample(rips_instance, initialize_test):
casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC" casePath = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
summary_case = rips_instance.project.import_summary_case(casePath) summary_case = rips_instance.project.import_summary_case(casePath)
assert(summary_case.id == 1) assert summary_case.id == 1
summary_data_sampled = summary_case.resample_values("FOPT", "NONE") summary_data_sampled = summary_case.resample_values("FOPT", "NONE")
assert(len(summary_data_sampled.values) == 60) assert len(summary_data_sampled.values) == 60
assert(len(summary_data_sampled.time_steps) == 60) assert len(summary_data_sampled.time_steps) == 60
summary_data_sampled = summary_case.resample_values("FOPT", "DAY") summary_data_sampled = summary_case.resample_values("FOPT", "DAY")
assert(len(summary_data_sampled.values) == 721) assert len(summary_data_sampled.values) == 721
assert(len(summary_data_sampled.time_steps) == 721) assert len(summary_data_sampled.time_steps) == 721
summary_data_sampled = summary_case.resample_values("FOPT", "MONTH") summary_data_sampled = summary_case.resample_values("FOPT", "MONTH")
assert(len(summary_data_sampled.values) == 24) assert len(summary_data_sampled.values) == 24
assert(len(summary_data_sampled.time_steps) == 24) assert len(summary_data_sampled.time_steps) == 24
summary_data_sampled = summary_case.resample_values("FOPT", "QUARTER") summary_data_sampled = summary_case.resample_values("FOPT", "QUARTER")
assert(len(summary_data_sampled.values) == 8) assert len(summary_data_sampled.values) == 8
assert(len(summary_data_sampled.time_steps) == 8) assert len(summary_data_sampled.time_steps) == 8
summary_data_sampled = summary_case.resample_values("FOPT", "YEAR") summary_data_sampled = summary_case.resample_values("FOPT", "YEAR")
assert(len(summary_data_sampled.values) == 3) assert len(summary_data_sampled.values) == 3
assert(len(summary_data_sampled.time_steps) == 3) assert len(summary_data_sampled.time_steps) == 3
@contextlib.contextmanager @contextlib.contextmanager
@ -79,14 +79,18 @@ def cd(newdir, cleanup=lambda: True):
os.chdir(prevdir) os.chdir(prevdir)
cleanup() cleanup()
@contextlib.contextmanager @contextlib.contextmanager
def tempdir(): def tempdir():
dirpath = tempfile.mkdtemp() dirpath = tempfile.mkdtemp()
def cleanup(): def cleanup():
shutil.rmtree(dirpath) shutil.rmtree(dirpath)
with cd(dirpath, cleanup): with cd(dirpath, cleanup):
yield dirpath yield dirpath
# This test ensures that missing unsmry file is handeled gracefully # This test ensures that missing unsmry file is handeled gracefully
def test_summary_no_unsmry(rips_instance, initialize_test): def test_summary_no_unsmry(rips_instance, initialize_test):
casePathRelative = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC" casePathRelative = dataroot.PATH + "/flow_diagnostics_test/SIMPLE_SUMMARY2.SMSPEC"
@ -103,13 +107,12 @@ def test_summary_no_unsmry(rips_instance, initialize_test):
summary_case = rips_instance.project.import_summary_case(temp_path) summary_case = rips_instance.project.import_summary_case(temp_path)
values = summary_case.summary_vector_values() values = summary_case.summary_vector_values()
assert(len(values.values) == 1) assert len(values.values) == 1
time_steps = summary_case.available_time_steps() time_steps = summary_case.available_time_steps()
assert(len(time_steps.values) == 1) assert len(time_steps.values) == 1
addresses = summary_case.available_addresses() addresses = summary_case.available_addresses()
assert(len(addresses.values) == 1) assert len(addresses.values) == 1
summary_case.resample_values() summary_case.resample_values()

View File

@ -1,7 +1,7 @@
import sys import sys
import os import os
sys.path.insert(1, os.path.join(sys.path[0], '../../')) sys.path.insert(1, os.path.join(sys.path[0], "../../"))
import rips import rips
import dataroot import dataroot
@ -11,10 +11,13 @@ def test_10k(rips_instance, initialize_test):
case_root_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC" case_root_path = dataroot.PATH + "/TEST10K_FLT_LGR_NNC"
case_path = case_root_path + "/TEST10K_FLT_LGR_NNC.EGRID" case_path = case_root_path + "/TEST10K_FLT_LGR_NNC.EGRID"
case = rips_instance.project.load_case(path=case_path) case = rips_instance.project.load_case(path=case_path)
assert(len(case.grids()) == 2) assert len(case.grids()) == 2
well_path_files = [case_root_path + "/wellpath_a.dev", case_root_path + "/wellpath_b.dev"] well_path_files = [
case_root_path + "/wellpath_a.dev",
case_root_path + "/wellpath_b.dev",
]
well_path_names = rips_instance.project.import_well_paths(well_path_files) well_path_names = rips_instance.project.import_well_paths(well_path_files)
wells = rips_instance.project.well_paths() wells = rips_instance.project.well_paths()
assert(len(wells) == 2) assert len(wells) == 2
assert(wells[0].name == "Well Path A") assert wells[0].name == "Well Path A"
assert(wells[1].name == "Well Path B") assert wells[1].name == "Well Path B"

View File

@ -34,11 +34,12 @@ def apply_cell_result(self, result_type, result_variable):
@add_method(View) @add_method(View)
def apply_flow_diagnostics_cell_result( def apply_flow_diagnostics_cell_result(
self, self,
result_variable='TOF', result_variable="TOF",
selection_mode='FLOW_TR_BY_SELECTION', selection_mode="FLOW_TR_BY_SELECTION",
injectors=None, injectors=None,
producers=None): producers=None,
):
"""Apply a flow diagnostics cell result """Apply a flow diagnostics cell result
**Parameters**:: **Parameters**::
@ -68,7 +69,7 @@ def apply_flow_diagnostics_cell_result(
cell_result.result_type = "FLOW_DIAGNOSTICS" cell_result.result_type = "FLOW_DIAGNOSTICS"
cell_result.result_variable = result_variable cell_result.result_variable = result_variable
cell_result.flow_tracer_selection_mode = selection_mode cell_result.flow_tracer_selection_mode = selection_mode
if selection_mode == 'FLOW_TR_BY_SELECTION': if selection_mode == "FLOW_TR_BY_SELECTION":
cell_result.selected_injector_tracers = injectors cell_result.selected_injector_tracers = injectors
cell_result.selected_producer_tracers = producers cell_result.selected_producer_tracers = producers
cell_result.update() cell_result.update()
@ -77,8 +78,9 @@ def apply_flow_diagnostics_cell_result(
@add_method(View) @add_method(View)
def clone(self): def clone(self):
"""Clone the current view""" """Clone the current view"""
view_id = self._execute_command(cloneView=Cmd.CloneViewRequest( view_id = self._execute_command(
viewId=self.id)).createViewResult.viewId cloneView=Cmd.CloneViewRequest(viewId=self.id)
).createViewResult.viewId
return self.case().view(view_id) return self.case().view(view_id)
@ -86,14 +88,17 @@ def clone(self):
def set_time_step(self, time_step): def set_time_step(self, time_step):
"""Set the time step for current view""" """Set the time step for current view"""
case_id = self.case().id case_id = self.case().id
return self._execute_command(setTimeStep=Cmd.SetTimeStepParams( return self._execute_command(
caseId=case_id, viewId=self.id, timeStep=time_step)) setTimeStep=Cmd.SetTimeStepParams(
caseId=case_id, viewId=self.id, timeStep=time_step
)
)
@add_method(View) @add_method(View)
def export_sim_well_fracture_completions(self, time_step, def export_sim_well_fracture_completions(
simulation_well_names, file_split, self, time_step, simulation_well_names, file_split, compdat_export
compdat_export): ):
"""Export fracture completions for simulation wells """Export fracture completions for simulation wells
**Parameters**:: **Parameters**::
@ -132,15 +137,19 @@ def export_sim_well_fracture_completions(self, time_step,
timeStep=time_step, timeStep=time_step,
simulationWellNames=simulation_well_names, simulationWellNames=simulation_well_names,
fileSplit=file_split, fileSplit=file_split,
compdatExport=compdat_export)) compdatExport=compdat_export,
)
)
@add_method(View) @add_method(View)
def export_visible_cells(self, def export_visible_cells(
export_keyword='FLUXNUM', self,
visible_active_cells_value=1, export_keyword="FLUXNUM",
hidden_active_cells_value=0, visible_active_cells_value=1,
inactive_cells_value=0): hidden_active_cells_value=0,
inactive_cells_value=0,
):
"""Export special properties for all visible cells. """Export special properties for all visible cells.
Arguments: Arguments:
@ -158,12 +167,14 @@ def export_visible_cells(self,
exportKeyword=export_keyword, exportKeyword=export_keyword,
visibleActiveCellsValue=visible_active_cells_value, visibleActiveCellsValue=visible_active_cells_value,
hiddenActiveCellsValue=hidden_active_cells_value, hiddenActiveCellsValue=hidden_active_cells_value,
inactiveCellsValue=inactive_cells_value)) inactiveCellsValue=inactive_cells_value,
)
)
@add_method(View) @add_method(View)
def export_property(self, undefined_value=0.0): def export_property(self, undefined_value=0.0):
""" Export the current Eclipse property from the view """Export the current Eclipse property from the view
Arguments: Arguments:
undefined_value (double): Value to use for undefined values. Defaults to 0.0 undefined_value (double): Value to use for undefined values. Defaults to 0.0
@ -171,22 +182,22 @@ def export_property(self, undefined_value=0.0):
case_id = self.case().id case_id = self.case().id
return self._execute_command( return self._execute_command(
exportPropertyInViews=Cmd.ExportPropertyInViewsRequest( exportPropertyInViews=Cmd.ExportPropertyInViewsRequest(
caseId=case_id, caseId=case_id, viewIds=[self.id], undefinedValue=undefined_value
viewIds=[self.id], )
undefinedValue=undefined_value)) )
@add_method(ViewWindow) @add_method(ViewWindow)
def case(self): def case(self):
"""Get the case the view belongs to""" """Get the case the view belongs to"""
mycase = self.ancestor(rips.case.Case) mycase = self.ancestor(rips.case.Case)
assert(mycase is not None) assert mycase is not None
return mycase return mycase
@add_method(ViewWindow) @add_method(ViewWindow)
def export_snapshot(self, prefix='', export_folder=''): def export_snapshot(self, prefix="", export_folder=""):
""" Export snapshot for the current view """Export snapshot for the current view
Arguments: Arguments:
prefix (str): Exported file name prefix prefix (str): Exported file name prefix
@ -194,8 +205,11 @@ def export_snapshot(self, prefix='', export_folder=''):
""" """
case_id = self.case().id case_id = self.case().id
return self._execute_command( return self._execute_command(
exportSnapshots=Cmd.ExportSnapshotsRequest(type='VIEWS', exportSnapshots=Cmd.ExportSnapshotsRequest(
prefix=prefix, type="VIEWS",
caseId=case_id, prefix=prefix,
viewId=self.id, caseId=case_id,
exportFolder=export_folder)) viewId=self.id,
exportFolder=export_folder,
)
)

View File

@ -10,8 +10,16 @@ from .resinsight_classes import WellLogPlot
@add_method(WellLogPlot) @add_method(WellLogPlot)
def export_data_as_las(self, export_folder, file_prefix='', export_tvdrkb=False, capitalize_file_names=False, resample_interval=0.0, convert_to_standard_units=False): def export_data_as_las(
""" Export LAS file(s) for the current plot self,
export_folder,
file_prefix="",
export_tvdrkb=False,
capitalize_file_names=False,
resample_interval=0.0,
convert_to_standard_units=False,
):
"""Export LAS file(s) for the current plot
Arguments: Arguments:
export_folder(str): The path to export to. By default will use the global export folder export_folder(str): The path to export to. By default will use the global export folder
@ -23,20 +31,26 @@ def export_data_as_las(self, export_folder, file_prefix='', export_tvdrkb=False,
Returns: Returns:
A list of files exported A list of files exported
""" """
res = self._execute_command(exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(exportFormat='LAS', res = self._execute_command(
viewId=self.id, exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(
exportFolder=export_folder, exportFormat="LAS",
filePrefix=file_prefix, viewId=self.id,
exportTvdRkb=export_tvdrkb, exportFolder=export_folder,
capitalizeFileNames=capitalize_file_names, filePrefix=file_prefix,
resampleInterval=resample_interval, exportTvdRkb=export_tvdrkb,
convertCurveUnits=convert_to_standard_units)) capitalizeFileNames=capitalize_file_names,
resampleInterval=resample_interval,
convertCurveUnits=convert_to_standard_units,
)
)
return res.exportWellLogPlotDataResult.exportedFiles return res.exportWellLogPlotDataResult.exportedFiles
@add_method(WellLogPlot) @add_method(WellLogPlot)
def export_data_as_ascii(self, export_folder, file_prefix='', capitalize_file_names=False): def export_data_as_ascii(
""" Export LAS file(s) for the current plot self, export_folder, file_prefix="", capitalize_file_names=False
):
"""Export LAS file(s) for the current plot
Arguments: Arguments:
export_folder(str): The path to export to. By default will use the global export folder export_folder(str): The path to export to. By default will use the global export folder
@ -46,11 +60,15 @@ def export_data_as_ascii(self, export_folder, file_prefix='', capitalize_file_na
Returns: Returns:
A list of files exported A list of files exported
""" """
res = self._execute_command(exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(exportFormat='ASCII', res = self._execute_command(
viewId=self.id, exportWellLogPlotData=Commands_pb2.ExportWellLogPlotDataRequest(
exportFolder=export_folder, exportFormat="ASCII",
filePrefix=file_prefix, viewId=self.id,
exportTvdRkb=False, exportFolder=export_folder,
capitalizeFileNames=capitalize_file_names, filePrefix=file_prefix,
resampleInterval=0.0)) exportTvdRkb=False,
capitalizeFileNames=capitalize_file_names,
resampleInterval=0.0,
)
)
return res.exportWellLogPlotDataResult.exportedFiles return res.exportWellLogPlotDataResult.exportedFiles