Merge pull request #1086 from stefoss23/python_conn
Python: removed properties.py, shedule.py, parser_module.py and sunbeam.py
This commit is contained in:
@@ -12,6 +12,10 @@ std::string direction( const Connection& c ) {
|
||||
return Connection::Direction2String( c.dir() );
|
||||
}
|
||||
|
||||
std::tuple<int, int, int> get_pos( const Connection& conn ) {
|
||||
return std::make_tuple(conn.getI(), conn.getJ(), conn.getK());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -20,9 +24,10 @@ void python::common::export_Connection(py::module& module) {
|
||||
py::class_< Connection >( module, "Connection")
|
||||
.def_property_readonly("direction", &direction )
|
||||
.def_property_readonly("state", &state )
|
||||
.def_property_readonly( "I", &Connection::getI )
|
||||
.def_property_readonly( "J", &Connection::getJ )
|
||||
.def_property_readonly( "K", &Connection::getK )
|
||||
.def_property_readonly( "i", &Connection::getI )
|
||||
.def_property_readonly( "j", &Connection::getJ )
|
||||
.def_property_readonly( "j", &Connection::getK )
|
||||
.def_property_readonly( "pos", &get_pos )
|
||||
.def_property_readonly( "attached_to_segment", &Connection::attachedToSegment )
|
||||
.def_property_readonly( "center_depth", &Connection::depth)
|
||||
.def_property_readonly( "rw", &Connection::rw)
|
||||
@@ -30,7 +35,7 @@ void python::common::export_Connection(py::module& module) {
|
||||
.def_property_readonly( "number", &Connection::complnum) // This is deprecated; complnum is the "correct" proeprty name
|
||||
.def_property_readonly( "sat_table_id", &Connection::satTableId)
|
||||
.def_property_readonly( "segment_number", &Connection::segment)
|
||||
.def_property_readonly( "CF", &Connection::CF)
|
||||
.def_property_readonly( "Kh", &Connection::Kh)
|
||||
.def_property_readonly( "cf", &Connection::CF)
|
||||
.def_property_readonly( "kh", &Connection::Kh)
|
||||
.def_property_readonly( "well_pi", &Connection::wellPi );
|
||||
}
|
||||
|
||||
@@ -4,6 +4,10 @@
|
||||
#include <opm/parser/eclipse/EclipseState/InitConfig/InitConfig.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/SimulationConfig/SimulationConfig.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Tables/TableManager.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/EclipseState.hpp>
|
||||
#include <opm/parser/eclipse/EclipseState/Schedule/Schedule.hpp>
|
||||
|
||||
|
||||
#include "export.hpp"
|
||||
|
||||
@@ -15,6 +19,10 @@ void python::common::export_EclipseConfig(py::module& module)
|
||||
.def( "restart", &EclipseConfig::restart, ref_internal);
|
||||
|
||||
py::class_< SummaryConfig >( module, "SummaryConfig")
|
||||
.def(py::init([](const Deck& deck, const EclipseState& state, const Schedule& schedule) {
|
||||
return SummaryConfig( deck, schedule, state.getTableManager() );
|
||||
} ) )
|
||||
|
||||
.def( "__contains__", &SummaryConfig::hasKeyword );
|
||||
|
||||
py::class_< InitConfig >( module, "InitConfig")
|
||||
|
||||
@@ -17,6 +17,7 @@ from .libopmcommon_python import DeckKeyword
|
||||
from .libopmcommon_python import EclipseState
|
||||
from .libopmcommon_python import Schedule
|
||||
from .libopmcommon_python import OpmLog
|
||||
from .libopmcommon_python import SummaryConfig
|
||||
|
||||
|
||||
#from .schedule import Well, Connection, Schedule
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
from .config import *
|
||||
@@ -1,11 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from opm import libopmcommon_python as lib
|
||||
from ..parser.sunbeam import delegate
|
||||
|
||||
|
||||
@delegate(lib.SummaryConfig)
|
||||
class SummaryConfig(object):
|
||||
def __repr__(self):
|
||||
return 'SummaryConfig()'
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from opm._common import action
|
||||
from opm._common import Parser
|
||||
from opm._common import ParseContext
|
||||
|
||||
from .parser_module import parse, load_deck, load_deck_string, parse_string
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import json
|
||||
|
||||
from opm import libopmcommon_python as lib
|
||||
from .properties import SunbeamState
|
||||
|
||||
|
||||
def _init_parse(recovery, keywords):
|
||||
context = lib.ParseContext(recovery)
|
||||
parser = lib.Parser()
|
||||
for kw in keywords:
|
||||
parser.add_keyword(json.dumps(kw))
|
||||
|
||||
return (context,parser)
|
||||
|
||||
|
||||
def parse(deck_file, recovery=[], keywords=[]):
|
||||
"""Will parse a file and create a SunbeamState object.
|
||||
|
||||
The parse function will parse a complete ECLIPSE input deck and return a
|
||||
SunbeamState instance which can be used to access all the properties of the
|
||||
Eclipse parser has internalized. Assuming the following small script has
|
||||
been executed:
|
||||
|
||||
import sunbeam
|
||||
|
||||
result = sunbeam.parse("ECLIPSE.DATA")
|
||||
|
||||
Then the main results can be found in .deck, .state and .schedule
|
||||
properties of the result object:
|
||||
|
||||
result.deck: This is the first result of the parsing process. In the Deck
|
||||
datastructure the original organisation with keywords found in the Eclipse
|
||||
datafile still remains, but the following processing has been completed:
|
||||
|
||||
o All comments have been stripped out.
|
||||
|
||||
o All include files have been loaded.
|
||||
|
||||
o All values are converted to the correct type, i.e. string, integer or
|
||||
double, and floating point values have been converted to SI units.
|
||||
|
||||
o '*' literals and values which have been omitted have been updated with
|
||||
the correct default values.
|
||||
|
||||
o The content has been basically verified; at least datatypes and the
|
||||
number of records in keywords with a fixed number of records.
|
||||
|
||||
You can always create a Deck data structure - even if your Eclipse input is
|
||||
far from complete, however this is quite coarse information - and if
|
||||
possible you are probably better off working with either the EclipseState
|
||||
object found in result.state or the Schedule object found in
|
||||
result.schedule.
|
||||
|
||||
result.state: This is a more processed result, where the different keywords
|
||||
have been assembled into higher order objects, for instance the various
|
||||
keywords which together constitute one Eclipse simulationgrid have been
|
||||
assembled into a EclipseGrid class, the PERMX keywords - along with BOX
|
||||
modifiers and such have been assembled into a properties object and the
|
||||
various table objects have been assembled into Table class.
|
||||
|
||||
|
||||
result.schedule: All the static information is assembled in the state
|
||||
property, and all the dynamic information is in the schedule property. The
|
||||
schedule property is an instance of the Schedule class from opm-parser, and
|
||||
mainly consists of well and group related information, including all rate
|
||||
information.
|
||||
|
||||
Example:
|
||||
|
||||
import sunbeam
|
||||
result = sunbeam.parse("ECLIPSE.DATA")
|
||||
|
||||
# Fetch the static properties from the result.state object:
|
||||
grid = result.state.grid
|
||||
print("The grid dimensions are: (%d,%d,%d)" % (grid.getNX(),
|
||||
grid.getNY(),
|
||||
grid.getNZ()))
|
||||
grid_properties = result.state.props()
|
||||
poro = grid_properties["PORO"]
|
||||
print("PORO[0]: %g" % poro[0])
|
||||
|
||||
|
||||
# Look at the dynamic properties:
|
||||
print("Wells: %s" % result.schedule.wells)
|
||||
|
||||
|
||||
The C++ implementation underlying opm-parser implemenest support for a
|
||||
large fraction of ECLIPSE properties, not all of that is exposed in Python,
|
||||
but it is quite simple to extend the Python wrapping.
|
||||
|
||||
In addition to the deck_file argument the parse() function has two optional
|
||||
arguments which can be used to alter the parsing process:
|
||||
|
||||
recovery: The specification of the ECLIPSE input format is not very strict,
|
||||
and out in the wild there are many decks which are handled corectly by
|
||||
ECLIPSE, although they seem to be in violation with the ECLIPSE input
|
||||
specification. Also there are *many* more exotic features of the ECLIPSE
|
||||
input specificiaction which are not yet handled by the opm-parser.
|
||||
|
||||
By default the parser is quite strict, and when an unknown situation is
|
||||
encountered an exception will be raised - however for a set of recognized
|
||||
error conditions it is possible to configure the parser to ignore the
|
||||
errors. A quite common situation is for instance that an extra '/' is
|
||||
found dangling in the deck - this is probably safe to ignore:
|
||||
|
||||
result = sunbeam.parse("ECLIPSE.DATE",
|
||||
recovery = [("PARSE_RANDOM_SLASH", sunbeam.action.ignore)])
|
||||
|
||||
The full list of error modes which are recognized can be found in include
|
||||
file ParseContext.hpp in the opm-parser source. To disable errors using
|
||||
the recovery method is a slippery slope; you might very well end up
|
||||
masking real problems in your input deck - and the final error when
|
||||
things go *really atray* might be quite incomprihensible. If you can
|
||||
modify your input deck that is recommended before ignoring errors with
|
||||
the recovery mechanism.
|
||||
|
||||
|
||||
keywords: The total number of kewords supported by ECLIPSE is immense, and
|
||||
the parser code only supports a fraction of these. Using the keywords
|
||||
argumnt you can tell the parser about additional keywords. The keyword
|
||||
specifications should be supplied as Python dictionaries; see the
|
||||
share/keywords directories in the opm-parser source for syntax. Assuming
|
||||
you have an input deck with the keyword WECONCMF which opm-parser does
|
||||
not yet support. You could then add that to your parser in the following
|
||||
manner:
|
||||
|
||||
import sunbeam
|
||||
|
||||
weconmf = {"name" : "WECONMF",
|
||||
"sections" : ["SCHEDULE"],
|
||||
"items" : [{"name" : "well", "value_type" : "STRING"},
|
||||
{"name" : "comp_index", "value_type" : "INTEGER"},
|
||||
{"name" : "max_mole_fraction", "value_type" : "DOUBLE", "dimension" : "1"},
|
||||
{"name" : "workover", "value_type : "STRING", "default" : "NONE"},
|
||||
{"name" : "end_flag", "value_type": "STRING", "default" : "NO}]}
|
||||
|
||||
state = sunbeam.parse("ECLIPSE.DATA", keywords = [weconmf])
|
||||
|
||||
Adding keywords in this way will ensure that the relevant information is
|
||||
inernalized in the deck, but it will not be taken into account when
|
||||
constructing the EclipseState and Schedule objects.
|
||||
|
||||
"""
|
||||
|
||||
if not os.path.isfile(deck_file):
|
||||
raise IOError("No such file: {}".format(deck_file))
|
||||
|
||||
context, parser = _init_parse(recovery, keywords)
|
||||
return SunbeamState( lib.parse(deck_file, context, parser))
|
||||
|
||||
|
||||
|
||||
def parse_string(deck_string, recovery=[], keywords=[]):
|
||||
"""Will parse a string and create SunbeamState object.
|
||||
|
||||
See function parse() for further details about return type and the recovery
|
||||
and keyword arguments.
|
||||
|
||||
"""
|
||||
context, parser = _init_parse(recovery, keywords)
|
||||
return SunbeamState(lib.parse_string(deck_string, context, parser))
|
||||
|
||||
|
||||
|
||||
def load_deck(deck_file, keywords=[], recovery=[]):
|
||||
"""
|
||||
Will parse a file and return a Deck object.
|
||||
|
||||
See function parse() for details about the keywords and recovery arguments.
|
||||
"""
|
||||
if not os.path.isfile(deck_file):
|
||||
raise IOError("No suc file: {}".format(deck_file))
|
||||
|
||||
context, parser = _init_parse(recovery, keywords)
|
||||
return lib.create_deck(deck_file, context, parser)
|
||||
|
||||
|
||||
def load_deck_string(deck_string, recovery=[], keywords=[]):
|
||||
"""
|
||||
Will parse a string and return a Deck object.
|
||||
|
||||
See function parse() for details about the keywords and recovery arguments.
|
||||
"""
|
||||
context, parser = _init_parse(recovery, keywords)
|
||||
return lib.create_deck_string(deck_string, context, parser)
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
from os.path import isfile
|
||||
|
||||
from opm import libopmcommon_python as lib
|
||||
from .sunbeam import delegate
|
||||
|
||||
|
||||
@delegate(lib.SunbeamState)
|
||||
class SunbeamState(object):
|
||||
|
||||
@property
|
||||
def deck(self):
|
||||
return self._deck()
|
||||
|
||||
@property
|
||||
def summary_config(self):
|
||||
return self._summary_config()
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
class _delegate(object):
|
||||
def __init__(self, name, attr):
|
||||
self._name = name
|
||||
self._attr = attr
|
||||
|
||||
def __get__(self, instance, _):
|
||||
if instance is None: return self
|
||||
return getattr(self.delegate(instance), self._attr)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
setattr(self.delegate(instance), self._attr, value)
|
||||
|
||||
def delegate(self, instance):
|
||||
return getattr(instance, self._name)
|
||||
|
||||
def __repr__(self):
|
||||
return '_delegate(' + repr(self._name) + ", " + repr(self._attr) + ")"
|
||||
|
||||
def delegate(delegate_cls, to = '_sun'):
|
||||
attributes = set(delegate_cls.__dict__.keys())
|
||||
|
||||
def inner(cls):
|
||||
class _property(object):
|
||||
pass
|
||||
|
||||
setattr(cls, to, _property())
|
||||
for attr in attributes - set(list(cls.__dict__.keys()) + ['__init__']):
|
||||
setattr(cls, attr, _delegate(to, attr))
|
||||
src, dst = getattr(delegate_cls, attr), getattr(cls, attr)
|
||||
setattr(dst, '__doc__', src.__doc__)
|
||||
|
||||
def new__new__(_cls, this, *args, **kwargs):
|
||||
new = super(cls, _cls).__new__(_cls)
|
||||
setattr(new, to, this) # self._sun = this
|
||||
return new
|
||||
|
||||
cls.__new__ = staticmethod(new__new__)
|
||||
|
||||
return cls
|
||||
|
||||
return inner
|
||||
@@ -1,3 +1 @@
|
||||
from opm._common import Schedule
|
||||
|
||||
from .schedule import *
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from opm import libopmcommon_python as lib
|
||||
from ..parser.sunbeam import delegate
|
||||
|
||||
|
||||
|
||||
@delegate(lib.Connection)
|
||||
class Connection(object):
|
||||
|
||||
@property
|
||||
def pos(self):
|
||||
return self.I, self.J, self.K
|
||||
|
||||
def __repr__(self):
|
||||
return 'Connection(number = {})'.format(self.number)
|
||||
|
||||
# using the names flowing and closed for functions that test if a well is
|
||||
# opened or closed at some point, because we might want to use the more
|
||||
# imperative words 'open' and 'close' (or 'shut') for *changing* the status
|
||||
# later
|
||||
@staticmethod
|
||||
def flowing():
|
||||
def fn(connection): return connection.state == 'OPEN'
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
def closed():
|
||||
def fn(connection): return connection.state == 'SHUT'
|
||||
return fn
|
||||
|
||||
@staticmethod
|
||||
def auto():
|
||||
def fn(connection): return connection.state == 'AUTO'
|
||||
return fn
|
||||
|
||||
1
python/python/opm/io/summary/__init__.py
Normal file
1
python/python/opm/io/summary/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from opm._common import SummaryConfig
|
||||
@@ -5,8 +5,7 @@ try:
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
|
||||
from opm.io.parser import load_deck, load_deck_string
|
||||
from opm.io.parser import Parser
|
||||
|
||||
# This is from the TimeMap.cpp implementation in opm
|
||||
ecl_month = {"JAN" : 1,
|
||||
@@ -213,11 +212,11 @@ class TimeVector(object):
|
||||
self._add_dates_block(ts)
|
||||
start_dt = datetime.datetime(start_date.year, start_date.month, start_date.day)
|
||||
if base_file:
|
||||
deck = load_deck(base_file)
|
||||
deck = Parser().parse(base_file)
|
||||
self._add_deck(deck, start_dt)
|
||||
|
||||
if base_string:
|
||||
deck = load_deck_string(base_string)
|
||||
deck = Parser().parse_string(base_string)
|
||||
self._add_deck(deck, start_dt)
|
||||
|
||||
|
||||
@@ -328,7 +327,7 @@ class TimeVector(object):
|
||||
tv.load("well.sch", date = datetime.datetime(2017, 4, 1))
|
||||
|
||||
"""
|
||||
deck = load_deck(filename)
|
||||
deck = Parser().parse(filename)
|
||||
self._add_deck(deck, date)
|
||||
|
||||
|
||||
@@ -336,7 +335,7 @@ class TimeVector(object):
|
||||
"""
|
||||
Like load() - but load from a string literal instead of file.
|
||||
"""
|
||||
deck = load_deck_string(deck_string)
|
||||
deck = Parser().parse_string(deck_string)
|
||||
self._add_deck(deck, date)
|
||||
|
||||
|
||||
|
||||
@@ -81,7 +81,6 @@ setup(
|
||||
packages=[
|
||||
'opm',
|
||||
'opm.io',
|
||||
'opm.io.config',
|
||||
'opm.io.deck',
|
||||
'opm.io.ecl_state',
|
||||
'opm.io.parser',
|
||||
|
||||
@@ -1,12 +1,24 @@
|
||||
import unittest
|
||||
import opm.io
|
||||
|
||||
from opm.io.parser import Parser
|
||||
from opm.io.ecl_state import EclipseState
|
||||
from opm.io.schedule import Schedule
|
||||
|
||||
|
||||
def flowing(connection):
|
||||
return connection.state == 'OPEN'
|
||||
|
||||
def closed(connection):
|
||||
return connection.state == 'SHUT'
|
||||
|
||||
|
||||
class TestWells(unittest.TestCase):
|
||||
|
||||
"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.sch = opm.io.parse('tests/spe3/SPE3CASE1.DATA').schedule
|
||||
deck = Parser().parse('tests/spe3/SPE3CASE1.DATA')
|
||||
cls.state = EclipseState(deck)
|
||||
cls.sch = Schedule(deck, cls.state)
|
||||
cls.timesteps = cls.sch.timesteps
|
||||
|
||||
def test_connection_pos(self):
|
||||
@@ -19,7 +31,7 @@ class TestWells(unittest.TestCase):
|
||||
self.assertEqual(p01, (6,6,3))
|
||||
self.assertEqual(p10, (0,0,0))
|
||||
self.assertEqual(p11, (0,0,1))
|
||||
|
||||
|
||||
def test_connection_state(self):
|
||||
for timestep,_ in enumerate(self.timesteps):
|
||||
for well in self.sch.get_wells(timestep):
|
||||
@@ -27,8 +39,6 @@ class TestWells(unittest.TestCase):
|
||||
self.assertEqual("OPEN", connection.state)
|
||||
|
||||
def test_filters(self):
|
||||
flowing = opm.io.schedule.Connection.flowing()
|
||||
closed = opm.io.schedule.Connection.closed()
|
||||
connections = self.sch.get_wells(0)[0].connections()
|
||||
self.assertEqual(len(list(filter(flowing, connections))), 2)
|
||||
self.assertEqual(len(list(filter(closed, connections))), 0)
|
||||
@@ -44,7 +54,7 @@ class TestWells(unittest.TestCase):
|
||||
for well in self.sch.get_wells(timestep):
|
||||
for connection in well.connections():
|
||||
self.assertFalse(connection.attached_to_segment)
|
||||
"""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import unittest
|
||||
from opm.io import load_deck_string
|
||||
|
||||
from opm.io.parser import Parser
|
||||
|
||||
class TestParse(unittest.TestCase):
|
||||
|
||||
@@ -27,7 +28,7 @@ FIPNUM
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.deck = load_deck_string(self.DECK_STRING)
|
||||
self.deck = Parser().parse_string(self.DECK_STRING)
|
||||
|
||||
def test_deck_in(self):
|
||||
map(lambda kw: self.assertIn(kw, self.deck), [
|
||||
@@ -49,7 +50,7 @@ FIPNUM
|
||||
str(self.deck['DX']).split()
|
||||
)
|
||||
self.assertEqual(
|
||||
str(load_deck_string('RUNSPEC\n\nDX\n4*0.5 /')).split(),
|
||||
str( Parser().parse_string('RUNSPEC\n\nDX\n4*0.5 /') ).split(),
|
||||
'RUNSPEC DX 0.5 0.5 0.5 0.5 /'.split()
|
||||
)
|
||||
|
||||
|
||||
@@ -57,8 +57,8 @@ FIPNUM
|
||||
self.norne_fname = os.path.abspath('../../examples/data/norne/NORNE_ATW2013.DATA')
|
||||
|
||||
def test_IOError(self):
|
||||
with self.assertRaises(IOError):
|
||||
opm.io.load_deck("file/not/found")
|
||||
with self.assertRaises(Exception):
|
||||
Parser().parse("file/not/found")
|
||||
|
||||
|
||||
def test_parser_fail_without_extension(self):
|
||||
|
||||
@@ -1,23 +1,186 @@
|
||||
import unittest
|
||||
import opm.io
|
||||
|
||||
class TestState(unittest.TestCase):
|
||||
from opm.io.parser import Parser
|
||||
|
||||
from opm.io.ecl_state import EclipseState
|
||||
from opm.io.schedule import Schedule
|
||||
from opm.io.summary import SummaryConfig
|
||||
|
||||
|
||||
class TestState2(unittest.TestCase):
|
||||
FAULTS_DECK = """
|
||||
RUNSPEC
|
||||
|
||||
DIMENS
|
||||
10 10 10 /
|
||||
GRID
|
||||
DX
|
||||
1000*0.25 /
|
||||
DY
|
||||
1000*0.25 /
|
||||
DZ
|
||||
1000*0.25 /
|
||||
TOPS
|
||||
100*0.25 /
|
||||
FAULTS
|
||||
'F1' 1 1 1 4 1 4 'X' /
|
||||
'F2' 5 5 1 4 1 4 'X-' /
|
||||
/
|
||||
MULTFLT
|
||||
'F1' 0.50 /
|
||||
'F2' 0.50 /
|
||||
/
|
||||
EDIT
|
||||
MULTFLT /
|
||||
'F2' 0.25 /
|
||||
/
|
||||
OIL
|
||||
|
||||
GAS
|
||||
|
||||
TITLE
|
||||
The title
|
||||
|
||||
START
|
||||
8 MAR 1998 /
|
||||
|
||||
PROPS
|
||||
REGIONS
|
||||
SWAT
|
||||
1000*1 /
|
||||
SATNUM
|
||||
1000*2 /
|
||||
\
|
||||
"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.spe3 = opm.io.parse('tests/spe3/SPE3CASE1.DATA')
|
||||
cpa = opm.io.parse('tests/data/CORNERPOINT_ACTNUM.DATA')
|
||||
|
||||
def setUpClass(cls):
|
||||
parser = Parser()
|
||||
cls.deck_cpa = parser.parse('tests/data/CORNERPOINT_ACTNUM.DATA')
|
||||
cls.cp_state = EclipseState(cls.deck_cpa)
|
||||
|
||||
cls.deck_spe3 = parser.parse('tests/spe3/SPE3CASE1.DATA')
|
||||
cls.state = EclipseState(cls.deck_spe3)
|
||||
cls.schedule = Schedule(cls.deck_spe3, cls.state)
|
||||
cls.summary_config = SummaryConfig(cls.deck_spe3, cls.state, cls.schedule)
|
||||
|
||||
def test_config(self):
|
||||
cfg = self.state.config()
|
||||
|
||||
init = cfg.init()
|
||||
self.assertTrue(init.hasEquil())
|
||||
self.assertFalse(init.restartRequested())
|
||||
self.assertEqual(0, init.getRestartStep())
|
||||
|
||||
rst = cfg.restart()
|
||||
self.assertFalse(rst.getWriteRestartFile(0))
|
||||
self.assertEqual(7, rst.getFirstRestartStep())
|
||||
|
||||
def test_repr_title(self):
|
||||
self.assertEqual('SPE 3 - CASE 1', self.state.title)
|
||||
|
||||
def test_state_nnc(self):
|
||||
self.assertFalse(self.state.has_input_nnc())
|
||||
|
||||
def test_grid(self):
|
||||
grid = self.state.grid()
|
||||
self.assertEqual(9, grid.nx)
|
||||
self.assertEqual(9, grid.ny)
|
||||
self.assertEqual(4, grid.nz)
|
||||
self.assertEqual(9*9*4, grid.nactive)
|
||||
self.assertEqual(9*9*4, grid.cartesianSize)
|
||||
g,i,j,k = 295,7,5,3
|
||||
self.assertEqual(g, grid.globalIndex(i,j,k))
|
||||
self.assertEqual((i,j,k), grid.getIJK(g))
|
||||
|
||||
def test_simulation(self):
|
||||
sim = self.state.simulation()
|
||||
self.assertFalse(sim.hasThresholdPressure())
|
||||
self.assertFalse(sim.useCPR())
|
||||
self.assertTrue(sim.hasDISGAS())
|
||||
self.assertTrue(sim.hasVAPOIL())
|
||||
|
||||
def test_tables(self):
|
||||
tables = self.state.tables()
|
||||
self.assertTrue('SGOF' in tables)
|
||||
self.assertTrue('SWOF' in tables)
|
||||
self.assertFalse('SOF' in tables)
|
||||
|
||||
ct = self.cp_state.tables()
|
||||
self.assertFalse('SGOF' in ct)
|
||||
self.assertTrue('SWOF' in ct)
|
||||
|
||||
tab = 'SWOF'
|
||||
col = 'KRW'
|
||||
self.assertAlmostEqual(0.1345, self.state.tables().evaluate(tab, 0, col, 0.5))
|
||||
self.assertAlmostEqual(0.39, self.state.tables().evaluate(tab, 0, col, 0.72))
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
self.state.tables().evaluate(tab, 0, 'NO', 1)
|
||||
|
||||
def test_faults(self):
|
||||
self.assertEquals([], self.state.faultNames())
|
||||
parser = Parser()
|
||||
faultdeck = parser.parse_string(self.FAULTS_DECK)
|
||||
faultstate = EclipseState(faultdeck)
|
||||
self.assertEqual(['F1', 'F2'], faultstate.faultNames())
|
||||
# 'F2' 5 5 1 4 1 4 'X-' / \n"
|
||||
f2 = faultstate.faultFaces('F2')
|
||||
self.assertTrue((4,0,0,'X-') in f2)
|
||||
self.assertFalse((3,0,0,'X-') in f2)
|
||||
|
||||
def test_jfunc(self):
|
||||
# jf["FLAG"] = WATER; # set in deck
|
||||
# jf["DIRECTION"] = XY; # default
|
||||
# jf["ALPHA_FACTOR"] = 0.5 # default
|
||||
# jf["BETA_FACTOR"] = 0.5 # default
|
||||
# jf["OIL_WATER"] = 21.0 # set in deck
|
||||
# jf["GAS_OIL"] = -1.0 # N/A
|
||||
|
||||
parser = Parser()
|
||||
deck = parser.parse('tests/data/JFUNC.DATA')
|
||||
js = EclipseState(deck)
|
||||
self.assertEqual('JFUNC TEST', js.title)
|
||||
jf = js.jfunc()
|
||||
print(jf)
|
||||
self.assertEqual(jf['FLAG'], 'WATER')
|
||||
self.assertEqual(jf['DIRECTION'], 'XY')
|
||||
self.assertFalse('GAS_OIL' in jf)
|
||||
self.assertTrue('OIL_WATER' in jf)
|
||||
self.assertEqual(jf['OIL_WATER'], 21.0)
|
||||
self.assertEqual(jf["ALPHA_FACTOR"], 0.5) # default
|
||||
self.assertEqual(jf["BETA_FACTOR"], 0.5) # default
|
||||
|
||||
jfunc_gas = """
|
||||
DIMENS
|
||||
10 10 10 /
|
||||
GRID
|
||||
DX
|
||||
1000*0.25 /
|
||||
DY
|
||||
1000*0.25 /
|
||||
DZ
|
||||
1000*0.25 /
|
||||
TOPS
|
||||
100*0.25 /
|
||||
JFUNC
|
||||
GAS * 13.0 0.6 0.7 Z /
|
||||
PROPS\nREGIONS
|
||||
"""
|
||||
deck2 = parser.parse_string(jfunc_gas)
|
||||
js_gas = EclipseState(deck2)
|
||||
jf = js_gas.jfunc()
|
||||
self.assertEqual(jf['FLAG'], 'GAS')
|
||||
self.assertEqual(jf['DIRECTION'], 'Z')
|
||||
self.assertTrue('GAS_OIL' in jf)
|
||||
self.assertFalse('OIL_WATER' in jf)
|
||||
self.assertEqual(jf['GAS_OIL'], 13.0)
|
||||
self.assertEqual(jf["ALPHA_FACTOR"], 0.6) # default
|
||||
self.assertEqual(jf["BETA_FACTOR"], 0.7) # default
|
||||
|
||||
def test_summary(self):
|
||||
smry = self.spe3.summary_config
|
||||
smry = self.summary_config
|
||||
self.assertTrue('SummaryConfig' in repr(smry))
|
||||
self.assertTrue('WOPR' in smry) # hasKeyword
|
||||
self.assertFalse('NONO' in smry) # hasKeyword
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from opm.io.parser import Parser
|
||||
|
||||
from opm.io.ecl_state import EclipseState
|
||||
|
||||
|
||||
class TestState2(unittest.TestCase):
|
||||
FAULTS_DECK = """
|
||||
RUNSPEC
|
||||
|
||||
DIMENS
|
||||
10 10 10 /
|
||||
GRID
|
||||
DX
|
||||
1000*0.25 /
|
||||
DY
|
||||
1000*0.25 /
|
||||
DZ
|
||||
1000*0.25 /
|
||||
TOPS
|
||||
100*0.25 /
|
||||
FAULTS
|
||||
'F1' 1 1 1 4 1 4 'X' /
|
||||
'F2' 5 5 1 4 1 4 'X-' /
|
||||
/
|
||||
MULTFLT
|
||||
'F1' 0.50 /
|
||||
'F2' 0.50 /
|
||||
/
|
||||
EDIT
|
||||
MULTFLT /
|
||||
'F2' 0.25 /
|
||||
/
|
||||
OIL
|
||||
|
||||
GAS
|
||||
|
||||
TITLE
|
||||
The title
|
||||
|
||||
START
|
||||
8 MAR 1998 /
|
||||
|
||||
PROPS
|
||||
REGIONS
|
||||
SWAT
|
||||
1000*1 /
|
||||
SATNUM
|
||||
1000*2 /
|
||||
\
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
parser = Parser()
|
||||
cls.deck_spe3 = parser.parse('tests/spe3/SPE3CASE1.DATA')
|
||||
cls.deck_cpa = parser.parse('tests/data/CORNERPOINT_ACTNUM.DATA')
|
||||
cls.state = EclipseState(cls.deck_spe3)
|
||||
cls.cp_state = EclipseState(cls.deck_cpa)
|
||||
|
||||
def test_config(self):
|
||||
cfg = self.state.config()
|
||||
|
||||
init = cfg.init()
|
||||
self.assertTrue(init.hasEquil())
|
||||
self.assertFalse(init.restartRequested())
|
||||
self.assertEqual(0, init.getRestartStep())
|
||||
|
||||
rst = cfg.restart()
|
||||
self.assertFalse(rst.getWriteRestartFile(0))
|
||||
self.assertEqual(7, rst.getFirstRestartStep())
|
||||
|
||||
def test_repr_title(self):
|
||||
self.assertEqual('SPE 3 - CASE 1', self.state.title)
|
||||
|
||||
def test_state_nnc(self):
|
||||
self.assertFalse(self.state.has_input_nnc())
|
||||
|
||||
def test_grid(self):
|
||||
grid = self.state.grid()
|
||||
self.assertEqual(9, grid.nx)
|
||||
self.assertEqual(9, grid.ny)
|
||||
self.assertEqual(4, grid.nz)
|
||||
self.assertEqual(9*9*4, grid.nactive)
|
||||
self.assertEqual(9*9*4, grid.cartesianSize)
|
||||
g,i,j,k = 295,7,5,3
|
||||
self.assertEqual(g, grid.globalIndex(i,j,k))
|
||||
self.assertEqual((i,j,k), grid.getIJK(g))
|
||||
|
||||
def test_simulation(self):
|
||||
sim = self.state.simulation()
|
||||
self.assertFalse(sim.hasThresholdPressure())
|
||||
self.assertFalse(sim.useCPR())
|
||||
self.assertTrue(sim.hasDISGAS())
|
||||
self.assertTrue(sim.hasVAPOIL())
|
||||
|
||||
def test_tables(self):
|
||||
tables = self.state.tables()
|
||||
self.assertTrue('SGOF' in tables)
|
||||
self.assertTrue('SWOF' in tables)
|
||||
self.assertFalse('SOF' in tables)
|
||||
|
||||
ct = self.cp_state.tables()
|
||||
self.assertFalse('SGOF' in ct)
|
||||
self.assertTrue('SWOF' in ct)
|
||||
|
||||
tab = 'SWOF'
|
||||
col = 'KRW'
|
||||
self.assertAlmostEqual(0.1345, self.state.tables().evaluate(tab, 0, col, 0.5))
|
||||
self.assertAlmostEqual(0.39, self.state.tables().evaluate(tab, 0, col, 0.72))
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
self.state.tables().evaluate(tab, 0, 'NO', 1)
|
||||
|
||||
def test_faults(self):
|
||||
self.assertEquals([], self.state.faultNames())
|
||||
parser = Parser()
|
||||
faultdeck = parser.parse_string(self.FAULTS_DECK)
|
||||
faultstate = EclipseState(faultdeck)
|
||||
self.assertEqual(['F1', 'F2'], faultstate.faultNames())
|
||||
# 'F2' 5 5 1 4 1 4 'X-' / \n"
|
||||
f2 = faultstate.faultFaces('F2')
|
||||
self.assertTrue((4,0,0,'X-') in f2)
|
||||
self.assertFalse((3,0,0,'X-') in f2)
|
||||
|
||||
def test_jfunc(self):
|
||||
# jf["FLAG"] = WATER; # set in deck
|
||||
# jf["DIRECTION"] = XY; # default
|
||||
# jf["ALPHA_FACTOR"] = 0.5 # default
|
||||
# jf["BETA_FACTOR"] = 0.5 # default
|
||||
# jf["OIL_WATER"] = 21.0 # set in deck
|
||||
# jf["GAS_OIL"] = -1.0 # N/A
|
||||
|
||||
parser = Parser()
|
||||
deck = parser.parse('tests/data/JFUNC.DATA')
|
||||
js = EclipseState(deck)
|
||||
self.assertEqual('JFUNC TEST', js.title)
|
||||
jf = js.jfunc()
|
||||
print(jf)
|
||||
self.assertEqual(jf['FLAG'], 'WATER')
|
||||
self.assertEqual(jf['DIRECTION'], 'XY')
|
||||
self.assertFalse('GAS_OIL' in jf)
|
||||
self.assertTrue('OIL_WATER' in jf)
|
||||
self.assertEqual(jf['OIL_WATER'], 21.0)
|
||||
self.assertEqual(jf["ALPHA_FACTOR"], 0.5) # default
|
||||
self.assertEqual(jf["BETA_FACTOR"], 0.5) # default
|
||||
|
||||
jfunc_gas = """
|
||||
DIMENS
|
||||
10 10 10 /
|
||||
GRID
|
||||
DX
|
||||
1000*0.25 /
|
||||
DY
|
||||
1000*0.25 /
|
||||
DZ
|
||||
1000*0.25 /
|
||||
TOPS
|
||||
100*0.25 /
|
||||
JFUNC
|
||||
GAS * 13.0 0.6 0.7 Z /
|
||||
PROPS\nREGIONS
|
||||
"""
|
||||
deck2 = parser.parse_string(jfunc_gas)
|
||||
js_gas = EclipseState(deck2)
|
||||
jf = js_gas.jfunc()
|
||||
self.assertEqual(jf['FLAG'], 'GAS')
|
||||
self.assertEqual(jf['DIRECTION'], 'Z')
|
||||
self.assertTrue('GAS_OIL' in jf)
|
||||
self.assertFalse('OIL_WATER' in jf)
|
||||
self.assertEqual(jf['GAS_OIL'], 13.0)
|
||||
self.assertEqual(jf["ALPHA_FACTOR"], 0.6) # default
|
||||
self.assertEqual(jf["BETA_FACTOR"], 0.7) # default
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import unittest
|
||||
import datetime
|
||||
from opm.tools import *
|
||||
from opm.io import load_deck_string, load_deck
|
||||
|
||||
from opm.io.parser import Parser
|
||||
|
||||
from utils import tmp
|
||||
class TestTimeVector(unittest.TestCase):
|
||||
|
||||
@@ -119,7 +121,7 @@ class TestTimeVector(unittest.TestCase):
|
||||
def test_no_leading_DATES(self):
|
||||
tv = TimeVector(datetime.date(1997, 11, 6), base_file="tests/data/schedule/part1.sch")
|
||||
s = str(tv)
|
||||
d = load_deck_string(s)
|
||||
d = Parser().parse_string(s)
|
||||
kw0 = d[0]
|
||||
self.assertEqual(kw0.name, "WELSPECS")
|
||||
|
||||
|
||||
@@ -120,10 +120,9 @@ class TestWells(unittest.TestCase):
|
||||
num_steps = len( self.sch.timesteps )
|
||||
w0 = self.sch.get_wells(num_steps - 1)[0]
|
||||
c0,c1 = w0.connections()
|
||||
"""
|
||||
|
||||
self.assertEqual((6,6,2), c0.pos)
|
||||
self.assertEqual((6,6,3), c1.pos)
|
||||
"""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Reference in New Issue
Block a user