Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions python/resdata/summary/rd_cmp.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@ def load_summary(self):
self.summary = Summary(self.case)

def start_time_equal(self, other):
if self.summary.getDataStartTime() == other.summary.getDataStartTime():
if self.summary.get_data_start_time() == other.summary.get_data_start_time():
return True
else:
return False

def end_time_equal(self, other):
if self.summary.getEndTime() == other.summary.getEndTime():
if self.summary.get_end_time() == other.summary.get_end_time():
return True
else:
return False
Expand Down
4 changes: 2 additions & 2 deletions python/resdata/summary/rd_npv.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,10 @@ def parse_expression(self, expression):
def compile(self, expression):
parsedExpression = self.parseExpression(expression)
self.code = (
"trange = self.baseCase.timeRange(self.start, self.end, self.interval)\n"
"trange = self.baseCase.time_range(self.start, self.end, self.interval)\n"
)
for key, var in self.keyList.items():
self.code += '%s = self.baseCase.blockedProduction("%s", trange)\n' % (
self.code += '%s = self.baseCase.blocked_production("%s", trange)\n' % (
var,
key,
)
Expand Down
176 changes: 19 additions & 157 deletions python/resdata/summary/rd_sum.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ class Summary(BaseCClass):
_sim_length = ResdataPrototype("double rd_sum_get_sim_length(rd_sum)")
_get_first_day = ResdataPrototype("double rd_sum_get_first_day(rd_sum)")
_get_data_start = ResdataPrototype("rd_time_t rd_sum_get_data_start(rd_sum)")
_get_unit = ResdataPrototype("char* rd_sum_get_unit(rd_sum, char*)")
_get_restart_case = ResdataPrototype("rd_sum_ref rd_sum_get_restart_case(rd_sum)")
_get_restart_step = ResdataPrototype("int rd_sum_get_restart_step(rd_sum)")
_get_simcase = ResdataPrototype("char* rd_sum_get_case(rd_sum)")
Expand Down Expand Up @@ -397,23 +396,6 @@ def add_t_step(self, report_step, sim_days):
tstep = self._add_tstep(report_step, sim_seconds).setParent(parent=self)
return tstep

def get_vector(self, key, report_only=False):
"""
Will return SummaryVector according to @key.

Will raise exception KeyError if the summary object does not
have @key.
"""
warnings.warn(
"The method get_vector() has been deprecated, use numpy_vector() instead",
DeprecationWarning,
)
self.assertKeyValid(key)
if report_only:
return SummaryVector(self, key, report_only=True)
else:
return SummaryVector(self, key)

def report_index_list(self):
"""
Internal function for working with report_steps.
Expand Down Expand Up @@ -446,37 +428,6 @@ def groups(self, pattern=None):
"""
return self._create_group_list(pattern)

def get_values(self, key, report_only=False):
"""
Will return numpy vector of all values according to @key.

If the optional argument report_only is true only the values
corresponding to report steps are included. The method is
also available as the 'values' property of an SummaryVector
instance.
"""
warnings.warn(
"The method get_values() has been deprecated - use numpy_vector() instead.",
DeprecationWarning,
)
if self.has_key(key):
key_index = self._get_general_var_index(key)
if report_only:
index_list = self.report_index_list()
values = np.zeros(len(index_list))
for i in range(len(index_list)):
time_index = index_list[i]
values[i] = self._iiget(time_index, key_index)
else:
length = self._data_length()
values = np.zeros(length)
for i in range(length):
values[i] = self._iiget(i, key_index)

return values
else:
raise KeyError("Summary object does not have key:%s" % key)

def _make_time_vector(
self, time_index: Sequence[Union[CTime, datetime.datetime, int, datetime.date]]
) -> TimeVector:
Expand Down Expand Up @@ -741,7 +692,7 @@ def from_pandas(cls, case, frame, dims=None, headers=None):

for i, time in enumerate(frame.index):
days = (time - start_time).total_seconds() / 86400
t_step = rd_sum.addTStep(i + 1, days)
t_step = rd_sum.add_t_step(i + 1, days)

for var in var_list:
t_step[var] = frame.iloc[i][var]
Expand Down Expand Up @@ -800,13 +751,6 @@ def first_value(self, key):

return self._get_first_value(key)

def get_last_value(self, key):
warnings.warn(
"The function get_last_value() is deprecated, use last_value() instead",
DeprecationWarning,
)
return self.last_value(key)

def get_last(self, key):
"""
Will return the last SummaryNode corresponding to @key.
Expand Down Expand Up @@ -869,38 +813,6 @@ def assert_key_valid(self, key):
def __iter__(self):
return iter(self.keys())

def __getitem__(self, key):
"""
Implements [] operator - @key should be a summary key.

The returned value will be a SummaryVector instance.
"""
warnings.warn(
"The method the [] operator will change behaviour in the future. It will then return a plain numpy vector. You are advised to change to use the numpy_vector() method right away",
DeprecationWarning,
)
return self.get_vector(key)

def scale_vector(self, key, scalar):
msg = """The function Summary.scale_vector has been removed. As an alternative you
are advised to fetch vector as a numpy vector and then scale that yourself:

vec = rd_sum.numpy_vector(key)
vec *= scalar

"""
raise NotImplementedError(msg)

def shift_vector(self, key, addend):
msg = """The function Summary.shift_vector has been removed. As an alternative you
are advised to fetch vector as a numpy vector and then scale that yourself:

vec = rd_sum.numpy_vector(key)
vec += scalar

"""
raise NotImplementedError(msg)

def check_sim_time(self, date):
"""
Will check if the input date is in the time span [sim_start, sim_end].
Expand Down Expand Up @@ -928,7 +840,7 @@ def get_interp(self, key, days=None, date=None):
Also available as method get_interp() on the SummaryVector
class.
"""
self.assertKeyValid(key)
self.assert_key_valid(key)
if days is None and date is None:
raise ValueError("Must supply either days or date")

Expand Down Expand Up @@ -968,22 +880,22 @@ def time_range(
num, timeUnit = TimeVector.parseTimeUnit(interval)

if start is None:
start = self.getDataStartTime()
start = self.get_data_start_time()
else:
if isinstance(start, datetime.date):
start = datetime.datetime(start.year, start.month, start.day, 0, 0, 0)

if start < self.getDataStartTime():
start = self.getDataStartTime()
if start < self.get_data_start_time():
start = self.get_data_start_time()

if end is None:
end = self.getEndTime()
end = self.get_end_time()
else:
if isinstance(end, datetime.date):
end = datetime.datetime(end.year, end.month, end.day, 0, 0, 0)

if end > self.getEndTime():
end = self.getEndTime()
if end > self.get_end_time():
end = self.get_end_time()

if end < start:
raise ValueError("Invalid time interval start after end")
Expand Down Expand Up @@ -1031,7 +943,7 @@ def time_range(
else:
trange.append(end)

data_start = self.getDataStartTime()
data_start = self.get_data_start_time()
if trange[0] < data_start:
trange[0] = CTime(data_start)

Expand Down Expand Up @@ -1093,7 +1005,7 @@ def get_interp_vector(self, key, days_list=None, date_list=None):
Also available as method get_interp_vector() on the
SummaryVector class.
"""
self.assertKeyValid(key)
self.assert_key_valid(key)
if days_list:
if date_list:
raise ValueError("Must supply either days_list or date_list")
Expand Down Expand Up @@ -1258,40 +1170,6 @@ def get_dates(self, report_only=False):
else:
return self.dates

@property
def mpl_dates(self):
"""
Will return a numpy vector of dates ready for matplotlib

The content of the vector are dates in matplotlib format,
i.e. floats - generated by the date2num() function at the top
of this file.
"""
warnings.warn(
"The mpl_dates property has been deprecated - use numpy_dates instead",
DeprecationWarning,
)
return self.get_mpl_dates(False)

def get_mpl_dates(self, report_only=False):
"""
Will return a numpy vector of dates ready for matplotlib

If the optional argument @report_only is set to True, only
dates values corresponding to report steps will be
included. The content of the vector are dates in matplotlib
format, i.e. floats - generated by the date2num() function at
the top of this file.
"""
warnings.warn(
"The get_mpl_dates( ) method has been deprecated - use numpy_dates instead",
DeprecationWarning,
)
if report_only:
return [date2num(dt) for dt in self.report_dates]
else:
return [date2num(dt) for dt in self.dates]

@property
def report_step(self):
"""
Expand Down Expand Up @@ -1387,23 +1265,23 @@ def end_date(self):

@property
def data_start(self):
return self.getDataStartTime()
return self.get_data_start_time()

@property
def end_time(self):
"""
The time of the last (loaded) time step.
"""
return self.getEndTime()
return self.get_end_time()

@property
def start_time(self):
return self.getStartTime()
return self.get_start_time()

def get_data_start_time(self):
"""The first date we have data for.

Thiw will mostly be equal to getStartTime(), but in the case
Thiw will mostly be equal to get_start_time(), but in the case
of restarts, where the case we have restarted from is not
found, this time will be later than the true start of the
field.
Expand Down Expand Up @@ -1480,7 +1358,7 @@ def first_lt(self, key, limit):
def solve_dates(self, key, value, rates_clamp_lower=True):
"""Will solve the equation vector[@key] == value for dates.

See solveDays() for further details.
See solve_days() for further details.
"""
if not key in self:
raise KeyError("Unrecognized key:%s" % key)
Expand All @@ -1498,7 +1376,7 @@ def solve_days(self, key, value, rates_clamp_lower=True):
a list of values, which can have zero, one or multiple values:

case = Summary("CASE")
days = case.solveDays("RPR:2", 200)
days = case.solve_days("RPR:2", 200)

if len(days) == 0:
print("Pressure was never equal to 200 BARSA")
Expand Down Expand Up @@ -1637,8 +1515,8 @@ def __repr__(self):
Summary("NORNE_ATW2013.UNSMRY", [1997-11-06 00:00:00, 2006-12-01 00:00:00], keys=3781) at 0x1609e20
"""
name = self._nicename()
s_time = self.getStartTime()
e_time = self.getEndTime()
s_time = self.get_start_time()
e_time = self.get_end_time()
num_keys = len(self.keys())
content = 'name="%s", time=[%s, %s], keys=%d' % (name, s_time, e_time, num_keys)
return self._create_repr(content)
Expand All @@ -1660,7 +1538,7 @@ def export_csv(self, filename, keys=None, date_format="%Y-%m-%d", sep=";"):
limit the keys which are exported:

rd_sum = Summary("CASE")
rd_sum.exportCSV("case.csv", keys=["W*:OP1", "W*:OP2", "F*T"])
rd_sum.export_csv("case.csv", keys=["W*:OP1", "W*:OP2", "F*T"])

Will export all well related variables for wells 'OP1' and
'OP2' and all total field vectors.
Expand Down Expand Up @@ -1709,19 +1587,3 @@ def resample(
"void rd_sum_init_double_frame_interp(rd_sum, rd_sum_vector, rd_time_t_vector, double*)",
bind=False,
)

monkey_the_camel(Summary, "varType", Summary.var_type, classmethod)
monkey_the_camel(Summary, "addVariable", Summary.add_variable)
monkey_the_camel(Summary, "addTStep", Summary.add_t_step)
monkey_the_camel(Summary, "assertKeyValid", Summary.assert_key_valid)
monkey_the_camel(Summary, "scaleVector", Summary.scale_vector)
monkey_the_camel(Summary, "shiftVector", Summary.shift_vector)
monkey_the_camel(Summary, "timeRange", Summary.time_range)
monkey_the_camel(Summary, "blockedProduction", Summary.blocked_production)
monkey_the_camel(Summary, "getDataStartTime", Summary.get_data_start_time)
monkey_the_camel(Summary, "getStartTime", Summary.get_start_time)
monkey_the_camel(Summary, "getEndTime", Summary.get_end_time)
monkey_the_camel(Summary, "solveDates", Summary.solve_dates)
monkey_the_camel(Summary, "solveDays", Summary.solve_days)
monkey_the_camel(Summary, "dumpCSVLine", Summary.dump_csv_line)
monkey_the_camel(Summary, "exportCSV", Summary.export_csv)
Loading
Loading