mirror of https://gitlab.com/pamhyr/pamhyr2
Results: Add additional data to SQL study save.
parent
2487bec6d6
commit
14549330b9
|
|
@ -29,8 +29,133 @@ from Model.Results.River.River import River
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class AdditionalData(SQLSubModel):
|
||||||
|
_sub_classes = []
|
||||||
|
|
||||||
|
def __init__(self, id=-1, study=None, data=None):
|
||||||
|
super(AdditionalData, self).__init__(
|
||||||
|
id=id, status=study.status,
|
||||||
|
owner_scenario=study.status.scenario.id
|
||||||
|
)
|
||||||
|
|
||||||
|
self._study = study
|
||||||
|
self._data = data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _db_create(cls, execute, ext=""):
|
||||||
|
execute(f"""
|
||||||
|
CREATE TABLE results_add_data{ext} (
|
||||||
|
{cls.create_db_add_pamhyr_id()},
|
||||||
|
result INTEGER NOT NULL,
|
||||||
|
type_x TEXT NOT NULL,
|
||||||
|
type_y TEXT NOT NULL,
|
||||||
|
legend TEXT NOT NULL,
|
||||||
|
unit TEXT NOT NULL,
|
||||||
|
data_len INTEGER NOT NULL,
|
||||||
|
x BLOB NOT NULL,
|
||||||
|
y BLOB NOT NULL,
|
||||||
|
{Scenario.create_db_add_scenario()},
|
||||||
|
{Scenario.create_db_add_scenario_fk()},
|
||||||
|
FOREIGN KEY(result) REFERENCES results(pamhyr_id),
|
||||||
|
PRIMARY KEY(pamhyr_id, result, scenario)
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
if ext != "":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return cls._create_submodel(execute)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _db_update(cls, execute, version, data=None):
|
||||||
|
major, minor, release = version.strip().split(".")
|
||||||
|
|
||||||
|
if major == "0" and int(minor) == 2 and int(release) <= 1:
|
||||||
|
cls._db_create(execute)
|
||||||
|
|
||||||
|
return cls._update_submodel(execute, version, data)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _db_load(cls, execute, data=None):
|
||||||
|
new = []
|
||||||
|
|
||||||
|
study = data['study']
|
||||||
|
status = data['status']
|
||||||
|
scenario = data["scenario"]
|
||||||
|
|
||||||
|
table = execute(
|
||||||
|
"SELECT pamhyr_id, type_x, type_y, " +
|
||||||
|
"legend, unit, data_len, x, y, " +
|
||||||
|
"scenario " +
|
||||||
|
"FROM results_add_data " +
|
||||||
|
f"WHERE scenario = {scenario.id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if table is None:
|
||||||
|
return new
|
||||||
|
|
||||||
|
for v in table:
|
||||||
|
it = iter(v)
|
||||||
|
|
||||||
|
pid = next(it)
|
||||||
|
type_x = next(it)
|
||||||
|
type_y = next(it)
|
||||||
|
legend = next(it)
|
||||||
|
unit = next(it)
|
||||||
|
data_len = next(it)
|
||||||
|
bx = next(it)
|
||||||
|
by = next(it)
|
||||||
|
owner_scenario = next(it)
|
||||||
|
|
||||||
|
data_format = ">" + ''.join(itertools.repeat("d", data_len))
|
||||||
|
x = struct.unpack(data_format, bx)
|
||||||
|
y = struct.unpack(data_format, by)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'type_x': tmp_dict[data_type[2]],
|
||||||
|
'type_y': tmp_dict[data_type[0]],
|
||||||
|
'legend': legend,
|
||||||
|
'unit': tmp_unit[data_type[0]],
|
||||||
|
'x': x, 'y': y
|
||||||
|
}
|
||||||
|
|
||||||
|
new_results = cls(study=study)
|
||||||
|
new.append(new_results)
|
||||||
|
|
||||||
|
return new
|
||||||
|
|
||||||
|
def _db_save(self, execute, data=None):
|
||||||
|
if self._status.scenario.id != self._owner_scenario:
|
||||||
|
return
|
||||||
|
|
||||||
|
pid = self._pamhyr_id
|
||||||
|
data_len = len(self._data["x"])
|
||||||
|
|
||||||
|
data_format = ">" + ''.join(itertools.repeat("d", data_len))
|
||||||
|
bx = struct.pack(data_format, *self._data["x"])
|
||||||
|
by = struct.pack(data_format, *self._data["y"])
|
||||||
|
|
||||||
|
execute(
|
||||||
|
"INSERT INTO " +
|
||||||
|
"results_add_data (pamhyr_id, result, " +
|
||||||
|
"type_x, type_y, " +
|
||||||
|
"legend, unit, data_len, x, y, " +
|
||||||
|
"scenario) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
self._pamhyr_id, data["result"],
|
||||||
|
self._data["type_x"], self._data["type_y"],
|
||||||
|
self._data["legend"], self._data["unit"],
|
||||||
|
data_len, bx, by, self._owner_scenario
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class Results(SQLSubModel):
|
class Results(SQLSubModel):
|
||||||
_sub_classes = [River]
|
_sub_classes = [River, AdditionalData]
|
||||||
|
|
||||||
def __init__(self, id=-1, study=None, solver=None,
|
def __init__(self, id=-1, study=None, solver=None,
|
||||||
repertory="", name="0"):
|
repertory="", name="0"):
|
||||||
|
|
@ -50,6 +175,7 @@ class Results(SQLSubModel):
|
||||||
# Keep results creation date
|
# Keep results creation date
|
||||||
"creation_date": datetime.now(),
|
"creation_date": datetime.now(),
|
||||||
"study_revision": study.status.version,
|
"study_revision": study.status.version,
|
||||||
|
"additional_data": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
if solver is not None:
|
if solver is not None:
|
||||||
|
|
@ -206,6 +332,11 @@ class Results(SQLSubModel):
|
||||||
f"WHERE scenario = {self._owner_scenario} " +
|
f"WHERE scenario = {self._owner_scenario} " +
|
||||||
f"AND result = {pid}"
|
f"AND result = {pid}"
|
||||||
)
|
)
|
||||||
|
execute(
|
||||||
|
"DELETE FROM results_add_data " +
|
||||||
|
f"WHERE scenario = {self._owner_scenario} " +
|
||||||
|
f"AND result = {pid}"
|
||||||
|
)
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
if self._status.scenario.id != self._owner_scenario:
|
if self._status.scenario.id != self._owner_scenario:
|
||||||
|
|
@ -238,4 +369,7 @@ class Results(SQLSubModel):
|
||||||
data["result"] = self._pamhyr_id
|
data["result"] = self._pamhyr_id
|
||||||
self._river._db_save(execute, data)
|
self._river._db_save(execute, data)
|
||||||
|
|
||||||
|
for add_data in self.get("additional_data"):
|
||||||
|
add_data._db_save(execute, data)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class Study(SQLModel):
|
class Study(SQLModel):
|
||||||
_version = "0.2.1"
|
_version = "0.2.2"
|
||||||
|
|
||||||
_sub_classes = [
|
_sub_classes = [
|
||||||
Scenario,
|
Scenario,
|
||||||
|
|
|
||||||
|
|
@ -59,6 +59,8 @@ from PyQt5.QtWidgets import (
|
||||||
QSlider, QLabel, QWidget, QGridLayout, QTabBar, QInputDialog
|
QSlider, QLabel, QWidget, QGridLayout, QTabBar, QInputDialog
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from Model.Results.Results import AdditionalData
|
||||||
|
|
||||||
from View.Tools.Plot.PamhyrCanvas import MplCanvas
|
from View.Tools.Plot.PamhyrCanvas import MplCanvas
|
||||||
from View.Tools.Plot.PamhyrToolbar import PamhyrPlotToolbar
|
from View.Tools.Plot.PamhyrToolbar import PamhyrPlotToolbar
|
||||||
|
|
||||||
|
|
@ -1249,7 +1251,17 @@ class ResultsWindow(PamhyrWindow):
|
||||||
|
|
||||||
x, y = self.read_csv_file_data(filename)
|
x, y = self.read_csv_file_data(filename)
|
||||||
data = self.read_csv_file_format(x, y)
|
data = self.read_csv_file_format(x, y)
|
||||||
self.read_csv_file_update_plot(data)
|
|
||||||
|
results = self._results[self._current_results[0]]
|
||||||
|
data_lst = results.get("additional_data")
|
||||||
|
data_lst.append(
|
||||||
|
AdditionalData(
|
||||||
|
study=self._study,
|
||||||
|
data=data
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.update_plot_additional_data()
|
||||||
|
|
||||||
def read_csv_file_data(self, filename):
|
def read_csv_file_data(self, filename):
|
||||||
sep = ","
|
sep = ","
|
||||||
|
|
@ -1262,13 +1274,15 @@ class ResultsWindow(PamhyrWindow):
|
||||||
if line[0] != "*" and line[0] != "#" and line[0] != "$":
|
if line[0] != "*" and line[0] != "#" and line[0] != "$":
|
||||||
row = line.split(sep)
|
row = line.split(sep)
|
||||||
|
|
||||||
if len(row) >= 2:
|
if len(row) < 2:
|
||||||
try:
|
continue
|
||||||
fx, fy = float(row[0]), float(row[1])
|
|
||||||
x.append(fx)
|
try:
|
||||||
y.append(fy)
|
fx, fy = float(row[0]), float(row[1])
|
||||||
except:
|
x.append(fx)
|
||||||
continue
|
y.append(fy)
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
return x, y
|
return x, y
|
||||||
|
|
||||||
|
|
@ -1309,28 +1323,35 @@ class ResultsWindow(PamhyrWindow):
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def read_csv_file_update_plot(self, data):
|
def update_plot_additional_data(self):
|
||||||
x, y = data['x'], data['y']
|
results = self._results[self._current_results[0]]
|
||||||
legend = data['legend']
|
|
||||||
unit = data['unit']
|
|
||||||
|
|
||||||
if data['type_x'] == 'water_elevation' and data['type_y'] == 'time':
|
for data in results.get("additional_data"):
|
||||||
line = self.canvas_2.axes.plot(
|
data = data._data
|
||||||
x, y, marker="+",
|
x, y = data['x'], data['y']
|
||||||
label=legend + ' ' + unit
|
legend = data['legend']
|
||||||
)
|
unit = data['unit']
|
||||||
self.plot_rkc.canvas.draw_idle()
|
|
||||||
self.plot_rkc.update_idle()
|
|
||||||
|
|
||||||
if data['type_x'] == 'discharge' and data['type_y'] == 'time':
|
if (
|
||||||
line = self.canvas_4.axes.plot(
|
data['type_x'] == 'water_elevation' and
|
||||||
x, y, marker="+",
|
data['type_y'] == 'time'
|
||||||
label=legend + ' ' + unit
|
):
|
||||||
)
|
line = self.canvas_2.axes.plot(
|
||||||
self.plot_h._line.append(line)
|
x, y, marker="+",
|
||||||
self.plot_h.enable_legend()
|
label=legend + ' ' + unit
|
||||||
self.plot_h.canvas.draw_idle()
|
)
|
||||||
self.plot_h.update_idle()
|
self.plot_rkc.canvas.draw_idle()
|
||||||
|
self.plot_rkc.update_idle()
|
||||||
|
|
||||||
for p in self._additional_plot:
|
if data['type_x'] == 'discharge' and data['type_y'] == 'time':
|
||||||
self._additional_plot[p].add_imported_plot(data)
|
line = self.canvas_4.axes.plot(
|
||||||
|
x, y, marker="+",
|
||||||
|
label=legend + ' ' + unit
|
||||||
|
)
|
||||||
|
self.plot_h._line.append(line)
|
||||||
|
self.plot_h.enable_legend()
|
||||||
|
self.plot_h.canvas.draw_idle()
|
||||||
|
self.plot_h.update_idle()
|
||||||
|
|
||||||
|
for p in self._additional_plot:
|
||||||
|
self._additional_plot[p].add_imported_plot(data)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue