mirror of https://gitlab.com/pamhyr/pamhyr2
Results: Fix save method.
parent
936bfc753a
commit
381bc37d13
|
|
@ -30,6 +30,8 @@ logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class Results(SQLSubModel):
|
class Results(SQLSubModel):
|
||||||
|
_sub_classes = [River]
|
||||||
|
|
||||||
def __init__(self, id=-1, study=None, solver=None,
|
def __init__(self, id=-1, study=None, solver=None,
|
||||||
repertory="", name="0"):
|
repertory="", name="0"):
|
||||||
super(Results, self).__init__(
|
super(Results, self).__init__(
|
||||||
|
|
@ -90,7 +92,8 @@ class Results(SQLSubModel):
|
||||||
execute(f"""
|
execute(f"""
|
||||||
CREATE TABLE results{ext} (
|
CREATE TABLE results{ext} (
|
||||||
{cls.create_db_add_pamhyr_id()},
|
{cls.create_db_add_pamhyr_id()},
|
||||||
solver TEXT NOT NULL,
|
solver_name TEXT NOT NULL,
|
||||||
|
solver_type TEXT NOT NULL,
|
||||||
study_revision INTEGER NOT NULL,
|
study_revision INTEGER NOT NULL,
|
||||||
creation_data DATE NOT NULL,
|
creation_data DATE NOT NULL,
|
||||||
nb_timestamps INTEGER NOT NULL,
|
nb_timestamps INTEGER NOT NULL,
|
||||||
|
|
@ -101,7 +104,7 @@ class Results(SQLSubModel):
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
return cls._create_submodel(execute)
|
return True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _db_update(cls, execute, version, data=None):
|
def _db_update(cls, execute, version, data=None):
|
||||||
|
|
@ -114,13 +117,13 @@ class Results(SQLSubModel):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _db_load(cls, execute, data=None):
|
def _db_load(cls, execute, data=None):
|
||||||
new = []
|
new = None
|
||||||
|
|
||||||
study = data['study']
|
study = data['study']
|
||||||
status = data['status']
|
status = data['status']
|
||||||
scenario = data["scenario"]
|
scenario = data["scenario"]
|
||||||
|
|
||||||
values = execute(
|
table = execute(
|
||||||
"SELECT pamhyr_id, solver_name, solver_type, " +
|
"SELECT pamhyr_id, solver_name, solver_type, " +
|
||||||
"study_revision, creation_data, nb_timestamps, timestamps, " +
|
"study_revision, creation_data, nb_timestamps, timestamps, " +
|
||||||
"scenario " +
|
"scenario " +
|
||||||
|
|
@ -128,44 +131,49 @@ class Results(SQLSubModel):
|
||||||
f"WHERE scenario = {scenario.id}"
|
f"WHERE scenario = {scenario.id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
for v in values:
|
if len(table) > 1:
|
||||||
|
logger.warning("Multiple results for this scenario")
|
||||||
|
|
||||||
|
for v in table:
|
||||||
it = iter(v)
|
it = iter(v)
|
||||||
|
|
||||||
pid = next(it)
|
pid = next(it)
|
||||||
solver = next(it)
|
solver_name = next(it)
|
||||||
|
solver_type = next(it)
|
||||||
revision = next(it)
|
revision = next(it)
|
||||||
creation_date = next(it)
|
creation_date = next(it)
|
||||||
nb_timestamps = next(it)
|
nb_timestamps = next(it)
|
||||||
timestamps_bytes = next(it)
|
timestamps_bytes = next(it)
|
||||||
owner_scenario = next(it)
|
owner_scenario = next(it)
|
||||||
|
|
||||||
new_results = cls(
|
new_results = cls(study=study)
|
||||||
id=pid, status=status,
|
|
||||||
owner_scenario=owner_scenario
|
|
||||||
)
|
|
||||||
new_results.set("solver_name", solver_name)
|
new_results.set("solver_name", solver_name)
|
||||||
new_results.set("solver_type", solver_type)
|
new_results.set("solver_type", solver_type)
|
||||||
new_results.set("study_revision", revision)
|
new_results.set("study_revision", revision)
|
||||||
new_results.set("creation_date", creation_date)
|
new_results.set("creation_date", creation_date)
|
||||||
|
|
||||||
sf = ">" + ''.join(itertools.repeat("d", len(nb_timestamps)))
|
sf = ">" + ''.join(itertools.repeat("d", nb_timestamps))
|
||||||
ts = struct.unpack(sf, timestamp_bytes)
|
ts = struct.unpack(sf, timestamps_bytes)
|
||||||
new_results.set("timestamps", ts)
|
new_results.set("timestamps", ts)
|
||||||
|
|
||||||
data["timestamps"] = ts
|
data["timestamps"] = ts
|
||||||
new_results._river = River._db_load(execute, data)
|
new_results._river = River._db_load(execute, data)
|
||||||
|
|
||||||
new.append(new_results)
|
new = new_results
|
||||||
|
|
||||||
return new
|
return new
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
|
if self._status.scenario.id != self._owner_scenario:
|
||||||
|
return
|
||||||
|
|
||||||
execute(
|
execute(
|
||||||
"DELETED FROM results " +
|
"DELETE FROM results " +
|
||||||
f"WHERE scenario = {self._owner_scenario}"
|
f"WHERE scenario = {self._owner_scenario}"
|
||||||
)
|
)
|
||||||
execute(
|
execute(
|
||||||
"DELETED FROM results_data " +
|
"DELETE FROM results_data " +
|
||||||
f"WHERE scenario = {self._owner_scenario}"
|
f"WHERE scenario = {self._owner_scenario}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -183,11 +191,12 @@ class Results(SQLSubModel):
|
||||||
execute(
|
execute(
|
||||||
"INSERT INTO " +
|
"INSERT INTO " +
|
||||||
"results (pamhyr_id, solver_name, solver_type, " +
|
"results (pamhyr_id, solver_name, solver_type, " +
|
||||||
"study_revision, creation_data, nb_timestamps, timestamps, " +
|
"study_revision, creation_data, " +
|
||||||
"scenario) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
"nb_timestamps, timestamps, " +
|
||||||
|
"scenario) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
self._pamhyr_id, solver_name, solver_type,
|
self._pamhyr_id, solver_name, solver_type,
|
||||||
self._owner_scenario.revision, self.get("creation_data"),
|
self._status.scenario.id, self.get("creation_date"),
|
||||||
len(ts), struct.pack(sf, ts), self._owner_scenario
|
len(ts), struct.pack(sf, *ts), self._owner_scenario
|
||||||
)
|
)
|
||||||
|
|
||||||
data["result"] = self._pamhyr_id
|
data["result"] = self._pamhyr_id
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ import itertools
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from Model.Scenario import Scenario
|
||||||
from Model.Tools.PamhyrDB import SQLSubModel
|
from Model.Tools.PamhyrDB import SQLSubModel
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
@ -62,9 +63,10 @@ class Profile(SQLSubModel):
|
||||||
return self._data[timestamp]
|
return self._data[timestamp]
|
||||||
|
|
||||||
def get_key(self, key):
|
def get_key(self, key):
|
||||||
return list(
|
res = list(
|
||||||
map(lambda ts: self._data[ts][key], self._data)
|
map(lambda ts: self._data[ts][key], self._data)
|
||||||
)
|
)
|
||||||
|
return res
|
||||||
|
|
||||||
def get_ts_key(self, timestamp, key):
|
def get_ts_key(self, timestamp, key):
|
||||||
if timestamp in self._data:
|
if timestamp in self._data:
|
||||||
|
|
@ -96,10 +98,7 @@ class Profile(SQLSubModel):
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
if ext == "_tmp":
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return cls._create_submodel(execute)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _db_update(cls, execute, version, data=None):
|
def _db_update(cls, execute, version, data=None):
|
||||||
|
|
@ -156,26 +155,38 @@ class Profile(SQLSubModel):
|
||||||
|
|
||||||
def get_keys(self):
|
def get_keys(self):
|
||||||
return reduce(
|
return reduce(
|
||||||
lambda acc, ts: acc.union(d[ts].keys())
|
lambda acc, ts: acc.union(self._data[ts].keys()),
|
||||||
|
self._data.keys(), set()
|
||||||
)
|
)
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
|
logger.debug("Save profile...")
|
||||||
pid = self._pamhyr_id
|
pid = self._pamhyr_id
|
||||||
result = data["result"]
|
result = data["result"]
|
||||||
|
|
||||||
for key in self.get_keys():
|
keys = self.get_keys()
|
||||||
data = self.get_key(key)
|
logger.debug(f"{keys}...")
|
||||||
|
for key in keys:
|
||||||
|
values = self.get_key(key)
|
||||||
|
if any(filter(lambda x: type(x) in [tuple, list], values)):
|
||||||
|
logger.debug(f"{key} : {len(values)} {values[0]}")
|
||||||
|
continue
|
||||||
|
|
||||||
sf = ">" + ''.join(itertools.repeat("f", len(data)))
|
values = list(map(float, values))
|
||||||
data_bytes = struct.pack(sf, data)
|
|
||||||
|
sf = ">" + ''.join(itertools.repeat("f", len(values)))
|
||||||
|
data_bytes = struct.pack(sf, *values)
|
||||||
|
|
||||||
execute(
|
execute(
|
||||||
"INSERT INTO " +
|
"INSERT INTO " +
|
||||||
"results_data (pamhyr_id, result, , " +
|
"results_data (pamhyr_id, result, " +
|
||||||
"study_revision, key, len_data, data, " +
|
"reach, section, " +
|
||||||
"scenario) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
"key, len_data, data, " +
|
||||||
pid, result, self._owner_scenario.revision,
|
"scenario) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
key, len(data), data_bytes,
|
pid, result,
|
||||||
|
data["reach"].pamhyr_id,
|
||||||
|
self._profile.pamhyr_id,
|
||||||
|
key, len(values), data_bytes,
|
||||||
self._owner_scenario
|
self._owner_scenario
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -183,6 +194,8 @@ class Profile(SQLSubModel):
|
||||||
|
|
||||||
|
|
||||||
class Reach(SQLSubModel):
|
class Reach(SQLSubModel):
|
||||||
|
_sub_classes = [Profile]
|
||||||
|
|
||||||
def __init__(self, reach, study):
|
def __init__(self, reach, study):
|
||||||
super(Reach, self).__init__(
|
super(Reach, self).__init__(
|
||||||
id=-1, status=study.status,
|
id=-1, status=study.status,
|
||||||
|
|
@ -247,12 +260,15 @@ class Reach(SQLSubModel):
|
||||||
return new_reach
|
return new_reach
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
|
logger.debug("Save reach...")
|
||||||
for profile in self._profiles:
|
for profile in self._profiles:
|
||||||
data["profile"] = profile.geometry.pamhyr_id
|
data["profile"] = profile.geometry.pamhyr_id
|
||||||
profile._db_save(execute, data)
|
profile._db_save(execute, data)
|
||||||
|
|
||||||
|
|
||||||
class River(SQLSubModel):
|
class River(SQLSubModel):
|
||||||
|
_sub_classes = [Reach]
|
||||||
|
|
||||||
def __init__(self, study):
|
def __init__(self, study):
|
||||||
super(River, self).__init__(
|
super(River, self).__init__(
|
||||||
id=-1, status=study.status,
|
id=-1, status=study.status,
|
||||||
|
|
@ -315,6 +331,7 @@ class River(SQLSubModel):
|
||||||
return new_river
|
return new_river
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
|
logger.debug("Save river...")
|
||||||
for reach in self._reachs:
|
for reach in self._reachs:
|
||||||
data["reach"] = reach.geometry.pamhyr_id
|
data["reach"] = reach.geometry
|
||||||
reach._db_save(execute, data)
|
reach._db_save(execute, data)
|
||||||
|
|
|
||||||
|
|
@ -58,6 +58,7 @@ from Model.LateralContributionsAdisTS.LateralContributionsAdisTSList \
|
||||||
import LateralContributionsAdisTSList
|
import LateralContributionsAdisTSList
|
||||||
from Model.D90AdisTS.D90AdisTSList import D90AdisTSList
|
from Model.D90AdisTS.D90AdisTSList import D90AdisTSList
|
||||||
from Model.DIFAdisTS.DIFAdisTSList import DIFAdisTSList
|
from Model.DIFAdisTS.DIFAdisTSList import DIFAdisTSList
|
||||||
|
from Model.Results.Results import Results
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
@ -437,6 +438,7 @@ class River(Graph):
|
||||||
LateralContributionsAdisTSList,
|
LateralContributionsAdisTSList,
|
||||||
D90AdisTSList,
|
D90AdisTSList,
|
||||||
DIFAdisTSList,
|
DIFAdisTSList,
|
||||||
|
Results
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, status=None):
|
def __init__(self, status=None):
|
||||||
|
|
@ -473,6 +475,8 @@ class River(Graph):
|
||||||
self._D90AdisTS = D90AdisTSList(status=self._status)
|
self._D90AdisTS = D90AdisTSList(status=self._status)
|
||||||
self._DIFAdisTS = DIFAdisTSList(status=self._status)
|
self._DIFAdisTS = DIFAdisTSList(status=self._status)
|
||||||
|
|
||||||
|
self._results = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _db_create(cls, execute):
|
def _db_create(cls, execute):
|
||||||
cls._create_submodel(execute)
|
cls._create_submodel(execute)
|
||||||
|
|
@ -585,6 +589,9 @@ class River(Graph):
|
||||||
|
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
def _db_load_results(self, execute, data=None):
|
||||||
|
self._results = Results._db_load(execute, data)
|
||||||
|
|
||||||
def _db_save(self, execute, data=None):
|
def _db_save(self, execute, data=None):
|
||||||
self._db_save_delete_artefact(execute, data)
|
self._db_save_delete_artefact(execute, data)
|
||||||
|
|
||||||
|
|
@ -610,6 +617,9 @@ class River(Graph):
|
||||||
objs.append(self._D90AdisTS)
|
objs.append(self._D90AdisTS)
|
||||||
objs.append(self._DIFAdisTS)
|
objs.append(self._DIFAdisTS)
|
||||||
|
|
||||||
|
if self._results is not None:
|
||||||
|
objs.append(self._results)
|
||||||
|
|
||||||
self._save_submodel(execute, objs, data)
|
self._save_submodel(execute, objs, data)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -353,7 +353,10 @@ class Study(SQLModel):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
data = {"status": new.status}
|
data = {
|
||||||
|
"study": new,
|
||||||
|
"status": new.status
|
||||||
|
}
|
||||||
|
|
||||||
# Scenarios
|
# Scenarios
|
||||||
new.scenarios = Scenarios._db_load(
|
new.scenarios = Scenarios._db_load(
|
||||||
|
|
@ -381,10 +384,11 @@ class Study(SQLModel):
|
||||||
|
|
||||||
# Load river data
|
# Load river data
|
||||||
new._river = River._db_load(
|
new._river = River._db_load(
|
||||||
sql_exec,
|
sql_exec, data=data
|
||||||
data=data
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
new._river._db_load_results(sql_exec, data=data)
|
||||||
|
|
||||||
return new
|
return new
|
||||||
|
|
||||||
def _save(self, progress=None):
|
def _save(self, progress=None):
|
||||||
|
|
@ -442,7 +446,7 @@ class Study(SQLModel):
|
||||||
)
|
)
|
||||||
self.commit()
|
self.commit()
|
||||||
|
|
||||||
def sql_save_request_count(self):
|
def sql_save_request_count(self, *args, **kargs):
|
||||||
return self._count()
|
return self._count()
|
||||||
|
|
||||||
def _count(self):
|
def _count(self):
|
||||||
|
|
|
||||||
|
|
@ -95,11 +95,15 @@ class SQLModel(SQL):
|
||||||
def _save_submodel(self, objs, data=None):
|
def _save_submodel(self, objs, data=None):
|
||||||
progress = data if data is not None else lambda: None
|
progress = data if data is not None else lambda: None
|
||||||
|
|
||||||
def fn(sql):
|
def fn(sql, *args, **kargs):
|
||||||
|
if "fetch_one" not in kargs:
|
||||||
|
kargs["fetch_one"] = False
|
||||||
|
|
||||||
|
if "commit" not in kargs:
|
||||||
|
kargs["commit"] = False
|
||||||
|
|
||||||
res = self.execute(
|
res = self.execute(
|
||||||
sql,
|
sql, *args, **kargs
|
||||||
fetch_one=False,
|
|
||||||
commit=False
|
|
||||||
)
|
)
|
||||||
progress()
|
progress()
|
||||||
return res
|
return res
|
||||||
|
|
@ -117,7 +121,7 @@ class SQLModel(SQL):
|
||||||
def _count(self):
|
def _count(self):
|
||||||
raise NotImplementedMethodeError(self, self._count)
|
raise NotImplementedMethodeError(self, self._count)
|
||||||
|
|
||||||
def _save_count(self, objs, data={}):
|
def _save_count(self, objs, *args, data={}, **kargs):
|
||||||
counter = {
|
counter = {
|
||||||
"insert": 0,
|
"insert": 0,
|
||||||
"update": 0,
|
"update": 0,
|
||||||
|
|
@ -125,7 +129,7 @@ class SQLModel(SQL):
|
||||||
"other": 0,
|
"other": 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
def fn(sql):
|
def fn(sql, *args, **kargs):
|
||||||
if "insert" in sql.lower():
|
if "insert" in sql.lower():
|
||||||
counter["insert"] = counter["insert"] + 1
|
counter["insert"] = counter["insert"] + 1
|
||||||
elif "update" in sql.lower():
|
elif "update" in sql.lower():
|
||||||
|
|
|
||||||
|
|
@ -106,12 +106,12 @@ class SQL(object):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@timer
|
@timer
|
||||||
def execute(self, cmd, fetch_one=True, commit=False):
|
def execute(self, cmd, *args, fetch_one=True, commit=False, **kargs):
|
||||||
logger.debug(f"SQL - {cmd}")
|
logger.debug(f"SQL - {cmd} + {', '.join(map(str, args))}")
|
||||||
|
|
||||||
value = None
|
value = None
|
||||||
try:
|
try:
|
||||||
res = self._cur.execute(cmd)
|
res = self._cur.execute(cmd, args)
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
self._db.commit()
|
self._db.commit()
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue