mirror of https://gitlab.com/pamhyr/pamhyr2
499 lines
13 KiB
Python
499 lines
13 KiB
Python
# BoundaryCondition.py -- Pamhyr
|
|
# Copyright (C) 2023-2024 INRAE
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
import logging
|
|
|
|
from tools import (
|
|
trace, timer,
|
|
old_pamhyr_date_to_timestamp,
|
|
date_iso_to_timestamp,
|
|
date_dmy_to_timestamp,
|
|
)
|
|
|
|
from Model.Tools.PamhyrDB import SQLSubModel
|
|
from Model.Except import NotImplementedMethodeError
|
|
from Model.Scenario import Scenario
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
class Data(SQLSubModel):
|
|
_sub_classes = []
|
|
|
|
def __init__(self, id: int = -1,
|
|
types=[float, float],
|
|
status=None):
|
|
super(Data, self).__init__(id)
|
|
self._status = status
|
|
|
|
self._types = types
|
|
self._data = []
|
|
|
|
@classmethod
|
|
def _db_create(cls, execute, ext=""):
|
|
execute(f"""
|
|
CREATE TABLE boundary_condition_data{ext}(
|
|
{cls.create_db_add_pamhyr_id()},
|
|
ind INTEGER NOT NULL,
|
|
data0 TEXT NOT NULL,
|
|
data1 TEXT NOT NULL,
|
|
lc INTEGER,
|
|
{Scenario.create_db_add_scenario()},
|
|
{Scenario.create_db_add_scenario_fk()},
|
|
FOREIGN KEY(lc) REFERENCES boundary_condition(pamhyr_id),
|
|
PRIMARY KEY(pamhyr_id, scenario)
|
|
)
|
|
""")
|
|
|
|
return cls._create_submodel(execute)
|
|
|
|
@classmethod
|
|
def _db_update(cls, execute, version, data=None):
|
|
major, minor, release = version.strip().split(".")
|
|
|
|
if major == "0" and int(minor) < 1:
|
|
cls._db_update_to_0_1_0(execute)
|
|
|
|
return cls._update_submodel(execute, version, data)
|
|
|
|
@classmethod
|
|
def _db_update_to_0_1_0(cls, execute):
|
|
table = "boundary_condition_data"
|
|
|
|
cls.update_db_add_pamhyr_id(execute, table)
|
|
Scenario.update_db_add_scenario(execute, table)
|
|
|
|
cls._db_create(execute, ext="_tmp")
|
|
execute(
|
|
f"INSERT INTO {table} " +
|
|
"(pamhyr_id, ind, data0, data1, lc, scenario) " +
|
|
"SELECT pamhyr_id, ind, data0, data1, lc, scenario " +
|
|
f"FROM {table}"
|
|
)
|
|
|
|
execute(f"DROP TABLE {table}")
|
|
execute(f"ALTER TABLE {table}_tmp RENAME TO {table}")
|
|
|
|
@classmethod
|
|
def _db_load(cls, execute, data=None):
|
|
new = []
|
|
lc = data["lc"]
|
|
|
|
values = execute(
|
|
"SELECT pamhyr_id, ind, data0, data1 " +
|
|
"FROM boundary_condition_data " +
|
|
f"WHERE lc = {lc._pamhyr_id} " +
|
|
"ORDER BY ind ASC"
|
|
)
|
|
|
|
for v in values:
|
|
it = iter(v)
|
|
|
|
pid = next(it)
|
|
ind = next(it)
|
|
data0 = bc._types[0](next(it))
|
|
data1 = bc._types[1](next(it))
|
|
|
|
nd = cls(
|
|
id=pid,
|
|
types=lc._types,
|
|
status=data['status']
|
|
)
|
|
nd._data = [data0, data1]
|
|
|
|
new.append((ind, nd))
|
|
|
|
return new
|
|
|
|
def _db_save(self, execute, data=None):
|
|
pid = self._pamhyr_id
|
|
ind = data["ind"]
|
|
data0 = self._db_format(str(self[0]))
|
|
data1 = self._db_format(str(self[1]))
|
|
lc = data["lc"]
|
|
|
|
sql = (
|
|
"INSERT INTO " +
|
|
"boundary_condition_data(pamhyr_id, ind, data0, data1, bc) " +
|
|
f"VALUES ({pid}, {ind}, '{data0}', {data1}, {lc._pamhyr_id})"
|
|
)
|
|
execute(sql)
|
|
|
|
def __getitem__(self, key):
|
|
return self._types[key](self._data[key])
|
|
|
|
def __setitem__(self, key, value):
|
|
self._data[key] = self._types[key](value)
|
|
|
|
|
|
class BoundaryCondition(SQLSubModel):
|
|
_sub_classes = [Data]
|
|
|
|
def __init__(self, id: int = -1,
|
|
name: str = "", status=None):
|
|
super(BoundaryCondition, self).__init__(id)
|
|
|
|
self._status = status
|
|
|
|
self._name = name
|
|
self._type = ""
|
|
self._node = None
|
|
self._data = []
|
|
self._header = []
|
|
self._types = [float, float]
|
|
|
|
@classmethod
|
|
def _db_create(cls, execute, ext=""):
|
|
execute(f"""
|
|
CREATE TABLE boundary_condition{ext}(
|
|
{cls.create_db_add_pamhyr_id()},
|
|
name TEXT NOT NULL,
|
|
type TEXT NOT NULL,
|
|
tab TEXT NOT NULL,
|
|
node INTEGER,
|
|
{Scenario.create_db_add_scenario()},
|
|
{Scenario.create_db_add_scenario_fk()},
|
|
FOREIGN KEY(node) REFERENCES river_node(pamhyr_id),
|
|
PRIMARY KEY(pamhyr_id, scenario)
|
|
)
|
|
""")
|
|
|
|
return cls._create_submodel(execute)
|
|
|
|
@classmethod
|
|
def _db_update(cls, execute, version, data=None):
|
|
major, minor, release = version.strip().split(".")
|
|
|
|
if major == minor == "0":
|
|
cls._db_update_to_0_1_0(execute, data)
|
|
|
|
return cls._update_submodel(execute, version, data)
|
|
|
|
@classmethod
|
|
def _db_update_to_0_1_0(cls, execute, data):
|
|
table = "boundary_condition"
|
|
|
|
cls.update_db_add_pamhyr_id(execute, table, data)
|
|
Scenario.update_db_add_scenario(execute, table)
|
|
|
|
cls._db_create(execute, ext="_tmp")
|
|
|
|
# Copy table
|
|
execute(
|
|
f"INSERT INTO {table}_tmp " +
|
|
f"(pamhyr_id, name, type, tab, node, scenario) " +
|
|
"SELECT pamhyr_id, name, type, tab, node, scenario " +
|
|
f"FROM {table}"
|
|
)
|
|
|
|
execute(f"DROP TABLE {table}")
|
|
execute(f"ALTER TABLE {table}_tmp RENAME TO {table}")
|
|
|
|
cls._db_update_to_0_1_0_set_node_pid(execute, nodes)
|
|
|
|
@classmethod
|
|
def _db_update_to_0_1_0_set_node_pid(cls, execute, nodes):
|
|
bcs = execute(
|
|
"SELECT pamhyr_id, node FROM boundary_condition"
|
|
)
|
|
|
|
for row in bcs:
|
|
pid = row[0]
|
|
node_id = row[1]
|
|
|
|
execute(
|
|
"UPDATE boundary_condition " +
|
|
f"SET node = {nodes[node_id]} " +
|
|
f"WHERE pamhyr_id = {pid}"
|
|
)
|
|
|
|
@classmethod
|
|
def _get_ctor_from_type(cls, t):
|
|
from Model.BoundaryCondition.BoundaryConditionTypes import (
|
|
NotDefined, PonctualContribution,
|
|
TimeOverZ, TimeOverDischarge, ZOverDischarge,
|
|
Solid,
|
|
)
|
|
|
|
res = NotDefined
|
|
if t == "PC":
|
|
res = PonctualContribution
|
|
elif t == "TZ":
|
|
res = TimeOverZ
|
|
elif t == "TD":
|
|
res = TimeOverDischarge
|
|
elif t == "ZD":
|
|
res = ZOverDischarge
|
|
elif t == "SL":
|
|
res = Solid
|
|
return res
|
|
|
|
@classmethod
|
|
def _db_load(cls, execute, data=None):
|
|
new = []
|
|
tab = data["tab"]
|
|
|
|
table = execute(
|
|
"SELECT pamhyr_id, name, type, node " +
|
|
"FROM boundary_condition " +
|
|
f"WHERE tab = '{tab}'"
|
|
)
|
|
|
|
for row in table:
|
|
t = row[2]
|
|
ctor = cls._get_ctor_from_type(t)
|
|
bc = ctor(
|
|
id=row[0],
|
|
name=row[1],
|
|
status=data['status']
|
|
)
|
|
|
|
bc.node = None
|
|
if row[3] != -1:
|
|
bc.node = next(filter(lambda n: n.id == row[3], data["nodes"]))
|
|
|
|
data["bc"] = bc
|
|
bc._data = Data._db_load(execute, data=data)
|
|
|
|
return new
|
|
|
|
def _db_save(self, execute, data=None):
|
|
tab = data["tab"]
|
|
|
|
execute(
|
|
"DELETE FROM boundary_condition " +
|
|
f"WHERE pamhyr_id = {self._pamhyr_id}"
|
|
)
|
|
execute(
|
|
"DELETE FROM boundary_condition_data " +
|
|
f"WHERE bc = {self._pamhyr_id}"
|
|
)
|
|
|
|
node = -1
|
|
if self._node is not None:
|
|
node = self._node.id
|
|
|
|
execute(
|
|
"INSERT INTO " +
|
|
"boundary_condition(pamhyr_id, name, type, tab, node) " +
|
|
"VALUES (" +
|
|
f"{self._pamhyr_id}, '{self._db_format(self._name)}', " +
|
|
f"'{self._db_format(self._type)}', '{tab}', {node}" +
|
|
")"
|
|
)
|
|
|
|
data["bc"] = self
|
|
|
|
ind = 0
|
|
for d in self._data:
|
|
data["ind"] = ind
|
|
|
|
d._db_save(execute, data)
|
|
|
|
ind += 1
|
|
|
|
return True
|
|
|
|
def __len__(self):
|
|
return len(self._data)
|
|
|
|
@classmethod
|
|
def compatibility(cls):
|
|
return ["liquid", "solid", "suspenssion"]
|
|
|
|
@classmethod
|
|
def time_convert(cls, data):
|
|
if type(data) is str:
|
|
if data.count("-") == 2:
|
|
return date_iso_to_timestamp(data)
|
|
if data.count("/") == 2:
|
|
return date_dmy_to_timestamp(data)
|
|
if data.count(":") == 3:
|
|
return old_pamhyr_date_to_timestamp(data)
|
|
if data.count(":") == 2:
|
|
return old_pamhyr_date_to_timestamp("00:" + data)
|
|
if data.count(".") == 1:
|
|
return round(float(data))
|
|
|
|
return int(data)
|
|
|
|
@property
|
|
def name(self):
|
|
if self._name == "":
|
|
return f"B{self.id + 1}"
|
|
|
|
return self._name
|
|
|
|
@name.setter
|
|
def name(self, name):
|
|
self._name = name
|
|
self._status.modified()
|
|
|
|
@property
|
|
def bctype(self):
|
|
return self._type
|
|
|
|
@property
|
|
def node(self):
|
|
return self._node
|
|
|
|
@node.setter
|
|
def node(self, node):
|
|
self._node = node
|
|
self._status.modified()
|
|
|
|
def has_node(self):
|
|
return self._node is not None
|
|
|
|
@property
|
|
def header(self):
|
|
return self._header.copy()
|
|
|
|
@property
|
|
def data(self):
|
|
return self._data.copy()
|
|
|
|
def get_type_column(self, column):
|
|
if 0 <= column < 2:
|
|
return self._types[column]
|
|
return None
|
|
|
|
@property
|
|
def _default_0(self):
|
|
return self._types[0](0)
|
|
|
|
@property
|
|
def _default_1(self):
|
|
return self._types[1](0.0)
|
|
|
|
def is_define(self):
|
|
return self._data is not None
|
|
|
|
def new_from_data(self, header, data):
|
|
new_0 = self._default_0
|
|
new_1 = self._default_1
|
|
|
|
if len(header) != 0:
|
|
for i in [0, 1]:
|
|
for j in range(len(header)):
|
|
if self._header[i] == header[j]:
|
|
if i == 0:
|
|
new_0 = self._types[i](data[j].replace(",", "."))
|
|
else:
|
|
new_1 = self._types[i](data[j].replace(",", "."))
|
|
else:
|
|
new_0 = self._types[0](data[0].replace(",", "."))
|
|
new_1 = self._types[1](data[1].replace(",", "."))
|
|
|
|
return (new_0, new_1)
|
|
|
|
def add(self, index: int):
|
|
value = (self._default_0, self._default_1)
|
|
self._data.insert(index, value)
|
|
self._status.modified()
|
|
return value
|
|
|
|
def insert(self, index: int, value):
|
|
self._data.insert(index, value)
|
|
self._status.modified()
|
|
|
|
def delete_i(self, indexes):
|
|
self._data = list(
|
|
map(
|
|
lambda e: e[1],
|
|
filter(
|
|
lambda e: e[0] not in indexes,
|
|
enumerate(self.data)
|
|
)
|
|
)
|
|
)
|
|
self._status.modified()
|
|
|
|
def delete(self, els):
|
|
self._data = list(
|
|
filter(
|
|
lambda e: e not in els,
|
|
self.data
|
|
)
|
|
)
|
|
self._status.modified()
|
|
|
|
def sort(self, _reverse=False, key=None):
|
|
if key is None:
|
|
self._data.sort(reverse=_reverse)
|
|
else:
|
|
self._data.sort(reverse=_reverse, key=key)
|
|
self._status.modified()
|
|
|
|
def index(self, bc):
|
|
self._data.index(bc)
|
|
|
|
def get_i(self, index):
|
|
return self.data[index]
|
|
|
|
def get_range(self, _range):
|
|
lst = []
|
|
for r in _range:
|
|
lst.append(r)
|
|
return lst
|
|
|
|
def _set_i_c_v(self, index, column, value):
|
|
v = list(self._data[index])
|
|
v[column] = self._types[column](value)
|
|
self._data[index] = tuple(v)
|
|
self._status.modified()
|
|
|
|
def set_i_0(self, index: int, value):
|
|
self._set_i_c_v(index, 0, value)
|
|
|
|
def set_i_1(self, index: int, value):
|
|
self._set_i_c_v(index, 1, value)
|
|
|
|
@timer
|
|
def convert(self, cls):
|
|
new = cls(name=self.name, status=self._status)
|
|
new.node = self.node
|
|
|
|
for i, _ in enumerate(self.data):
|
|
new.add(i)
|
|
|
|
for i in [0, 1]:
|
|
for j in [0, 1]:
|
|
if self._header[i] == new.header[j]:
|
|
for ind, v in self.data:
|
|
try:
|
|
new._set_i_c_v(ind, j, v[i])
|
|
except Exception as e:
|
|
logger.info(e)
|
|
|
|
return new
|
|
|
|
def move_up(self, index):
|
|
if index < len(self):
|
|
next = index - 1
|
|
d = self._data
|
|
d[index], d[next] = d[next], d[index]
|
|
self._status.modified()
|
|
|
|
def move_down(self, index):
|
|
if index >= 0:
|
|
prev = index + 1
|
|
d = self._data
|
|
d[index], d[prev] = d[prev], d[index]
|
|
self._status.modified()
|