acoused/Model/read_table_for_open.py

635 lines
21 KiB
Python

# ============================================================================== #
# read_table_for_open.py - AcouSed #
# Copyright (C) 2024 INRAE #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# by Brahim MOUDJED #
# ============================================================================== #
# -*- coding: utf-8 -*-
import os
import sys
import sqlite3
import logging
import numpy as np
from datetime import date, time
from PyQt5.QtWidgets import QFileDialog, QApplication, QWidget, QTabWidget
import settings as stg
from settings import BS_raw_data, acoustic_data
from View.acoustic_data_tab import AcousticDataTab
from tools import trace
logger = logging.getLogger("acoused")
class ReadTableForOpen:
def __init__(self):
self.opened = False
self.open_file_dialog()
def open_file_dialog(self):
name, _ = QFileDialog.getOpenFileName(
caption="Open Acoused file",
directory="",
filter="Acoused file (*.acd)",
options=QFileDialog.DontUseNativeDialog
)
if name != "":
stg.dirname_open = os.path.dirname(name)
stg.filename_open = os.path.basename(name)
try:
os.chdir(stg.dirname_open)
except OSError as e:
logger.warning(f"chdir: {str(e)}")
self.read_table()
self.opened = True
def execute(self, query):
return self._cur.execute(query).fetchall()
def read_table(self):
stg.read_table_trigger = 1
logger.debug(f"Open '{stg.filename_open}'")
cnx = sqlite3.connect(stg.filename_open)
self._cur = cnx.cursor()
self.read_table_acoustic_file()
self.read_table_measure()
self.read_table_BS_raw_data()
self.read_table_settings()
self.read_table_sediment_file()
self.read_table_table_sediment_data()
self.read_table_table_calibration()
logger.debug(f"Reading '{stg.filename_open}' done")
self._cur.close()
cnx.close()
logger.debug(f"'{stg.filename_open}' closed")
def read_table_acoustic_file(self):
query0 = f'''SELECT acoustic_data FROM AcousticFile'''
data0 = self.execute(query0)
logger.debug(f"data0: {data0}")
stg.acoustic_data = [x[0] for x in data0]
logger.debug(f"stg.acoustic_data: {stg.acoustic_data}")
for k in range(len(stg.acoustic_data)):
query = f'''
SELECT
acoustic_data, acoustic_file, ABS_name, path_BS_noise_data,
filename_BS_noise_data,
noise_method, noise_value, data_preprocessed
FROM AcousticFile
WHERE (acoustic_data = {k})
'''
data = self.execute(query)[0]
print("data acoustic file", data)
stg.filename_BS_raw_data.append(
str(data[1]) + '.aqa'
)
stg.path_BS_raw_data.append("")
stg.ABS_name.append(data[2])
stg.path_BS_noise_data.append(data[3])
stg.filename_BS_noise_data.append(data[4])
stg.noise_method.append(data[5])
stg.noise_value.append(data[6])
stg.data_preprocessed.append(data[7])
logger.debug("data acoustic file:")
logger.debug(f"- {stg.filename_BS_raw_data}")
logger.debug(f"- {stg.ABS_name}")
logger.debug(f"- {stg.path_BS_noise_data}")
logger.debug(f"- {stg.filename_BS_noise_data}")
logger.debug(f"- {stg.noise_method}")
logger.debug(f"- {stg.noise_value}")
logger.debug(f"- {stg.data_preprocessed}")
def read_table_measure(self):
stg.date = [0]*len(stg.acoustic_data)
stg.hour = [0]*len(stg.acoustic_data)
for i in range(len(stg.acoustic_data)):
query = f'''
SELECT
acoustic_data, Date, Hour, frequency,
sound_attenuation, kt_read, kt_corrected, NbProfiles,
NbProfilesPerSeconds, NbCells, CellSize, PulseLength,
NbPingsPerSeconds, NbPingsAveragedPerProfile,
GainRx, GainTx
FROM Measure
WHERE (acoustic_data = {i})
'''
data = self.execute(query)
logger.debug(f"data for {i}: {data}")
stg.date[i] = date.fromisoformat(data[0][1])
stg.hour[i] = time.fromisoformat(data[0][2])
stg.freq.append(
np.array([x[3] for x in data])
)
stg.freq_text.append(
[str(x[3]*1e-6) + 'MHz' for x in data]
)
stg.water_attenuation.append(
[x[4] for x in data]
)
stg.kt_read = [x[5] for x in data]
stg.kt_corrected = [x[6] for x in data]
stg.nb_profiles.append([x[7] for x in data])
stg.nb_profiles_per_sec.append([x[8] for x in data])
stg.nb_cells.append([x[9] for x in data])
stg.cell_size.append([x[10] for x in data])
stg.pulse_length.append([x[11] for x in data])
stg.nb_pings_per_sec.append(
[x[12] for x in data]
)
stg.nb_pings_averaged_per_profile.append(
[x[13] for x in data]
)
stg.gain_rx.append([x[14] for x in data])
stg.gain_tx.append([x[15] for x in data])
logger.debug("measure:")
logger.debug(f"- stg.acoustic_data: {stg.acoustic_data}")
logger.debug(f"- stg.freq {stg.freq}")
logger.debug(f"- stg.water_attenuation {stg.water_attenuation}")
logger.debug(f"- stg.kt_read {stg.kt_read}")
logger.debug(f"- stg.kt_corrected {stg.kt_corrected}")
logger.debug(f"- stg.nb_profiles {stg.nb_profiles}")
logger.debug(f"- stg.nb_profiles_per_sec {stg.nb_profiles_per_sec}")
logger.debug(f"- stg.nb_cells {stg.nb_cells}")
logger.debug(f"- stg.cell_size {stg.cell_size}")
logger.debug(f"- stg.pulse_length {stg.pulse_length}")
logger.debug(f"- stg.nb_pings_per_sec {stg.nb_pings_per_sec}")
logger.debug(f"- stg.nb_pings_averaged_per_profile {stg.nb_pings_averaged_per_profile}")
logger.debug(f"- stg.gain_rx {stg.gain_rx}")
logger.debug(f"- stg.gain_tx {stg.gain_tx}")
logger.debug(f"- {stg.date}")
logger.debug(f"- {stg.hour}")
def read_table_BS_raw_data(self):
logger.debug(f"len stg.acoustic_data: {len(stg.acoustic_data)}")
for i in range(len(stg.acoustic_data)):
query = lambda values: f'''
SELECT
{", ".join(values)}
FROM BSRawData
WHERE (acoustic_data = {i})
'''
self.read_table_BS_raw_data_raw(query, i)
self.read_table_BS_raw_data_cross_section(query, i)
self.read_table_BS_raw_data_bed(query, i)
self.read_table_BS_raw_data_noise(query, i)
self.read_table_BS_raw_data_SNR(query, i)
self.read_table_BS_raw_data_rest(query, i)
self.read_table_BS_raw_data_mean(query, i)
def read_table_BS_raw_data_raw(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"time", "depth",
"BS_raw_data",
"time_reshape", "depth_reshape",
"BS_raw_data_reshape",
]
)
)[0]
it = iter(data)
time = next(it)
depth = next(it)
BS_raw_data = next(it)
time_reshape = next(it)
depth_reshape = next(it)
BS_raw_data_reshape = next(it)
stg.time.append(
np_f64_parse(time).reshape((stg.freq[i].shape[0], -1))
)
stg.depth.append(np_f64_parse(depth).reshape(
(stg.freq[i].shape[0], -1)
))
stg.BS_raw_data.append(
np_f64_parse(BS_raw_data).reshape(
(
stg.freq[i].shape[0],
stg.depth[i].shape[1],
stg.time[i].shape[1]
)
)
)
stg.time_reshape.append(
np_f64_parse(time_reshape).reshape(
(-1, stg.freq[i].shape[0])
)
)
stg.depth_reshape.append(
np_f64_parse(depth_reshape).reshape(
(-1, stg.freq[i].shape[0])
)
)
stg.BS_raw_data_reshape.append(
np_f64_parse(BS_raw_data_reshape).reshape(
(-1, stg.freq[i].shape[0])
)
)
def read_table_BS_raw_data_cross_section(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"time_cross_section",
"depth_cross_section",
"BS_cross_section",
]
)
)[0]
it = iter(data)
time = next(it)
depth = next(it)
BS = np_f64_parse(next(it))
if len(BS) == 0:
stg.time_cross_section.append(np.array([]))
stg.depth_cross_section.append(np.array([]))
stg.BS_cross_section.append(np.array([]))
else:
stg.time_cross_section.append(
np_f64_parse(time).reshape(
(stg.freq[i].shape[0], -1)
)
)
stg.depth_cross_section.append(
np_f64_parse(depth).reshape(
(stg.freq[i].shape[0], -1)
)
)
stg.BS_cross_section.append(
BS.reshape(
(
stg.freq[i].shape[0],
stg.depth_cross_section[i].shape[1],
stg.time_cross_section[i].shape[1]
)
)
)
def read_table_BS_raw_data_bed(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"BS_stream_bed",
"depth_bottom", "val_bottom", "ind_bottom",
]
)
)[0]
it = iter(data)
BS = np_f64_parse(next(it))
depth = np_f64_parse(next(it))
val = np_f64_parse(next(it))
ind = np_f64_parse(next(it))
if len(BS) == 0:
stg.BS_stream_bed.append(np.array([]))
else:
stg.BS_stream_bed.append(
BS.reshape(
(
stg.freq[i].shape[0],
stg.depth_cross_section[i].shape[1],
stg.time_cross_section[i].shape[1]
)
)
)
if len(depth) == 0:
stg.depth_bottom.append(np.array([]))
stg.val_bottom.append([])
stg.ind_bottom.append([])
else:
stg.depth_bottom.append(depth)
stg.val_bottom.append(val.tolist())
stg.ind_bottom.append(ind.tolist())
def read_table_BS_raw_data_noise(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"time_noise", "depth_noise", "BS_noise_raw_data",
]
)
)[0]
it = iter(data)
time = next(it)
depth = next(it)
BS = np_f64_parse(next(it))
if len(BS) == 0:
stg.time_noise.append(np.array([]))
stg.depth_noise.append(np.array([]))
stg.BS_noise_raw_data.append(np.array([]))
stg.BS_noise_averaged_data.append(np.array([]))
else:
stg.time_noise.append(
np_f64_parse(time).reshape(
(stg.freq[i].shape[0], -1)
)
)
stg.depth_noise.append(
np_f64_parse(depth).reshape(
(stg.freq[i].shape[0], -1)
)
)
stg.BS_noise_raw_data.append(
BS.reshape(
(
stg.freq[i].shape[0],
stg.depth_noise[i].shape[1],
stg.time_noise[i].shape[1]
)
)
)
stg.BS_noise_averaged_data.append(np.array([]))
def read_table_BS_raw_data_SNR(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"SNR_raw_data", "SNR_cross_section", "SNR_stream_bed",
]
)
)[0]
it = iter(data)
SNR_vars = [
(stg.SNR_raw_data, stg.BS_raw_data),
(stg.SNR_cross_section, stg.BS_cross_section),
(stg.SNR_stream_bed, stg.BS_stream_bed),
]
stg.SNR_stream_bed.append(np.array([]))
for dest, resh in SNR_vars:
SNR = np_f64_parse(next(it))
if len(SNR) == 0:
dest.append(np.array([]))
else:
dest.append(SNR.reshape(resh[i].shape))
def read_table_BS_raw_data_rest(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(
[
"BS_raw_data_pre_process_SNR",
"BS_raw_data_pre_process_average",
"BS_cross_section_pre_process_SNR",
"BS_cross_section_pre_process_average",
"BS_stream_bed_pre_process_SNR",
"BS_stream_bed_pre_process_average",
]
)
)[0]
BS_vars = [
(stg.BS_raw_data_pre_process_SNR, stg.BS_raw_data),
(stg.BS_raw_data_pre_process_average, stg.BS_raw_data),
(stg.BS_cross_section_pre_process_SNR, stg.BS_cross_section),
(stg.BS_cross_section_pre_process_average, stg.BS_cross_section),
(stg.BS_stream_bed_pre_process_SNR, stg.BS_stream_bed),
(stg.BS_stream_bed_pre_process_average, stg.BS_stream_bed),
]
it = iter(data)
for dest, resh in BS_vars:
BS = np_f64_parse(next(it))
if len(BS) == 0:
dest.append(np.array([]))
else:
dest.append(BS.reshape(resh[i].shape))
def read_table_BS_raw_data_mean(self, query, i):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
data = self.execute(
query(["BS_mean"])
)[0]
BS = np_f64_parse(data[0])
if len(BS) == 0:
stg.BS_mean.append(np.array([]))
else:
stg.BS_mean.append(
BS.reshape(
(stg.freq[i].shape[0], -1)
)
)
def read_table_settings(self):
for s in range(len(stg.acoustic_data)):
query3 = f'''
SELECT
acoustic_data, temperature, distance_to_free_surface,
tmin_index, tmin_value, tmax_index, tmax_value,
rmin_index, rmin_value, rmax_index, rmax_value,
freq_bottom_detection_index, freq_bottom_detection_value,
SNR_filter_value, Nb_cells_to_average_BS_signal
FROM Settings
WHERE (acoustic_data = {s})
'''
data = self.execute(query3)
x = data[0]
stg.temperature = [x[1]][0]
stg.distance_from_ABS_to_free_surface.append(x[2])
stg.tmin.append((x[3], x[4]))
stg.tmax.append((x[5], x[6]))
stg.rmin.append((x[7], x[8]))
stg.rmax.append((x[9], x[10]))
stg.freq_bottom_detection.append((x[11], x[12]))
stg.SNR_filter_value.append(x[13])
stg.Nb_cells_to_average_BS_signal.append(x[14])
logger.debug(f"stg.temperature: {stg.temperature}")
logger.debug(f"stg.tmin: {stg.tmin}")
logger.debug(f"stg.tmin: {stg.tmax}")
logger.debug(f"stg.SNR_filter_value: {stg.SNR_filter_value}")
def read_table_sediment_file(self):
query = f'''
SELECT
path_fine, filename_fine, radius_grain_fine,
path_sand, filename_sand, radius_grain_sand,
time_column_label, distance_from_bank_column_label,
depth_column_label,
Ctot_fine_column_label, D50_fine_column_label,
Ctot_sand_column_label, D50_sand_column_label
FROM SedimentsFile
'''
data = self.execute(query)[0]
stg.path_fine = data[0]
stg.filename_fine = data[1]
stg.radius_grain_fine = np.array(
np.frombuffer(data[2], dtype=np.float64)
)
stg.path_sand = data[3]
stg.filename_sand = data[4]
stg.radius_grain_sand = np.array(
np.frombuffer(data[5], dtype=np.float64)
)
stg.columns_fine = (
[data[6], data[7], data[8], data[9], data[10]]
+ list(stg.radius_grain_fine)
)
stg.columns_sand = (
[data[6], data[7], data[8], data[11], data[12]]
+ list(stg.radius_grain_sand)
)
@trace
def read_table_table_sediment_data(self):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
query = f'''
SELECT
sample_fine_name, sample_fine_index, distance_from_bank_fine,
depth_fine, time_fine, Ctot_fine, Ctot_fine_per_cent, D50_fine,
frac_vol_fine, frac_vol_fine_cumul,
sample_sand_name, sample_sand_index, distance_from_bank_sand,
depth_sand, time_sand, Ctot_sand, Ctot_sand_per_cent, D50_sand,
frac_vol_sand, frac_vol_sand_cumul
FROM SedimentsData
'''
data = self.execute(query)
stg.frac_vol_fine = []
stg.frac_vol_fine_cumul = []
stg.frac_vol_sand = []
stg.frac_vol_sand_cumul = []
for d in data:
stg.sample_fine.append((d[0], d[1]))
stg.distance_from_bank_fine.append(d[2])
stg.depth_fine.append(d[3])
stg.time_fine.append(d[4])
stg.Ctot_fine.append(d[5])
stg.Ctot_fine_per_cent.append(d[6])
stg.D50_fine.append(d[7])
stg.frac_vol_fine.append(
np_f64_parse(d[8])
)
stg.frac_vol_fine_cumul.append(
np_f64_parse(d[9])
)
stg.sample_sand.append((d[10], d[11]))
stg.distance_from_bank_sand.append(d[12])
stg.depth_sand.append(d[13])
stg.time_sand.append(d[14])
stg.Ctot_sand.append(d[15])
stg.Ctot_sand_per_cent.append(d[16])
stg.D50_sand.append(d[17])
stg.frac_vol_sand.append(
np_f64_parse(d[18])
)
stg.frac_vol_sand_cumul.append(
np_f64_parse(d[19])
)
stg.frac_vol_fine = np.array(stg.frac_vol_fine)
stg.frac_vol_fine_cumul = np.array(stg.frac_vol_fine_cumul)
stg.frac_vol_sand = np.array(stg.frac_vol_sand)
stg.frac_vol_sand_cumul = np.array(stg.frac_vol_sand_cumul)
logger.debug(f"fine: {stg.Ctot_fine}, sand: {stg.sample_sand}")
@trace
def read_table_table_calibration(self):
np_f64_parse = lambda d: np.frombuffer(d, dtype=np.float64)
query = f'''
SELECT
path_calibration_file,
filename_calibration_file,
range_lin_interp,
M_profile_fine,
ks, sv,
X_exponent,
alpha_s, zeta, FCB,
depth_real, lin_reg
FROM Calibration
'''
data = self.execute(query)
it = iter(data[0])
stg.path_calibration_file = next(it)
stg.filename_calibration_file = next(it)
stg.range_lin_interp = np_f64_parse(next(it))
stg.M_profile_fine = np_f64_parse(next(it))
stg.ks = np_f64_parse(next(it))
stg.sv = np_f64_parse(next(it))
stg.X_exponent = np_f64_parse(next(it))
stg.alpha_s = np_f64_parse(next(it))
stg.zeta = np_f64_parse(next(it))
stg.FCB = np_f64_parse(next(it))
stg.depth_real = np_f64_parse(next(it))
stg.lin_reg = np_f64_parse(next(it))