Skip to content

Commit

Permalink
Merge pull request #77 from introlab/dev
Browse files Browse the repository at this point in the history
Main pull for 1.1.0 release
  • Loading branch information
SBriere authored Dec 4, 2023
2 parents c2f9bf8 + ea2f7ea commit 44a6c92
Show file tree
Hide file tree
Showing 32 changed files with 1,390 additions and 429 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ python-3.10
/python/OpenIMU.spec
.DS_Store
*.dmg
python-3.11
13 changes: 2 additions & 11 deletions python/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#TODO MAKE THIS GENERIC
set (PYTHON_VERSION 3.10)
set (PYTHON_VERSION 3.11)
add_subdirectory(env)

set(PYTHON_ENV_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/env/python-${PYTHON_VERSION})
Expand Down Expand Up @@ -112,17 +112,7 @@ message(STATUS ${python_files})

# PyInstaller
set (installer_args
#--hidden-import scipy._lib.messagestream
#--hidden-import sqlalchemy.ext.baked
--hidden-import logging.config
# --exclude-module PySide6.QtQml
# --exclude-module PySide6.QtQmlModels
# --exclude-module PySide6.QtQuick3DUtils
# --exclude-module PySide6.QtSerialPort
# --exclude-module PySide6.QtVirtualKeyboard
# --exclude-module PySide6.QtPositioning
# --exclude-module PySide6.QtWebChannel
# --exclude-module PySide6.QtWebEngineWidgets
--clean
-y
--windowed # If windowed, no console is displayed
Expand All @@ -138,6 +128,7 @@ if(WIN32)
#--hidden-import pkg_resources.py2_warn
--paths ${PYTHON_ENV_DIRECTORY}/Lib/site-packages/scipy/.libs # PyInstaller doesn't seem to find all required DLLs for SciPy...
--paths ${PYTHON_ENV_DIRECTORY}/Library/bin # PyInstaller doesn't seem to find sqlite3.dll...
--splash ../setup/splash.png # For now, only works on Windows and Linux
)

# Build this target to make a package
Expand Down
8 changes: 4 additions & 4 deletions python/OpenIMU.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import sys
import platform
import faulthandler

from PySide6.QtWidgets import QApplication, QMainWindow
from PySide6.QtCore import Qt
Expand All @@ -21,17 +22,16 @@ def except_hook(cls, exception, traceback):
from libopenimu.qt.OpenIMUApp import OpenIMUApp
from libopenimu.qt.MainWindow import MainWindow

if not (getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')):
faulthandler.enable() # start @ the beginning

try:
# Close the splash screen, if running from a frozen package with pyinstaller
import pyi_splash
pyi_splash.close()
except ModuleNotFoundError:
pass

# Support high DPI scaling
# Must be done before starting the app
QApplication.setAttribute(Qt.AA_EnableHighDpiScaling)

# Needed for the webengine to work properly
# Initially useful for MacOS, but also seem to improve speed a little in Windows...

Expand Down
2 changes: 1 addition & 1 deletion python/alembic/versions/6b75a01d10b5_0_5_upgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ def upgrade():


def downgrade():
op.remove_column('tabSensors', 'hw_id')
op.drop_column('tabSensors', 'hw_id')
23 changes: 23 additions & 0 deletions python/alembic/versions/976c9bb61bcc_added_settings_for_sensors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""added settings for sensors
Revision ID: 976c9bb61bcc
Revises: 9f2446706f91
Create Date: 2023-11-30 09:18:13.301296
"""
from alembic import op
from sqlalchemy import Column, String

# revision identifiers, used by Alembic.
revision = '976c9bb61bcc'
down_revision = '9f2446706f91'
branch_labels = None
depends_on = None


def upgrade():
op.add_column('tabSensors', Column('settings', String()))


def downgrade():
op.drop_column('tabSensors', 'settings')
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ def upgrade():


def downgrade():
op.remove_column('tabProcessedData', 'params')
op.drop_column('tabProcessedData', 'params')
14 changes: 7 additions & 7 deletions python/env/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
pypiwin32==223; sys_platform == 'win32'
PySide6==6.3.1
cython==0.29.30
numpy==1.23.1
scipy==1.8.1
sqlalchemy==1.4.39
alembic==1.8.1
pyinstaller==5.2
PySide6==6.6.0
cython==3.0.5
numpy==1.26.2
scipy==1.11.4
sqlalchemy==2.0.23
alembic==1.12.1
pyinstaller==5.13.2
107 changes: 97 additions & 10 deletions python/libopenimu/db/DBExporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import pickle
import numbers
import unicodedata
import json


class ExporterTypes:
Expand All @@ -39,7 +40,6 @@ def get_icon_for_type(exporter_type_id: int) -> QIcon:


class DBExporter(QObject):

exportProcessed = Signal()

def __init__(self, db_manager: DBManager, export_path: str, export_format: int, parent=None):
Expand Down Expand Up @@ -88,11 +88,11 @@ def get_base_path_for_participant(self, participant: Participant) -> str:

def get_base_path_for_recordset(self, recordset: Recordset) -> str:
return self.get_base_path_for_participant(recordset.participant) + os.sep + 'Recordsets' + os.sep + \
DBExporter.clean_string(recordset.name)
DBExporter.clean_string(recordset.name)

def get_base_path_for_processed_data(self, data: ProcessedData) -> str:
return self.get_base_path_for_participant(data.processed_data_ref[0].recordset.participant) + os.sep + \
'Processed' + os.sep + DBExporter.clean_string(data.name)
'Processed' + os.sep + DBExporter.clean_string(data.name)

def export_group(self, id_group: int):
group_name = self.tr('GROUP_None')
Expand Down Expand Up @@ -142,7 +142,8 @@ def export_recordset(self, id_recordset: int):

# Export infos in file
infos = {'id_recordset': recordset.id_recordset, 'name': recordset.name,
'start_time': str(recordset.start_timestamp), 'start_timestamp': recordset.start_timestamp.timestamp(),
'start_time': str(recordset.start_timestamp),
'start_timestamp': recordset.start_timestamp.timestamp(),
'end_time': str(recordset.end_timestamp), 'end_timestamp': recordset.end_timestamp.timestamp()}
if self.exportFormat == ExporterTypes.CSV:
DBExporter.dict_to_csv(rec_dir + os.sep + 'infos_Recordset.csv', infos)
Expand All @@ -159,6 +160,10 @@ def export_recordset(self, id_recordset: int):
self.export_sensor_data_gps(sensor, all_data, rec_dir)
elif sensor.id_sensor_type == SensorType.BEACON:
self.export_sensor_data_beacons(sensor, all_data, rec_dir)
elif sensor.id_sensor_type == SensorType.QUESTIONS:
self.export_sensor_data_questions(sensor, all_data, rec_dir)
elif sensor.id_sensor_type == SensorType.BIOMETRICS:
self.export_sensor_data_health(sensor, all_data, rec_dir)
else:
self.export_sensor_data(sensor, all_data, rec_dir)

Expand Down Expand Up @@ -230,7 +235,7 @@ def export_sensor_data(self, sensor: Sensor, sensors_data: list[SensorData], bas
# Data files
filename = base_dir + os.sep + base_filename
if self.exportFormat == ExporterTypes.CSV:
filename = filename + '.CSV'
filename = filename + '.csv'
elif self.exportFormat == ExporterTypes.MATLAB:
filename = filename + '.mat'

Expand Down Expand Up @@ -275,7 +280,7 @@ def export_sensor_data_gps(self, sensor: Sensor, sensors_data: list[SensorData],

filename = base_dir + os.sep + base_filename
if self.exportFormat == ExporterTypes.CSV:
filename = filename + '.CSV'
filename = filename + '.csv'
elif self.exportFormat == ExporterTypes.MATLAB:
filename = filename + '.mat'

Expand Down Expand Up @@ -308,6 +313,87 @@ def export_sensor_data_gps(self, sensor: Sensor, sensors_data: list[SensorData],
'labels': header.split('\t')}},
do_compression=True, long_field_names=True)

def export_sensor_data_questions(self, sensor: Sensor, sensors_data: list[SensorData], base_dir: str):
base_filename = sensor.name.replace(' ', '_')

# Write sensor infos file
self.export_sensor_infos(sensor, base_dir, base_filename)

# Data files
filename = base_dir + os.sep + base_filename
if self.exportFormat == ExporterTypes.CSV:
filename = filename + '.csv'
elif self.exportFormat == ExporterTypes.MATLAB:
filename = filename + '.mat'

header = ['Label', 'Shown', 'Answered', 'Index', 'Value']

# Read questions data
answers_obj = np.zeros(shape=(len(sensors_data), 5), dtype='object')
index = 0
for sensor_data in sensors_data:
data = json.loads(sensor_data.data)
data['start_timestamp'] = sensor_data.timestamps.start_timestamp.timestamp()
data['end_timestamp'] = sensor_data.timestamps.end_timestamp.timestamp()
answers_obj[index][0] = data['question_id']
answers_obj[index][1] = data['start_timestamp']
answers_obj[index][2] = data['end_timestamp']
answers_obj[index][3] = ', '.join([str(item) for item in data['answer_index']])
answers_obj[index][4] = ', '.join(data['answer_text'])
index += 1

# Write values
if self.exportFormat == ExporterTypes.CSV:
np.savetxt(filename, answers_obj, delimiter="\t", header='\t'.join(header), fmt='%s')

elif self.exportFormat == ExporterTypes.MATLAB:
sio.savemat(filename, {sensor.name.replace(' ', '_'): {'values': answers_obj, 'labels': header}},
do_compression=True, long_field_names=True)

def export_sensor_data_health(self, sensor: Sensor, sensors_data: list[SensorData], base_dir: str):
result = {}
for sensor_data in sensors_data:
if not result.__contains__(sensor_data.channel.id_channel):
result[sensor_data.channel.id_channel] = []

series = sensor_data.to_time_series()
result[sensor_data.channel.id_channel].append(series)

base_filename = sensor.location + '_' + sensor.name.replace(' ', '_')

# Write sensor infos file
self.export_sensor_infos(sensor, base_dir, base_filename)

# Data files
header = ['Time', 'Value']
# One file per channel (health value)
for id_channel in result.keys():
channel = self.dbMan.get_channel(id_channel)
value_list = []
if channel:
label = channel.label.replace(' ', '_').replace('(', '').replace(')', '')
time = []
values = []

for list_item in result[id_channel]:
time.append(list_item['time'])
values.append(list_item['values'])

all_time = np.concatenate(time)
all_values = np.concatenate(values)
value_list.append(all_time)
value_list.append(all_values)

my_array = np.array(value_list)
filename = base_dir + os.sep + base_filename + '_' + label
if self.exportFormat == ExporterTypes.CSV:
np.savetxt(filename + '.csv', my_array.transpose(), delimiter="\t", header='\t'.join(header),
fmt='%.4f')
elif self.exportFormat == ExporterTypes.MATLAB:
sio.savemat(filename + '.mat', {sensor.name.replace(' ', '') + '_' + label:
{'values': my_array.transpose(), 'labels': header}},
do_compression=True, long_field_names=True)

def export_sensor_data_beacons(self, sensor: Sensor, sensors_data: list[SensorData], base_dir: str):
result = {}
for sensor_data in sensors_data:
Expand Down Expand Up @@ -343,16 +429,16 @@ def export_sensor_data_beacons(self, sensor: Sensor, sensors_data: list[SensorDa
for data in result[id_channel]:
if beacons[beacon_id] is None:
rows = len(data['time'])
beacons[beacon_id] = np.array([data['time'], [None]*rows, [None]*rows]).transpose()
beacons[beacon_id] = np.array([data['time'], [None] * rows, [None] * rows]).transpose()

if channel.label.endswith('RSSI'):
for index, t in enumerate(data['time']):
t_index = np.where(beacons[beacon_id][:, 0] == t)
if t_index: # Time already there?
if t_index: # Time already there?
beacons[beacon_id][t_index, 1] = data['values'][index]
else:
beacons[beacon_id] = beacons[beacon_id] = np.vstack((beacons[beacon_id],
[t, data['values'][index], None]))
[t, data['values'][index], None]))

if channel.label.endswith('Power'):
for index, t in enumerate(data['time']):
Expand All @@ -379,7 +465,8 @@ def export_sensor_data_beacons(self, sensor: Sensor, sensors_data: list[SensorDa
def export_sensor_infos(self, sensor: Sensor, base_dir: str, base_filename: str):
infos = {'id_sensor': sensor.id_sensor, 'name': sensor.name, 'id_sensor_type': sensor.id_sensor_type,
'location': sensor.location, 'data_rate': sensor.data_rate, 'sampling_rate': sensor.sampling_rate,
'channels': len(sensor.channels), 'hardware_id': sensor.hw_id, 'hardware_name': sensor.hw_name}
'channels': len(sensor.channels), 'hardware_id': sensor.hw_id, 'hardware_name': sensor.hw_name,
'settings': sensor.settings}
if self.exportFormat == ExporterTypes.CSV:
DBExporter.dict_to_csv(base_dir + os.sep + 'infos_' + base_filename + '.csv', infos)
if self.exportFormat == ExporterTypes.MATLAB:
Expand Down
27 changes: 15 additions & 12 deletions python/libopenimu/db/DBManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,13 @@
from sqlalchemy.orm import sessionmaker
# noinspection PyProtectedMember
from sqlalchemy.engine import Engine
from sqlalchemy import event
from sqlalchemy import event, text

import os
import datetime
import numpy as np
import pickle
import sys
import warnings
import scipy.io as sio

from PySide6.QtCore import QObject, Signal

Expand Down Expand Up @@ -155,7 +153,8 @@ def session_add(self, store):

def compact(self):
self.clean_db()
self.engine.execute("VACUUM")
with self.engine.connect() as connection:
connection.execute(text("VACUUM"))

# GROUPS
def update_group(self, group):
Expand Down Expand Up @@ -255,18 +254,20 @@ def delete_participant(self, part):
# self.engine.execute("VACUUM")

#
def add_sensor(self, _id_sensor_type, _name, _hw_name, _location, _sampling_rate, _data_rate):
def add_sensor(self, _id_sensor_type, _name, _hw_name, _location, _sampling_rate, _data_rate,
_settings: str | None = None):
# Check if that sensor is already present in the database
query = self.session.query(Sensor).filter((Sensor.id_sensor_type == _id_sensor_type) &
(Sensor.location == _location) &
(Sensor.name == _name) &
(Sensor.hw_name == _hw_name) &
(Sensor.sampling_rate == _sampling_rate) &
(Sensor.data_rate) == _data_rate)
(Sensor.data_rate == _data_rate) &
(Sensor.settings == _settings))

if query.first():
# print("Sensor " + _name + " already present in DB!")
return query.first();
return query.first()

# Create object
sensor = Sensor(
Expand All @@ -275,7 +276,8 @@ def add_sensor(self, _id_sensor_type, _name, _hw_name, _location, _sampling_rate
hw_name=_hw_name,
location=_location,
sampling_rate=_sampling_rate,
data_rate=_data_rate)
data_rate=_data_rate,
settings=_settings)
self.session.add(sensor)
self.commit()
return sensor
Expand Down Expand Up @@ -504,10 +506,11 @@ def get_all_sensor_data(self, **kwargs):
result = query.all()

# Convert to the right format
for sensor_data in result:
# print('data len:', len(sensor_data.data))
sensor_data.data = DataFormat.from_bytes(sensor_data.data, sensor_data.channel.id_data_format)

with self.session.no_autoflush:
for sensor_data in result:
# print('data len:', len(sensor_data.data))
sensor_data.data = DataFormat.from_bytes(sensor_data.data, sensor_data.channel.id_data_format)
self.session.rollback()
return result

def get_sensor_times(self, sensor: Sensor, recordset: Recordset):
Expand Down
Loading

0 comments on commit 44a6c92

Please sign in to comment.