From f0ee488c785755630e726ba10a6621143cfcaa2a Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Mon, 24 Feb 2025 18:13:21 +0100
Subject: [PATCH 01/14] test with com0com
---
freedata_server/list_ports_winreg.py | 150 +++++++++++++++++++++++++++
freedata_server/serial_ports.py | 10 +-
2 files changed, 159 insertions(+), 1 deletion(-)
create mode 100644 freedata_server/list_ports_winreg.py
diff --git a/freedata_server/list_ports_winreg.py b/freedata_server/list_ports_winreg.py
new file mode 100644
index 00000000..afb78730
--- /dev/null
+++ b/freedata_server/list_ports_winreg.py
@@ -0,0 +1,150 @@
+#! python
+#
+# Enumerate serial ports on Windows including a human readable description
+# and hardware information using winreg.
+#
+# Using winreg helps find virtual comports
+
+try:
+ # Python 3.X
+ import winreg
+except ImportError:
+ # Python 2.7 compatibility
+ try:
+ import _winreg as winreg
+ except ImportError:
+ winreg = None
+
+from serial.tools import list_ports_common
+from serial.tools import list_ports_windows
+
+SERIAL_REGISTRY_PATH = 'HARDWARE\\DEVICEMAP\\SERIALCOMM'
+
+
+def regval_to_listport(winport):
+ """Convert a windows port from registry key to pyserial's ListPortInfo.
+
+ Args:
+ winport (tuple): Windows registry value (description, device, value).
+
+ Returns:
+ listport (ListPortInfo): comport device details.
+ """
+ # Create the ListPortInfo
+ description, device, _ = winport
+ listport = list_ports_common.ListPortInfo(device)
+
+ # Format the description like other ListPortInfo
+ description = description.replace('\\Device\\', '')
+ listport.description = "{} ({})".format(description, device)
+
+ return listport
+
+
+# end regval_to_listport
+
+
+def winreg_comports():
+ """Return windows comports found in the registry.
+
+ See Also:
+ list_ports_winreg.comports(), list_ports_winreg.comports_list(),
+ list_ports_windows.comports()
+
+ Note:
+ This should include virtual comports, and it is significantly faster
+ than list_ports_windows.comports(). However, list_ports_windows has far
+ more information. comports() contains all list_ports_windows.comports()
+ and winreg_comports() that were not found from list_ports_windows.
+
+ Returns:
+ comports (list): Sorted list of ListPortInfo.
+ """
+ try:
+ # Get the Serial Coms registry
+ key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, SERIAL_REGISTRY_PATH)
+
+ # Get key info - number of values (subkeys, num_vals, last_modified)
+ num_values = winreg.QueryInfoKey(key)[1]
+
+ # Make a generator of comports
+ for i in range(num_values):
+ # get registry value for the comport
+ value = winreg.EnumValue(key, i)
+ yield regval_to_listport(value)
+
+ # Close the registry key
+ winreg.CloseKey(key)
+ except (AttributeError, WindowsError, EnvironmentError):
+ # winreg is None or there was a key error
+ pass
+
+
+# end winreg_comports
+
+
+def comports_list():
+ """Return a list of comports found from list_ports_windows and comports
+ found in the window registry.
+
+ See Also:
+ list_ports_winreg.comports(), list_ports_winreg.winreg_comports(),
+ list_ports_windows.comports()
+
+ Note:
+ This should include virtual comports. This method contains all
+ list_ports_windows.comports() and winreg_comports() that were not found
+ from list_ports_windows.
+
+ Returns:
+ comports (list): List of ListPortInfo comport details.
+ """
+ comports = list(list_ports_windows.comports())
+
+ comports[len(comports):] = [li for li in winreg_comports()
+ if li not in comports]
+
+ return comports
+
+
+# end comports_list
+
+
+def comports(include_links=False):
+ """Generator for comports found from list ports windows and comports found
+ in the windows registry.
+
+ See Also:
+ list_ports_winreg.comports_list(), list_ports_winreg.winreg_comports(),
+ list_ports_windows.comports()
+
+ Note:
+ This should include virtual comports. This method contains all
+ list_ports_windows.comports() and winreg_comports() that were not found
+ from list_ports_windows.
+
+ Yields:
+ comport (ListPortInfo): Comport details.
+
+ Returns:
+ comports (generator): Generator of ListPortInfo comports.
+ """
+ existing = []
+ for comport in list_ports_windows.comports():
+ existing.append(comport)
+ yield comport
+
+ for li in winreg_comports():
+ if li not in existing:
+ existing.append(li)
+ yield li
+
+
+# end comports
+
+
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+# test
+if __name__ == '__main__':
+ for port, desc, hwid in sorted(comports()):
+ print("%s: %s [%s]" % (port, desc, hwid))
\ No newline at end of file
diff --git a/freedata_server/serial_ports.py b/freedata_server/serial_ports.py
index d9cf1dfb..f874314e 100644
--- a/freedata_server/serial_ports.py
+++ b/freedata_server/serial_ports.py
@@ -1,9 +1,17 @@
import serial.tools.list_ports
+import list_ports_winreg
import helpers
+import sys
+
+
def get_ports():
serial_devices = []
- ports = serial.tools.list_ports.comports(include_links=False)
+ if sys.platform == 'win32':
+ ports = serial.tools.list_ports.comports(include_links=False)
+ else:
+ ports = list_ports_winreg.comports(include_links=False)
+
for port, desc, hwid in ports:
# calculate hex of hwid if we have unique names
crc_hwid = helpers.get_crc_16(bytes(hwid, encoding="utf-8"))
From 644f34529309340be9a3d4279e988411c5bbabcf Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Mon, 24 Feb 2025 18:16:13 +0100
Subject: [PATCH 02/14] test with com0com
---
freedata_server/serial_ports.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/freedata_server/serial_ports.py b/freedata_server/serial_ports.py
index f874314e..89aa7fbc 100644
--- a/freedata_server/serial_ports.py
+++ b/freedata_server/serial_ports.py
@@ -8,9 +8,10 @@ def get_ports():
serial_devices = []
if sys.platform == 'win32':
- ports = serial.tools.list_ports.comports(include_links=False)
- else:
ports = list_ports_winreg.comports(include_links=False)
+ else:
+
+ ports = serial.tools.list_ports.comports(include_links=False)
for port, desc, hwid in ports:
# calculate hex of hwid if we have unique names
From d1c26950335ae8f1f9e71950e9eab67b8185a695 Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Tue, 25 Feb 2025 11:51:33 +0100
Subject: [PATCH 03/14] attempt overriding ptt port
---
.../src/components/settings_hamlib.vue | 25 +++++++++++++++++++
.../src/components/settings_serial_ptt.vue | 24 ++++++++++++++++++
2 files changed, 49 insertions(+)
diff --git a/freedata_gui/src/components/settings_hamlib.vue b/freedata_gui/src/components/settings_hamlib.vue
index 8da65c5d..a7654bde 100644
--- a/freedata_gui/src/components/settings_hamlib.vue
+++ b/freedata_gui/src/components/settings_hamlib.vue
@@ -442,6 +442,31 @@ const settings = ref({
+
+
+
+ Radio custom port
+
+
+
+
+
+
+
+
diff --git a/freedata_gui/src/components/settings_serial_ptt.vue b/freedata_gui/src/components/settings_serial_ptt.vue
index 350a4a5e..5558000e 100644
--- a/freedata_gui/src/components/settings_serial_ptt.vue
+++ b/freedata_gui/src/components/settings_serial_ptt.vue
@@ -27,6 +27,30 @@
+
+
+
+ Custom PTT port
+
+
+
+
+
+
+
From debf1d57be08d45de90a4c3997cc7e3242dd25e5 Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Wed, 26 Feb 2025 11:56:56 +0100
Subject: [PATCH 04/14] attempt solving tests
---
freedata_server/serial_ports.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/freedata_server/serial_ports.py b/freedata_server/serial_ports.py
index 89aa7fbc..0f1280c1 100644
--- a/freedata_server/serial_ports.py
+++ b/freedata_server/serial_ports.py
@@ -1,5 +1,4 @@
import serial.tools.list_ports
-import list_ports_winreg
import helpers
import sys
@@ -8,6 +7,7 @@ def get_ports():
serial_devices = []
if sys.platform == 'win32':
+ import list_ports_winreg
ports = list_ports_winreg.comports(include_links=False)
else:
From b59ef36296c0d2b19f9efee4dc62e1cbeb423091 Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Sat, 1 Mar 2025 08:35:16 +0100
Subject: [PATCH 05/14] adjustments for api, database and error handling
---
freedata_server/api/freedata.py | 33 -------
freedata_server/config.py | 21 +++--
.../message_system_db_attachments.py | 86 ++++++++++++++++++-
freedata_server/message_system_db_messages.py | 11 ++-
freedata_server/schedule_manager.py | 42 +++++++++
freedata_server/server.py | 3 +
6 files changed, 151 insertions(+), 45 deletions(-)
diff --git a/freedata_server/api/freedata.py b/freedata_server/api/freedata.py
index ab6a1294..2c5c05d8 100644
--- a/freedata_server/api/freedata.py
+++ b/freedata_server/api/freedata.py
@@ -22,39 +22,6 @@ async def get_freedata_message(message_id: str, request: Request):
return api_response(message)
-@router.post("/messages", summary="Transmit Message", tags=["FreeDATA"], responses={
- 200: {
- "description": "Message transmitted successfully.",
- "content": {
- "application/json": {
- "example": {
- "destination": "XX1XXX-6",
- "body": "Hello FreeDATA"
- }
- }
- }
- },
- 404: {
- "description": "The requested resource was not found.",
- "content": {
- "application/json": {
- "example": {
- "error": "Resource not found."
- }
- }
- }
- },
- 503: {
- "description": "Modem not running or busy.",
- "content": {
- "application/json": {
- "example": {
- "error": "Modem not running."
- }
- }
- }
- }
-})
async def post_freedata_message(request: Request):
"""
Transmit a FreeDATA message.
diff --git a/freedata_server/config.py b/freedata_server/config.py
index 08782f71..67902112 100644
--- a/freedata_server/config.py
+++ b/freedata_server/config.py
@@ -230,14 +230,17 @@ class CONFIG:
# self.log.info("[CFG] reading...")
if not self.config_exists():
return False
-
- # at first just copy the config as read from file
- result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
+ try:
+ # at first just copy the config as read from file
+ result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
- # handle the special settings
- for section in result:
- for setting in result[section]:
- result[section][setting] = self.handle_setting(
- section, setting, result[section][setting], False)
+ # handle the special settings
+ for section in result:
+ for setting in result[section]:
+ result[section][setting] = self.handle_setting(
+ section, setting, result[section][setting], False)
+ return result
+ except Exception as conferror:
+ self.log.error("[CFG] reading logfile", e=conferror)
+ return False
- return result
diff --git a/freedata_server/message_system_db_attachments.py b/freedata_server/message_system_db_attachments.py
index b582893e..526db408 100644
--- a/freedata_server/message_system_db_attachments.py
+++ b/freedata_server/message_system_db_attachments.py
@@ -70,4 +70,88 @@ class DatabaseManagerAttachments(DatabaseManager):
self.log(f"Error fetching attachment with SHA-512 hash {hash_sha512}: {e}", isWarning=True)
return None
finally:
- session.remove()
\ No newline at end of file
+ session.remove()
+
+ def delete_attachments_by_message_id(self, message_id):
+ """
+ Deletes attachment associations for a given message ID.
+
+ For each attachment linked to the message:
+ - If the attachment is linked to more than one message, only the association for this message is deleted.
+ - If the attachment is linked only to this message, both the association and the attachment record are deleted.
+
+ Parameters:
+ message_id (str): The ID of the message whose attachment associations should be deleted.
+
+ Returns:
+ bool: True if the deletion was successful, False otherwise.
+ """
+ session = self.get_thread_scoped_session()
+ try:
+ # Find all attachment associations for the given message ID.
+ links = session.query(MessageAttachment).filter_by(message_id=message_id).all()
+ if not links:
+ self.log(f"No attachments linked with message ID {message_id} found.")
+ return True
+
+ for link in links:
+ # Count how many associations exist for this attachment.
+ link_count = session.query(MessageAttachment).filter_by(attachment_id=link.attachment_id).count()
+ if link_count > 1:
+ # More than one link exists, so only remove the association.
+ session.delete(link)
+ self.log(
+ f"Deleted link for attachment '{link.attachment.name}' from message {message_id} (other links exist).")
+ else:
+ # Only one link exists, so delete both the association and the attachment.
+ session.delete(link)
+ session.delete(link.attachment)
+ self.log(f"Deleted attachment '{link.attachment.name}' from message {message_id} (only link).")
+
+ session.commit()
+ return True
+ except Exception as e:
+ session.rollback()
+ self.log(f"Error deleting attachments for message ID {message_id}: {e}", isWarning=True)
+ return False
+ finally:
+ session.remove()
+
+
+ def clean_orphaned_attachments(self):
+ """
+ Checks for orphaned attachments in the database, i.e. attachments that have no
+ MessageAttachment links to any messages. Optionally, deletes these orphaned attachments.
+
+ Parameters:
+ cleanup (bool): If True, deletes the orphaned attachments; if False, only returns them.
+
+ Returns:
+ If cleanup is False:
+ list: A list of dictionaries representing the orphaned attachments.
+ If cleanup is True:
+ dict: A summary dictionary with the count of deleted attachments.
+ """
+ session = self.get_thread_scoped_session()
+ try:
+ orphaned = []
+ # Get all attachments in the database.
+ attachments = session.query(Attachment).all()
+ for attachment in attachments:
+ # Count the number of MessageAttachment links for this attachment.
+ link_count = session.query(MessageAttachment).filter_by(attachment_id=attachment.id).count()
+ if link_count == 0:
+ orphaned.append(attachment)
+
+ for attachment in orphaned:
+ self.log(f"Deleting orphaned attachment: {attachment.name}")
+ session.delete(attachment)
+ self.log(f"Checked for orphaned attachments")
+ session.commit()
+ return {'status': 'success', 'deleted_count': len(orphaned)}
+ except Exception as e:
+ session.rollback()
+ self.log(f"Error checking orphaned attachments: {e}", isWarning=True)
+ return None
+ finally:
+ session.remove()
diff --git a/freedata_server/message_system_db_messages.py b/freedata_server/message_system_db_messages.py
index e922c806..417a04d6 100644
--- a/freedata_server/message_system_db_messages.py
+++ b/freedata_server/message_system_db_messages.py
@@ -222,23 +222,30 @@ class DatabaseManagerMessages(DatabaseManager):
return None
def delete_message(self, message_id):
+
+ # Delete attachment links associated with this message.
+ # This call will check each attachment link:
+ # - If the attachment is used by other messages, only the link is removed.
+ # - If the attachment is solely linked to this message, the attachment record is deleted.
+ self.attachments_manager.delete_attachments_by_message_id(message_id)
+
+
session = self.get_thread_scoped_session()
try:
message = session.query(P2PMessage).filter_by(id=message_id).first()
if message:
session.delete(message)
session.commit()
+
self.log(f"Deleted: {message_id}")
self.event_manager.freedata_message_db_change(message_id=message_id)
return {'status': 'success', 'message': f'Message {message_id} deleted'}
else:
return {'status': 'failure', 'message': 'Message not found'}
-
except Exception as e:
session.rollback()
self.log(f"Error deleting message with ID {message_id}: {e}", isWarning=True)
return {'status': 'failure', 'message': 'error deleting message'}
-
finally:
session.remove()
diff --git a/freedata_server/schedule_manager.py b/freedata_server/schedule_manager.py
index f8ef7ada..fc1bbeb2 100644
--- a/freedata_server/schedule_manager.py
+++ b/freedata_server/schedule_manager.py
@@ -88,6 +88,48 @@ class ScheduleManager:
print(e)
def push_to_explorer(self):
+ """
+
+ Exception in thread Thread-5 (run):
+Traceback (most recent call last):
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 1045, in _bootstrap_inner
+ self.run()
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 982, in run
+ self._target(*self._args, **self._kwargs)
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/sched.py", line 151, in run
+ action(*argument, **kwargs)
+ File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/schedule_manager.py", line 42, in schedule_event
+ event_function() # Execute the event function
+ ^^^^^^^^^^^^^^^^
+ File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/schedule_manager.py", line 91, in push_to_explorer
+ self.config = self.config_manager.read()
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/config.py", line 235, in read
+ result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/config.py", line 235, in
+ result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
+ ^^^^^^^^^^^^^^^^^^^^
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 875, in items
+ return [(option, value_getter(option)) for option in orig_keys]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+2025-02-28 17:14:49 [info ] [DatabaseManagerMessages]: Updating station list with DJ2LS-0
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 875, in
+ return [(option, value_getter(option)) for option in orig_keys]
+ ^^^^^^^^^^^^^^^^^^^^
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 871, in
+ value_getter = lambda option: self._interpolation.before_get(self,
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 396, in before_get
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
+ File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 413, in _interpolate_some
+ p = rest.find("%")
+ ^^^^^^^^^
+AttributeError: 'list' object has no attribute 'find'
+
+
+
+ """
self.config = self.config_manager.read()
if self.config['STATION']['enable_explorer'] and self.state_manager.is_modem_running:
try:
diff --git a/freedata_server/server.py b/freedata_server/server.py
index 6d332469..20bb10b3 100644
--- a/freedata_server/server.py
+++ b/freedata_server/server.py
@@ -25,6 +25,7 @@ import structlog
from message_system_db_manager import DatabaseManager
+from message_system_db_attachments import DatabaseManagerAttachments
from schedule_manager import ScheduleManager
from api.general import router as general_router
@@ -206,6 +207,8 @@ def main():
app.modem_service.put("start")
DatabaseManager(app.event_manager).initialize_default_values()
DatabaseManager(app.event_manager).database_repair_and_cleanup()
+ DatabaseManagerAttachments(app.event_manager).clean_orphaned_attachments()
+
app.wsm = websocket_manager.wsm()
app.wsm.startWorkerThreads(app)
From 7d55f640e7cf27ed016be8c227cc5e3767a12c4b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 1 Mar 2025 16:06:52 +0000
Subject: [PATCH 06/14] Update nuitka requirement from <=2.6.2 to <=2.6.7
Updates the requirements on [nuitka](https://github.com/Nuitka/Nuitka) to permit the latest version.
- [Changelog](https://github.com/Nuitka/Nuitka/blob/develop/Changelog.rst)
- [Commits](https://github.com/Nuitka/Nuitka/compare/0.3.11a...2.6.7)
---
updated-dependencies:
- dependency-name: nuitka
dependency-type: direct:production
...
Signed-off-by: dependabot[bot]
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index 32fe5376..f4d26035 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -9,7 +9,7 @@ asyncio
chardet
colorama
ordered-set
-nuitka<=2.6.2
+nuitka<=2.6.7
pyinstaller
websocket-client
fastapi[standard]
From 5bcd67061ed08ddbcb0696759f2f1e0e63c2da40 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 1 Mar 2025 16:22:56 +0000
Subject: [PATCH 07/14] Bump globals from 15.15.0 to 16.0.0 in /freedata_gui
Bumps [globals](https://github.com/sindresorhus/globals) from 15.15.0 to 16.0.0.
- [Release notes](https://github.com/sindresorhus/globals/releases)
- [Commits](https://github.com/sindresorhus/globals/compare/v15.15.0...v16.0.0)
---
updated-dependencies:
- dependency-name: globals
dependency-type: direct:development
update-type: version-update:semver-major
...
Signed-off-by: dependabot[bot]
---
freedata_gui/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/freedata_gui/package.json b/freedata_gui/package.json
index 98062b5a..db0e7e42 100644
--- a/freedata_gui/package.json
+++ b/freedata_gui/package.json
@@ -54,7 +54,7 @@
"@vue/cli-service": "~5.0.8",
"eslint": "^8.0.0",
"eslint-plugin-vue": "^9.28.0",
- "globals": "^15.9.0"
+ "globals": "^16.0.0"
},
"eslintConfig": {
"root": true,
From 99ce766c146d5edcd0e3f774de46f92467dff205 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 1 Mar 2025 16:23:25 +0000
Subject: [PATCH 08/14] Bump pinia from 2.3.1 to 3.0.1 in /freedata_gui
Bumps [pinia](https://github.com/vuejs/pinia) from 2.3.1 to 3.0.1.
- [Release notes](https://github.com/vuejs/pinia/releases)
- [Commits](https://github.com/vuejs/pinia/commits)
---
updated-dependencies:
- dependency-name: pinia
dependency-type: direct:production
update-type: version-update:semver-major
...
Signed-off-by: dependabot[bot]
---
freedata_gui/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/freedata_gui/package.json b/freedata_gui/package.json
index 98062b5a..bbf44e88 100644
--- a/freedata_gui/package.json
+++ b/freedata_gui/package.json
@@ -37,7 +37,7 @@
"gridstack": "^11.0.1",
"js-image-compressor": "^2.0.0",
"marked": "^15.0.3",
- "pinia": "^2.1.7",
+ "pinia": "^3.0.1",
"qth-locator": "^2.1.0",
"topojson-client": "^3.1.0",
"uuid": "^11.0.2",
From 64ac6f1bfd51f2951c4894450e78df05615d766d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 1 Mar 2025 16:23:42 +0000
Subject: [PATCH 09/14] Bump bootstrap-vue-next from 0.26.30 to 0.27.0 in
/freedata_gui
Bumps [bootstrap-vue-next](https://github.com/bootstrap-vue-next/bootstrap-vue-next/tree/HEAD/packages/bootstrap-vue-next) from 0.26.30 to 0.27.0.
- [Release notes](https://github.com/bootstrap-vue-next/bootstrap-vue-next/releases)
- [Changelog](https://github.com/bootstrap-vue-next/bootstrap-vue-next/blob/main/packages/bootstrap-vue-next/CHANGELOG.md)
- [Commits](https://github.com/bootstrap-vue-next/bootstrap-vue-next/commits/nuxt-v0.27.0/packages/bootstrap-vue-next)
---
updated-dependencies:
- dependency-name: bootstrap-vue-next
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
---
freedata_gui/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/freedata_gui/package.json b/freedata_gui/package.json
index 98062b5a..bec52ce6 100644
--- a/freedata_gui/package.json
+++ b/freedata_gui/package.json
@@ -28,7 +28,7 @@
"@popperjs/core": "^2.11.8",
"bootstrap": "^5.3.3",
"bootstrap-icons": "^1.11.3",
- "bootstrap-vue-next": "^0.26.7",
+ "bootstrap-vue-next": "^0.27.0",
"chart.js": "^4.4.3",
"chartjs-plugin-annotation": "^3.0.1",
"core-js": "^3.8.3",
From fe0f341f3587d200df61fc3d2c5323f9d9ef6b2b Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Sat, 1 Mar 2025 19:17:55 +0100
Subject: [PATCH 10/14] schedule_manager.py aktualisieren
Removed trackback
---
freedata_server/schedule_manager.py | 43 +----------------------------
1 file changed, 1 insertion(+), 42 deletions(-)
diff --git a/freedata_server/schedule_manager.py b/freedata_server/schedule_manager.py
index fc1bbeb2..406fb635 100644
--- a/freedata_server/schedule_manager.py
+++ b/freedata_server/schedule_manager.py
@@ -88,48 +88,7 @@ class ScheduleManager:
print(e)
def push_to_explorer(self):
- """
-
- Exception in thread Thread-5 (run):
-Traceback (most recent call last):
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 1045, in _bootstrap_inner
- self.run()
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 982, in run
- self._target(*self._args, **self._kwargs)
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/sched.py", line 151, in run
- action(*argument, **kwargs)
- File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/schedule_manager.py", line 42, in schedule_event
- event_function() # Execute the event function
- ^^^^^^^^^^^^^^^^
- File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/schedule_manager.py", line 91, in push_to_explorer
- self.config = self.config_manager.read()
- ^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/config.py", line 235, in read
- result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "/Users/simonlang/PycharmProjects/FreeDATA/freedata_server/config.py", line 235, in
- result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
- ^^^^^^^^^^^^^^^^^^^^
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 875, in items
- return [(option, value_getter(option)) for option in orig_keys]
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-2025-02-28 17:14:49 [info ] [DatabaseManagerMessages]: Updating station list with DJ2LS-0
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 875, in
- return [(option, value_getter(option)) for option in orig_keys]
- ^^^^^^^^^^^^^^^^^^^^
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 871, in
- value_getter = lambda option: self._interpolation.before_get(self,
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 396, in before_get
- self._interpolate_some(parser, option, L, value, section, defaults, 1)
- File "/usr/local/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/configparser.py", line 413, in _interpolate_some
- p = rest.find("%")
- ^^^^^^^^^
-AttributeError: 'list' object has no attribute 'find'
-
-
-
- """
+
self.config = self.config_manager.read()
if self.config['STATION']['enable_explorer'] and self.state_manager.is_modem_running:
try:
From 878a6357c18d6d39aa04d6da0e53f7c8df4bab81 Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Sat, 1 Mar 2025 19:58:24 +0100
Subject: [PATCH 11/14] first run with adusted fft processing
---
freedata_server/audio.py | 214 +++++++++++++++++---------------------
freedata_server/server.py | 2 +-
2 files changed, 99 insertions(+), 117 deletions(-)
diff --git a/freedata_server/audio.py b/freedata_server/audio.py
index 53f37f3e..d1825d87 100644
--- a/freedata_server/audio.py
+++ b/freedata_server/audio.py
@@ -7,6 +7,7 @@ import structlog
import numpy as np
import queue
import helpers
+import time
log = structlog.get_logger("audio")
@@ -250,172 +251,153 @@ def normalize_audio(datalist: np.ndarray) -> np.ndarray:
return normalized_data
+# Global variables to manage channel status
+CHANNEL_BUSY_DELAY = 0 # Counter for channel busy delay
+SLOT_DELAY = [0] * 5 # Counters for delays in each slot
-RMS_COUNTER = 0
-CHANNEL_BUSY_DELAY = 0
-SLOT_DELAY = [0, 0, 0, 0, 0]
+# Constants for delay logic
+DELAY_INCREMENT = 2 # Amount to increase delay
+MAX_DELAY = 200 # Maximum allowable delay
+# Predefined frequency ranges (slots) for FFT analysis
+# These ranges are based on an FFT length of 800 samples
+SLOT_RANGES = [
+ (0, 65), # Slot 1: Frequency range from 0 to 65
+ (65, 120), # Slot 2: Frequency range from 65 to 120
+ (120, 176), # Slot 3: Frequency range from 120 to 176
+ (176, 231), # Slot 4: Frequency range from 176 to 231
+ (231, 315) # Slot 5: Frequency range from 231 to 315
+]
-def prepare_data_for_fft(data, target_length_samples=400):
+# Initialize a queue to store FFT results for visualization
+fft_queue = queue.Queue()
+
+# Variable to track the time of the last RMS calculation
+last_rms_time = 0
+
+def prepare_data_for_fft(data, target_length_samples=800):
"""
- Prepare data array for FFT by padding if necessary to match the target length.
- Center the data if it's shorter than the target length.
+ Prepare the input data for FFT by ensuring it meets the required length.
Parameters:
- - data: numpy array of np.int16, representing the input data.
- - target_length_samples: int, the target length of the data in samples.
+ - data: numpy.ndarray of type np.int16, representing the audio data.
+ - target_length_samples: int, the desired length of the data in samples.
Returns:
- - numpy array of np.int16, padded and/or centered if necessary.
+ - numpy.ndarray of type np.int16 with a length of target_length_samples.
"""
- # Calculate the current length in samples
- current_length_samples = data.size
+ # Check if the input data type is np.int16
+ if data.dtype != np.int16:
+ raise ValueError("Audio data must be of type np.int16")
- # Check if padding is needed
- if current_length_samples < target_length_samples:
- # Calculate total padding needed
- total_pad_length = target_length_samples - current_length_samples
- # Calculate padding on each side
- pad_before = total_pad_length // 2
- pad_after = total_pad_length - pad_before
- # Pad the data to center it
- data_padded = np.pad(data, (pad_before, pad_after), 'constant', constant_values=(0,))
- return data_padded
+ # If data is shorter than the target length, pad with zeros
+ if len(data) < target_length_samples:
+ return np.pad(data, (0, target_length_samples - len(data)), 'constant', constant_values=(0,))
else:
- # No padding needed, return original data
- return data
+ # If data is longer or equal to the target length, truncate it
+ return data[:target_length_samples]
+
+def calculate_rms_dbfs(data):
+ """
+ Calculate the Root Mean Square (RMS) value of the audio data and
+ convert it to dBFS (decibels relative to full scale).
+
+ Parameters:
+ - data: numpy.ndarray of type np.int16, representing the audio data.
+
+ Returns:
+ - float: RMS value in dBFS. Returns -100 if the RMS value is 0.
+ """
+ # Compute the RMS value using int32 to prevent overflow
+ rms = np.sqrt(np.mean(np.square(data, dtype=np.int32), dtype=np.float64))
+ # Convert the RMS value to dBFS
+ return 20 * np.log10(rms / 32768) if rms > 0 else -100
def calculate_fft(data, fft_queue, states) -> None:
"""
- Calculate an average signal strength of the channel to assess
- whether the channel is "busy."
+ Perform FFT calculation, update channel status, and manage the FFT queue
+ for visualization purposes.
+
+ Parameters:
+ - data: numpy.ndarray of type np.int16, representing the audio data.
+ - fft_queue: queue.Queue, stores FFT results for visualization.
+ - states: An object that holds the current state of the system.
"""
- # Initialize dbfs counter
- # rms_counter = 0
-
- # https://gist.github.com/ZWMiller/53232427efc5088007cab6feee7c6e4c
- # Fast Fourier Transform, 10*log10(abs) is to scale it to dB
- # and make sure it's not imaginary
-
- global RMS_COUNTER, CHANNEL_BUSY_DELAY
+ global CHANNEL_BUSY_DELAY, last_rms_time
try:
- data = prepare_data_for_fft(data, target_length_samples=800)
+ # Prepare the data for FFT processing
+ data = prepare_data_for_fft(data)
+
+ # Perform FFT and compute the power spectrum
fftarray = np.fft.rfft(data)
+ power_spectrum = np.abs(fftarray) ** 2
- # Set value 0 to 1 to avoid division by zero
- fftarray[fftarray == 0] = 1
- dfft = 10.0 * np.log10(abs(fftarray))
+ # Calculate the average power and set the detection threshold
+ avg_power = np.mean(power_spectrum)
+ threshold = avg_power * 20
- # get average of dfft
- avg = np.mean(dfft)
+ # Check if the system is neither transmitting nor receiving
+ not_transmitting = not states.isTransmitting()
+ not_receiving = not states.is_receiving_codec2_signal()
- # Detect signals which are higher than the
- # average + 10 (+10 smoothes the output).
- # Data higher than the average must be a signal.
- # Therefore we are setting it to 100 so it will be highlighted
- # Have to do this when we are not transmitting so our
- # own sending data will not affect this too much
- if not states.isTransmitting():
- dfft[dfft > avg + 15] = 100
+ # Compute the logarithmic power spectrum for visualization
+ dfft = 10.0 * np.log10(power_spectrum + 1e-12)
- # Calculate audio dbfs
- # https://stackoverflow.com/a/9763652
- # calculate dbfs every 50 cycles for reducing CPU load
- RMS_COUNTER += 1
- if RMS_COUNTER > 5:
- d = np.frombuffer(data, np.int16).astype(np.float32)
- # calculate RMS and then dBFS
- # https://dsp.stackexchange.com/questions/8785/how-to-compute-dbfs
- # try except for avoiding runtime errors by division/0
- try:
- rms = int(np.sqrt(np.max(d ** 2)))
- if rms == 0:
- raise ZeroDivisionError
- audio_dbfs = 20 * np.log10(rms / 32768)
- states.set("audio_dbfs", audio_dbfs)
- except Exception as e:
- states.set("audio_dbfs", -100)
+ if not_transmitting:
+ # Highlight frequency components exceeding the threshold
+ dfft[power_spectrum > threshold] = 100
- RMS_COUNTER = 0
+ # Calculate the audio RMS value in dBFS once per second
+ current_time = time.time()
+ if current_time - last_rms_time >= 1.0:
+ audio_dbfs = calculate_rms_dbfs(data)
+ states.set("audio_dbfs", audio_dbfs)
+ last_rms_time = current_time
- # Convert data to int to decrease size
+ # Convert the FFT data to integers for visualization
dfft = dfft.astype(int)
-
- # Create list of dfft
dfftlist = dfft.tolist()
- # Reduce area where the busy detection is enabled
- # We want to have this in correlation with mode bandwidth
- # TODO This is not correctly and needs to be checked for correct maths
- # dfftlist[0:1] = 10,15Hz
- # Bandwidth[Hz] / 10,15
- # narrowband = 563Hz = 56
- # wideband = 1700Hz = 167
- # 1500Hz = 148
- # 2700Hz = 266
- # 3200Hz = 315
- # Initialize slot delay counters
- DELAY_INCREMENT = 2
- MAX_DELAY = 200
-
- # Main logic
- slot = 0
- slot1 = [0, 65]
- slot2 = [65, 120]
- slot3 = [120, 176]
- slot4 = [176, 231]
- slot5 = [231, len(dfftlist)]
- slotbusy = [False, False, False, False, False]
-
- # Set to true if we should increment delay count; else false to decrement
+ # Initialize the slot busy status list
+ slotbusy = [False] * 5
addDelay = False
- for range in [slot1, slot2, slot3, slot4, slot5]:
- range_start = range[0]
- range_end = range[1]
- # define the area, we are detecting busy state
- slotdfft = dfft[range_start:range_end]
- # Check for signals higher than average by checking for "100"
- # If we have a signal, increment our channel_busy delay counter
- # so we have a smoother state toggle
- if np.sum(slotdfft[slotdfft > avg + 15]) >= 200 and not states.isTransmitting() and not states.is_receiving_codec2_signal():
+ # Evaluate each slot to determine if it exceeds the threshold
+ for slot, (range_start, range_end) in enumerate(SLOT_RANGES):
+ slot_power = np.sum(power_spectrum[range_start:range_end])
+ if slot_power > threshold and not_transmitting and not_receiving:
addDelay = True
slotbusy[slot] = True
SLOT_DELAY[slot] = min(SLOT_DELAY[slot] + DELAY_INCREMENT, MAX_DELAY)
else:
SLOT_DELAY[slot] = max(SLOT_DELAY[slot] - 1, 0)
+ slotbusy[slot] = SLOT_DELAY[slot] > 0
- if SLOT_DELAY[slot] == 0:
- slotbusy[slot] = False
- else:
- slotbusy[slot] = True
-
- # increment slot
- slot += 1
+ # Update the channel slot busy status based on slot evaluations
states.set_channel_slot_busy(slotbusy)
if addDelay:
- # Limit delay counter to a maximum of 200. The higher this value,
- # the longer we will wait until releasing state
+ # If any slot is busy, increase the channel busy delay
states.set_channel_busy_condition_traffic(True)
CHANNEL_BUSY_DELAY = min(CHANNEL_BUSY_DELAY + DELAY_INCREMENT, MAX_DELAY)
else:
- # Decrement channel busy counter if no signal has been detected.
+ # If no slots are busy, decrease the channel busy delay
CHANNEL_BUSY_DELAY = max(CHANNEL_BUSY_DELAY - 1, 0)
- # When our channel busy counter reaches 0, toggle state to False
if CHANNEL_BUSY_DELAY == 0:
states.set_channel_busy_condition_traffic(False)
- # erase queue if greater than 3
- if fft_queue.qsize() >= 1:
- fft_queue = queue.Queue()
+ # Ensure the FFT queue does not overflow
+ while not fft_queue.empty():
+ fft_queue.get()
- fft_queue.put(dfftlist[:315]) # 315 --> bandwidth 3200
+ # Add the current FFT data to the queue for visualization
+ fft_queue.put(dfftlist[:315])
except Exception as err:
print(f"[MDM] calculate_fft: Exception: {err}")
+
def terminate():
log.warning("[SHUTDOWN] terminating audio instance...")
if sd._initialized:
diff --git a/freedata_server/server.py b/freedata_server/server.py
index 20bb10b3..13874809 100644
--- a/freedata_server/server.py
+++ b/freedata_server/server.py
@@ -1,3 +1,4 @@
+
import os
import sys
# we need to add script directory to the sys path for avoiding problems with pip package
@@ -19,7 +20,6 @@ import audio
import service_manager
import state_manager
import websocket_manager
-
import event_manager
import structlog
From 0c1dafe6eda186abc43761e855a932750f5578c7 Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Sat, 1 Mar 2025 20:50:04 +0100
Subject: [PATCH 12/14] first run with adusted fft processing
---
freedata_server/audio.py | 82 +++++++++++++++++++++++++---------------
1 file changed, 51 insertions(+), 31 deletions(-)
diff --git a/freedata_server/audio.py b/freedata_server/audio.py
index d1825d87..f103ef7e 100644
--- a/freedata_server/audio.py
+++ b/freedata_server/audio.py
@@ -313,88 +313,108 @@ def calculate_rms_dbfs(data):
# Convert the RMS value to dBFS
return 20 * np.log10(rms / 32768) if rms > 0 else -100
-def calculate_fft(data, fft_queue, states) -> None:
- """
- Perform FFT calculation, update channel status, and manage the FFT queue
- for visualization purposes.
- Parameters:
- - data: numpy.ndarray of type np.int16, representing the audio data.
- - fft_queue: queue.Queue, stores FFT results for visualization.
- - states: An object that holds the current state of the system.
- """
+def calculate_fft(data, fft_queue, states) -> None:
global CHANNEL_BUSY_DELAY, last_rms_time
try:
- # Prepare the data for FFT processing
+ # Prepare the data for FFT processing by ensuring it meets the target length
data = prepare_data_for_fft(data)
- # Perform FFT and compute the power spectrum
+ # Compute the real FFT of the audio data
fftarray = np.fft.rfft(data)
- power_spectrum = np.abs(fftarray) ** 2
- # Calculate the average power and set the detection threshold
- avg_power = np.mean(power_spectrum)
- threshold = avg_power * 20
+ # Calculate the amplitude spectrum in decibels (dB)
+ dfft = 10.0 * np.log10(np.abs(fftarray) + 1e-12) # Adding a small constant to avoid log(0)
+
+ # Compute the average amplitude of the spectrum
+ avg_amplitude = np.mean(dfft)
+
+ # Set the threshold for significant frequency components; adjust the offset as needed
+ threshold = avg_amplitude + 13
+
+ # Identify frequency components that exceed the threshold
+ significant_frequencies = dfft > threshold
# Check if the system is neither transmitting nor receiving
not_transmitting = not states.isTransmitting()
not_receiving = not states.is_receiving_codec2_signal()
- # Compute the logarithmic power spectrum for visualization
- dfft = 10.0 * np.log10(power_spectrum + 1e-12)
-
if not_transmitting:
- # Highlight frequency components exceeding the threshold
- dfft[power_spectrum > threshold] = 100
+ # Highlight significant frequencies in the dfft array
+ dfft[significant_frequencies] = 100
- # Calculate the audio RMS value in dBFS once per second
+ # Get the current time
current_time = time.time()
+
+ # Update the RMS value every second
if current_time - last_rms_time >= 1.0:
+ # Calculate the RMS value in dBFS
audio_dbfs = calculate_rms_dbfs(data)
+
+ # Update the state with the new RMS value
states.set("audio_dbfs", audio_dbfs)
+
+ # Update the last RMS calculation time
last_rms_time = current_time
- # Convert the FFT data to integers for visualization
+ # Convert the dfft array to integers for further processing
dfft = dfft.astype(int)
+
+ # Convert the dfft array to a list for queue insertion
dfftlist = dfft.tolist()
# Initialize the slot busy status list
- slotbusy = [False] * 5
+ slotbusy = [False] * len(SLOT_RANGES)
+
+ # Flag to determine if additional delay should be added
addDelay = False
- # Evaluate each slot to determine if it exceeds the threshold
+ # Iterate over each slot range to detect activity
for slot, (range_start, range_end) in enumerate(SLOT_RANGES):
- slot_power = np.sum(power_spectrum[range_start:range_end])
- if slot_power > threshold and not_transmitting and not_receiving:
+ # Check if any frequency in the slot exceeds the threshold
+ if np.any(significant_frequencies[range_start:range_end]) and not_transmitting and not_receiving:
+ # Mark that additional delay should be added
addDelay = True
+
+ # Set the current slot as busy
slotbusy[slot] = True
+
+ # Increment the slot delay, ensuring it does not exceed the maximum
SLOT_DELAY[slot] = min(SLOT_DELAY[slot] + DELAY_INCREMENT, MAX_DELAY)
else:
+ # Decrement the slot delay, ensuring it does not go below zero
SLOT_DELAY[slot] = max(SLOT_DELAY[slot] - 1, 0)
+
+ # Set the slot busy status based on the current delay
slotbusy[slot] = SLOT_DELAY[slot] > 0
- # Update the channel slot busy status based on slot evaluations
+ # Update the state with the current slot busy statuses
states.set_channel_slot_busy(slotbusy)
if addDelay:
- # If any slot is busy, increase the channel busy delay
+ # Set the channel busy condition due to traffic
states.set_channel_busy_condition_traffic(True)
+
+ # Increment the channel busy delay, ensuring it does not exceed the maximum
CHANNEL_BUSY_DELAY = min(CHANNEL_BUSY_DELAY + DELAY_INCREMENT, MAX_DELAY)
else:
- # If no slots are busy, decrease the channel busy delay
+ # Decrement the channel busy delay, ensuring it does not go below zero
CHANNEL_BUSY_DELAY = max(CHANNEL_BUSY_DELAY - 1, 0)
+
+ # If the channel busy delay has reset, clear the busy condition
if CHANNEL_BUSY_DELAY == 0:
states.set_channel_busy_condition_traffic(False)
- # Ensure the FFT queue does not overflow
+ # Clear any existing items in the FFT queue
while not fft_queue.empty():
fft_queue.get()
- # Add the current FFT data to the queue for visualization
+ # Add the processed dfft list to the FFT queue, limited to the first 315 elements
fft_queue.put(dfftlist[:315])
except Exception as err:
+ # Log any exceptions that occur during the FFT calculation
print(f"[MDM] calculate_fft: Exception: {err}")
From 978a706ad3fcc9485f487bec6472c85982601f5d Mon Sep 17 00:00:00 2001
From: DJ2LS <75909252+DJ2LS@users.noreply.github.com>
Date: Sun, 2 Mar 2025 09:38:16 +0100
Subject: [PATCH 13/14] gui hotfix
---
freedata_gui/src/components/chat_messages_sent.vue | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/freedata_gui/src/components/chat_messages_sent.vue b/freedata_gui/src/components/chat_messages_sent.vue
index 743441c5..4b68e299 100644
--- a/freedata_gui/src/components/chat_messages_sent.vue
+++ b/freedata_gui/src/components/chat_messages_sent.vue
@@ -58,11 +58,11 @@