Merge branch 'develop' into ls-i18n

pull/903/head
DJ2LS 2025-03-02 09:48:03 +01:00 committed by GitHub
commit 66a5a33be2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 445 additions and 171 deletions

View File

@ -28,7 +28,7 @@
"@popperjs/core": "^2.11.8",
"bootstrap": "^5.3.3",
"bootstrap-icons": "^1.11.3",
"bootstrap-vue-next": "^0.26.7",
"bootstrap-vue-next": "^0.27.0",
"chart.js": "^4.4.3",
"chartjs-plugin-annotation": "^3.0.1",
"core-js": "^3.8.3",
@ -42,7 +42,7 @@
"install": "^0.13.0",
"js-image-compressor": "^2.0.0",
"marked": "^15.0.3",
"pinia": "^2.1.7",
"pinia": "^3.0.1",
"qth-locator": "^2.1.0",
"topojson-client": "^3.1.0",
"uuid": "^11.0.2",
@ -59,7 +59,7 @@
"@vue/cli-service": "~5.0.8",
"eslint": "^8.0.0",
"eslint-plugin-vue": "^9.28.0",
"globals": "^15.9.0"
"globals": "^16.0.0"
},
"eslintConfig": {
"root": true,

View File

@ -58,11 +58,10 @@
<div class="card-footer p-1 border-top-0">
<p class="text p-0 m-0 mb-1 me-1 text-end">
<span class="badge badge-primary mr-2" :class="{
'bg-danger': message.status == 'failed',
'bg-primary': message.status == 'transmitting',
'bg-secondary': message.status == 'transmitted',
'bg-secondary': message.status == 'queued',
<span class="badge mr-2" :class="{
'text-bg-danger': message.status === 'failed',
'text-bg-primary': message.status === 'transmitting',
'text-bg-secondary': message.status === 'transmitted' || message.status === 'queued'
}"
>
{{ message.status }}

View File

@ -442,6 +442,31 @@ const settings = ref({
</select>
</div>
<!-- Radio Custom Port -->
<div class="input-group input-group-sm mb-1">
<label class="input-group-text w-50 text-wrap">
Radio custom port
<button
type="button"
class="btn btn-link p-0 ms-2"
data-bs-toggle="tooltip"
title="Override the com port of your radio if its not listed above"
>
<i class="bi bi-question-circle"></i>
</button>
</label>
<input
type="text"
class="form-control"
placeholder="settings.remote.RADIO.serial_port.port"
id="rigctldIp"
aria-label="Rigctld IP"
@change="onChange"
v-model="settings.remote.RADIO.serial_port"
/>
</div>
<!-- Serial Speed -->
<div class="input-group input-group-sm mb-1">
<label class="input-group-text w-50 text-wrap">

View File

@ -27,6 +27,30 @@
</select>
</div>
<!-- Radio Custom Port -->
<div class="input-group input-group-sm mb-1">
<label class="input-group-text w-50 text-wrap">
Custom PTT port
<button
type="button"
class="btn btn-link p-0 ms-2"
data-bs-toggle="tooltip"
title="Override the com port of your radio if its not listed above"
>
<i class="bi bi-question-circle"></i>
</button>
</label>
<input
type="text"
class="form-control"
placeholder="settings.remote.RADIO.ptt_port.port"
id="rigctldIp"
aria-label="Rigctld IP"
@change="onChange"
v-model="settings.remote.RADIO.ptt_port"
/>
</div>
<!-- PTT via DTR Selector -->
<div class="input-group input-group-sm mb-1">
<label class="input-group-text w-50 text-wrap">

View File

@ -22,39 +22,6 @@ async def get_freedata_message(message_id: str, request: Request):
return api_response(message)
@router.post("/messages", summary="Transmit Message", tags=["FreeDATA"], responses={
200: {
"description": "Message transmitted successfully.",
"content": {
"application/json": {
"example": {
"destination": "XX1XXX-6",
"body": "Hello FreeDATA"
}
}
}
},
404: {
"description": "The requested resource was not found.",
"content": {
"application/json": {
"example": {
"error": "Resource not found."
}
}
}
},
503: {
"description": "Modem not running or busy.",
"content": {
"application/json": {
"example": {
"error": "Modem not running."
}
}
}
}
})
async def post_freedata_message(request: Request):
"""
Transmit a FreeDATA message.

View File

@ -7,6 +7,7 @@ import structlog
import numpy as np
import queue
import helpers
import time
log = structlog.get_logger("audio")
@ -250,172 +251,173 @@ def normalize_audio(datalist: np.ndarray) -> np.ndarray:
return normalized_data
# Global variables to manage channel status
CHANNEL_BUSY_DELAY = 0 # Counter for channel busy delay
SLOT_DELAY = [0] * 5 # Counters for delays in each slot
RMS_COUNTER = 0
CHANNEL_BUSY_DELAY = 0
SLOT_DELAY = [0, 0, 0, 0, 0]
# Constants for delay logic
DELAY_INCREMENT = 2 # Amount to increase delay
MAX_DELAY = 200 # Maximum allowable delay
# Predefined frequency ranges (slots) for FFT analysis
# These ranges are based on an FFT length of 800 samples
SLOT_RANGES = [
(0, 65), # Slot 1: Frequency range from 0 to 65
(65, 120), # Slot 2: Frequency range from 65 to 120
(120, 176), # Slot 3: Frequency range from 120 to 176
(176, 231), # Slot 4: Frequency range from 176 to 231
(231, 315) # Slot 5: Frequency range from 231 to 315
]
def prepare_data_for_fft(data, target_length_samples=400):
# Initialize a queue to store FFT results for visualization
fft_queue = queue.Queue()
# Variable to track the time of the last RMS calculation
last_rms_time = 0
def prepare_data_for_fft(data, target_length_samples=800):
"""
Prepare data array for FFT by padding if necessary to match the target length.
Center the data if it's shorter than the target length.
Prepare the input data for FFT by ensuring it meets the required length.
Parameters:
- data: numpy array of np.int16, representing the input data.
- target_length_samples: int, the target length of the data in samples.
- data: numpy.ndarray of type np.int16, representing the audio data.
- target_length_samples: int, the desired length of the data in samples.
Returns:
- numpy array of np.int16, padded and/or centered if necessary.
- numpy.ndarray of type np.int16 with a length of target_length_samples.
"""
# Calculate the current length in samples
current_length_samples = data.size
# Check if the input data type is np.int16
if data.dtype != np.int16:
raise ValueError("Audio data must be of type np.int16")
# Check if padding is needed
if current_length_samples < target_length_samples:
# Calculate total padding needed
total_pad_length = target_length_samples - current_length_samples
# Calculate padding on each side
pad_before = total_pad_length // 2
pad_after = total_pad_length - pad_before
# Pad the data to center it
data_padded = np.pad(data, (pad_before, pad_after), 'constant', constant_values=(0,))
return data_padded
# If data is shorter than the target length, pad with zeros
if len(data) < target_length_samples:
return np.pad(data, (0, target_length_samples - len(data)), 'constant', constant_values=(0,))
else:
# No padding needed, return original data
return data
# If data is longer or equal to the target length, truncate it
return data[:target_length_samples]
def calculate_rms_dbfs(data):
"""
Calculate the Root Mean Square (RMS) value of the audio data and
convert it to dBFS (decibels relative to full scale).
Parameters:
- data: numpy.ndarray of type np.int16, representing the audio data.
Returns:
- float: RMS value in dBFS. Returns -100 if the RMS value is 0.
"""
# Compute the RMS value using int32 to prevent overflow
rms = np.sqrt(np.mean(np.square(data, dtype=np.int32), dtype=np.float64))
# Convert the RMS value to dBFS
return 20 * np.log10(rms / 32768) if rms > 0 else -100
def calculate_fft(data, fft_queue, states) -> None:
"""
Calculate an average signal strength of the channel to assess
whether the channel is "busy."
"""
# Initialize dbfs counter
# rms_counter = 0
# https://gist.github.com/ZWMiller/53232427efc5088007cab6feee7c6e4c
# Fast Fourier Transform, 10*log10(abs) is to scale it to dB
# and make sure it's not imaginary
global RMS_COUNTER, CHANNEL_BUSY_DELAY
global CHANNEL_BUSY_DELAY, last_rms_time
try:
data = prepare_data_for_fft(data, target_length_samples=800)
# Prepare the data for FFT processing by ensuring it meets the target length
data = prepare_data_for_fft(data)
# Compute the real FFT of the audio data
fftarray = np.fft.rfft(data)
# Set value 0 to 1 to avoid division by zero
fftarray[fftarray == 0] = 1
dfft = 10.0 * np.log10(abs(fftarray))
# Calculate the amplitude spectrum in decibels (dB)
dfft = 10.0 * np.log10(np.abs(fftarray) + 1e-12) # Adding a small constant to avoid log(0)
# get average of dfft
avg = np.mean(dfft)
# Compute the average amplitude of the spectrum
avg_amplitude = np.mean(dfft)
# Detect signals which are higher than the
# average + 10 (+10 smoothes the output).
# Data higher than the average must be a signal.
# Therefore we are setting it to 100 so it will be highlighted
# Have to do this when we are not transmitting so our
# own sending data will not affect this too much
if not states.isTransmitting():
dfft[dfft > avg + 15] = 100
# Set the threshold for significant frequency components; adjust the offset as needed
threshold = avg_amplitude + 13
# Calculate audio dbfs
# https://stackoverflow.com/a/9763652
# calculate dbfs every 50 cycles for reducing CPU load
RMS_COUNTER += 1
if RMS_COUNTER > 5:
d = np.frombuffer(data, np.int16).astype(np.float32)
# calculate RMS and then dBFS
# https://dsp.stackexchange.com/questions/8785/how-to-compute-dbfs
# try except for avoiding runtime errors by division/0
try:
rms = int(np.sqrt(np.max(d ** 2)))
if rms == 0:
raise ZeroDivisionError
audio_dbfs = 20 * np.log10(rms / 32768)
# Identify frequency components that exceed the threshold
significant_frequencies = dfft > threshold
# Check if the system is neither transmitting nor receiving
not_transmitting = not states.isTransmitting()
not_receiving = not states.is_receiving_codec2_signal()
if not_transmitting:
# Highlight significant frequencies in the dfft array
dfft[significant_frequencies] = 100
# Get the current time
current_time = time.time()
# Update the RMS value every second
if current_time - last_rms_time >= 1.0:
# Calculate the RMS value in dBFS
audio_dbfs = calculate_rms_dbfs(data)
# Update the state with the new RMS value
states.set("audio_dbfs", audio_dbfs)
except Exception as e:
states.set("audio_dbfs", -100)
RMS_COUNTER = 0
# Update the last RMS calculation time
last_rms_time = current_time
# Convert data to int to decrease size
# Convert the dfft array to integers for further processing
dfft = dfft.astype(int)
# Create list of dfft
# Convert the dfft array to a list for queue insertion
dfftlist = dfft.tolist()
# Reduce area where the busy detection is enabled
# We want to have this in correlation with mode bandwidth
# TODO This is not correctly and needs to be checked for correct maths
# dfftlist[0:1] = 10,15Hz
# Bandwidth[Hz] / 10,15
# narrowband = 563Hz = 56
# wideband = 1700Hz = 167
# 1500Hz = 148
# 2700Hz = 266
# 3200Hz = 315
# Initialize slot delay counters
DELAY_INCREMENT = 2
MAX_DELAY = 200
# Initialize the slot busy status list
slotbusy = [False] * len(SLOT_RANGES)
# Main logic
slot = 0
slot1 = [0, 65]
slot2 = [65, 120]
slot3 = [120, 176]
slot4 = [176, 231]
slot5 = [231, len(dfftlist)]
slotbusy = [False, False, False, False, False]
# Set to true if we should increment delay count; else false to decrement
# Flag to determine if additional delay should be added
addDelay = False
for range in [slot1, slot2, slot3, slot4, slot5]:
range_start = range[0]
range_end = range[1]
# define the area, we are detecting busy state
slotdfft = dfft[range_start:range_end]
# Check for signals higher than average by checking for "100"
# If we have a signal, increment our channel_busy delay counter
# so we have a smoother state toggle
if np.sum(slotdfft[slotdfft > avg + 15]) >= 200 and not states.isTransmitting() and not states.is_receiving_codec2_signal():
# Iterate over each slot range to detect activity
for slot, (range_start, range_end) in enumerate(SLOT_RANGES):
# Check if any frequency in the slot exceeds the threshold
if np.any(significant_frequencies[range_start:range_end]) and not_transmitting and not_receiving:
# Mark that additional delay should be added
addDelay = True
# Set the current slot as busy
slotbusy[slot] = True
# Increment the slot delay, ensuring it does not exceed the maximum
SLOT_DELAY[slot] = min(SLOT_DELAY[slot] + DELAY_INCREMENT, MAX_DELAY)
else:
# Decrement the slot delay, ensuring it does not go below zero
SLOT_DELAY[slot] = max(SLOT_DELAY[slot] - 1, 0)
if SLOT_DELAY[slot] == 0:
slotbusy[slot] = False
else:
slotbusy[slot] = True
# Set the slot busy status based on the current delay
slotbusy[slot] = SLOT_DELAY[slot] > 0
# increment slot
slot += 1
# Update the state with the current slot busy statuses
states.set_channel_slot_busy(slotbusy)
if addDelay:
# Limit delay counter to a maximum of 200. The higher this value,
# the longer we will wait until releasing state
# Set the channel busy condition due to traffic
states.set_channel_busy_condition_traffic(True)
# Increment the channel busy delay, ensuring it does not exceed the maximum
CHANNEL_BUSY_DELAY = min(CHANNEL_BUSY_DELAY + DELAY_INCREMENT, MAX_DELAY)
else:
# Decrement channel busy counter if no signal has been detected.
# Decrement the channel busy delay, ensuring it does not go below zero
CHANNEL_BUSY_DELAY = max(CHANNEL_BUSY_DELAY - 1, 0)
# When our channel busy counter reaches 0, toggle state to False
# If the channel busy delay has reset, clear the busy condition
if CHANNEL_BUSY_DELAY == 0:
states.set_channel_busy_condition_traffic(False)
# erase queue if greater than 3
if fft_queue.qsize() >= 1:
fft_queue = queue.Queue()
# Clear any existing items in the FFT queue
while not fft_queue.empty():
fft_queue.get()
fft_queue.put(dfftlist[:315]) # 315 --> bandwidth 3200
# Add the processed dfft list to the FFT queue, limited to the first 315 elements
fft_queue.put(dfftlist[:315])
except Exception as err:
# Log any exceptions that occur during the FFT calculation
print(f"[MDM] calculate_fft: Exception: {err}")
def terminate():
log.warning("[SHUTDOWN] terminating audio instance...")
if sd._initialized:

View File

@ -230,7 +230,7 @@ class CONFIG:
# self.log.info("[CFG] reading...")
if not self.config_exists():
return False
try:
# at first just copy the config as read from file
result = {s: dict(self.parser.items(s)) for s in self.parser.sections()}
@ -239,5 +239,8 @@ class CONFIG:
for setting in result[section]:
result[section][setting] = self.handle_setting(
section, setting, result[section][setting], False)
return result
except Exception as conferror:
self.log.error("[CFG] reading logfile", e=conferror)
return False

View File

@ -0,0 +1,150 @@
#! python
#
# Enumerate serial ports on Windows including a human readable description
# and hardware information using winreg.
#
# Using winreg helps find virtual comports
try:
# Python 3.X
import winreg
except ImportError:
# Python 2.7 compatibility
try:
import _winreg as winreg
except ImportError:
winreg = None
from serial.tools import list_ports_common
from serial.tools import list_ports_windows
SERIAL_REGISTRY_PATH = 'HARDWARE\\DEVICEMAP\\SERIALCOMM'
def regval_to_listport(winport):
"""Convert a windows port from registry key to pyserial's ListPortInfo.
Args:
winport (tuple): Windows registry value (description, device, value).
Returns:
listport (ListPortInfo): comport device details.
"""
# Create the ListPortInfo
description, device, _ = winport
listport = list_ports_common.ListPortInfo(device)
# Format the description like other ListPortInfo
description = description.replace('\\Device\\', '')
listport.description = "{} ({})".format(description, device)
return listport
# end regval_to_listport
def winreg_comports():
"""Return windows comports found in the registry.
See Also:
list_ports_winreg.comports(), list_ports_winreg.comports_list(),
list_ports_windows.comports()
Note:
This should include virtual comports, and it is significantly faster
than list_ports_windows.comports(). However, list_ports_windows has far
more information. comports() contains all list_ports_windows.comports()
and winreg_comports() that were not found from list_ports_windows.
Returns:
comports (list): Sorted list of ListPortInfo.
"""
try:
# Get the Serial Coms registry
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, SERIAL_REGISTRY_PATH)
# Get key info - number of values (subkeys, num_vals, last_modified)
num_values = winreg.QueryInfoKey(key)[1]
# Make a generator of comports
for i in range(num_values):
# get registry value for the comport
value = winreg.EnumValue(key, i)
yield regval_to_listport(value)
# Close the registry key
winreg.CloseKey(key)
except (AttributeError, WindowsError, EnvironmentError):
# winreg is None or there was a key error
pass
# end winreg_comports
def comports_list():
"""Return a list of comports found from list_ports_windows and comports
found in the window registry.
See Also:
list_ports_winreg.comports(), list_ports_winreg.winreg_comports(),
list_ports_windows.comports()
Note:
This should include virtual comports. This method contains all
list_ports_windows.comports() and winreg_comports() that were not found
from list_ports_windows.
Returns:
comports (list): List of ListPortInfo comport details.
"""
comports = list(list_ports_windows.comports())
comports[len(comports):] = [li for li in winreg_comports()
if li not in comports]
return comports
# end comports_list
def comports(include_links=False):
"""Generator for comports found from list ports windows and comports found
in the windows registry.
See Also:
list_ports_winreg.comports_list(), list_ports_winreg.winreg_comports(),
list_ports_windows.comports()
Note:
This should include virtual comports. This method contains all
list_ports_windows.comports() and winreg_comports() that were not found
from list_ports_windows.
Yields:
comport (ListPortInfo): Comport details.
Returns:
comports (generator): Generator of ListPortInfo comports.
"""
existing = []
for comport in list_ports_windows.comports():
existing.append(comport)
yield comport
for li in winreg_comports():
if li not in existing:
existing.append(li)
yield li
# end comports
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# test
if __name__ == '__main__':
for port, desc, hwid in sorted(comports()):
print("%s: %s [%s]" % (port, desc, hwid))

View File

@ -71,3 +71,87 @@ class DatabaseManagerAttachments(DatabaseManager):
return None
finally:
session.remove()
def delete_attachments_by_message_id(self, message_id):
"""
Deletes attachment associations for a given message ID.
For each attachment linked to the message:
- If the attachment is linked to more than one message, only the association for this message is deleted.
- If the attachment is linked only to this message, both the association and the attachment record are deleted.
Parameters:
message_id (str): The ID of the message whose attachment associations should be deleted.
Returns:
bool: True if the deletion was successful, False otherwise.
"""
session = self.get_thread_scoped_session()
try:
# Find all attachment associations for the given message ID.
links = session.query(MessageAttachment).filter_by(message_id=message_id).all()
if not links:
self.log(f"No attachments linked with message ID {message_id} found.")
return True
for link in links:
# Count how many associations exist for this attachment.
link_count = session.query(MessageAttachment).filter_by(attachment_id=link.attachment_id).count()
if link_count > 1:
# More than one link exists, so only remove the association.
session.delete(link)
self.log(
f"Deleted link for attachment '{link.attachment.name}' from message {message_id} (other links exist).")
else:
# Only one link exists, so delete both the association and the attachment.
session.delete(link)
session.delete(link.attachment)
self.log(f"Deleted attachment '{link.attachment.name}' from message {message_id} (only link).")
session.commit()
return True
except Exception as e:
session.rollback()
self.log(f"Error deleting attachments for message ID {message_id}: {e}", isWarning=True)
return False
finally:
session.remove()
def clean_orphaned_attachments(self):
"""
Checks for orphaned attachments in the database, i.e. attachments that have no
MessageAttachment links to any messages. Optionally, deletes these orphaned attachments.
Parameters:
cleanup (bool): If True, deletes the orphaned attachments; if False, only returns them.
Returns:
If cleanup is False:
list: A list of dictionaries representing the orphaned attachments.
If cleanup is True:
dict: A summary dictionary with the count of deleted attachments.
"""
session = self.get_thread_scoped_session()
try:
orphaned = []
# Get all attachments in the database.
attachments = session.query(Attachment).all()
for attachment in attachments:
# Count the number of MessageAttachment links for this attachment.
link_count = session.query(MessageAttachment).filter_by(attachment_id=attachment.id).count()
if link_count == 0:
orphaned.append(attachment)
for attachment in orphaned:
self.log(f"Deleting orphaned attachment: {attachment.name}")
session.delete(attachment)
self.log(f"Checked for orphaned attachments")
session.commit()
return {'status': 'success', 'deleted_count': len(orphaned)}
except Exception as e:
session.rollback()
self.log(f"Error checking orphaned attachments: {e}", isWarning=True)
return None
finally:
session.remove()

View File

@ -222,23 +222,30 @@ class DatabaseManagerMessages(DatabaseManager):
return None
def delete_message(self, message_id):
# Delete attachment links associated with this message.
# This call will check each attachment link:
# - If the attachment is used by other messages, only the link is removed.
# - If the attachment is solely linked to this message, the attachment record is deleted.
self.attachments_manager.delete_attachments_by_message_id(message_id)
session = self.get_thread_scoped_session()
try:
message = session.query(P2PMessage).filter_by(id=message_id).first()
if message:
session.delete(message)
session.commit()
self.log(f"Deleted: {message_id}")
self.event_manager.freedata_message_db_change(message_id=message_id)
return {'status': 'success', 'message': f'Message {message_id} deleted'}
else:
return {'status': 'failure', 'message': 'Message not found'}
except Exception as e:
session.rollback()
self.log(f"Error deleting message with ID {message_id}: {e}", isWarning=True)
return {'status': 'failure', 'message': 'error deleting message'}
finally:
session.remove()

View File

@ -88,6 +88,7 @@ class ScheduleManager:
print(e)
def push_to_explorer(self):
self.config = self.config_manager.read()
if self.config['STATION']['enable_explorer'] and self.state_manager.is_modem_running:
try:

View File

@ -1,9 +1,18 @@
import serial.tools.list_ports
import helpers
import sys
def get_ports():
serial_devices = []
if sys.platform == 'win32':
import list_ports_winreg
ports = list_ports_winreg.comports(include_links=False)
else:
ports = serial.tools.list_ports.comports(include_links=False)
for port, desc, hwid in ports:
# calculate hex of hwid if we have unique names
crc_hwid = helpers.get_crc_16(bytes(hwid, encoding="utf-8"))

View File

@ -1,3 +1,4 @@
import os
import sys
# we need to add script directory to the sys path for avoiding problems with pip package
@ -19,12 +20,12 @@ import audio
import service_manager
import state_manager
import websocket_manager
import event_manager
import structlog
from message_system_db_manager import DatabaseManager
from message_system_db_attachments import DatabaseManagerAttachments
from schedule_manager import ScheduleManager
from api.general import router as general_router
@ -206,6 +207,8 @@ def main():
app.modem_service.put("start")
DatabaseManager(app.event_manager).initialize_default_values()
DatabaseManager(app.event_manager).database_repair_and_cleanup()
DatabaseManagerAttachments(app.event_manager).clean_orphaned_attachments()
app.wsm = websocket_manager.wsm()
app.wsm.startWorkerThreads(app)

View File

@ -9,7 +9,7 @@ asyncio
chardet
colorama
ordered-set
nuitka<=2.6.2
nuitka<=2.6.7
pyinstaller
websocket-client
fastapi[standard]