performance improvements, charts improvements, minor UI improvements

This commit is contained in:
Антон
2026-01-09 17:50:45 +03:00
parent 53a3a99309
commit f64f49a7a6
7 changed files with 337 additions and 179 deletions

View File

@@ -243,11 +243,51 @@ class OpenVPNAPI:
pass
try:
# 4. Формирование запроса
# В агрегированных таблицах нет полей rate_mbps, возвращаем 0
# 4. Request Formation
is_aggregated = target_table != 'usage_history'
if is_aggregated:
# High resolution handling for 1h and 3h ranges
is_high_res = False
interval = 0
points_count = 0
if target_table == 'usage_history':
if duration_hours <= 1.1:
is_high_res = True
interval = 30
points_count = 120
elif duration_hours <= 3.1:
is_high_res = True
interval = 60
points_count = 180
elif duration_hours <= 6.1:
is_high_res = True
interval = 120 # 2 minutes
points_count = 180
elif duration_hours <= 12.1:
is_high_res = True
interval = 300 # 5 minutes
points_count = 144
elif duration_hours <= 24.1:
is_high_res = True
interval = 900 # 15 minutes
points_count = 96
if is_high_res:
query = f'''
SELECT
datetime((strftime('%s', uh.timestamp) / {interval}) * {interval}, 'unixepoch') as timestamp,
SUM(uh.bytes_received) as bytes_received,
SUM(uh.bytes_sent) as bytes_sent,
MAX(uh.bytes_received_rate_mbps) as bytes_received_rate_mbps,
MAX(uh.bytes_sent_rate_mbps) as bytes_sent_rate_mbps
FROM usage_history uh
JOIN clients c ON uh.client_id = c.id
WHERE c.common_name = ? AND uh.timestamp BETWEEN ? AND ?
GROUP BY datetime((strftime('%s', uh.timestamp) / {interval}) * {interval}, 'unixepoch')
ORDER BY timestamp ASC
'''
elif is_aggregated:
query = f'''
SELECT
t.timestamp,
@@ -280,13 +320,44 @@ class OpenVPNAPI:
cursor.execute(query, (common_name, s_str, e_str))
columns = [column[0] for column in cursor.description]
data = [dict(zip(columns, row)) for row in cursor.fetchall()]
db_data_list = [dict(zip(columns, row)) for row in cursor.fetchall()]
final_data = db_data_list
if is_high_res:
# Zero-filling
final_data = []
db_data_map = {row['timestamp']: row for row in db_data_list}
# Align to nearest interval
ts_end = end_date.timestamp()
ts_aligned = ts_end - (ts_end % interval)
aligned_end = datetime.utcfromtimestamp(ts_aligned)
# Generate points
start_generated = aligned_end - timedelta(seconds=points_count * interval)
current = start_generated
for _ in range(points_count):
current += timedelta(seconds=interval)
ts_str = current.strftime('%Y-%m-%d %H:%M:%S')
if ts_str in db_data_map:
final_data.append(db_data_map[ts_str])
else:
final_data.append({
'timestamp': ts_str,
'bytes_received': 0,
'bytes_sent': 0,
'bytes_received_rate_mbps': 0,
'bytes_sent_rate_mbps': 0
})
return {
'data': data,
'data': final_data,
'meta': {
'resolution_used': target_table,
'record_count': len(data),
'resolution_used': target_table + ('_hires' if is_high_res else ''),
'record_count': len(final_data),
'start': s_str,
'end': e_str
}
@@ -342,64 +413,105 @@ class OpenVPNAPI:
'traffic_distribution': {'rx': 0, 'tx': 0}
}
# 1. Определяем таблицу и временную метку
target_table = 'usage_history'
# 1. Configuration
hours = 24
interval_seconds = 900 # 15 min default
target_table = 'usage_history'
if range_arg == '7d':
target_table = 'stats_hourly'
hours = 168 # 7 * 24
hours = 168
interval_seconds = 6300 # 105 min -> 96 points
target_table = 'stats_hourly'
elif range_arg == '30d':
target_table = 'stats_6h' # или stats_daily
hours = 720 # 30 * 24
hours = 720
interval_seconds = 27000 # 450 min -> 96 points
target_table = 'stats_hourly' # Fallback to hourly/raw as needed
# Fallback logic for table existence
try:
# Проверка наличия таблицы
try:
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{target_table}'")
if not cursor.fetchone():
target_table = 'usage_history'
except:
pass
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{target_table}'")
if not cursor.fetchone():
target_table = 'usage_history' # Fallback to raw if aggregated missing
except:
target_table = 'usage_history'
# 2. Глобальная история (График)
# Для агрегированных таблиц поля rate могут отсутствовать, заменяем нулями
try:
# 2. Global History (Chart)
if target_table == 'usage_history':
rate_cols = "SUM(bytes_received_rate_mbps) as total_rx_rate, SUM(bytes_sent_rate_mbps) as total_tx_rate,"
else:
rate_cols = "0 as total_rx_rate, 0 as total_tx_rate,"
# Aggregation Query
# Group by interval_seconds
query_hist = f'''
SELECT
timestamp,
SUM(bytes_received) as total_rx,
SUM(bytes_sent) as total_tx,
{rate_cols}
COUNT(DISTINCT client_id) as active_count
FROM {target_table}
WHERE timestamp >= datetime('now', '-{hours} hours')
GROUP BY timestamp
datetime((strftime('%s', timestamp) / {interval_seconds}) * {interval_seconds}, 'unixepoch') as timestamp,
SUM(total_rx) as total_rx,
SUM(total_tx) as total_tx,
MAX(total_rx_rate) as total_rx_rate,
MAX(total_tx_rate) as total_tx_rate,
MAX(active_count) as active_count
FROM (
SELECT
timestamp,
SUM(bytes_received) as total_rx,
SUM(bytes_sent) as total_tx,
{rate_cols}
COUNT(DISTINCT client_id) as active_count
FROM {target_table}
WHERE timestamp >= datetime('now', '-{hours} hours')
GROUP BY timestamp
) sub
GROUP BY datetime((strftime('%s', timestamp) / {interval_seconds}) * {interval_seconds}, 'unixepoch')
ORDER BY timestamp ASC
'''
cursor.execute(query_hist)
rows = cursor.fetchall()
if rows:
columns = [col[0] for col in cursor.description]
analytics['global_history_24h'] = [dict(zip(columns, row)) for row in rows]
# Максимум клиентов
max_clients = 0
for row in analytics['global_history_24h']:
if row['active_count'] > max_clients:
max_clients = row['active_count']
analytics['max_concurrent_24h'] = max_clients
# 3. Топ-3 самых активных клиентов (за выбранный период)
# Внимание: для топа всегда берем данные, но запрос может быть тяжелым на usage_history за месяц.
# Лучше использовать агрегаты, если период большой.
columns = [col[0] for col in cursor.description]
db_data = {row[0]: dict(zip(columns, row)) for row in rows}
# Используем ту же таблицу, что и для истории, чтобы согласовать данные
# Post-processing: Zero Fill
analytics['global_history_24h'] = []
now = datetime.utcnow()
# Round down to nearest interval
ts_now = now.timestamp()
ts_aligned = ts_now - (ts_now % interval_seconds)
now_aligned = datetime.utcfromtimestamp(ts_aligned)
# We want exactly 96 points ending at now_aligned
# Start time = now_aligned - (96 * interval)
start_time = now_aligned - timedelta(seconds=96 * interval_seconds)
current = start_time
# Generate exactly 96 points
for _ in range(96):
current += timedelta(seconds=interval_seconds)
ts_str = current.strftime('%Y-%m-%d %H:%M:%S')
if ts_str in db_data:
analytics['global_history_24h'].append(db_data[ts_str])
else:
analytics['global_history_24h'].append({
'timestamp': ts_str,
'total_rx': 0,
'total_tx': 0,
'total_rx_rate': 0,
'total_tx_rate': 0,
'active_count': 0
})
# Max Clients metric
max_clients = 0
for row in analytics['global_history_24h']:
if row.get('active_count', 0) > max_clients:
max_clients = row['active_count']
analytics['max_concurrent_24h'] = max_clients
# 3. Top Clients & 4. Traffic Distribution (Keep existing logic)
# Use same target table
query_top = f'''
SELECT
c.common_name,
@@ -417,7 +529,6 @@ class OpenVPNAPI:
top_cols = [col[0] for col in cursor.description]
analytics['top_clients_24h'] = [dict(zip(top_cols, row)) for row in cursor.fetchall()]
# 4. Распределение трафика
query_dist = f'''
SELECT
SUM(bytes_received) as rx,