2026-01-09 01:05:50 +03:00
|
|
|
|
import sqlite3
|
|
|
|
|
|
import configparser
|
|
|
|
|
|
from datetime import datetime, timedelta, timezone
|
2026-01-12 11:44:50 +03:00
|
|
|
|
from flask import Flask, jsonify, request, send_file
|
2026-01-09 01:05:50 +03:00
|
|
|
|
from flask_cors import CORS
|
|
|
|
|
|
import logging
|
|
|
|
|
|
import subprocess
|
|
|
|
|
|
import os
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
import re
|
|
|
|
|
|
from db import DatabaseManager
|
2026-01-12 11:44:50 +03:00
|
|
|
|
from pki_manager import PKIManager
|
|
|
|
|
|
from config_manager import ConfigManager
|
|
|
|
|
|
from service_manager import ServiceManager
|
|
|
|
|
|
import io
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
# Set up logging
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
|
|
level=logging.INFO,
|
|
|
|
|
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
|
|
|
|
)
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
app = Flask(__name__)
|
|
|
|
|
|
CORS(app) # Enable CORS for all routes
|
|
|
|
|
|
|
|
|
|
|
|
class OpenVPNAPI:
|
|
|
|
|
|
def __init__(self, config_file='config.ini'):
|
|
|
|
|
|
self.db_manager = DatabaseManager(config_file)
|
|
|
|
|
|
self.config = configparser.ConfigParser()
|
|
|
|
|
|
self.config.read(config_file)
|
2026-01-12 11:44:50 +03:00
|
|
|
|
|
|
|
|
|
|
# Paths
|
2026-01-09 01:05:50 +03:00
|
|
|
|
self.certificates_path = self.config.get('certificates', 'certificates_path', fallback='/etc/openvpn/certs')
|
2026-01-12 11:44:50 +03:00
|
|
|
|
self.easyrsa_path = self.config.get('pki', 'easyrsa_path', fallback='/etc/openvpn/easy-rsa')
|
|
|
|
|
|
self.pki_path = self.config.get('pki', 'pki_path', fallback='/etc/openvpn/pki') # Fixed default to match Settings
|
|
|
|
|
|
self.templates_path = self.config.get('api', 'templates_path', fallback='templates')
|
|
|
|
|
|
self.server_config_dir = self.config.get('server', 'config_dir', fallback='/etc/openvpn')
|
|
|
|
|
|
self.server_config_path = self.config.get('server', 'config_path', fallback=os.path.join(self.server_config_dir, 'server.conf')) # Specific file
|
|
|
|
|
|
self.public_ip = self.config.get('openvpn_monitor', 'public_ip', fallback='')
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
self.cert_extensions = self.config.get('certificates', 'certificate_extensions', fallback='crt,pem,key').split(',')
|
2026-01-12 11:44:50 +03:00
|
|
|
|
self._cert_cache = {}
|
|
|
|
|
|
|
|
|
|
|
|
# Managers
|
|
|
|
|
|
self.pki = PKIManager(self.easyrsa_path, self.pki_path)
|
|
|
|
|
|
self.conf_mgr = ConfigManager(self.templates_path, self.server_config_dir)
|
|
|
|
|
|
self.conf_mgr.server_conf_path = Path(self.server_config_path) # Override with specific path
|
|
|
|
|
|
self.service = ServiceManager('openvpn') # Or openvpn@server for systemd multi-instance
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
def get_db_connection(self):
|
|
|
|
|
|
"""Get a database connection"""
|
|
|
|
|
|
return self.db_manager.get_connection()
|
|
|
|
|
|
|
|
|
|
|
|
# --- БЛОК РАБОТЫ С СЕРТИФИКАТАМИ (Оставлен без изменений) ---
|
|
|
|
|
|
def parse_openssl_date(self, date_str):
|
|
|
|
|
|
try:
|
|
|
|
|
|
parts = date_str.split()
|
|
|
|
|
|
if len(parts[1]) == 1:
|
|
|
|
|
|
parts[1] = f' {parts[1]}'
|
|
|
|
|
|
normalized_date = ' '.join(parts)
|
|
|
|
|
|
return datetime.strptime(normalized_date, '%b %d %H:%M:%S %Y GMT')
|
|
|
|
|
|
except ValueError:
|
|
|
|
|
|
try:
|
|
|
|
|
|
return datetime.strptime(date_str, '%b %d %H:%M:%S %Y %Z')
|
|
|
|
|
|
except ValueError:
|
|
|
|
|
|
logger.warning(f"Could not parse date: {date_str}")
|
|
|
|
|
|
return datetime.min
|
|
|
|
|
|
|
|
|
|
|
|
def calculate_days_remaining(self, not_after_str):
|
|
|
|
|
|
if not_after_str == 'N/A': return 'N/A'
|
|
|
|
|
|
try:
|
|
|
|
|
|
expiration_date = self.parse_openssl_date(not_after_str)
|
|
|
|
|
|
if expiration_date == datetime.min: return 'N/A'
|
|
|
|
|
|
days_remaining = (expiration_date - datetime.now()).days
|
|
|
|
|
|
if days_remaining < 0: return f"Expired ({abs(days_remaining)} days ago)"
|
|
|
|
|
|
else: return f"{days_remaining} days"
|
|
|
|
|
|
except Exception: return 'N/A'
|
|
|
|
|
|
|
|
|
|
|
|
def extract_cert_info(self, cert_file):
|
|
|
|
|
|
try:
|
|
|
|
|
|
result = subprocess.run(['openssl', 'x509', '-in', cert_file, '-noout', '-text'],
|
|
|
|
|
|
capture_output=True, text=True, check=True)
|
|
|
|
|
|
output = result.stdout
|
|
|
|
|
|
data = {'file': os.path.basename(cert_file), 'file_path': cert_file, 'subject': 'N/A',
|
2026-01-12 11:44:50 +03:00
|
|
|
|
'issuer': 'N/A', 'not_after': 'N/A', 'not_before': 'N/A', 'serial': 'N/A', 'type': 'Unknown'}
|
|
|
|
|
|
|
|
|
|
|
|
is_ca = False
|
|
|
|
|
|
extended_usage = ""
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
for line in output.split('\n'):
|
|
|
|
|
|
line = line.strip()
|
|
|
|
|
|
if line.startswith('Subject:'):
|
|
|
|
|
|
data['subject'] = line.split('Subject:', 1)[1].strip()
|
2026-01-12 11:44:50 +03:00
|
|
|
|
cn_match = re.search(r'CN\s*=\s*([^,]+)', data['subject'])
|
|
|
|
|
|
if cn_match: data['common_name'] = cn_match.group(1).strip()
|
2026-01-09 01:05:50 +03:00
|
|
|
|
elif 'Not After' in line:
|
|
|
|
|
|
data['not_after'] = line.split(':', 1)[1].strip()
|
2026-01-12 11:44:50 +03:00
|
|
|
|
elif 'Not Before' in line:
|
|
|
|
|
|
data['not_before'] = line.split(':', 1)[1].strip()
|
|
|
|
|
|
elif 'Serial Number:' in line:
|
|
|
|
|
|
data['serial'] = line.split(':', 1)[1].strip()
|
|
|
|
|
|
elif 'CA:TRUE' in line:
|
|
|
|
|
|
is_ca = True
|
|
|
|
|
|
elif 'TLS Web Server Authentication' in line:
|
|
|
|
|
|
extended_usage += "Server "
|
|
|
|
|
|
elif 'TLS Web Client Authentication' in line:
|
|
|
|
|
|
extended_usage += "Client "
|
|
|
|
|
|
|
|
|
|
|
|
# Determine Type
|
|
|
|
|
|
if is_ca:
|
|
|
|
|
|
data['type'] = 'CA'
|
|
|
|
|
|
elif 'Server' in extended_usage:
|
|
|
|
|
|
data['type'] = 'Server'
|
|
|
|
|
|
elif 'Client' in extended_usage:
|
|
|
|
|
|
data['type'] = 'Client'
|
|
|
|
|
|
elif 'server' in data.get('common_name', '').lower():
|
|
|
|
|
|
data['type'] = 'Server'
|
|
|
|
|
|
else:
|
|
|
|
|
|
data['type'] = 'Client' # Default to client if ambiguous
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
if data['not_after'] != 'N/A':
|
|
|
|
|
|
data['sort_date'] = self.parse_openssl_date(data['not_after']).isoformat()
|
|
|
|
|
|
else:
|
|
|
|
|
|
data['sort_date'] = datetime.min.isoformat()
|
2026-01-12 11:44:50 +03:00
|
|
|
|
|
|
|
|
|
|
# Parse dates for UI
|
|
|
|
|
|
if data['not_after'] != 'N/A':
|
|
|
|
|
|
dt = self.parse_openssl_date(data['not_after'])
|
|
|
|
|
|
data['expires_iso'] = dt.isoformat()
|
|
|
|
|
|
|
|
|
|
|
|
if data['not_before'] != 'N/A':
|
|
|
|
|
|
dt = self.parse_openssl_date(data['not_before'])
|
|
|
|
|
|
data['issued_iso'] = dt.isoformat()
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
data['days_remaining'] = self.calculate_days_remaining(data['not_after'])
|
|
|
|
|
|
data['is_expired'] = 'Expired' in data['days_remaining']
|
2026-01-12 11:44:50 +03:00
|
|
|
|
|
|
|
|
|
|
# State for UI
|
|
|
|
|
|
if data['is_expired']:
|
|
|
|
|
|
data['state'] = 'Expired'
|
|
|
|
|
|
else:
|
|
|
|
|
|
data['state'] = 'Valid'
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
return data
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error processing {cert_file}: {e}")
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def get_certificates_info(self):
|
|
|
|
|
|
cert_path = Path(self.certificates_path)
|
|
|
|
|
|
if not cert_path.exists(): return []
|
2026-01-09 10:30:49 +03:00
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
cert_files = []
|
|
|
|
|
|
for ext in self.cert_extensions:
|
|
|
|
|
|
cert_files.extend(cert_path.rglob(f'*.{ext.strip()}'))
|
2026-01-09 10:30:49 +03:00
|
|
|
|
|
|
|
|
|
|
current_valid_files = set()
|
2026-01-09 01:05:50 +03:00
|
|
|
|
cert_data = []
|
2026-01-09 10:30:49 +03:00
|
|
|
|
|
|
|
|
|
|
for cert_file_path in cert_files:
|
|
|
|
|
|
cert_file = str(cert_file_path)
|
|
|
|
|
|
current_valid_files.add(cert_file)
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
mtime = os.path.getmtime(cert_file)
|
|
|
|
|
|
|
|
|
|
|
|
# Check cache
|
|
|
|
|
|
cached = self._cert_cache.get(cert_file)
|
|
|
|
|
|
if cached and cached['mtime'] == mtime:
|
|
|
|
|
|
cert_data.append(cached['data'])
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Parse and update cache
|
|
|
|
|
|
parsed_data = self.extract_cert_info(cert_file)
|
|
|
|
|
|
if parsed_data:
|
|
|
|
|
|
self._cert_cache[cert_file] = {
|
|
|
|
|
|
'mtime': mtime,
|
|
|
|
|
|
'data': parsed_data
|
|
|
|
|
|
}
|
|
|
|
|
|
cert_data.append(parsed_data)
|
|
|
|
|
|
except OSError:
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
# Prune cache for deleted files
|
|
|
|
|
|
for cached_file in list(self._cert_cache.keys()):
|
|
|
|
|
|
if cached_file not in current_valid_files:
|
|
|
|
|
|
del self._cert_cache[cached_file]
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
return cert_data
|
|
|
|
|
|
# -----------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
def get_current_stats(self):
|
|
|
|
|
|
"""Get current statistics for all clients"""
|
|
|
|
|
|
conn = self.get_db_connection()
|
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
# ИЗМЕНЕНИЕ:
|
|
|
|
|
|
# Вместо "ORDER BY timestamp DESC LIMIT 1" (мгновенное значение),
|
|
|
|
|
|
# мы берем "MAX(rate)" за последние 2 минуты.
|
|
|
|
|
|
# Это фильтрует "нули", возникающие из-за рассинхрона записи логов,
|
|
|
|
|
|
# и показывает реальную пропускную способность канала.
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute('''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
c.common_name,
|
|
|
|
|
|
c.real_address,
|
|
|
|
|
|
c.status,
|
|
|
|
|
|
CASE
|
|
|
|
|
|
WHEN c.status = 'Active' THEN 'N/A'
|
|
|
|
|
|
ELSE strftime('%Y-%m-%d %H:%M:%S', c.last_activity)
|
|
|
|
|
|
END as last_activity,
|
|
|
|
|
|
c.total_bytes_received,
|
|
|
|
|
|
c.total_bytes_sent,
|
|
|
|
|
|
-- Пиковая скорость Download за последние 2 минуты
|
|
|
|
|
|
(SELECT MAX(uh.bytes_received_rate_mbps)
|
|
|
|
|
|
FROM usage_history uh
|
|
|
|
|
|
WHERE uh.client_id = c.id
|
|
|
|
|
|
AND uh.timestamp >= datetime('now', '-30 seconds')) as current_recv_rate,
|
|
|
|
|
|
-- Пиковая скорость Upload за последние 2 минуты
|
|
|
|
|
|
(SELECT MAX(uh.bytes_sent_rate_mbps)
|
|
|
|
|
|
FROM usage_history uh
|
|
|
|
|
|
WHERE uh.client_id = c.id
|
|
|
|
|
|
AND uh.timestamp >= datetime('now', '-30 seconds')) as current_sent_rate,
|
|
|
|
|
|
strftime('%Y-%m-%d %H:%M:%S', c.updated_at) as last_updated
|
|
|
|
|
|
FROM clients c
|
|
|
|
|
|
ORDER BY c.status DESC, c.common_name
|
|
|
|
|
|
''')
|
|
|
|
|
|
|
|
|
|
|
|
columns = [column[0] for column in cursor.description]
|
|
|
|
|
|
data = []
|
|
|
|
|
|
|
|
|
|
|
|
for row in cursor.fetchall():
|
|
|
|
|
|
data.append(dict(zip(columns, row)))
|
|
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error fetching data: {e}")
|
|
|
|
|
|
return []
|
|
|
|
|
|
finally:
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
def get_client_history(self, common_name, start_date=None, end_date=None, resolution='auto'):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Получение истории с поддержкой агрегации (TSDB).
|
|
|
|
|
|
Автоматически выбирает таблицу (Raw, Hourly, Daily) в зависимости от периода.
|
|
|
|
|
|
"""
|
|
|
|
|
|
conn = self.get_db_connection()
|
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
# 1. Установка временных рамок
|
|
|
|
|
|
if not end_date:
|
2026-01-09 21:05:02 +03:00
|
|
|
|
end_date = datetime.utcnow()
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
if not start_date:
|
|
|
|
|
|
start_date = end_date - timedelta(hours=24) # Дефолт - сутки
|
|
|
|
|
|
|
|
|
|
|
|
# Убедимся, что даты - это объекты datetime
|
|
|
|
|
|
if isinstance(start_date, str):
|
|
|
|
|
|
try: start_date = datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
except: pass
|
|
|
|
|
|
if isinstance(end_date, str):
|
|
|
|
|
|
try: end_date = datetime.strptime(end_date, '%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
except: pass
|
|
|
|
|
|
|
|
|
|
|
|
duration_hours = (end_date - start_date).total_seconds() / 3600
|
|
|
|
|
|
|
|
|
|
|
|
# 2. Маппинг разрешений на таблицы
|
|
|
|
|
|
table_map = {
|
|
|
|
|
|
'raw': 'usage_history',
|
|
|
|
|
|
'5min': 'stats_5min',
|
|
|
|
|
|
'15min': 'stats_15min',
|
|
|
|
|
|
'hourly': 'stats_hourly',
|
|
|
|
|
|
'6h': 'stats_6h',
|
|
|
|
|
|
'daily': 'stats_daily'
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
target_table = 'usage_history'
|
|
|
|
|
|
|
|
|
|
|
|
# 3. Логика выбора таблицы
|
|
|
|
|
|
if resolution == 'auto':
|
|
|
|
|
|
if duration_hours <= 24:
|
|
|
|
|
|
target_table = 'usage_history' # Сырые данные (график за день)
|
|
|
|
|
|
elif duration_hours <= 168: # до 7 дней
|
|
|
|
|
|
target_table = 'stats_hourly' # По часам
|
|
|
|
|
|
elif duration_hours <= 2160: # до 3 месяцев
|
|
|
|
|
|
target_table = 'stats_6h' # Каждые 6 часов
|
|
|
|
|
|
else:
|
|
|
|
|
|
target_table = 'stats_daily' # По дням
|
|
|
|
|
|
elif resolution in table_map:
|
|
|
|
|
|
target_table = table_map[resolution]
|
|
|
|
|
|
|
|
|
|
|
|
# Проверка существования таблицы (fallback, если миграции не было)
|
|
|
|
|
|
try:
|
|
|
|
|
|
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{target_table}'")
|
|
|
|
|
|
if not cursor.fetchone():
|
|
|
|
|
|
logger.warning(f"Table {target_table} missing, fallback to usage_history")
|
|
|
|
|
|
target_table = 'usage_history'
|
|
|
|
|
|
except:
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# 4. Request Formation
|
2026-01-09 01:05:50 +03:00
|
|
|
|
is_aggregated = target_table != 'usage_history'
|
|
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# High resolution handling for 1h and 3h ranges
|
|
|
|
|
|
is_high_res = False
|
|
|
|
|
|
interval = 0
|
|
|
|
|
|
points_count = 0
|
|
|
|
|
|
|
|
|
|
|
|
if target_table == 'usage_history':
|
|
|
|
|
|
if duration_hours <= 1.1:
|
|
|
|
|
|
is_high_res = True
|
|
|
|
|
|
interval = 30
|
|
|
|
|
|
points_count = 120
|
|
|
|
|
|
elif duration_hours <= 3.1:
|
|
|
|
|
|
is_high_res = True
|
|
|
|
|
|
interval = 60
|
|
|
|
|
|
points_count = 180
|
|
|
|
|
|
elif duration_hours <= 6.1:
|
|
|
|
|
|
is_high_res = True
|
|
|
|
|
|
interval = 120 # 2 minutes
|
|
|
|
|
|
points_count = 180
|
|
|
|
|
|
elif duration_hours <= 12.1:
|
|
|
|
|
|
is_high_res = True
|
|
|
|
|
|
interval = 300 # 5 minutes
|
|
|
|
|
|
points_count = 144
|
|
|
|
|
|
elif duration_hours <= 24.1:
|
|
|
|
|
|
is_high_res = True
|
|
|
|
|
|
interval = 900 # 15 minutes
|
|
|
|
|
|
points_count = 96
|
|
|
|
|
|
|
|
|
|
|
|
if is_high_res:
|
|
|
|
|
|
query = f'''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
datetime((strftime('%s', uh.timestamp) / {interval}) * {interval}, 'unixepoch') as timestamp,
|
|
|
|
|
|
SUM(uh.bytes_received) as bytes_received,
|
|
|
|
|
|
SUM(uh.bytes_sent) as bytes_sent,
|
|
|
|
|
|
MAX(uh.bytes_received_rate_mbps) as bytes_received_rate_mbps,
|
|
|
|
|
|
MAX(uh.bytes_sent_rate_mbps) as bytes_sent_rate_mbps
|
|
|
|
|
|
FROM usage_history uh
|
|
|
|
|
|
JOIN clients c ON uh.client_id = c.id
|
|
|
|
|
|
WHERE c.common_name = ? AND uh.timestamp BETWEEN ? AND ?
|
|
|
|
|
|
GROUP BY datetime((strftime('%s', uh.timestamp) / {interval}) * {interval}, 'unixepoch')
|
|
|
|
|
|
ORDER BY timestamp ASC
|
|
|
|
|
|
'''
|
|
|
|
|
|
elif is_aggregated:
|
2026-01-09 01:05:50 +03:00
|
|
|
|
query = f'''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
t.timestamp,
|
|
|
|
|
|
t.bytes_received,
|
|
|
|
|
|
t.bytes_sent,
|
|
|
|
|
|
0 as bytes_received_rate_mbps,
|
|
|
|
|
|
0 as bytes_sent_rate_mbps
|
|
|
|
|
|
FROM {target_table} t
|
|
|
|
|
|
JOIN clients c ON t.client_id = c.id
|
|
|
|
|
|
WHERE c.common_name = ? AND t.timestamp BETWEEN ? AND ?
|
|
|
|
|
|
ORDER BY t.timestamp ASC
|
|
|
|
|
|
'''
|
|
|
|
|
|
else:
|
|
|
|
|
|
query = f'''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
uh.timestamp,
|
|
|
|
|
|
uh.bytes_received,
|
|
|
|
|
|
uh.bytes_sent,
|
|
|
|
|
|
uh.bytes_received_rate_mbps,
|
|
|
|
|
|
uh.bytes_sent_rate_mbps
|
|
|
|
|
|
FROM usage_history uh
|
|
|
|
|
|
JOIN clients c ON uh.client_id = c.id
|
|
|
|
|
|
WHERE c.common_name = ? AND uh.timestamp BETWEEN ? AND ?
|
|
|
|
|
|
ORDER BY uh.timestamp ASC
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
|
|
s_str = start_date.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
e_str = end_date.strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(query, (common_name, s_str, e_str))
|
|
|
|
|
|
|
|
|
|
|
|
columns = [column[0] for column in cursor.description]
|
2026-01-09 17:50:45 +03:00
|
|
|
|
db_data_list = [dict(zip(columns, row)) for row in cursor.fetchall()]
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
final_data = db_data_list
|
|
|
|
|
|
|
|
|
|
|
|
if is_high_res:
|
|
|
|
|
|
# Zero-filling
|
|
|
|
|
|
final_data = []
|
|
|
|
|
|
db_data_map = {row['timestamp']: row for row in db_data_list}
|
|
|
|
|
|
|
|
|
|
|
|
# Align to nearest interval
|
|
|
|
|
|
ts_end = end_date.timestamp()
|
|
|
|
|
|
ts_aligned = ts_end - (ts_end % interval)
|
|
|
|
|
|
aligned_end = datetime.utcfromtimestamp(ts_aligned)
|
|
|
|
|
|
|
|
|
|
|
|
# Generate points
|
|
|
|
|
|
start_generated = aligned_end - timedelta(seconds=points_count * interval)
|
|
|
|
|
|
|
|
|
|
|
|
current = start_generated
|
|
|
|
|
|
for _ in range(points_count):
|
|
|
|
|
|
current += timedelta(seconds=interval)
|
|
|
|
|
|
ts_str = current.strftime('%Y-%m-%d %H:%M:%S')
|
2026-01-09 21:05:02 +03:00
|
|
|
|
ts_iso = ts_str.replace(' ', 'T') + 'Z'
|
2026-01-09 17:50:45 +03:00
|
|
|
|
|
|
|
|
|
|
if ts_str in db_data_map:
|
2026-01-09 21:05:02 +03:00
|
|
|
|
item = db_data_map[ts_str].copy()
|
|
|
|
|
|
item['timestamp'] = ts_iso
|
|
|
|
|
|
final_data.append(item)
|
2026-01-09 17:50:45 +03:00
|
|
|
|
else:
|
|
|
|
|
|
final_data.append({
|
2026-01-09 21:05:02 +03:00
|
|
|
|
'timestamp': ts_iso,
|
2026-01-09 17:50:45 +03:00
|
|
|
|
'bytes_received': 0,
|
|
|
|
|
|
'bytes_sent': 0,
|
|
|
|
|
|
'bytes_received_rate_mbps': 0,
|
|
|
|
|
|
'bytes_sent_rate_mbps': 0
|
|
|
|
|
|
})
|
2026-01-09 21:05:02 +03:00
|
|
|
|
else:
|
|
|
|
|
|
for item in final_data:
|
|
|
|
|
|
if 'timestamp' in item and isinstance(item['timestamp'], str):
|
|
|
|
|
|
item['timestamp'] = item['timestamp'].replace(' ', 'T') + 'Z'
|
2026-01-09 17:50:45 +03:00
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
return {
|
2026-01-09 17:50:45 +03:00
|
|
|
|
'data': final_data,
|
2026-01-09 01:05:50 +03:00
|
|
|
|
'meta': {
|
2026-01-09 17:50:45 +03:00
|
|
|
|
'resolution_used': target_table + ('_hires' if is_high_res else ''),
|
|
|
|
|
|
'record_count': len(final_data),
|
2026-01-09 01:05:50 +03:00
|
|
|
|
'start': s_str,
|
|
|
|
|
|
'end': e_str
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error fetching history: {e}")
|
|
|
|
|
|
return {'data': [], 'error': str(e)}
|
|
|
|
|
|
finally:
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
def get_system_stats(self):
|
|
|
|
|
|
"""Общая статистика по системе"""
|
|
|
|
|
|
conn = self.get_db_connection()
|
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
|
try:
|
|
|
|
|
|
cursor.execute('''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
COUNT(*) as total_clients,
|
|
|
|
|
|
SUM(CASE WHEN status = 'Active' THEN 1 ELSE 0 END) as active_clients,
|
|
|
|
|
|
COALESCE(SUM(total_bytes_received), 0) as total_bytes_received,
|
|
|
|
|
|
COALESCE(SUM(total_bytes_sent), 0) as total_bytes_sent
|
|
|
|
|
|
FROM clients
|
|
|
|
|
|
''')
|
|
|
|
|
|
result = cursor.fetchone()
|
|
|
|
|
|
columns = [column[0] for column in cursor.description]
|
|
|
|
|
|
|
|
|
|
|
|
if result:
|
|
|
|
|
|
stats = dict(zip(columns, result))
|
|
|
|
|
|
# Добавляем человекочитаемые форматы
|
|
|
|
|
|
stats['total_received_gb'] = round(stats['total_bytes_received'] / (1024**3), 2)
|
|
|
|
|
|
stats['total_sent_gb'] = round(stats['total_bytes_sent'] / (1024**3), 2)
|
|
|
|
|
|
return stats
|
|
|
|
|
|
return {}
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error system stats: {e}")
|
|
|
|
|
|
return {}
|
|
|
|
|
|
finally:
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
def get_analytics_data(self, range_arg='24h'):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get aggregated analytics with dynamic resolution.
|
|
|
|
|
|
range_arg: '24h', '7d', '30d'
|
|
|
|
|
|
"""
|
|
|
|
|
|
conn = self.get_db_connection()
|
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
analytics = {
|
|
|
|
|
|
'max_concurrent_24h': 0,
|
|
|
|
|
|
'top_clients_24h': [],
|
|
|
|
|
|
'global_history_24h': [],
|
|
|
|
|
|
'traffic_distribution': {'rx': 0, 'tx': 0}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# 1. Configuration
|
2026-01-09 01:05:50 +03:00
|
|
|
|
hours = 24
|
2026-01-09 17:50:45 +03:00
|
|
|
|
interval_seconds = 900 # 15 min default
|
|
|
|
|
|
target_table = 'usage_history'
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
|
|
|
|
|
if range_arg == '7d':
|
2026-01-09 17:50:45 +03:00
|
|
|
|
hours = 168
|
|
|
|
|
|
interval_seconds = 6300 # 105 min -> 96 points
|
|
|
|
|
|
target_table = 'stats_hourly'
|
2026-01-09 01:05:50 +03:00
|
|
|
|
elif range_arg == '30d':
|
2026-01-09 17:50:45 +03:00
|
|
|
|
hours = 720
|
|
|
|
|
|
interval_seconds = 27000 # 450 min -> 96 points
|
|
|
|
|
|
target_table = 'stats_hourly' # Fallback to hourly/raw as needed
|
|
|
|
|
|
|
|
|
|
|
|
# Fallback logic for table existence
|
2026-01-09 01:05:50 +03:00
|
|
|
|
try:
|
2026-01-09 17:50:45 +03:00
|
|
|
|
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{target_table}'")
|
|
|
|
|
|
if not cursor.fetchone():
|
|
|
|
|
|
target_table = 'usage_history' # Fallback to raw if aggregated missing
|
|
|
|
|
|
except:
|
|
|
|
|
|
target_table = 'usage_history'
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
try:
|
|
|
|
|
|
# 2. Global History (Chart)
|
2026-01-09 01:05:50 +03:00
|
|
|
|
if target_table == 'usage_history':
|
|
|
|
|
|
rate_cols = "SUM(bytes_received_rate_mbps) as total_rx_rate, SUM(bytes_sent_rate_mbps) as total_tx_rate,"
|
|
|
|
|
|
else:
|
|
|
|
|
|
rate_cols = "0 as total_rx_rate, 0 as total_tx_rate,"
|
|
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# Aggregation Query
|
|
|
|
|
|
# Group by interval_seconds
|
2026-01-09 01:05:50 +03:00
|
|
|
|
query_hist = f'''
|
|
|
|
|
|
SELECT
|
2026-01-09 17:50:45 +03:00
|
|
|
|
datetime((strftime('%s', timestamp) / {interval_seconds}) * {interval_seconds}, 'unixepoch') as timestamp,
|
|
|
|
|
|
SUM(total_rx) as total_rx,
|
|
|
|
|
|
SUM(total_tx) as total_tx,
|
|
|
|
|
|
MAX(total_rx_rate) as total_rx_rate,
|
|
|
|
|
|
MAX(total_tx_rate) as total_tx_rate,
|
|
|
|
|
|
MAX(active_count) as active_count
|
|
|
|
|
|
FROM (
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
timestamp,
|
|
|
|
|
|
SUM(bytes_received) as total_rx,
|
|
|
|
|
|
SUM(bytes_sent) as total_tx,
|
|
|
|
|
|
{rate_cols}
|
|
|
|
|
|
COUNT(DISTINCT client_id) as active_count
|
|
|
|
|
|
FROM {target_table}
|
|
|
|
|
|
WHERE timestamp >= datetime('now', '-{hours} hours')
|
|
|
|
|
|
GROUP BY timestamp
|
|
|
|
|
|
) sub
|
|
|
|
|
|
GROUP BY datetime((strftime('%s', timestamp) / {interval_seconds}) * {interval_seconds}, 'unixepoch')
|
2026-01-09 01:05:50 +03:00
|
|
|
|
ORDER BY timestamp ASC
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute(query_hist)
|
|
|
|
|
|
rows = cursor.fetchall()
|
2026-01-09 17:50:45 +03:00
|
|
|
|
columns = [col[0] for col in cursor.description]
|
|
|
|
|
|
db_data = {row[0]: dict(zip(columns, row)) for row in rows}
|
|
|
|
|
|
|
|
|
|
|
|
# Post-processing: Zero Fill
|
|
|
|
|
|
analytics['global_history_24h'] = []
|
|
|
|
|
|
|
2026-01-09 21:05:02 +03:00
|
|
|
|
now = datetime.now(timezone.utc)
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# Round down to nearest interval
|
|
|
|
|
|
ts_now = now.timestamp()
|
|
|
|
|
|
ts_aligned = ts_now - (ts_now % interval_seconds)
|
|
|
|
|
|
now_aligned = datetime.utcfromtimestamp(ts_aligned)
|
|
|
|
|
|
|
|
|
|
|
|
# We want exactly 96 points ending at now_aligned
|
|
|
|
|
|
# Start time = now_aligned - (96 * interval)
|
|
|
|
|
|
start_time = now_aligned - timedelta(seconds=96 * interval_seconds)
|
|
|
|
|
|
|
|
|
|
|
|
current = start_time
|
|
|
|
|
|
# Generate exactly 96 points
|
|
|
|
|
|
for _ in range(96):
|
|
|
|
|
|
current += timedelta(seconds=interval_seconds)
|
|
|
|
|
|
ts_str = current.strftime('%Y-%m-%d %H:%M:%S')
|
2026-01-09 21:05:02 +03:00
|
|
|
|
ts_iso = ts_str.replace(' ', 'T') + 'Z'
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
if ts_str in db_data:
|
2026-01-09 21:05:02 +03:00
|
|
|
|
item = db_data[ts_str].copy()
|
|
|
|
|
|
item['timestamp'] = ts_iso
|
|
|
|
|
|
analytics['global_history_24h'].append(item)
|
2026-01-09 17:50:45 +03:00
|
|
|
|
else:
|
|
|
|
|
|
analytics['global_history_24h'].append({
|
2026-01-09 21:05:02 +03:00
|
|
|
|
'timestamp': ts_iso,
|
2026-01-09 17:50:45 +03:00
|
|
|
|
'total_rx': 0,
|
|
|
|
|
|
'total_tx': 0,
|
|
|
|
|
|
'total_rx_rate': 0,
|
|
|
|
|
|
'total_tx_rate': 0,
|
|
|
|
|
|
'active_count': 0
|
|
|
|
|
|
})
|
2026-01-09 01:05:50 +03:00
|
|
|
|
|
2026-01-09 17:50:45 +03:00
|
|
|
|
# Max Clients metric
|
|
|
|
|
|
max_clients = 0
|
|
|
|
|
|
for row in analytics['global_history_24h']:
|
|
|
|
|
|
if row.get('active_count', 0) > max_clients:
|
|
|
|
|
|
max_clients = row['active_count']
|
|
|
|
|
|
analytics['max_concurrent_24h'] = max_clients
|
|
|
|
|
|
|
|
|
|
|
|
# 3. Top Clients & 4. Traffic Distribution (Keep existing logic)
|
|
|
|
|
|
# Use same target table
|
2026-01-09 01:05:50 +03:00
|
|
|
|
query_top = f'''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
c.common_name,
|
|
|
|
|
|
SUM(t.bytes_received) as rx,
|
|
|
|
|
|
SUM(t.bytes_sent) as tx,
|
|
|
|
|
|
(SUM(t.bytes_received) + SUM(t.bytes_sent)) as total_traffic
|
|
|
|
|
|
FROM {target_table} t
|
|
|
|
|
|
JOIN clients c ON t.client_id = c.id
|
|
|
|
|
|
WHERE t.timestamp >= datetime('now', '-{hours} hours')
|
|
|
|
|
|
GROUP BY c.id
|
|
|
|
|
|
ORDER BY total_traffic DESC
|
|
|
|
|
|
LIMIT 3
|
|
|
|
|
|
'''
|
|
|
|
|
|
cursor.execute(query_top)
|
|
|
|
|
|
top_cols = [col[0] for col in cursor.description]
|
|
|
|
|
|
analytics['top_clients_24h'] = [dict(zip(top_cols, row)) for row in cursor.fetchall()]
|
|
|
|
|
|
|
|
|
|
|
|
query_dist = f'''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
SUM(bytes_received) as rx,
|
|
|
|
|
|
SUM(bytes_sent) as tx
|
|
|
|
|
|
FROM {target_table}
|
|
|
|
|
|
WHERE timestamp >= datetime('now', '-{hours} hours')
|
|
|
|
|
|
'''
|
|
|
|
|
|
cursor.execute(query_dist)
|
|
|
|
|
|
dist_res = cursor.fetchone()
|
|
|
|
|
|
if dist_res:
|
|
|
|
|
|
analytics['traffic_distribution'] = {'rx': dist_res[0] or 0, 'tx': dist_res[1] or 0}
|
|
|
|
|
|
|
|
|
|
|
|
return analytics
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Analytics error: {e}")
|
|
|
|
|
|
return analytics
|
|
|
|
|
|
finally:
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
2026-01-12 11:44:50 +03:00
|
|
|
|
def get_active_sessions(self):
|
|
|
|
|
|
"""Get list of currently active sessions from temporary table"""
|
|
|
|
|
|
conn = self.get_db_connection()
|
|
|
|
|
|
conn.row_factory = sqlite3.Row
|
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Check if table exists first (graceful degradation)
|
|
|
|
|
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='active_sessions'")
|
|
|
|
|
|
if not cursor.fetchone():
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
cursor.execute('''
|
|
|
|
|
|
SELECT
|
|
|
|
|
|
client_id, common_name, real_address, bytes_received, bytes_sent, connected_since, last_seen
|
|
|
|
|
|
FROM active_sessions
|
|
|
|
|
|
ORDER BY connected_since DESC
|
|
|
|
|
|
''')
|
|
|
|
|
|
|
|
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
|
|
result = []
|
|
|
|
|
|
for row in rows:
|
|
|
|
|
|
result.append({
|
|
|
|
|
|
'client_id': row['client_id'],
|
|
|
|
|
|
'common_name': row['common_name'],
|
|
|
|
|
|
'real_address': row['real_address'],
|
|
|
|
|
|
'bytes_received': row['bytes_received'],
|
|
|
|
|
|
'bytes_sent': row['bytes_sent'],
|
|
|
|
|
|
'connected_since': row['connected_since'],
|
|
|
|
|
|
'last_seen': row['last_seen'],
|
|
|
|
|
|
# Calculated fields for convenience
|
|
|
|
|
|
'received_mb': round((row['bytes_received'] or 0) / (1024*1024), 2),
|
|
|
|
|
|
'sent_mb': round((row['bytes_sent'] or 0) / (1024*1024), 2)
|
|
|
|
|
|
})
|
|
|
|
|
|
return result
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error fetching active sessions: {e}")
|
|
|
|
|
|
return []
|
|
|
|
|
|
finally:
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
# Initialize API instance
|
|
|
|
|
|
api = OpenVPNAPI()
|
|
|
|
|
|
|
|
|
|
|
|
# --- ROUTES ---
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/stats', methods=['GET'])
|
|
|
|
|
|
def get_stats():
|
|
|
|
|
|
"""Get current statistics for all clients"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = api.get_current_stats()
|
|
|
|
|
|
# Форматирование данных
|
|
|
|
|
|
formatted_data = []
|
|
|
|
|
|
for client in data:
|
|
|
|
|
|
client['total_received_mb'] = round((client['total_bytes_received'] or 0) / (1024*1024), 2)
|
|
|
|
|
|
client['total_sent_mb'] = round((client['total_bytes_sent'] or 0) / (1024*1024), 2)
|
|
|
|
|
|
client['current_recv_rate_mbps'] = client['current_recv_rate'] or 0
|
|
|
|
|
|
client['current_sent_rate_mbps'] = client['current_sent_rate'] or 0
|
|
|
|
|
|
formatted_data.append(client)
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
|
|
'data': formatted_data,
|
|
|
|
|
|
'count': len(formatted_data)
|
|
|
|
|
|
})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/stats/system', methods=['GET'])
|
|
|
|
|
|
def get_system_stats():
|
|
|
|
|
|
"""Get system-wide statistics"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
stats = api.get_system_stats()
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
|
|
'data': stats
|
|
|
|
|
|
})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/stats/<string:common_name>', methods=['GET'])
|
|
|
|
|
|
def get_client_stats(common_name):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Get detailed stats for a client.
|
|
|
|
|
|
Query Params:
|
|
|
|
|
|
- range: '24h' (default), '7d', '30d', '1y' OR custom dates
|
|
|
|
|
|
- resolution: 'auto' (default), 'raw', '5min', 'hourly', 'daily'
|
|
|
|
|
|
"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Чтение параметров запроса
|
|
|
|
|
|
range_arg = request.args.get('range', default='24h')
|
|
|
|
|
|
resolution = request.args.get('resolution', default='auto')
|
|
|
|
|
|
|
|
|
|
|
|
# --- ИСПРАВЛЕНИЕ ТУТ ---
|
|
|
|
|
|
# Используем UTC, так как SQLite хранит данные в UTC
|
|
|
|
|
|
end_date = datetime.now(timezone.utc)
|
|
|
|
|
|
start_date = end_date - timedelta(hours=24)
|
|
|
|
|
|
|
|
|
|
|
|
# Парсинг диапазона
|
|
|
|
|
|
if range_arg.endswith('h'):
|
|
|
|
|
|
start_date = end_date - timedelta(hours=int(range_arg[:-1]))
|
|
|
|
|
|
elif range_arg.endswith('d'):
|
|
|
|
|
|
start_date = end_date - timedelta(days=int(range_arg[:-1]))
|
|
|
|
|
|
elif range_arg.endswith('y'):
|
|
|
|
|
|
start_date = end_date - timedelta(days=int(range_arg[:-1]) * 365)
|
|
|
|
|
|
|
|
|
|
|
|
# Получаем текущее состояние
|
|
|
|
|
|
all_stats = api.get_current_stats()
|
|
|
|
|
|
client_data = next((c for c in all_stats if c['common_name'] == common_name), None)
|
|
|
|
|
|
|
|
|
|
|
|
if not client_data:
|
|
|
|
|
|
return jsonify({'success': False, 'error': 'Client not found'}), 404
|
|
|
|
|
|
|
|
|
|
|
|
# Получаем исторические данные
|
|
|
|
|
|
history_result = api.get_client_history(
|
|
|
|
|
|
common_name,
|
|
|
|
|
|
start_date=start_date,
|
|
|
|
|
|
end_date=end_date,
|
|
|
|
|
|
resolution=resolution
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
response = {
|
|
|
|
|
|
'common_name': client_data['common_name'],
|
|
|
|
|
|
'real_address': client_data['real_address'],
|
|
|
|
|
|
'status': client_data['status'],
|
|
|
|
|
|
'totals': {
|
|
|
|
|
|
'received_mb': round((client_data['total_bytes_received'] or 0) / (1024*1024), 2),
|
|
|
|
|
|
'sent_mb': round((client_data['total_bytes_sent'] or 0) / (1024*1024), 2)
|
|
|
|
|
|
},
|
|
|
|
|
|
'current_rates': {
|
|
|
|
|
|
'recv_mbps': client_data['current_recv_rate'] or 0,
|
|
|
|
|
|
'sent_mbps': client_data['current_sent_rate'] or 0
|
|
|
|
|
|
},
|
|
|
|
|
|
'last_activity': client_data['last_activity'],
|
|
|
|
|
|
'history': history_result.get('data', []),
|
|
|
|
|
|
'meta': history_result.get('meta', {})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Для timestamp ответа API лучше тоже использовать UTC или явно указывать смещение,
|
|
|
|
|
|
# но для совместимости с JS new Date() UTC строка идеальна.
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'timestamp': datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
|
|
'data': response
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"API Error: {e}")
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/certificates', methods=['GET'])
|
|
|
|
|
|
def get_certificates():
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = api.get_certificates_info()
|
|
|
|
|
|
return jsonify({'success': True, 'data': data})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/clients', methods=['GET'])
|
|
|
|
|
|
def get_clients_list():
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = api.get_current_stats()
|
|
|
|
|
|
simple_list = [{'common_name': c['common_name'], 'status': c['status']} for c in data]
|
|
|
|
|
|
return jsonify({'success': True, 'data': simple_list})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/health', methods=['GET'])
|
|
|
|
|
|
def health_check():
|
|
|
|
|
|
try:
|
|
|
|
|
|
conn = api.get_db_connection()
|
|
|
|
|
|
conn.close()
|
|
|
|
|
|
return jsonify({'success': True, 'status': 'healthy'})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'status': 'unhealthy', 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/analytics', methods=['GET'])
|
|
|
|
|
|
def get_analytics():
|
|
|
|
|
|
"""Get dashboard analytics data"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
range_arg = request.args.get('range', default='24h')
|
|
|
|
|
|
|
|
|
|
|
|
# Маппинг для безопасности
|
|
|
|
|
|
valid_ranges = {'24h': '24h', '7d': '7d', '30d': '30d'}
|
|
|
|
|
|
selected_range = valid_ranges.get(range_arg, '24h')
|
|
|
|
|
|
|
|
|
|
|
|
data = api.get_analytics_data(selected_range)
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'timestamp': datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
|
|
'data': data,
|
|
|
|
|
|
'range': selected_range
|
|
|
|
|
|
})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error(f"Error in analytics endpoint: {e}")
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
2026-01-12 11:44:50 +03:00
|
|
|
|
@app.route('/api/v1/sessions', methods=['GET'])
|
|
|
|
|
|
def get_sessions():
|
|
|
|
|
|
"""Get all currently active sessions (real-time)"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = api.get_active_sessions()
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'timestamp': datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
|
|
'data': data,
|
|
|
|
|
|
'count': len(data)
|
|
|
|
|
|
})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# --- PKI MANAGEMENT ROUTES ---
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/init', methods=['POST'])
|
|
|
|
|
|
def init_pki():
|
|
|
|
|
|
"""Initialize PKI environment"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
force = request.json.get('force', False)
|
|
|
|
|
|
pki_vars = request.json.get('vars', {})
|
|
|
|
|
|
|
|
|
|
|
|
# 0. Update Vars if provided
|
|
|
|
|
|
if pki_vars:
|
|
|
|
|
|
api.pki.update_vars(pki_vars)
|
|
|
|
|
|
|
|
|
|
|
|
# 1. Clean/Init PKI
|
|
|
|
|
|
success, msg = api.pki.init_pki(force=force)
|
|
|
|
|
|
if not success: return jsonify({'success': False, 'error': msg}), 400
|
|
|
|
|
|
|
|
|
|
|
|
# 2. Build CA
|
|
|
|
|
|
# Use CN from vars if available, else default
|
|
|
|
|
|
ca_cn = pki_vars.get('EASYRSA_REQ_CN', 'OpenVPN-CA')
|
|
|
|
|
|
api.pki.build_ca(ca_cn)
|
|
|
|
|
|
|
|
|
|
|
|
# 3. Build Server Cert
|
|
|
|
|
|
api.pki.build_server("server")
|
|
|
|
|
|
|
|
|
|
|
|
# 4. Gen DH
|
|
|
|
|
|
api.pki.gen_dh()
|
|
|
|
|
|
|
|
|
|
|
|
# 5. Gen TA Key
|
|
|
|
|
|
# Ensure pki dir exists
|
|
|
|
|
|
ta_path = Path(api.pki_path) / 'ta.key'
|
|
|
|
|
|
api.pki.gen_ta_key(ta_path)
|
|
|
|
|
|
|
|
|
|
|
|
# 6. Gen CRL
|
|
|
|
|
|
api.pki.gen_crl()
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({'success': True, 'message': 'PKI initialized successfully'})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/validate', methods=['POST'])
|
|
|
|
|
|
def validate_pki():
|
|
|
|
|
|
"""Validate PKI path"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
path = request.json.get('path')
|
|
|
|
|
|
if not path: return jsonify({'success': False, 'error': 'Path required'}), 400
|
|
|
|
|
|
success, msg = api.pki.validate_pki_path(path)
|
|
|
|
|
|
return jsonify({'success': success, 'message': msg})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/config', methods=['GET', 'POST'])
|
|
|
|
|
|
def handle_pki_config():
|
|
|
|
|
|
"""Get or Save PKI path configuration"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
if request.method == 'GET':
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'data': {
|
|
|
|
|
|
'easyrsa_path': api.easyrsa_path,
|
|
|
|
|
|
'pki_path': api.pki_path
|
|
|
|
|
|
}
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
# POST
|
|
|
|
|
|
path_str = request.json.get('path')
|
|
|
|
|
|
if not path_str: return jsonify({'success': False, 'error': 'Path required'}), 400
|
|
|
|
|
|
|
|
|
|
|
|
path = Path(path_str).resolve()
|
|
|
|
|
|
if not path.exists(): return jsonify({'success': False, 'error': 'Path invalid'}), 400
|
|
|
|
|
|
|
|
|
|
|
|
# Heuristic to determine easyrsa_path and pki_path
|
|
|
|
|
|
# User supplied 'path' is likely the PKI directory (containing ca.crt or being empty/prepared)
|
|
|
|
|
|
pki_path = path
|
|
|
|
|
|
easyrsa_path = path.parent # Default assumption: script is in parent
|
|
|
|
|
|
|
|
|
|
|
|
# 1. Search for easyrsa binary (Heuristic)
|
|
|
|
|
|
potential_bins = [
|
|
|
|
|
|
path / 'easyrsa', # Inside path
|
|
|
|
|
|
path.parent / 'easyrsa', # Parent
|
|
|
|
|
|
path.parent / 'easy-rsa' / 'easyrsa', # Sibling easy-rsa
|
|
|
|
|
|
Path('/usr/share/easy-rsa/easyrsa'), # System
|
|
|
|
|
|
Path('/etc/openvpn/easy-rsa/easyrsa') # System
|
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
found_bin = None
|
|
|
|
|
|
for bin_path in potential_bins:
|
|
|
|
|
|
if bin_path.exists():
|
|
|
|
|
|
easyrsa_path = bin_path.parent
|
|
|
|
|
|
found_bin = bin_path
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
# Override with explicit easyrsa_path if provided
|
|
|
|
|
|
explicit_easyrsa = request.json.get('easyrsa_path')
|
|
|
|
|
|
if explicit_easyrsa:
|
|
|
|
|
|
epath = Path(explicit_easyrsa)
|
|
|
|
|
|
if epath.is_file(): # Path to script
|
|
|
|
|
|
easyrsa_path = epath.parent
|
|
|
|
|
|
found_bin = epath
|
|
|
|
|
|
elif (epath / 'easyrsa').exists(): # Path to dir
|
|
|
|
|
|
easyrsa_path = epath
|
|
|
|
|
|
found_bin = epath / 'easyrsa'
|
|
|
|
|
|
|
|
|
|
|
|
if not found_bin:
|
|
|
|
|
|
# Fallback: assume typical layout if not found yet
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
# If user pointed to root (containing pki subdir)
|
|
|
|
|
|
if (path / 'pki' / 'ca.crt').exists() or ((path / 'pki').exists() and not (path / 'ca.crt').exists()):
|
|
|
|
|
|
pki_path = path / 'pki'
|
|
|
|
|
|
# Only adjust easyrsa_path if not explicitly set/found yet
|
|
|
|
|
|
if not explicit_easyrsa and not found_bin and (path / 'easyrsa').exists():
|
|
|
|
|
|
easyrsa_path = path
|
|
|
|
|
|
|
|
|
|
|
|
# Update Config
|
|
|
|
|
|
if not api.config.has_section('pki'):
|
|
|
|
|
|
api.config.add_section('pki')
|
|
|
|
|
|
|
|
|
|
|
|
api.config.set('pki', 'easyrsa_path', str(easyrsa_path))
|
|
|
|
|
|
api.config.set('pki', 'pki_path', str(pki_path))
|
|
|
|
|
|
|
|
|
|
|
|
# Write config.ini
|
|
|
|
|
|
with open('config.ini', 'w') as f:
|
|
|
|
|
|
api.config.write(f)
|
|
|
|
|
|
|
|
|
|
|
|
# Reload PKI Manager
|
|
|
|
|
|
api.easyrsa_path = str(easyrsa_path)
|
|
|
|
|
|
api.pki_path = str(pki_path)
|
|
|
|
|
|
api.pki = PKIManager(api.easyrsa_path, api.pki_path)
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({
|
|
|
|
|
|
'success': True,
|
|
|
|
|
|
'message': f'PKI Conf saved',
|
|
|
|
|
|
'details': {
|
|
|
|
|
|
'easyrsa_path': str(easyrsa_path),
|
|
|
|
|
|
'pki_path': str(pki_path)
|
|
|
|
|
|
}
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/client/<string:name>/config', methods=['GET'])
|
|
|
|
|
|
def get_client_config(name):
|
|
|
|
|
|
"""Get client config (generate on fly)"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# Get defaults from active server config
|
|
|
|
|
|
server_conf = api.conf_mgr.read_server_config()
|
|
|
|
|
|
|
|
|
|
|
|
# Determine public host
|
|
|
|
|
|
host = request.args.get('server_ip')
|
|
|
|
|
|
if not host:
|
|
|
|
|
|
host = server_conf.get('public_ip')
|
|
|
|
|
|
if not host:
|
|
|
|
|
|
try:
|
|
|
|
|
|
import socket
|
|
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
|
|
s.connect(("8.8.8.8", 80))
|
|
|
|
|
|
host = s.getsockname()[0]
|
|
|
|
|
|
s.close()
|
|
|
|
|
|
except:
|
|
|
|
|
|
host = '127.0.0.1'
|
|
|
|
|
|
|
|
|
|
|
|
extra_params = {
|
|
|
|
|
|
'remote_host': host,
|
|
|
|
|
|
'remote_port': request.args.get('port') or server_conf.get('port', 1194),
|
|
|
|
|
|
'proto': request.args.get('proto') or server_conf.get('proto', 'udp')
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
succ_conf, conf_content = api.conf_mgr.generate_client_config(
|
|
|
|
|
|
name, api.pki_path, server_conf, extra_params
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not succ_conf: return jsonify({'success': False, 'error': conf_content}), 500
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({'success': True, 'config': conf_content, 'filename': f"{name}.ovpn"})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/client', methods=['POST'])
|
|
|
|
|
|
def create_client():
|
|
|
|
|
|
"""Create new client and return config"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
data = request.json
|
|
|
|
|
|
name = data.get('name')
|
|
|
|
|
|
if not name: return jsonify({'success': False, 'error': 'Name is required'}), 400
|
|
|
|
|
|
|
|
|
|
|
|
# 1. Build Cert
|
|
|
|
|
|
success, output = api.pki.build_client(name)
|
|
|
|
|
|
if not success: return jsonify({'success': False, 'error': output}), 500
|
|
|
|
|
|
|
|
|
|
|
|
# 2. Generate Config (Just to verify it works, but we don't strictly need to return it if UI doesn't download it automatically.
|
|
|
|
|
|
# However, it's good practice to return it in creation response too, in case UI changes mind)
|
|
|
|
|
|
server_ip = data.get('server_ip') or api.public_ip or '127.0.0.1'
|
|
|
|
|
|
|
|
|
|
|
|
# Get defaults from active server config
|
|
|
|
|
|
server_conf = api.conf_mgr.read_server_config()
|
|
|
|
|
|
def_port = server_conf.get('port', 1194)
|
|
|
|
|
|
def_proto = server_conf.get('proto', 'udp')
|
|
|
|
|
|
|
|
|
|
|
|
succ_conf, conf_content = api.conf_mgr.generate_client_config(
|
|
|
|
|
|
name, api.pki_path, server_ip, data.get('port', def_port), data.get('proto', def_proto)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if not succ_conf: return jsonify({'success': False, 'error': conf_content}), 500
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({'success': True, 'config': conf_content, 'filename': f"{name}.ovpn"})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/pki/client/<string:name>', methods=['DELETE'])
|
|
|
|
|
|
def revoke_client(name):
|
|
|
|
|
|
"""Revoke client certificate"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
success, output = api.pki.revoke_client(name)
|
|
|
|
|
|
if not success: return jsonify({'success': False, 'error': output}), 500
|
|
|
|
|
|
return jsonify({'success': True, 'message': 'Client revoked'})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
# --- SERVER MANAGEMENT ROUTES ---
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/server/config', methods=['GET', 'POST'])
|
|
|
|
|
|
def manage_server_config():
|
|
|
|
|
|
"""Get or Save server.conf"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
if request.method == 'GET':
|
|
|
|
|
|
# Check for path override (Reload from specific file)
|
|
|
|
|
|
path_arg = request.args.get('path')
|
|
|
|
|
|
|
|
|
|
|
|
if path_arg:
|
|
|
|
|
|
# Update path preference if requested
|
|
|
|
|
|
new_path_str = str(path_arg)
|
|
|
|
|
|
if new_path_str != str(api.conf_mgr.server_conf_path):
|
|
|
|
|
|
api.server_config_path = new_path_str
|
|
|
|
|
|
api.conf_mgr.server_conf_path = Path(new_path_str)
|
|
|
|
|
|
|
|
|
|
|
|
if not api.config.has_section('server'): api.config.add_section('server')
|
|
|
|
|
|
api.config.set('server', 'config_path', new_path_str)
|
|
|
|
|
|
with open('config.ini', 'w') as f:
|
|
|
|
|
|
api.config.write(f)
|
|
|
|
|
|
|
|
|
|
|
|
current_conf = api.conf_mgr.read_server_config()
|
|
|
|
|
|
# Enriched with meta-config
|
|
|
|
|
|
current_conf['config_path'] = str(api.conf_mgr.server_conf_path)
|
|
|
|
|
|
current_conf['public_ip'] = api.public_ip
|
|
|
|
|
|
return jsonify({'success': True, 'data': current_conf})
|
|
|
|
|
|
|
|
|
|
|
|
# POST
|
|
|
|
|
|
params = request.json
|
|
|
|
|
|
# Basic validation
|
|
|
|
|
|
if not params.get('port'): return jsonify({'success': False, 'error': 'Port required'}), 400
|
|
|
|
|
|
|
|
|
|
|
|
# Check/Update Config Path and Public IP
|
|
|
|
|
|
new_path = params.get('config_path')
|
|
|
|
|
|
new_ip = params.get('public_ip')
|
|
|
|
|
|
|
|
|
|
|
|
config_updated = False
|
|
|
|
|
|
if new_path and str(new_path) != str(api.conf_mgr.server_conf_path):
|
|
|
|
|
|
api.server_config_path = str(new_path)
|
|
|
|
|
|
api.conf_mgr.server_conf_path = Path(new_path)
|
|
|
|
|
|
if not api.config.has_section('server'): api.config.add_section('server')
|
|
|
|
|
|
api.config.set('server', 'config_path', str(new_path))
|
|
|
|
|
|
config_updated = True
|
|
|
|
|
|
|
|
|
|
|
|
if new_ip is not None and new_ip != api.public_ip: # Allow empty string
|
|
|
|
|
|
api.public_ip = new_ip
|
|
|
|
|
|
if not api.config.has_section('openvpn_monitor'): api.config.add_section('openvpn_monitor')
|
|
|
|
|
|
api.config.set('openvpn_monitor', 'public_ip', new_ip)
|
|
|
|
|
|
config_updated = True
|
|
|
|
|
|
|
|
|
|
|
|
if config_updated:
|
|
|
|
|
|
with open('config.ini', 'w') as f:
|
|
|
|
|
|
api.config.write(f)
|
|
|
|
|
|
|
|
|
|
|
|
# Define paths
|
|
|
|
|
|
params['ca_path'] = str(Path(api.pki_path) / 'ca.crt')
|
|
|
|
|
|
params['cert_path'] = str(Path(api.pki_path) / 'issued/server.crt')
|
|
|
|
|
|
params['key_path'] = str(Path(api.pki_path) / 'private/server.key')
|
|
|
|
|
|
params['dh_path'] = str(Path(api.pki_path) / 'dh.pem')
|
|
|
|
|
|
params['ta_path'] = str(Path(api.pki_path) / 'ta.key')
|
|
|
|
|
|
params['crl_path'] = str(Path(api.pki_path) / 'crl.pem')
|
|
|
|
|
|
|
|
|
|
|
|
success, msg = api.conf_mgr.generate_server_config(params)
|
|
|
|
|
|
if not success: return jsonify({'success': False, 'error': msg}), 500
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify({'success': True, 'path': msg})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/server/action', methods=['POST'])
|
|
|
|
|
|
def server_action():
|
|
|
|
|
|
"""Start/Stop/Restart OpenVPN service"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
action = request.json.get('action')
|
|
|
|
|
|
if action == 'start':
|
|
|
|
|
|
success, msg = api.service.start()
|
|
|
|
|
|
elif action == 'stop':
|
|
|
|
|
|
success, msg = api.service.stop()
|
|
|
|
|
|
elif action == 'restart':
|
|
|
|
|
|
success, msg = api.service.restart()
|
|
|
|
|
|
else:
|
|
|
|
|
|
return jsonify({'success': False, 'error': 'Invalid action'}), 400
|
|
|
|
|
|
|
|
|
|
|
|
if not success: return jsonify({'success': False, 'error': msg}), 500
|
|
|
|
|
|
return jsonify({'success': True, 'message': msg})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/v1/server/status', methods=['GET'])
|
|
|
|
|
|
def server_status():
|
|
|
|
|
|
"""Get service status"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
status = api.service.get_status()
|
|
|
|
|
|
return jsonify({'success': True, 'status': status})
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
|
2026-01-09 01:05:50 +03:00
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
host = api.config.get('api', 'host', fallback='0.0.0.0')
|
|
|
|
|
|
port = 5001 # Используем 5001, чтобы не конфликтовать, если что-то уже есть на 5000
|
|
|
|
|
|
debug = api.config.getboolean('api', 'debug', fallback=False)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"Starting API on {host}:{port}")
|
|
|
|
|
|
app.run(host=host, port=port, debug=debug)
|