Добавлена работа с маршрутными таблицами

В настройках теперь можно определить дополнительную маршрутную таблицу, в которую будут экспортироваться префиксы, которые были извлечены из заданного списка ASN или FQDN.
This commit is contained in:
2026-03-29 08:43:50 +03:00
parent c104b13dfd
commit 2396872e49

View File

@@ -5,24 +5,223 @@ import os
import argparse import argparse
import sys import sys
import socket import socket
import subprocess
import logging
import ipaddress
# --- Configuration ---
CONFIG_FILE = "config.json" CONFIG_FILE = "config.json"
DATA_FILE = "data.json" DATA_FILE = "data.json"
FQDN_DATA_FILE = "fqdn_data.json" FQDN_DATA_FILE = "fqdn_data.json"
BASE_URL = "https://stat.ripe.net/data/announced-prefixes/data.json" BASE_URL = "https://stat.ripe.net/data/announced-prefixes/data.json"
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
# --- Logging Setup ---
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, stream=sys.stderr)
logger = logging.getLogger(__name__)
# --- Helper Functions ---
def load_full_config(): def load_full_config():
if not os.path.exists(CONFIG_FILE): if not os.path.exists(CONFIG_FILE):
return {"asns": [], "fqdns": []} # Create default config if missing
default_config = {"asns": [], "fqdns": [], "routing_table": "main"}
save_full_config(default_config)
return default_config
try: try:
with open(CONFIG_FILE, 'r') as f: with open(CONFIG_FILE, 'r') as f:
return json.load(f) config = json.load(f)
# Ensure backward compatibility
if "routing_table" not in config:
config["routing_table"] = "main"
save_full_config(config)
return config
except json.JSONDecodeError: except json.JSONDecodeError:
return {"asns": [], "fqdns": []} logger.error(f"Failed to decode {CONFIG_FILE}. Using defaults.")
return {"asns": [], "fqdns": [], "routing_table": "main"}
def save_full_config(config): def save_full_config(config):
try:
with open(CONFIG_FILE, 'w') as f: with open(CONFIG_FILE, 'w') as f:
json.dump(config, f, indent=4) json.dump(config, f, indent=4)
except Exception as e:
logger.error(f"Failed to save config: {e}")
# --- Routing Manager ---
class RoutingManager:
RT_TABLES_FILE = "/etc/iproute2/rt_tables"
def __init__(self, table_name, gateway=None):
self.table_name = str(table_name)
self.gateway = gateway
self.existing_routes = set()
self.table_id = None
def ensure_table_exists(self):
"""Check if table exists, create if missing."""
if self._read_table_id():
logger.info(f"Routing table '{self.table_name}' (ID: {self.table_id}) found.")
return True
logger.warning(f"Routing table '{self.table_name}' not found. Creating...")
if self._create_table():
logger.info(f"Routing table '{self.table_name}' created successfully.")
self._read_table_id()
return True
else:
logger.error(f"Failed to create routing table '{self.table_name}'.")
return False
def _read_table_id(self):
"""Read table ID from /etc/iproute2/rt_tables."""
try:
if not os.path.exists(self.RT_TABLES_FILE):
return False
with open(self.RT_TABLES_FILE, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
parts = line.split()
if len(parts) >= 2:
tid, tname = parts[0], parts[1]
if tname == self.table_name:
self.table_id = tid
return True
return False
except Exception as e:
logger.error(f"Error reading {self.RT_TABLES_FILE}: {e}")
return False
def _create_table(self):
"""Add new table to /etc/iproute2/rt_tables with free ID."""
try:
rt_dir = os.path.dirname(self.RT_TABLES_FILE)
if not os.path.exists(rt_dir):
os.makedirs(rt_dir, exist_ok=True)
used_ids = set()
if os.path.exists(self.RT_TABLES_FILE):
with open(self.RT_TABLES_FILE, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
parts = line.split()
if len(parts) >= 2:
try:
used_ids.add(int(parts[0]))
except ValueError:
continue
free_id = None
for tid in range(2, 253):
if tid not in used_ids:
free_id = tid
break
if free_id is None:
logger.error("No available table IDs (2-252).")
return False
with open(self.RT_TABLES_FILE, 'a') as f:
f.write(f"{free_id}\t{self.table_name}\n")
self.table_id = str(free_id)
return True
except PermissionError:
logger.error(f"Permission denied writing to {self.RT_TABLES_FILE}. Run as root.")
return False
except Exception as e:
logger.error(f"Error creating table: {e}")
return False
def check_table_exists(self):
"""Wrapper to ensure table exists and load routes."""
if not self.ensure_table_exists():
return False
# Validate Gateway
if self.gateway:
try:
ipaddress.ip_address(self.gateway)
logger.info(f"Using gateway: {self.gateway}")
except ValueError:
logger.error(f"Invalid gateway IP address: {self.gateway}")
return False
else:
logger.warning("No gateway specified. Routes will be added without 'via'.")
# Load existing routes
try:
result = subprocess.run(
["ip", "route", "show", "table", self.table_name],
capture_output=True,
text=True,
timeout=5
)
if result.returncode == 0:
self._parse_existing_routes(result.stdout)
return True
except Exception as e:
logger.error(f"Error listing routes: {e}")
return False
def _parse_existing_routes(self, output):
"""Parse 'ip route' output to extract prefixes."""
for line in output.splitlines():
parts = line.split()
if parts:
prefix = parts[0]
if prefix != "default" and "/" in prefix:
try:
ipaddress.ip_network(prefix, strict=False)
self.existing_routes.add(prefix)
except ValueError:
continue
def add_routes(self, prefixes):
"""Add IPv4 prefixes to the table if they don't exist."""
added_count = 0
failed_count = 0
for prefix in prefixes:
try:
net = ipaddress.ip_network(prefix, strict=False)
if net.version != 4:
continue
except ValueError:
logger.warning(f"Invalid prefix format: {prefix}")
continue
if prefix in self.existing_routes:
logger.debug(f"Route {prefix} already exists in table {self.table_name}.")
continue
try:
# Correct syntax: ip route add <PREFIX> via <GATEWAY> table <TABLE_NAME>
cmd = ["ip", "route", "add", prefix]
if self.gateway:
cmd.extend(["via", self.gateway])
cmd.extend(["table", self.table_name])
subprocess.run(cmd, check=True, capture_output=True, text=True)
logger.info(f"Injected route: {prefix} via {self.gateway or 'direct'} -> table {self.table_name}")
added_count += 1
except subprocess.CalledProcessError as e:
if "File exists" in e.stderr:
logger.debug(f"Route {prefix} already exists (race condition).")
else:
logger.error(f"Failed to add route {prefix}: {e.stderr.strip()}")
failed_count += 1
except Exception as e:
logger.error(f"Unexpected error adding {prefix}: {e}")
failed_count += 1
logger.info(f"Routing injection complete: {added_count} added, {failed_count} failed.")
return failed_count == 0
class CIDRCollector: class CIDRCollector:
def __init__(self): def __init__(self):
@@ -37,20 +236,20 @@ class CIDRCollector:
if asn not in self.asns: if asn not in self.asns:
self.asns.append(asn) self.asns.append(asn)
self.save_config() self.save_config()
print(f"ASN {asn} added.") logger.info(f"ASN {asn} added.")
else: else:
print(f"ASN {asn} already in list.") logger.info(f"ASN {asn} already in list.")
def remove_asn(self, asn): def remove_asn(self, asn):
if asn in self.asns: if asn in self.asns:
self.asns.remove(asn) self.asns.remove(asn)
self.save_config() self.save_config()
print(f"ASN {asn} removed.") logger.info(f"ASN {asn} removed.")
else: else:
print(f"ASN {asn} not found in list.") logger.warning(f"ASN {asn} not found in list.")
def list_asns(self): def list_asns(self):
print("Current ASNs:", self.asns) logger.info(f"Current ASNs: {self.asns}")
def fetch_prefixes(self, asn): def fetch_prefixes(self, asn):
params = {'resource': f'AS{asn}'} params = {'resource': f'AS{asn}'}
@@ -66,7 +265,7 @@ class CIDRCollector:
prefixes.append(item['prefix']) prefixes.append(item['prefix'])
return prefixes return prefixes
except Exception as e: except Exception as e:
print(f"Error fetching data for AS{asn}: {e}") logger.error(f"Error fetching data for AS{asn}: {e}")
return None return None
def load_data(self): def load_data(self):
@@ -76,6 +275,7 @@ class CIDRCollector:
with open(DATA_FILE, 'r') as f: with open(DATA_FILE, 'r') as f:
return json.load(f) return json.load(f)
except json.JSONDecodeError: except json.JSONDecodeError:
logger.error(f"Failed to decode {DATA_FILE}")
return {} return {}
def save_data(self, data): def save_data(self, data):
@@ -85,13 +285,12 @@ class CIDRCollector:
def run_collection(self): def run_collection(self):
current_data = self.load_data() current_data = self.load_data()
updated = False updated = False
current_time = datetime.datetime.now().isoformat() current_time = datetime.datetime.now().isoformat()
print("Starting ASN CIDR collection...") logger.info("Starting ASN CIDR collection...")
for asn in self.asns: for asn in self.asns:
str_asn = str(asn) str_asn = str(asn)
print(f"Processing AS{asn}...") logger.info(f"Processing AS{asn}...")
fetched_prefixes = self.fetch_prefixes(asn) fetched_prefixes = self.fetch_prefixes(asn)
if fetched_prefixes is None: if fetched_prefixes is None:
@@ -99,35 +298,33 @@ class CIDRCollector:
fetched_set = set(fetched_prefixes) fetched_set = set(fetched_prefixes)
# Initialize if ASN not present
if str_asn not in current_data: if str_asn not in current_data:
current_data[str_asn] = { current_data[str_asn] = {
"last_updated": current_time, "last_updated": current_time,
"prefixes": sorted(list(fetched_set)) "prefixes": sorted(list(fetched_set))
} }
print(f" - New ASN. Added {len(fetched_set)} prefixes.") logger.info(f" - New ASN. Added {len(fetched_set)} prefixes.")
updated = True updated = True
else: else:
existing_prefixes = set(current_data[str_asn].get("prefixes", [])) existing_prefixes = set(current_data[str_asn].get("prefixes", []))
# Check for new prefixes
new_prefixes = fetched_set - existing_prefixes new_prefixes = fetched_set - existing_prefixes
if new_prefixes: if new_prefixes:
# Accumulate: Union of existing and new
updated_set = existing_prefixes.union(fetched_set) updated_set = existing_prefixes.union(fetched_set)
current_data[str_asn]["prefixes"] = sorted(list(updated_set)) current_data[str_asn]["prefixes"] = sorted(list(updated_set))
current_data[str_asn]["last_updated"] = current_time current_data[str_asn]["last_updated"] = current_time
print(f" - Updates found. Added {len(new_prefixes)} new prefixes.") logger.info(f" - Updates found. Added {len(new_prefixes)} new prefixes.")
updated = True updated = True
else: else:
print(" - No new prefixes found.") logger.debug(f" - No new prefixes for AS{asn}.")
if updated: if updated:
self.save_data(current_data) self.save_data(current_data)
print("CIDR Data saved to data.json") logger.info("CIDR Data saved to data.json")
else: else:
print("No CIDR changes to save.") logger.info("No CIDR changes to save.")
return current_data
class FQDNCollector: class FQDNCollector:
def __init__(self): def __init__(self):
@@ -142,34 +339,31 @@ class FQDNCollector:
if fqdn not in self.fqdns: if fqdn not in self.fqdns:
self.fqdns.append(fqdn) self.fqdns.append(fqdn)
self.save_config() self.save_config()
print(f"FQDN {fqdn} added.") logger.info(f"FQDN {fqdn} added.")
else: else:
print(f"FQDN {fqdn} already in list.") logger.info(f"FQDN {fqdn} already in list.")
def remove_fqdn(self, fqdn): def remove_fqdn(self, fqdn):
if fqdn in self.fqdns: if fqdn in self.fqdns:
self.fqdns.remove(fqdn) self.fqdns.remove(fqdn)
self.save_config() self.save_config()
print(f"FQDN {fqdn} removed.") logger.info(f"FQDN {fqdn} removed.")
else: else:
print(f"FQDN {fqdn} not found in list.") logger.warning(f"FQDN {fqdn} not found in list.")
def list_fqdns(self): def list_fqdns(self):
print("Current FQDNs:", self.fqdns) logger.info(f"Current FQDNs: {self.fqdns}")
def resolve_fqdn(self, fqdn): def resolve_fqdn(self, fqdn):
try: try:
# Resolve for both IPv4 (AF_INET) and IPv6 (AF_INET6)
# We use 0 for family to get both
results = socket.getaddrinfo(fqdn, None) results = socket.getaddrinfo(fqdn, None)
ips = set() ips = set()
for result in results: for result in results:
# result[4] is the sockaddr. For IP protocols, index 0 is the IP address string
ip_addr = result[4][0] ip_addr = result[4][0]
ips.add(ip_addr) ips.add(ip_addr)
return list(ips) return list(ips)
except socket.gaierror as e: except socket.gaierror as e:
print(f"Error resolving {fqdn}: {e}") logger.error(f"Error resolving {fqdn}: {e}")
return [] return []
def load_data(self): def load_data(self):
@@ -190,13 +384,13 @@ class FQDNCollector:
updated = False updated = False
current_time = datetime.datetime.now().isoformat() current_time = datetime.datetime.now().isoformat()
print("Starting FQDN IP collection...") logger.info("Starting FQDN IP collection...")
for fqdn in self.fqdns: for fqdn in self.fqdns:
print(f"Processing {fqdn}...") logger.info(f"Processing {fqdn}...")
resolved_ips = self.resolve_fqdn(fqdn) resolved_ips = self.resolve_fqdn(fqdn)
if not resolved_ips: if not resolved_ips:
print(f" - No IPs resolved for {fqdn}") logger.warning(f" - No IPs resolved for {fqdn}")
continue continue
fetched_set = set(resolved_ips) fetched_set = set(resolved_ips)
@@ -206,7 +400,7 @@ class FQDNCollector:
"last_updated": current_time, "last_updated": current_time,
"ips": sorted(list(fetched_set)) "ips": sorted(list(fetched_set))
} }
print(f" - New FQDN. Added {len(fetched_set)} IPs.") logger.info(f" - New FQDN. Added {len(fetched_set)} IPs.")
updated = True updated = True
else: else:
existing_ips = set(current_data[fqdn].get("ips", [])) existing_ips = set(current_data[fqdn].get("ips", []))
@@ -216,25 +410,27 @@ class FQDNCollector:
updated_set = existing_ips.union(fetched_set) updated_set = existing_ips.union(fetched_set)
current_data[fqdn]["ips"] = sorted(list(updated_set)) current_data[fqdn]["ips"] = sorted(list(updated_set))
current_data[fqdn]["last_updated"] = current_time current_data[fqdn]["last_updated"] = current_time
print(f" - Updates found. Added {len(new_ips)} new IPs.") logger.info(f" - Updates found. Added {len(new_ips)} new IPs.")
updated = True updated = True
else: else:
print(" - No new IPs found.") logger.debug(" - No new IPs found.")
if updated: if updated:
self.save_data(current_data) self.save_data(current_data)
print(f"FQDN Data saved to {FQDN_DATA_FILE}") logger.info(f"FQDN Data saved to {FQDN_DATA_FILE}")
else: else:
print("No FQDN changes to save.") logger.info("No FQDN changes to save.")
# --- Main Logic ---
def main(): def main():
parser = argparse.ArgumentParser(description="Collector for RIPE AS CIDRs and FQDN IPs") parser = argparse.ArgumentParser(description="Collector for RIPE AS CIDRs and FQDN IPs")
subparsers = parser.add_subparsers(dest="command") subparsers = parser.add_subparsers(dest="command")
# Command: run (default) # Command: run
parser_run = subparsers.add_parser("run", help="Run the collection process") parser_run = subparsers.add_parser("run", help="Run the collection process")
parser_run.add_argument("--mode", choices=["asn", "fqdn", "all"], default="all", help="Collection mode: asn, fqdn, or all (default)") parser_run.add_argument("--mode", choices=["asn", "fqdn", "all"], default="all", help="Collection mode")
parser_run.add_argument("--inject", action="store_true", help="Inject collected IPv4 prefixes into routing table")
# ASN Commands # ASN Commands
parser_add = subparsers.add_parser("add", help="Add an ASN") parser_add = subparsers.add_parser("add", help="Add an ASN")
@@ -258,6 +454,7 @@ def main():
asn_collector = CIDRCollector() asn_collector = CIDRCollector()
fqdn_collector = FQDNCollector() fqdn_collector = FQDNCollector()
try:
if args.command == "add": if args.command == "add":
asn_collector.add_asn(args.asn) asn_collector.add_asn(args.asn)
elif args.command == "remove": elif args.command == "remove":
@@ -271,16 +468,50 @@ def main():
fqdn_collector.list_fqdns() fqdn_collector.list_fqdns()
elif args.command == "run": elif args.command == "run":
mode = args.mode mode = args.mode
all_prefixes = []
# 1. Collection Phase
if mode in ["asn", "all"]: if mode in ["asn", "all"]:
asn_collector.run_collection() data = asn_collector.run_collection()
# Aggregate all prefixes for potential injection
for asn_data in data.values():
all_prefixes.extend(asn_data.get("prefixes", []))
if mode == "all": if mode == "all":
print("-" * 20) logger.info("-" * 20)
if mode in ["fqdn", "all"]: if mode in ["fqdn", "all"]:
fqdn_collector.run_collection() fqdn_collector.run_collection()
# 2. Injection Phase (Only if --inject is present)
if args.inject:
logger.info("Starting Routing Injection phase...")
if os.geteuid() != 0:
logger.error("Error: --inject requires root privileges (sudo).")
sys.exit(1)
config = load_full_config()
table_name = config.get("routing_table", "main")
gateway = config.get("pbr_gateway") # Получаем шлюз из конфига
router = RoutingManager(table_name, gateway=gateway) # Передаем шлюз
if router.check_table_exists():
unique_prefixes = list(set(all_prefixes))
logger.info(f"Attempting to inject {len(unique_prefixes)} unique prefixes into table '{table_name}'.")
router.add_routes(unique_prefixes)
else:
logger.error("Routing table check failed. Injection aborted.")
sys.exit(1)
else:
logger.debug("Injection skipped (--inject not provided).")
else: else:
parser.print_help() parser.print_help()
except Exception as e:
logger.critical(f"Unhandled exception: {e}")
sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()