initial build
This commit is contained in:
286
cidr_collector.py
Normal file
286
cidr_collector.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import json
|
||||
import requests
|
||||
import datetime
|
||||
import os
|
||||
import argparse
|
||||
import sys
|
||||
import socket
|
||||
|
||||
CONFIG_FILE = "config.json"
|
||||
DATA_FILE = "data.json"
|
||||
FQDN_DATA_FILE = "fqdn_data.json"
|
||||
BASE_URL = "https://stat.ripe.net/data/announced-prefixes/data.json"
|
||||
|
||||
def load_full_config():
|
||||
if not os.path.exists(CONFIG_FILE):
|
||||
return {"asns": [], "fqdns": []}
|
||||
try:
|
||||
with open(CONFIG_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
return {"asns": [], "fqdns": []}
|
||||
|
||||
def save_full_config(config):
|
||||
with open(CONFIG_FILE, 'w') as f:
|
||||
json.dump(config, f, indent=4)
|
||||
|
||||
class CIDRCollector:
|
||||
def __init__(self):
|
||||
self.config = load_full_config()
|
||||
self.asns = self.config.get("asns", [])
|
||||
|
||||
def save_config(self):
|
||||
self.config["asns"] = self.asns
|
||||
save_full_config(self.config)
|
||||
|
||||
def add_asn(self, asn):
|
||||
if asn not in self.asns:
|
||||
self.asns.append(asn)
|
||||
self.save_config()
|
||||
print(f"ASN {asn} added.")
|
||||
else:
|
||||
print(f"ASN {asn} already in list.")
|
||||
|
||||
def remove_asn(self, asn):
|
||||
if asn in self.asns:
|
||||
self.asns.remove(asn)
|
||||
self.save_config()
|
||||
print(f"ASN {asn} removed.")
|
||||
else:
|
||||
print(f"ASN {asn} not found in list.")
|
||||
|
||||
def list_asns(self):
|
||||
print("Current ASNs:", self.asns)
|
||||
|
||||
def fetch_prefixes(self, asn):
|
||||
params = {'resource': f'AS{asn}'}
|
||||
try:
|
||||
response = requests.get(BASE_URL, params=params, timeout=10)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
prefixes = []
|
||||
if 'data' in data and 'prefixes' in data['data']:
|
||||
for item in data['data']['prefixes']:
|
||||
if 'prefix' in item:
|
||||
prefixes.append(item['prefix'])
|
||||
return prefixes
|
||||
except Exception as e:
|
||||
print(f"Error fetching data for AS{asn}: {e}")
|
||||
return None
|
||||
|
||||
def load_data(self):
|
||||
if not os.path.exists(DATA_FILE):
|
||||
return {}
|
||||
try:
|
||||
with open(DATA_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
|
||||
def save_data(self, data):
|
||||
with open(DATA_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
def run_collection(self):
|
||||
current_data = self.load_data()
|
||||
updated = False
|
||||
|
||||
current_time = datetime.datetime.now().isoformat()
|
||||
|
||||
print("Starting ASN CIDR collection...")
|
||||
for asn in self.asns:
|
||||
str_asn = str(asn)
|
||||
print(f"Processing AS{asn}...")
|
||||
fetched_prefixes = self.fetch_prefixes(asn)
|
||||
|
||||
if fetched_prefixes is None:
|
||||
continue
|
||||
|
||||
fetched_set = set(fetched_prefixes)
|
||||
|
||||
# Initialize if ASN not present
|
||||
if str_asn not in current_data:
|
||||
current_data[str_asn] = {
|
||||
"last_updated": current_time,
|
||||
"prefixes": sorted(list(fetched_set))
|
||||
}
|
||||
print(f" - New ASN. Added {len(fetched_set)} prefixes.")
|
||||
updated = True
|
||||
else:
|
||||
existing_prefixes = set(current_data[str_asn].get("prefixes", []))
|
||||
|
||||
# Check for new prefixes
|
||||
new_prefixes = fetched_set - existing_prefixes
|
||||
|
||||
if new_prefixes:
|
||||
# Accumulate: Union of existing and new
|
||||
updated_set = existing_prefixes.union(fetched_set)
|
||||
current_data[str_asn]["prefixes"] = sorted(list(updated_set))
|
||||
current_data[str_asn]["last_updated"] = current_time
|
||||
print(f" - Updates found. Added {len(new_prefixes)} new prefixes.")
|
||||
updated = True
|
||||
else:
|
||||
print(" - No new prefixes found.")
|
||||
|
||||
if updated:
|
||||
self.save_data(current_data)
|
||||
print("CIDR Data saved to data.json")
|
||||
else:
|
||||
print("No CIDR changes to save.")
|
||||
|
||||
class FQDNCollector:
|
||||
def __init__(self):
|
||||
self.config = load_full_config()
|
||||
self.fqdns = self.config.get("fqdns", [])
|
||||
|
||||
def save_config(self):
|
||||
self.config["fqdns"] = self.fqdns
|
||||
save_full_config(self.config)
|
||||
|
||||
def add_fqdn(self, fqdn):
|
||||
if fqdn not in self.fqdns:
|
||||
self.fqdns.append(fqdn)
|
||||
self.save_config()
|
||||
print(f"FQDN {fqdn} added.")
|
||||
else:
|
||||
print(f"FQDN {fqdn} already in list.")
|
||||
|
||||
def remove_fqdn(self, fqdn):
|
||||
if fqdn in self.fqdns:
|
||||
self.fqdns.remove(fqdn)
|
||||
self.save_config()
|
||||
print(f"FQDN {fqdn} removed.")
|
||||
else:
|
||||
print(f"FQDN {fqdn} not found in list.")
|
||||
|
||||
def list_fqdns(self):
|
||||
print("Current FQDNs:", self.fqdns)
|
||||
|
||||
def resolve_fqdn(self, fqdn):
|
||||
try:
|
||||
# Resolve for both IPv4 (AF_INET) and IPv6 (AF_INET6)
|
||||
# We use 0 for family to get both
|
||||
results = socket.getaddrinfo(fqdn, None)
|
||||
ips = set()
|
||||
for result in results:
|
||||
# result[4] is the sockaddr. For IP protocols, index 0 is the IP address string
|
||||
ip_addr = result[4][0]
|
||||
ips.add(ip_addr)
|
||||
return list(ips)
|
||||
except socket.gaierror as e:
|
||||
print(f"Error resolving {fqdn}: {e}")
|
||||
return []
|
||||
|
||||
def load_data(self):
|
||||
if not os.path.exists(FQDN_DATA_FILE):
|
||||
return {}
|
||||
try:
|
||||
with open(FQDN_DATA_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
|
||||
def save_data(self, data):
|
||||
with open(FQDN_DATA_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
def run_collection(self):
|
||||
current_data = self.load_data()
|
||||
updated = False
|
||||
current_time = datetime.datetime.now().isoformat()
|
||||
|
||||
print("Starting FQDN IP collection...")
|
||||
for fqdn in self.fqdns:
|
||||
print(f"Processing {fqdn}...")
|
||||
resolved_ips = self.resolve_fqdn(fqdn)
|
||||
|
||||
if not resolved_ips:
|
||||
print(f" - No IPs resolved for {fqdn}")
|
||||
continue
|
||||
|
||||
fetched_set = set(resolved_ips)
|
||||
|
||||
if fqdn not in current_data:
|
||||
current_data[fqdn] = {
|
||||
"last_updated": current_time,
|
||||
"ips": sorted(list(fetched_set))
|
||||
}
|
||||
print(f" - New FQDN. Added {len(fetched_set)} IPs.")
|
||||
updated = True
|
||||
else:
|
||||
existing_ips = set(current_data[fqdn].get("ips", []))
|
||||
new_ips = fetched_set - existing_ips
|
||||
|
||||
if new_ips:
|
||||
updated_set = existing_ips.union(fetched_set)
|
||||
current_data[fqdn]["ips"] = sorted(list(updated_set))
|
||||
current_data[fqdn]["last_updated"] = current_time
|
||||
print(f" - Updates found. Added {len(new_ips)} new IPs.")
|
||||
updated = True
|
||||
else:
|
||||
print(" - No new IPs found.")
|
||||
|
||||
if updated:
|
||||
self.save_data(current_data)
|
||||
print(f"FQDN Data saved to {FQDN_DATA_FILE}")
|
||||
else:
|
||||
print("No FQDN changes to save.")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Collector for RIPE AS CIDRs and FQDN IPs")
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
|
||||
# Command: run (default)
|
||||
parser_run = subparsers.add_parser("run", help="Run the collection process")
|
||||
parser_run.add_argument("--mode", choices=["asn", "fqdn", "all"], default="all", help="Collection mode: asn, fqdn, or all (default)")
|
||||
|
||||
# ASN Commands
|
||||
parser_add = subparsers.add_parser("add", help="Add an ASN")
|
||||
parser_add.add_argument("asn", type=int, help="ASN to add")
|
||||
|
||||
parser_remove = subparsers.add_parser("remove", help="Remove an ASN")
|
||||
parser_remove.add_argument("asn", type=int, help="ASN to remove")
|
||||
|
||||
# FQDN Commands
|
||||
parser_add_fqdn = subparsers.add_parser("add-fqdn", help="Add an FQDN")
|
||||
parser_add_fqdn.add_argument("fqdn", type=str, help="FQDN to add")
|
||||
|
||||
parser_remove_fqdn = subparsers.add_parser("remove-fqdn", help="Remove an FQDN")
|
||||
parser_remove_fqdn.add_argument("fqdn", type=str, help="FQDN to remove")
|
||||
|
||||
# Command: list
|
||||
parser_list = subparsers.add_parser("list", help="List ASNs and FQDNs")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
asn_collector = CIDRCollector()
|
||||
fqdn_collector = FQDNCollector()
|
||||
|
||||
if args.command == "add":
|
||||
asn_collector.add_asn(args.asn)
|
||||
elif args.command == "remove":
|
||||
asn_collector.remove_asn(args.asn)
|
||||
elif args.command == "add-fqdn":
|
||||
fqdn_collector.add_fqdn(args.fqdn)
|
||||
elif args.command == "remove-fqdn":
|
||||
fqdn_collector.remove_fqdn(args.fqdn)
|
||||
elif args.command == "list":
|
||||
asn_collector.list_asns()
|
||||
fqdn_collector.list_fqdns()
|
||||
elif args.command == "run":
|
||||
mode = args.mode
|
||||
if mode in ["asn", "all"]:
|
||||
asn_collector.run_collection()
|
||||
|
||||
if mode == "all":
|
||||
print("-" * 20)
|
||||
|
||||
if mode in ["fqdn", "all"]:
|
||||
fqdn_collector.run_collection()
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user