You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ffmyk-ansible/roles/install_respondd_poller/files/respondd_poller.py

148 lines
4.4 KiB
Python

#!/usr/bin/env python
import socket
import ipaddress
import threading
import time
import zlib
import json
import os.path
import sys
from wgnlpy import WireGuard
import requests
from xml.etree import ElementTree
if not os.path.exists("/etc/respondd_poller.json"):
print("/etc/respondd_poller.json missing")
sys.exit(1)
interface = None
prefix = None
yanic_addr = None
request = None
with open("/etc/respondd_poller.json", "r") as f:
config = json.load(f)
if "interface" in config:
interface = config["interface"]
if "prefix" in config:
prefix = ipaddress.IPv6Network(config["prefix"])
if "yanic_addr" in config and "yanic_port" in config:
yanic_addr = (config["yanic_addr"], int(config["yanic_port"]))
if "request" in config:
request = config["request"].encode("ascii")
wg = WireGuard()
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
last_request = dict()
last_response = dict()
def get_wg_peers():
wgpeers = wg.get_interface(interface).peers
for peer in wgpeers:
for ip in wgpeers[peer].allowedips:
if ip.subnet_of(prefix):
yield ip
def inflate(data):
decompress = zlib.decompressobj(-zlib.MAX_WBITS)
inflated = decompress.decompress(data)
inflated += decompress.flush()
return inflated.decode()
def cleanup():
while True:
time.sleep(60)
old = time.monotonic() - 360
ips = []
macs = []
for ip in last_request:
if last_response[ip] < old:
ips.append(ip)
for ip in ips:
del last_response[ip]
del last_request[ip]
def recv():
global sock
while True:
data, addr = sock.recvfrom(1500)
sock.sendto(data, yanic_addr)
j = json.loads(inflate(data))
last_response[ipaddress.IPv6Address(addr[0])] = time.monotonic()
def send(ip):
global request
try:
sock.sendto(request, (bytearray(str(ip).encode('ascii')), 1001))
except:
print("failed to send packet to", ip)
return
def get_http_nodeinfo(ip):
global last_request
now = time.monotonic()
try:
status = requests.get('http://[' + str(ip) + ']/cgi-bin/status')
except:
return
status_tree = ElementTree.fromstring(status.content)
mesh_ifs = []
interface_list = status_tree.findall(".//*[@data-interface]")
for interface in interface_list:
mesh_ifs.append(interface.attrib["data-interface"])
for mesh_if in mesh_ifs:
try:
nodeinfo = requests.get('http://[' + str(ip) + ']/cgi-bin/dyn/neighbours-nodeinfo?' + mesh_if)
except:
return
for line in nodeinfo.content.split(b'\n'):
if line.startswith(b'data: {'):
data = line.split(b': ', maxsplit=1)[1]
data = json.loads(data)
if "network" in data and "addresses" in data["network"]:
for address in data["network"]["addresses"]:
if ipaddress.IPv6Network(address).subnet_of(prefix):
node_ip = ipaddress.IPv6Address(address)
if node_ip not in last_request:
last_request[node_ip] = now
last_response[node_ip] = now
def scan_wg_peers():
global last_request
while True:
print("scanning wg peers")
request_threads = []
now = time.monotonic()
for net in get_wg_peers():
ip = ipaddress.IPv6Address(str(net.network_address) + "1")
if ip not in last_request:
last_request[ip] = now
last_response[ip] = now
request_thread = threading.Thread(target=get_http_nodeinfo, args=(ip,))
request_thread.start()
request_threads.append(request_thread)
if len(request_threads) > 10:
for thread in request_threads:
thread.join()
request_threads = []
time.sleep(60)
listen_thread = threading.Thread(target=recv)
listen_thread.start()
cleanup_thread = threading.Thread(target=cleanup)
cleanup_thread.start()
scan_thread = threading.Thread(target=scan_wg_peers)
scan_thread.start()
last_wg_time = 0
while True:
now = time.monotonic()
for ip in last_request:
if now - last_request[ip] > 15:
last_request[ip] = now
send(ip)
time.sleep(1)