commit 2b7e04808d3a0290e9131315bd87761cfbbb67c3 Author: andrea Date: Thu Oct 31 01:49:19 2024 +0800 Upload files to "/" diff --git a/sm_rest.py b/sm_rest.py new file mode 100644 index 0000000..96ff62c --- /dev/null +++ b/sm_rest.py @@ -0,0 +1,184 @@ +import socket +import time +import xml.etree.ElementTree as ET +import threading +import json +import cherrypy + +# Configuration +SYSTEM_MANAGER_HOST = "sysman.freeradionetwork.eu" +SYSTEM_MANAGER_PORT = 10025 + +# Global data storage +data_lock = threading.Lock() +fetched_data = { + 'servers': [] +} + +def parse_client_info(client_info_str): + """ + Parses the client info string and returns a dictionary of fields. + """ + # Wrap the string with a root tag to make it well-formed XML + xml_str = f"{client_info_str}" + client_info = {} + + try: + root = ET.fromstring(xml_str) + # Mapping of tag names to field names + tags = { + 'ON': 'CallsignAndUser', + 'BC': 'BandAndChannel', + 'DS': 'Description', + 'NN': 'Country', + 'CT': 'CityCityPart' + } + for tag, field in tags.items(): + element = root.find(tag) + if element is not None and element.text is not None: + client_info[field] = element.text.strip() + else: + client_info[field] = None + except ET.ParseError as e: + print(f"Error parsing client info XML: {e}") + print(f"Client info string: {client_info_str}") + return client_info + +def fetch_system_explorer_info(): + global fetched_data + while True: + try: + temp_data = {'servers': []} + # Connect to the SystemManager + with socket.create_connection((SYSTEM_MANAGER_HOST, SYSTEM_MANAGER_PORT)) as sock: + # Send the 'SM' command with CRLF (\r\n) + sock.sendall(b'SM\r\n') + + # Wrap the socket into a file-like object for easier reading + sock_file = sock.makefile('r', encoding='utf-8', errors='replace') + + # Receive and interpret server count + server_count_line = sock_file.readline() + server_count_line = server_count_line.strip() + + if not server_count_line.isdigit(): + print("Unexpected response:", server_count_line) + continue + + server_count = int(server_count_line) + + # Loop through each server based on the count received + for server_index in range(server_count): + # Read server information + server_name_line = sock_file.readline() + if not server_name_line: + print("No more data from server when expecting server name.") + break + server_name = server_name_line.strip() + + server_entry = { + 'server_name': server_name, + 'nets': [] + } + + # Read net count for this server + net_count_line = sock_file.readline() + if not net_count_line: + print(f"No more data from server when expecting net count for server '{server_name}'.") + break + net_count_line = net_count_line.strip() + try: + net_count = int(net_count_line) + except ValueError: + print(f"Error parsing net count: {net_count_line}") + continue + + # Loop through each net + for net_index in range(net_count): + # Read net information + net_name_line = sock_file.readline() + if not net_name_line: + print(f"No more data from server when expecting net name for server '{server_name}'.") + break + net_name = net_name_line.strip() + + net_entry = { + 'net_name': net_name, + 'clients': [] + } + + # Read client count for this net + client_count_line = sock_file.readline() + if not client_count_line: + print(f"No more data from server when expecting client count for net '{net_name}'.") + break + client_count_line = client_count_line.strip() + try: + client_count = int(client_count_line) + except ValueError: + print(f"Error parsing client count: {client_count_line}") + continue + + # Loop through each client + for client_index in range(client_count): + client_info_line = sock_file.readline() + if not client_info_line: + print(f"No more data from server when expecting client info for net '{net_name}'.") + break + client_info_line = client_info_line.strip() + # Parse the client info string + client_info = parse_client_info(client_info_line) + net_entry['clients'].append(client_info) + server_entry['nets'].append(net_entry) + temp_data['servers'].append(server_entry) + + # Update the global data + with data_lock: + fetched_data = temp_data + + print("Data fetching complete. Sleeping for 60 seconds...") + time.sleep(60) # Fetch data every minute + + except Exception as e: + print(f"Error: {e}") + time.sleep(60) # Wait before retrying + +class FRNAPI: + @cherrypy.expose + @cherrypy.tools.json_out() + def index(self): + with data_lock: + return fetched_data + + @cherrypy.expose + @cherrypy.tools.json_out() + def servers(self): + with data_lock: + return fetched_data.get('servers', []) + + @cherrypy.expose + @cherrypy.tools.json_out() + def server(self, server_name=None): + if not server_name: + return {'error': 'server_name parameter is required'} + with data_lock: + for server in fetched_data.get('servers', []): + if server['server_name'] == server_name: + return server + return {'error': f'Server "{server_name}" not found'} + + # You can add more endpoints here as needed + +if __name__ == "__main__": + # Start the data fetching thread + data_thread = threading.Thread(target=fetch_system_explorer_info, daemon=True) + data_thread.start() + + # Configure CherryPy + cherrypy.config.update({ + 'server.socket_host': '0.0.0.0', # Listen on all interfaces + 'server.socket_port': 8080, # Change the port if needed + }) + + # Start the CherryPy web server + cherrypy.quickstart(FRNAPI())