No Description

client.py 4.1KB

    from http.server import BaseHTTPRequestHandler, HTTPServer import kiss from urllib.parse import urlparse import hashlib import time import gzip import signal call = "VA1QLE-A".encode() server = "VA1QLE".encode() #Bytes that mean stuff ESC = b'\x05' #Used for escaping bytes that appear in data ACK = b'\x06' PAGE_REQUEST = b'\x07' PAGE_RESPONSE = b'\x08' PAGE_RESPONSE_END = b'\x09' CHECKSUM = b'\x10' #Splits md5 checksum from HTML RESEND_PACKET = b'\x11' k = kiss.TCPKISS(host='localhost', port=8001) k.start() lastPathRequested = '' #Because some browsers will keep trying to connect if the first time fails. def escapeData(data): #Not very pretty return data.replace( ACK, ESC + ACK ).replace( PAGE_REQUEST, ESC + PAGE_REQUEST ).replace( PAGE_RESPONSE, ESC + PAGE_RESPONSE ).replace( PAGE_RESPONSE_END, ESC + PAGE_RESPONSE_END ).replace( CHECKSUM, ESC + CHECKSUM ).replace( RESEND_PACKET, ESC + RESEND_PACKET ) def signal_handler(signum, frame): raise TimeoutError signal.signal(signal.SIGALRM, signal_handler) def unescapeData(data): out = b'' i = 0 while i < len(data): if data[i] == ESC[0] and i + 1 < len(data): if data[i + 1] in (ACK[0], PAGE_REQUEST[0], PAGE_RESPONSE[0], PAGE_RESPONSE_END[0], CHECKSUM[0], RESEND_PACKET[0]): out += bytes([data[i + 1]]) i += 1 # Skips over the next byte else: out += bytes([data[i]]) else: out += bytes([data[i]]) i += 1 return out def sendPacket(data): k.write(call + b'>' + server + data) busyReq = False def requestPage(path): global busyReq busyReq = True global lastPathRequested if urlparse(path).netloc: if lastPathRequested != path: lastPathRequested = path sendPacket(PAGE_REQUEST + path.encode()) fullbody = b'' signal.alarm(30) try: while (not fullbody.endswith(PAGE_RESPONSE_END) or fullbody.endswith(ESC + PAGE_RESPONSE_END)): r = b'' while not PAGE_RESPONSE in r: #Keep trying until there's a page_response. Sometimes there'll be sufficient loopback #And the packet sent will be received. This prevents that from interfering. #TODO: Add a universal delimiter between callsigns and data #This will allow safer splitting, and will allow ignoring #Packets that aren't meant for us(If multiple people are on one frequency) x = k.read(readmode=False) r = b''.join(x)[1:] signal.alarm(0) #Split apart the data. Single packet (calls, data) = r.split(PAGE_RESPONSE, 1) (body, checksum) = data.rsplit(CHECKSUM, 1) #Splits on last delimiter (src, dest) = calls.split(b'>', 1) if dest == call: print(checksum) print(hashlib.md5(body).hexdigest().encode()) print(body) #print(body) if checksum == hashlib.md5(body).hexdigest().encode(): #Does unescape belong? TODO fullbody += body#[:-1]#body.replace(b'\n', b'') #Request next sendPacket(ACK + b'GarbageData') #Direwolf needs packets of at least 15 bytes else: sendPacket(RESEND_PACKET + b'GarbageData') print(fullbody) print(unescapeData(fullbody[:-1])) #Break apart the full multi-packet data fullbody = gzip.decompress(unescapeData(fullbody[:-1])) #Decompress. Ignore last byte - It'll be the PAGE_RESPONSE_END delimiter rspCode = int.from_bytes(fullbody[0:1], 'big') #First two bytes are the response code rspBody = fullbody[2:] #Remove bytes for code, and PAGE_RESPONSE_END at the end busyReq = False return (rspCode, rspBody) except TimeoutError: return(500, b"Timeout while connecting to packet node") else: busyReq = False return (500, b'') else: return (200, "ERROR: This is a proxy server! Don't try to connect directly!".encode()) #Change 200 to a different number class httpRequestHandler(BaseHTTPRequestHandler): #GET Request def do_GET(self): global busyReq while busyReq: time.sleep(250) (code, response) = requestPage(self.path) print(code) self.send_response(code) self.wfile.write(response) #print(requestPage("https://example.com")) httpd = HTTPServer(('127.0.0.1', 8081), httpRequestHandler) print('HTTP Server started.') httpd.serve_forever()