def attempt_udp(failover_ip, tracker_netloc): pp = pprint.PrettyPrinter(width=999999, compact=True) udp_url = "udp://" + tracker_netloc + "/announce" t1 = time() udp_attempt_result = {"url": udp_url, "time": int(t1)} latency = 0 parsed_response = {} try: parsed_response, ip = announce_udp(udp_url) latency = int((time() - t1) * 1000) pretty_data = redact_origin(pp.pformat(parsed_response)) udp_attempt_result.update({ "info": [pretty_data], "status": 1, "ip": ip }) except RuntimeError as e: udp_attempt_result.update({"info": [str(e)], "status": 0}) if udp_attempt_result["info"] != ["Can't resolve IP"]: udp_attempt_result["ip"] = failover_ip submitted_data.appendleft(udp_attempt_result) return ( udp_attempt_result["status"], parsed_response.get("interval"), udp_url, latency, )
def attempt_submitted(tracker): submitted_url = urlparse(tracker.url) try: failover_ip = socket.getaddrinfo(submitted_url.hostname, None)[0][4][0] except OSError: failover_ip = "" valid_bep_34, bep_34_info = get_bep_34(submitted_url.hostname) if valid_bep_34: # Hostname has a valid TXT record as per BEP34 if not bep_34_info: logger.info( f"Hostname denies connection via BEP34, giving up on submitted tracker {tracker.url}" ) submitted_data.appendleft({ "url": tracker.url, "time": int(time()), "status": 0, "ip": failover_ip, "info": ["Tracker denied connection according to BEP34"], }) raise RuntimeError elif bep_34_info: logger.info( f"Tracker {tracker.url} sets protocol and port preferences from BEP34: {str(bep_34_info)}" ) return attempt_from_txt_prefs(submitted_url, failover_ip, bep_34_info) else: # No valid BEP34, attempting all protocols return attempt_all_protocols(submitted_url, failover_ip)
def attempt_httpx(failover_ip, submitted_url, tls=True): http_url = build_httpx_url(submitted_url, tls) pp = pprint.PrettyPrinter(width=999999, compact=True) t1 = time() debug_http = {"url": http_url, "time": int(t1), "ip": failover_ip} latency = 0 http_response = {} try: http_response = announce_http(http_url) latency = int((time() - t1) * 1000) pretty_data = redact_origin(pp.pformat(http_response)) debug_http.update({"info": [pretty_data], "status": 1}) except RuntimeError as e: debug_http.update({"info": [redact_origin(str(e))], "status": 0}) submitted_data.appendleft(debug_http) return debug_http["status"], http_response, http_url, latency
def process_new_tracker(tracker_candidate): logger.info(f"Processing new tracker: {tracker_candidate.url}") all_ips_tracked = get_all_ips_tracked() exists_ip = set(tracker_candidate.ip).intersection(all_ips_tracked) if exists_ip: logger.info( f"Tracker {tracker_candidate.url} denied, IP of the tracker is already in the list" ) return with list_lock: for tracker in db.get_all_data(): if tracker.host == urlparse(tracker_candidate.url).hostname: logger.info( f"Tracker {tracker_candidate.url} denied, already being tracked" ) return tracker_candidate.last_downtime = int(time()) tracker_candidate.last_checked = int(time()) try: ( tracker_candidate.interval, tracker_candidate.url, tracker_candidate.latency, ) = attempt_submitted(tracker_candidate) except (RuntimeError, ValueError): return if (300 > tracker_candidate.interval or tracker_candidate.interval > 10800): # trackers with an update interval # less than 5' and more than 3h debug = submitted_data.popleft() info = debug["info"] debug.update({ "status": 0, "info": [ info[0], "Tracker rejected for having an interval shorter than 5 minutes or longer than 3 hours", ], }) submitted_data.appendleft(debug) return tracker_candidate.update_ipapi_data() tracker_candidate.is_up() tracker_candidate.update_uptime() db.insert_new_tracker(tracker_candidate) logger.info(f"New tracker {tracker_candidate.url} added to newTrackon")
def scrape_submitted(tracker): pp = pprint.PrettyPrinter(width=999999, compact=True) parsed = urlparse(tracker.url) tnetloc = parsed.netloc try: failover_ip = socket.getaddrinfo(parsed.hostname, None)[0][4][0] except OSError: failover_ip = "" # UDP scrape if parsed.port: # If the tracker netloc has a port, try with udp udp_version = "udp://" + tnetloc + "/announce" t1 = time() debug_udp = {"url": udp_version, "time": int(t1)} try: parsed, raw, ip = announce_udp(udp_version) latency = int((time() - t1) * 1000) pretty_data = redact_origin(pp.pformat(parsed)) debug_udp.update({"info": pretty_data, "status": 1, "ip": ip}) submitted_data.appendleft(debug_udp) return latency, parsed["interval"], udp_version except RuntimeError as e: debug_udp.update({"info": str(e), "status": 0}) if debug_udp["info"] != "Can't resolve IP": debug_udp["ip"] = failover_ip submitted_data.appendleft(debug_udp) logger.info(f"{udp_version} UDP failed, trying HTTPS") # HTTPS scrape if not urlparse(tracker.url).port: https_version = "https://" + tnetloc + ":443/announce" else: https_version = "https://" + tnetloc + "/announce" t1 = time() debug_https = {"url": https_version, "time": int(t1), "ip": failover_ip} try: response = announce_http(https_version) latency = int((time() - t1) * 1000) pretty_data = redact_origin(pp.pformat(response)) debug_https.update({"info": pretty_data, "status": 1}) submitted_data.appendleft(debug_https) return latency, response["interval"], https_version except RuntimeError as e: debug_https.update({"info": str(e), "status": 0}) "HTTPS not working, trying HTTP" submitted_data.appendleft(debug_https) # HTTP scrape if not urlparse(tracker.url).port: http_version = "http://" + tnetloc + ":80/announce" else: http_version = "http://" + tnetloc + "/announce" t1 = time() debug_http = {"url": http_version, "time": int(t1), "ip": failover_ip} try: response = announce_http(http_version) latency = int((time() - t1) * 1000) pretty_data = redact_origin(pp.pformat(response)) debug_http.update({"info": pretty_data, "status": 1}) submitted_data.appendleft(debug_http) return latency, response["interval"], http_version except RuntimeError as e: debug_http.update({"info": redact_origin(str(e)), "status": 0}) submitted_data.appendleft(debug_http) raise RuntimeError