def main(): config = json.loads(open(real_path('/config/config.json')).read()) tunnel_type = str(config['tunnel_type_external']) inject_host = str(config['inject_host_external']) inject_port = int(config['inject_port_external']) app.server((inject_host, inject_port), tunnel_type).run()
def main(): try: config_file = real_path('/config/config.json') config = json.loads(open(config_file).read()) tunnel_type = str(config['tunnel_type']) inject_host = str(config['inject_host']) inject_port = int(config['inject_port']) socks5_port_list = app.filter_array(config['socks5_port_list']) except KeyError: app.json_error(config_file) return if len(socks5_port_list) == 0: socks5_port_list.append('1080') log_connecting = True if len(socks5_port_list) > 1 else False quiet = True if len(socks5_port_list) > 1 else False app.server((inject_host, inject_port), quiet=quiet).start() ssh_clients = app.ssh_clients((inject_host, inject_port), socks5_port_list, log_connecting=log_connecting) ssh_clients.accounts = app.generate_accounts( app.convert_hostnames(real_path('/database/accounts.json'))) ssh_clients.start()
def main(): try: config_file = real_path('/config/config.json') config = json.loads(open(config_file).read()) inject_host = str(config['inject_host_external']) inject_port = int(config['inject_port_external']) except KeyError: app.json_error(config_file) return app.server((inject_host, inject_port), external=False, quiet=True).run()
def main(): config = json.loads(open(real_path('/config/config.json')).read()) tunnel_type = str(config['tunnel_type']) inject_host = str(config['inject_host']) inject_port = int(config['inject_port']) socks5_port = config['socks5_port'] app.server((inject_host, inject_port), tunnel_type, silent=True).start() ssh_clients = app.ssh_clients(tunnel_type, inject_host, inject_port, socks5_port) ssh_clients.accounts = app.generate_accounts( app.convert_hostnames(real_path('/database/accounts.json'))) ssh_clients.start()
def main(): config = json.loads(open(real_path('/config/config.json')).read()) tunnel_type = str(config['tunnel_type']) inject_host = str('127.0.0.1') inject_port = int('9080') socks5_port = str('2080') app.server((inject_host, inject_port), tunnel_type).start() ssh_clients = app.ssh_clients(tunnel_type, inject_host, inject_port, socks5_ports=[socks5_port], http_requests_enable=False, log_connecting=False) ssh_clients.accounts = app.generate_accounts( app.convert_hostnames(real_path('/database/accounts.json'))) while True: try: app.ssh_statistic('clear') threading.Thread(target=ssh_clients.ssh_client, args=( ssh_clients.unique, socks5_port, )).start() ssh_clients._connected.add(socks5_port) ssh_clients.unique += 1 ssh_clients.all_disconnected_listener() except KeyboardInterrupt: pass finally: if ssh_clients.all_disconnected() == False: ssh_clients.all_disconnected_listener() try: with threading.RLock(): command = str(input('\n:: ')) print() if app.xstrip(command) == 'exit': break except KeyboardInterrupt: break
def main(): try: config_file = real_path('/config/config.json') config = json.loads(open(config_file).read()) inject_host = str('127.0.0.1') inject_port = int('9080') socks5_port = str('2080') socks5_port_list = [socks5_port] except KeyError: app.json_error(config_file); return app.server((inject_host, inject_port)).start() ssh_clients = app.ssh_clients((inject_host, inject_port), http_requests_enable=False, log_connecting=False, dynamic_port_forwarding=False) ssh_clients.accounts = app.generate_accounts(app.convert_hostnames(real_path('/database/accounts.json'))) app.log('Type debug for debugging log') app.log('Type exit to exit') while True: try: ssh_clients.debug = False exit = False command = app.str_input('\n:: ', newline=True) if command == 'exit': exit = True break if command == 'debug': ssh_clients.debug = True app.ssh_statistic('clear') threading.Thread(target=ssh_clients.ssh_client, args=(ssh_clients.unique, socks5_port, )).start() ssh_clients._connected.add(socks5_port) ssh_clients.unique += 1 ssh_clients.all_disconnected_listener() except KeyboardInterrupt: pass finally: if not exit and ssh_clients.all_disconnected() == False: ssh_clients.all_disconnected_listener()
def start(run_through_python: bool) -> None: ''' Function configures local environment then launches the Flask App ''' config: dict = env_config(path.dirname(path.realpath(__file__))) session_config: dict = setEnvironment(config) app = server() app.env = 'development' print(f' * Running on http://{session_config["host"]}:5000/') if run_through_python: # if running using run.py activates, otherwise if using run.sh or flask run, skips app.run(host=session_config['host'], debug=True)
def main(): try: config_file = real_path('/config/config.json') config = json.loads(open(config_file).read()) proxy_command = config['proxy_command'] if str(proxy_command.strip()) == '': raise KeyError except KeyError: app.json_error(config_file) return False data_accounts = json.loads( open(real_path('/database/accounts.json')).read())['accounts'] data_deleted_accounts = {} for name, value in data_accounts.items(): data_deleted_accounts[name] = [] for i in range(len(value)): account = data_accounts[name][i] if app.check_hostname(account['hostname']) == False: data_accounts[name][i]['hostname'].replace('#', '') data_deleted_accounts[name].append(data_accounts[name][i]) data_accounts[name][i] = '' json_authentications = json.loads( open(real_path( '/database/authentications.json')).read())['authentications'] data_authentications = [] for i in range(len(json_authentications)): data_authentications.append([{ 'username': json_authentications[i]['username'], 'password': json_authentications[i]['username'] }]) accounts = app.generate_accounts( data_accounts, data_authentications=random.choice(data_authentications)) queue_accounts = queue.Queue() threads = 10 app.server((str('127.0.0.1'), int('3313')), force_tunnel_type='1', quiet='full').start() for account in accounts: queue_accounts.put(account) for i in range(threads): thread(queue_accounts, proxy_command).start() queue_accounts.join() print() deleted_accounts = app.generate_accounts(data_deleted_accounts, data_authentications=[{ 'username': '******', 'password': '******' }]) queue_deleted_accounts = queue.Queue() for deleted_account in deleted_accounts: queue_deleted_accounts.put(deleted_account) for i in range(threads): thread(queue_deleted_accounts, proxy_command).start() queue_deleted_accounts.join()
def main(): # Setup cli arguments parser = argparse.ArgumentParser( description= '''Listen for ADS-B signals using an RTL-SDR and watch the air traffic on local Dash webserver! Default location is http://localhost:8050''' ) parser.add_argument( '--rtl_device', '-d', type=int, default=0, metavar='device_index', help='Select the RTL-SDR device index to use. Defaults to device 0.') parser.add_argument( '--location', '-l', type=float, nargs=2, metavar=('Lat', 'Lon'), default=(None, None), help= 'Set the latitude and longitude of your ground station; usually your current location. If unset, attempts to determine your location using your IP address.' ) parser.add_argument( '--TTL', '-t', type=int, default=100, help= "Delete a tracked object if we haven't heard from it for TTL seconds. Default to 100 seconds." ) parser.add_argument( '--port', '-p', type=int, default=8050, help= 'The local port to run the Dash webserver on. Default to port 8050.') parser.add_argument( '--log', type=str, default=None, help= 'Where to log information on detected ADS-B packets. Does not log if unset.' ) parser.add_argument( '--fix-single-bit-errors', type=str, default='No', metavar='[Y/N]', dest='fix_single_bit_errors', help= 'Have the decoder attempt to fix single bit errors in packets. VERY RESOURCE INTENSIVE AT THIS TIME!!!' ) args = parser.parse_args() # Variable initialization fs = 2000000 # 2MHz sampling frequency center_freq = 1090e6 # 1090 MHz center frequency gain = 49.6 # Gain N_samples = 2048000 # SDR samples for each chunk of data ( Approx 1.024 seconds per chunk ) TTL = args.TTL # How long to store ADS-B object information log = args.log # Where to log packets # Determine ground station location using IP address or manual input if args.location[0] == None or args.location[1] == None: try: url = 'https://ipinfo.io' loc_request = requests.get(url) lat_lon = loc_request.json()['loc'].split(',') pos_ref = [float(lat_lon[0]), float(lat_lon[1])] except Exception: print(f'Error requesting location information from {url}') exit() else: pos_ref = [args.location[0], args.location[1]] # Determine whether to fix 1-bit errors FIX_1BIT_ERRORS = (args.fix_single_bit_errors[0] == 'Y' or args.fix_single_bit_errors[0] == 'y' or args.fix_single_bit_errors[0] == '1' or args.fix_single_bit_errors[0] == 'T' or args.fix_single_bit_errors[0] == 't') # Setup Dash server app.server(pos_ref, planes, packets) # Create a queue for communication between the reading and processing threads Qin = queue.Queue() # Setup the RTL-SDR reader sdr = RtlSdr(args.rtl_device) sdr.sample_rate = fs # sampling rate sdr.center_freq = center_freq # 1090MhZ center frequency sdr.gain = gain stop_flag = threading.Event() # Setup the reading and processing threads t_sdr_read = threading.Thread(target=sdr_read, args=(Qin, sdr, N_samples, stop_flag)) t_signal_process = threading.Thread(target=signal_process, args=(Qin, source, stop_flag, log, pos_ref, FIX_1BIT_ERRORS)) t_sdr_read.start() t_signal_process.start() # Run the Dash web server app.app.run_server(port=args.port) # Run until the threads stop while threading.active_count() > 0: try: time.sleep(0.1) except: print("Stopping threads...") stop_flag.set() raise exit()
from waitress import serve from app import server server(server)