def handle_free_internet_connect(environ, start_response): global is_free_internet_connected is_free_internet_connected = True args = comp_proxy.configure([]) argument_parser = argparse.ArgumentParser() argument_parser.add_argument('--proxy', action='append', default=[]) args, _ = argument_parser.parse_known_args(args) for props in args.proxy: props = props.split(',') prop_dict = dict(p.split('=') for p in props[1:]) fqsocks.gateways.proxy_client.add_proxies(props[0], prop_dict) fqsocks.gateways.proxy_client.last_refresh_started_at = 0 gevent.spawn(fqsocks.gateways.proxy_client.init_proxies) start_response(httplib.OK, [('Content-Type', 'text/plain')]) return []
def handle_free_internet_connect(environ, start_response): global is_free_internet_connected is_free_internet_connected = True args = comp_proxy.configure([]) argument_parser = argparse.ArgumentParser() argument_parser.add_argument('--proxy', action='append', default=[]) args, _ = argument_parser.parse_known_args(args) for props in args.proxy: props = props.split(',') prop_dict = dict(p.split('=') for p in props[1:]) fqsocks.fqsocks.add_proxies(props[0], prop_dict) fqsocks.fqsocks.last_refresh_started_at = 0 gevent.spawn(fqsocks.fqsocks.init_proxies) start_response(httplib.OK, [('Content-Type', 'text/plain')]) return []
except: LOGGER.exception('failed to patch ssl') try: response = urllib2.urlopen('http://127.0.0.1:8318/exit', '').read() if 'EXITING' == response: LOGGER.critical('!!! find previous instance, exiting !!!') gevent.sleep(3) except: LOGGER.exception('failed to exit previous') httpd.HANDLERS[('GET', 'ping')] = handle_ping httpd.HANDLERS[('POST', 'exit')] = handle_exit httpd.HANDLERS[('POST', 'free-internet/connect')] = handle_free_internet_connect httpd.HANDLERS[('POST', 'free-internet/disconnect')] = handle_free_internet_disconnect httpd.HANDLERS[('GET', 'free-internet/is-connected')] = handle_free_internet_is_connected greenlets = [gevent.spawn(httpd.serve_forever)] try: tun_fd = read_tun_fd_until_ready() LOGGER.info('tun fd: %s' % tun_fd) except: LOGGER.exception('failed to get tun fd') sys.exit(1) greenlets.append(gevent.spawn(serve_udp)) greenlets.append(gevent.spawn(redirect_tun_traffic, tun_fd)) args = [ '--log-level', 'INFO', '--log-file', '/data/data/fq.router2/log/fqsocks.log', '--listen', '10.25.1.1:12345'] args = comp_proxy.configure(args) greenlets.append(gevent.spawn(fqsocks.fqsocks.main, args)) for greenlet in greenlets: greenlet.join()
LOGGER.exception('failed to patch ssl') try: response = urllib2.urlopen('http://127.0.0.1:8318/exit', '').read() if 'EXITING' == response: LOGGER.critical('!!! find previous instance, exiting !!!') gevent.sleep(3) except: LOGGER.exception('failed to exit previous') httpd.HANDLERS[('GET', 'ping')] = handle_ping httpd.HANDLERS[('POST', 'exit')] = handle_exit httpd.HANDLERS[('POST', 'free-internet/connect')] = handle_free_internet_connect httpd.HANDLERS[( 'POST', 'free-internet/disconnect')] = handle_free_internet_disconnect greenlets = [gevent.spawn(httpd.serve_forever)] try: tun_fd = read_tun_fd_until_ready() LOGGER.info('tun fd: %s' % tun_fd) except: LOGGER.exception('failed to get tun fd') sys.exit(1) greenlets.append(gevent.spawn(serve_udp)) greenlets.append(gevent.spawn(redirect_tun_traffic, tun_fd)) args = [ '--disable-access-check', '--log-level', 'INFO', '--log-file', '/data/data/fq.router2/log/fqsocks.log', '--listen', '10.25.1.1:12345' ] args = comp_proxy.configure(args) greenlets.append(gevent.spawn(fqsocks.fqsocks.main, args)) for greenlet in greenlets: greenlet.join()