def main(): """The mainsy""" parser = argparse.ArgumentParser() parser.add_argument('--router', type=str, choices=['random', 'by_data', 'two_level'], required=True) parser.add_argument('PORT_NUM', type=int) parser.add_argument('--num-workers', type=int, default=multiprocessing.cpu_count()) args = vars(parser.parse_args()) if 'random' == args['router']: args['worker_getter'] = scatter else: args['worker_getter'] = globals()[args['router']] globals().update(args) # copy the args directly into globals (there are probably loads of good reasons to not do this in practice) # spawn the workers STDERR_LOCK = multiprocessing.RLock() workers_list = list() for i in xrange(args['num_workers']): worker_queue = multiprocessing.Queue() results_queue = multiprocessing.Queue() iworker = worker_class(worker_queue, results_queue, worker_id=i, stderr_lock=STDERR_LOCK) workers_list.append(iworker) with STDERR_LOCK: print >> sys.stderr, "Starting worker {}".format(i) iworker.start() # get_cardinality print >> sys.stderr, "Starting tcp server." handle = make_handle(workers_list) server = gevent.server.StreamServer(('0.0.0.0', PORT_NUM), handle) # creates a new server try: server.serve_forever() # start accepting new connections except KeyboardInterrupt: sys.exit(1)
def main(): print "gevperf startup!" gevent.spawn(print_stats) server = gevent.server.StreamServer(("0.0.0.0", 61000), handle) server.serve_forever() return 0
def main(): global COMMANDS global syslogger global config args = get_args() config = HoneyConfig(args.config) if args.verbose: honey_logger.setLevel(logging.DEBUG) try: syslogger = get_syslog_logger(config.syslog_address, config.syslog_port, config.syslog_protocol) if args.verbose: syslogger.setLevel(logging.DEBUG) honey_logger.info( "Setup syslog with parameters: IP:%s, PORT:%d, PROTOCOL:%s", config.syslog_address, config.syslog_port, config.syslog_protocol) except MissingConfigField: honey_logger.info( "Syslog reporting disabled, to enable it add its configuration to the configuration file" ) COMMANDS = config.commands server = gevent.server.StreamServer((config.ip, config.port), MyTelnetHandler.streamserver_handle) honey_logger.info("Listening on %d...", config.port) server.serve_forever()
def run_server(self): pool = gevent.pool.Pool(1000) listen = (self.listen_addr, self.listen_port) server = gevent.server.StreamServer(listen, self.wrap_handle_conn, spawn=pool) server.serve_forever()
def main(myname, argv): level = logging.WARNING # level = logging.INFO # level = logging.DEBUG logging.basicConfig(level=level, format='%(asctime)-15s %(message)s') if len(argv) not in [0, 1]: print("Usage: %s [db_path]" % myname) exit(1) db_path = argv[0] if len(argv) > 0 else _DB_PATH if USE_KYOTO: db_path += '.kch' logging.warning("Serving via %s at %s:%s [db:%s:%s][pid:%d]...", "gevent" if USE_GEVENT else "eventlet", _SERVER_ADDR, _SERVER_PORT, 'KyotoCabinet' if USE_KYOTO else 'LevelDB', db_path, os.getpid()) open_database(db_path) if USE_GEVENT: server = gevent.server.StreamServer((_SERVER_ADDR, _SERVER_PORT), handle_request) server.serve_forever() else: server = eventlet.listen((_SERVER_ADDR, _SERVER_PORT)) pool = eventlet.GreenPool() while True: try: new_sock, address = server.accept() pool.spawn_n(handle_request, new_sock, address) except (SystemExit, KeyboardInterrupt): break
def main(): print 'gevperf startup!' gevent.spawn(print_stats) server = gevent.server.StreamServer(('0.0.0.0', 61000), handle) server.serve_forever() return 0
def main(myname, argv): level = logging.WARNING # level = logging.INFO # level = logging.DEBUG logging.basicConfig(level=level, format='%(asctime)-15s %(message)s') if len(argv) not in [0, 1]: print "Usage: %s [db_path]" % myname exit(1) db_path = argv[0] if len(argv) > 0 else _DB_PATH if USE_KYOTO: db_path += '.kch' logging.warn("Serving via %s at %s:%s [db:%s:%s][pid:%d]...", "gevent" if USE_GEVENT else "eventlet", _SERVER_ADDR, _SERVER_PORT, 'KyotoCabinet' if USE_KYOTO else 'LevelDB', db_path, os.getpid()) open_database(db_path) if USE_GEVENT: server = gevent.server.StreamServer( (_SERVER_ADDR, _SERVER_PORT), handle_request) server.serve_forever() else: server = eventlet.listen((_SERVER_ADDR, _SERVER_PORT)) pool = eventlet.GreenPool() while True: try: new_sock, address = server.accept() pool.spawn_n(handle_request, new_sock, address) except (SystemExit, KeyboardInterrupt): break
def main(): hostname = '' port = 1234 server = PlotServer((hostname, port)) log.setLevel("DEBUG") log.info("Starting server") server.start() log.info("Started server") server.serve_forever()
def serve_forever(): server = gevent.server.StreamServer((LISTEN_IP, LISTEN_PORT), handle) LOGGER.info('started fqsocks http gateway at %s:%s' % (LISTEN_IP, LISTEN_PORT)) try: server.serve_forever() except: LOGGER.exception('failed to start http gateway') finally: LOGGER.info('http gateway stopped')
def handle(self, *args, **options): addr = options.get('addr') port = int(options.get('port')) print 'Server started', datetime.datetime.now(), '\t on port', port try: server = gevent.server.StreamServer((addr, port), DjangoCommandHandler.streamserver_handle) server.serve_forever() except KeyboardInterrupt: print 'Stopped', datetime.datetime.now()
def run(): logging.basicConfig( format="%(asctime)s %(levelname) 7s %(module)s: %(message)s", level=logging.DEBUG) server = gevent.server.StreamServer(('0.0.0.0', 8000), handler) try: server.serve_forever() except KeyboardInterrupt: pass
def serve_forever(): server = gevent.server.StreamServer((LISTEN_IP, LISTEN_PORT), handle) LOGGER.info('started fqsocks tcp gateway at %s:%s' % (LISTEN_IP, LISTEN_PORT)) try: server.serve_forever() except: LOGGER.exception('failed to start tcp gateway') finally: LOGGER.info('tcp gateway stopped')
def serve_dns(): address = ('', 53) server = HandlerDatagramServer(address, handle_dns) LOGGER.info('dns server started at %s:%s' % address) try: server.serve_forever() except: LOGGER.exception('dns server failed') finally: LOGGER.info('dns server stopped')
def run(self): server = gevent.server.StreamServer((self.address, self.port), self._handle_and_catch) try: server.serve_forever() except OSError as e: if e.errno == 10048: raise PortInUseError('tcp', self.address, self.port) else: raise
def start_frontend(frontend_name, frontend_config): assert 'http' == frontend_config['type'] try: address = (frontend_config['host'], frontend_config['port']) server = gevent.server.StreamServer(address, handle_frontend_http) LOGGER.info('serving frontend %s on port %s:%s...' % (frontend_name, frontend_config['host'], frontend_config['port'])) except: LOGGER.exception('failed to start frontend %s' % frontend_name) os._exit(1) server.serve_forever()
def main(): if os.name == 'nt': import ctypes ctypes.windll.kernel32.SetConsoleTitleW(u'rproxy v%s' % __version__) for k, v in conf.userconf.items('parents'): conf.addparentproxy(k, v) updatedaemon = Thread(target=updater) updatedaemon.daemon = True updatedaemon.start() server = ThreadingHTTPServer(conf.listen, ProxyHandler) server.serve_forever()
def run(self): # Check certificate, the certificate will be used to warp https connections CertUtil.check_ca() # Start a stream server, wait for incoming http requests server = gevent.server.StreamServer((self.listen_ip, self.listen_port), self.paasproxy_handler) self.logger.info("proxy_client listen on: %s:%d" % (self.listen_ip, self.listen_port) ) server.serve_forever() return
def main(): global __file__ __file__ = os.path.abspath(__file__) if os.path.islink(__file__): __file__ = getattr(os, 'readlink', lambda x: x)(__file__) os.chdir(os.path.dirname(os.path.abspath(__file__))) logging.basicConfig(level=logging.DEBUG if common.LISTEN_DEBUGINFO else logging.INFO, format='%(levelname)s - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]') pre_start() sys.stderr.write(common.info()) HandlerClass = VPSProxyHandler server = LocalProxyServer((common.LISTEN_IP, common.LISTEN_PORT), HandlerClass) server.serve_forever()
def serve_forver(): def handle_stream(socket, address): stream = self.handler(socket) stream.run() server = gevent.server.StreamServer( ('127.0.0.1', 9000), handle_stream, certfile='../pki/data/certs/servercert.pem', keyfile='../pki/data/private/serverkey.pem', ca_certs='../pki/data/certs/cacert.pem', cert_reqs=ssl.CERT_REQUIRED, ssl_version=ssl.PROTOCOL_TLSv1) server.serve_forever()
def main(): global COMMANDS global OVERWRITE_COMMANDS global syslogger global config global custom_pool args = get_args() config = HoneyConfig(args.config) if args.output: logging.basicConfig( filename = args.output, level=logging.INFO, format='%(asctime)s [%(name)s] %(levelname)s %(filename)s:%(lineno)s %(message)s') else: logging.basicConfig( level=logging.INFO, format='%(asctime)s [%(name)s] %(levelname)s %(filename)s:%(lineno)s %(message)s') if args.verbose: honey_logger.setLevel(logging.DEBUG) try: syslogger = get_syslog_logger(config.syslog_address, config.syslog_port, config.syslog_protocol) if args.verbose: syslogger.setLevel(logging.DEBUG) honey_logger.info( "Setup syslog with parameters: IP:%s, PORT:%d, PROTOCOL:%s", config.syslog_address, config.syslog_port, config.syslog_protocol) except MissingConfigField: honey_logger.info("Syslog reporting disabled, to enable it add its configuration to the configuration file") COMMANDS = {cmd:resp.decode('string_escape') for (cmd, resp) in config.commands.items()} try: the_timeout = config.timeout except MissingConfigField: the_timeout = default_timeout try: OVERWRITE_COMMANDS = config.overwrite_commands except MissingConfigField: OVERWRITE_COMMANDS = {} socket.setdefaulttimeout(the_timeout) custom_pool = CustomPool.CustomPool(honey_logger, config.pool) server = gevent.server.StreamServer((config.ip, config.port), MyTelnetHandler.streamserver_handle, spawn=custom_pool) honey_logger.info("Listening on %s:%d with timeout=%d", config.ip, config.port, the_timeout) server.serve_forever()
def main(myname, argv): level = logging.INFO level = logging.WARNING level = logging.DEBUG logging.basicConfig(level=level, format='%(asctime)-15s %(message)s') if len(argv) not in [0, 1]: print "Usage: %s [db_path]" % myname exit(1) db_path = argv[0] if len(argv) > 0 else _DB_PATH logging.warn("Serving at %s:%s [db:%s][pid:%d]...", _SERVER_ADDR, _SERVER_PORT, db_path, os.getpid()) open_database(db_path) server = gevent.server.StreamServer( (_SERVER_ADDR, _SERVER_PORT), handle_request) server.serve_forever()
def serve(listen, upstream, china_upstream, hosted_domain, hosted_at, direct, enable_china_domain, enable_hosted_domain, fallback_timeout, strategy): address = parse_ip_colon_port(listen) upstreams = [parse_ip_colon_port(e) for e in upstream] china_upstreams = [parse_ip_colon_port(e) for e in china_upstream] handler = DnsHandler( upstreams, enable_china_domain, china_upstreams, enable_hosted_domain, hosted_domain, hosted_at, direct, fallback_timeout, strategy) server = HandlerDatagramServer(address, handler) LOGGER.info('dns server started at %r, forwarding to %r', address, upstreams) try: server.serve_forever() except: LOGGER.exception('dns server failed') finally: LOGGER.info('dns server stopped')
def main(): from sys import argv global remote_addr local_address, local_port, remote_address, remote_port, backdoor_address, backdoor_port = argv[1:] remote_addr = (remote_address, int(remote_port)) backdoor = gevent.backdoor.BackdoorServer((backdoor_address, int(backdoor_port)), locals=globals()) backdoor.start() listener = socket.socket() listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) listener.bind((local_address, int(local_port))) listener.listen(50) server = gevent.server.StreamServer(listener, handle_connection) server.serve_forever()
def serve(listen, upstream, china_upstream, hosted_domain, hosted_at, direct, enable_china_domain, enable_hosted_domain, fallback_timeout, strategy): address = parse_ip_colon_port(listen) upstreams = [parse_ip_colon_port(e) for e in upstream] china_upstreams = [parse_ip_colon_port(e) for e in china_upstream] handler = DnsHandler(upstreams, enable_china_domain, china_upstreams, enable_hosted_domain, hosted_domain, hosted_at, direct, fallback_timeout, strategy) server = HandlerDatagramServer(address, handler) LOGGER.info('dns server started at %r, forwarding to %r', address, upstreams) try: server.serve_forever() except: LOGGER.exception('dns server failed') finally: LOGGER.info('dns server stopped')
def main(): from sys import argv global remote_addr local_address, local_port, remote_address, remote_port, backdoor_address, backdoor_port = argv[ 1:] remote_addr = (remote_address, int(remote_port)) backdoor = gevent.backdoor.BackdoorServer( (backdoor_address, int(backdoor_port)), locals=globals()) backdoor.start() listener = socket.socket() listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) listener.bind((local_address, int(local_port))) listener.listen(50) server = gevent.server.StreamServer(listener, handle_connection) server.serve_forever()
def main(): global COMMANDS global syslogger global config args = get_args() config = HoneyConfig(args.config) if args.output: logging.basicConfig( filename=args.output, level=logging.INFO, format= '%(asctime)s [%(name)s] %(levelname)s %(filename)s:%(lineno)s %(message)s' ) else: logging.basicConfig( level=logging.INFO, format= '%(asctime)s [%(name)s] %(levelname)s %(filename)s:%(lineno)s %(message)s' ) if args.verbose: honey_logger.setLevel(logging.DEBUG) try: syslogger = get_syslog_logger(config.syslog_address, config.syslog_port, config.syslog_protocol) if args.verbose: syslogger.setLevel(logging.DEBUG) honey_logger.info( "Setup syslog with parameters: IP:%s, PORT:%d, PROTOCOL:%s", config.syslog_address, config.syslog_port, config.syslog_protocol) except MissingConfigField: honey_logger.info( "Syslog reporting disabled, to enable it add its configuration to the configuration file" ) COMMANDS = config.commands server = gevent.server.StreamServer((config.ip, 23), MyTelnetHandler.streamserver_handle) honey_logger.info("Listening on %d...", 23) server.serve_forever()
def process_requests(self): binding = (rpipe.config.server.BIND_IP, rpipe.config.server.BIND_PORT) _logger.info("Running server: %s", binding) handler = _ServerConnectionHandler() server = gevent.server.StreamServer( binding, handler.handle_new_connection, cert_reqs=gevent.ssl.CERT_REQUIRED, keyfile=rpipe.config.server.KEY_FILEPATH, certfile=rpipe.config.server.CRT_FILEPATH, ca_certs=rpipe.config.server.CA_CRT_FILEPATH) # Wait until termination. Generally, we should already be running in # its own gthread. # # Since there is no cleanup and everything is based on coroutines, # default CTRL+BREAK and SIGTERM handling should be fine. server.serve_forever()
def serve(listen, upstream, china_upstream, hosted_domain, hosted_at, direct, enable_china_domain, enable_hosted_domain, fallback_timeout, strategy): address = parse_ip_colon_port(listen) upstreams = [parse_ip_colon_port(e) for e in upstream] or \ [('8.8.8.8', 53), ('208.67.222.222', 5353)] if enable_china_domain: china_upstreams = [parse_ip_colon_port(e) for e in china_upstream] or \ [('114.114.114.114', 53), ('114.114.115.115', 53)] else: china_upstreams = [] if enable_hosted_domain: hosted_domains = hosted_domain or HOSTED_DOMAINS() else: hosted_domains = set() server = DNSServer(address, upstreams, china_upstreams, hosted_domains, hosted_at, direct, fallback_timeout, strategy) LOGGER.info('dns server started at %r, forwarding to %r', address, upstreams) try: server.serve_forever() except: LOGGER.exception('dns server failed') finally: LOGGER.info('dns server stopped')
def run_listener(conn): server = gevent.server.StreamServer(("", 8023), HAPIListener.streamserver_handle) server.serve_forever()
def main(ui_hostname, ui_port, base_name, params_filename, plot_sensors, use_ui_server, adapt_model, save_data, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset, break_stop_offset, pred_gap, incoming_port,load_old_data,use_new_adam_params, input_time_length, train_on_breaks, min_break_samples, min_trial_samples): setup_logging() assert np.little_endian, "Should be in little endian" train_params = None # for trainer, e.g. adam params if params_filename is not None: if params_filename == 'newest': # sort will already sort temporally with our time string format all_params_files = sorted(glob(base_name + ".*.model_params.npy")) assert len(all_params_files) > 0, ("Expect atleast one params file " "if 'newest' given as argument") params_filename = all_params_files[-1] log.info("Loading model params from {:s}".format(params_filename)) params = np.load(params_filename) train_params_filename = params_filename.replace('model_params.npy', 'trainer_params.npy') if os.path.isfile(train_params_filename): if use_new_adam_params: log.info("Loading trainer params from {:s}".format(train_params_filename)) train_params = np.load(train_params_filename) else: log.warn("No train/adam params found, starting optimization params " "from scratch (model params will be loaded anyways).") else: params = np.load(base_name + '.npy') exp = create_experiment(base_name + '.yaml') # Possibly change input time length, for exmaple # if input time length very long during training and should be # shorter for online if input_time_length is not None: log.info("Change input time length to {:d}".format(input_time_length)) set_input_window_length(exp.final_layer, input_time_length) # probably unnecessary, just for safety exp.iterator.input_time_length = input_time_length # Have to set for both exp final layer and actually used model # as exp final layer might be used for adaptation # maybe check this all for correctness? cnt_model = exp.final_layer set_param_values_backwards_compatible(cnt_model, params) prediction_model = transform_to_normal_net(cnt_model) set_param_values_backwards_compatible(prediction_model, params) data_processor = StandardizeProcessor(factor_new=1e-3) online_model = OnlineModel(prediction_model) if adapt_model: online_trainer = BatchWiseCntTrainer(exp, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset=break_start_offset, break_stop_offset=break_stop_offset, train_param_values=train_params, add_breaks=train_on_breaks, min_break_samples=min_break_samples, min_trial_samples=min_trial_samples) else: log.info("Not adapting model...") online_trainer = NoTrainer() coordinator = OnlineCoordinator(data_processor, online_model, online_trainer, pred_gap=pred_gap) hostname = '' server = PredictionServer((hostname, incoming_port), coordinator=coordinator, ui_hostname=ui_hostname, ui_port=ui_port, plot_sensors=plot_sensors, use_ui_server=use_ui_server, save_data=save_data, model_base_name=base_name, adapt_model=adapt_model) # Compilation takes some time so initialize trainer already # before waiting in connection in server online_trainer.initialize() if adapt_model and load_old_data: online_trainer.add_data_from_today(data_processor) log.info("Starting server on port {:d}".format(incoming_port)) server.start() log.info("Started server") server.serve_forever()
def main(): print("Launching TELNET server at port: %s" % settings.TELNET_PORT) server = gevent.server.StreamServer(("", settings.TELNET_PORT), TelnetHandler.streamserver_handle) print("Listening...") server.serve_forever()
def run(self): self.work.start_refresher() server = gevent.server.StreamServer( (config.worker_host, config.worker_port), self._serve_worker) server.serve_forever()
def run(self): self.work.start_refresher() server = gevent.server.StreamServer((config.worker_host, config.worker_port), self._serve_worker) server.serve_forever()
if not data: response.close() break if xorchar: yield ''.join(chr(ord(x) ^ xorchar) for x in data) else: yield data except httplib.HTTPException: raise app = gae_application if urlfetch else paas_application if bae: application = bae.core.wsgi.WSGIApplication(app) elif sae: application = sae.create_wsgi_app(app) else: application = app if __name__ == '__main__': logging.basicConfig(level=logging.INFO, format='%(levelname)s - - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]') import gevent import gevent.server import gevent.wsgi import gevent.monkey gevent.monkey.patch_all(dns=gevent.version_info[0] >= 1) server = gevent.wsgi.WSGIServer(('', int(sys.argv[1])), application) logging.info('local paas_application serving at %s:%s', server.address[0], server.address[1]) server.serve_forever()
def main(): logging.basicConfig(level=logging.DEBUG, format='%(levelname)s - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]') address = ('', 53) server = DNSServer(address) logging.info('serving at %r', address) server.serve_forever()
def listen(self): server = gevent.server.StreamServer( (self.options['host'], self.options['port']), self.handle_connection) server.serve_forever()
def main(): parser = argparse.ArgumentParser(description='pwrcall nacl test.') parser.add_argument('action', help='connect/listen', choices=['connect', 'listen', 'c', 'l']) parser.add_argument('--state', dest='state', help='path to state file', default='pwr.state') parser.add_argument('--sock', dest='sock', help='where to connect / what to bind', required=True) parser.add_argument('--rpub', dest='rpub', help='remove public key for verification') args = parser.parse_args() state = pwrtls.state_file(args.state) fdnonblock(sys.stdin.fileno()) fdnonblock(sys.stdout.fileno()) if args.rpub: args.rpub = args.rpub.decode('hex') if args.action[0] == 'c': ip, port = args.sock.split(':', 1) port = int(port) socket = gevent.socket.create_connection((ip, port)) socket = pwrtls.wrap_socket(socket, **state) socket.do_handshake() print 'remote longpub', socket.remote_longpub.encode('hex') g1 = gevent.spawn(forward, sys.stdin, socket) forward(socket, sys.stdout) print 'server gone' socket.close() elif args.action[0] == 'l': if ':' in args.sock: ip, port = args.sock.split(':', 1) else: ip, port = '0.0.0.0', args.sock port = int(port) lsocket = gevent.socket.socket() lsocket.setsockopt(gevent.socket.SOL_SOCKET, gevent.socket.SO_REUSEADDR, 1) lsocket.bind((ip, port)) lsocket.listen(1) socket, addr = lsocket.accept() lsocket.close() print 'new client:', addr socket = pwrtls.wrap_socket(socket, server_side=True, **state) socket.do_handshake() print 'remote longpub', socket.remote_longpub.encode('hex') g1 = gevent.spawn(forward, sys.stdin, socket) forward(socket, sys.stdout) print 'client gone', addr socket.close() elif args.action[0] == 's': if ':' in args.sock: ip, port = args.sock.split(':', 1) else: ip, port = '0.0.0.0', args.sock port = int(port) def handle(sock, addr): print 'new client:', addr socket = pwrtls.wrap_socket(sock, server_side=True, **state) socket.do_handshake() print 'remote longpub', socket.remote_longpub.encode('hex') forward(socket, sys.stdout) print 'client gone', addr socket.close() server = gevent.server.StreamServer((ip, port), handle) server.serve_forever() return 0
def main(ui_hostname, ui_port, base_name, params_filename, plot_sensors, use_ui_server, adapt_model, save_data, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset, break_stop_offset, pred_freq, incoming_port,load_old_data,use_new_adam_params, input_time_length): setup_logging() assert np.little_endian, "Should be in little endian" train_params = None # for trainer, e.g. adam params if params_filename is not None: if params_filename == 'newest': # sort will already sort temporally with our time string format all_params_files = sorted(glob(base_name + ".*.model_params.npy")) assert len(all_params_files) > 0, ("Expect atleast one params file " "if 'newest' given as argument") params_filename = all_params_files[-1] log.info("Loading model params from {:s}".format(params_filename)) params = np.load(params_filename) train_params_filename = params_filename.replace('model_params.npy', 'trainer_params.npy') if os.path.isfile(train_params_filename): if use_new_adam_params: log.info("Loading trainer params from {:s}".format(train_params_filename)) train_params = np.load(train_params_filename) else: log.warn("No train/adam params found, starting optimization params " "from scratch (model params will be loaded anyways).") else: params = np.load(base_name + '.npy') exp = create_experiment(base_name + '.yaml') # Possibly change input time length, for exmaple # if input time length very long during training and should be # shorter for online if input_time_length is not None: log.info("Change input time length to {:d}".format(input_time_length)) set_input_window_length(exp.final_layer, input_time_length) # probably unnecessary, just for safety exp.iterator.input_time_length = input_time_length # Have to set for both exp final layer and actually used model # as exp final layer might be used for adaptation # maybe check this all for correctness? cnt_model = exp.final_layer lasagne.layers.set_all_param_values(cnt_model, params) prediction_model = transform_to_normal_net(cnt_model) lasagne.layers.set_all_param_values(prediction_model, params) data_processor = StandardizeProcessor(factor_new=1e-3) online_model = OnlineModel(prediction_model) if adapt_model: online_trainer = BatchWiseCntTrainer(exp, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset=break_start_offset, break_stop_offset=break_stop_offset, train_param_values=train_params) else: log.info("Not adapting model...") online_trainer = NoTrainer() coordinator = OnlineCoordinator(data_processor, online_model, online_trainer, pred_freq=pred_freq) hostname = '' server = PredictionServer((hostname, incoming_port), coordinator=coordinator, ui_hostname=ui_hostname, ui_port=ui_port, plot_sensors=plot_sensors, use_ui_server=use_ui_server, save_data=save_data, model_base_name=base_name, adapt_model=adapt_model) # Compilation takes some time so initialize trainer already # before waiting in connection in server online_trainer.initialize() if adapt_model and load_old_data: online_trainer.add_data_from_today(data_processor) log.info("Starting server on port {:d}".format(incoming_port)) server.start() log.info("Started server") server.serve_forever()
def start_server(): server = gevent.server.StreamServer((LISTEN_IP, LISTEN_PORT), handle) LOGGER.info('started fqsocks at %s:%s' % (LISTEN_IP, LISTEN_PORT)) server.serve_forever()
def server_forever(server): try: server.serve_forever() except KeyboardInterrupt: pass
def loop(self): server = gevent.server.StreamServer(("", 8025), MyTelnetHandler.streamserver_handle) server.serve_forever()
def main(): server = MyServer(('127.0.0.1', 56789)) server.serve_forever()
def lbmain(args=None, run=True): """%prog [options] zookeeper_connection path Run a resume-based load balancer on addr. """ if args is None: args = sys.argv[1:] elif isinstance(args, str): args = args.split() run = False import optparse parser = optparse.OptionParser(lbmain.__doc__) parser.add_option( '-a', '--address', default=':0', help="Address to listed on for web requests" ) parser.add_option( '-b', '--backlog', type='int', help="Server backlog setting.") parser.add_option( '-d', '--backdoor', action='store_true', help="Run a backdoor server. Use with caution!") parser.add_option( '-e', '--disconnect-message', help="Path to error page to use when a request is lost due to " "worker disconnection" ) parser.add_option( '-L', '--logger-configuration', help= "Read logger configuration from the given configuration file path.\n" "\n" "The configuration file must be in ZConfig logger configuration syntax." "\n" "Alternatively, you can give a Python logger level name or number." ) parser.add_option('-l', '--access-logger', help='Access-log logger name.') parser.add_option( '-m', '--max-connections', type='int', help="Maximum number of simultanious accepted connections.") parser.add_option( '-r', '--request-classifier', default='zc.resumelb.lb:host_classifier', help="Request classification function (module:expr)" ) parser.add_option( '-s', '--status-server', help=("Run a status server for getting pool information. " "The argument is a unix-domain socket path to listen on.")) parser.add_option( '-t', '--socket-timeout', type='float', default=99., help=('HTTP socket timeout.')) parser.add_option( '-v', '--single-version', action='store_true', help=('Only use a single worker version.')) try: options, args = parser.parse_args(args) if len(args) != 2: print 'Error: must supply a zookeeper connection string and path.' parser.parse_args(['-h']) zookeeper, path = args except SystemExit: if run: raise else: return if options.logger_configuration: logger_config = options.logger_configuration if re.match(r'\d+$', logger_config): logging.basicConfig(level=int(logger_config)) elif logger_config in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'): logging.basicConfig(level=getattr(logging, logger_config)) else: import ZConfig with open(logger_config) as f: ZConfig.configureLoggers(f.read()) zk = zc.zk.ZooKeeper(zookeeper) addrs = zk.children(path+'/workers/providers') rcmod, rcexpr = options.request_classifier.split(':') __import__(rcmod) rcmod = sys.modules[rcmod] request_classifier = eval(rcexpr, rcmod.__dict__) disconnect_message = options.disconnect_message if disconnect_message: with open(disconnect_message) as f: disconnect_message = f.read() else: disconnect_message = zc.resumelb.lb.default_disconnect_message from zc.resumelb.lb import LB lb = LB(map(zc.parse_addr.parse_addr, ()), request_classifier, disconnect_message, single_version=options.single_version) to_send = [[]] # Set up notification of address changes. awatcher = gevent.get_hub().loop.async() @awatcher.start def _(): lb.set_worker_addrs(to_send[0]) if options.single_version: @addrs def get_addrs(a): to_send[0] = dict( (zc.parse_addr.parse_addr(addr), zk.get_properties( path+'/workers/providers/'+addr).get('version') ) for addr in addrs) awatcher.send() else: @addrs def get_addrs(a): to_send[0] = map(zc.parse_addr.parse_addr, addrs) awatcher.send() # Set up notification of address changes. settings = zk.properties(path) swatcher = gevent.get_hub().loop.async() swatcher.start(lambda : lb.update_settings(settings)) settings(lambda a: swatcher.send()) lb.zk = zk lb.__zk = addrs, settings # Now, start a wsgi server addr = zc.parse_addr.parse_addr(options.address) if options.max_connections: spawn= gevent.pool.Pool(options.max_connections) else: spawn = 'default' if options.access_logger: accesslog = AccessLog(options.access_logger) else: accesslog = None server = WSGIServer( addr, lb.handle_wsgi, backlog=options.backlog, spawn=spawn, log=accesslog, socket_timeout=options.socket_timeout) server.start() registration_data = {} if options.backdoor: from gevent import backdoor bd = backdoor.BackdoorServer(('127.0.0.1', 0), locals()) bd.start() registration_data['backdoor'] = '127.0.0.1:%s' % bd.server_port status_server = None if options.status_server: def status(socket, addr): pool = lb.pool writer = socket.makefile('w') writer.write(json.dumps( dict( backlog = pool.backlog, mean_backlog = pool.mbacklog, workers = [ (worker.__name__, worker.backlog, worker.mbacklog, (int(worker.oldest_time) if worker.oldest_time else None), ) for worker in sorted( pool.workers, key=lambda w: w.__name__) ] ))+'\n') writer.close() socket.close() status_server_address = options.status_server if os.path.exists(status_server_address): os.remove(status_server_address) sock = gevent.socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(status_server_address) sock.listen(5) status_server = gevent.server.StreamServer(sock, status) status_server.start() zk.register_server(path+'/providers', (addr[0], server.server_port), **registration_data) def shutdown(): zk.close() server.close() if status_server is not None: status_server.close() lb.shutdown() gevent.signal(signal.SIGTERM, shutdown) if run: try: server.serve_forever() finally: logging.getLogger(__name__+'.lbmain').info('exiting') zk.close() else: gevent.sleep(.01) return lb, server
def main(): server = DNSServer(('', 53)) logging.info('serving at %r', server.address) server.family server.serve_forever()
def init_cli(): server = gevent.server.StreamServer(('', globals.TELNET_PORT), MyTelnetHandler.streamserver_handle) server.serve_forever()
def main( out_hostname, out_port, base_name, params_filename, plot_sensors, use_out_server, adapt_model, save_data, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset, break_stop_offset, pred_gap, incoming_port, load_old_data, use_new_adam_params, input_time_length, train_on_breaks, min_break_samples, min_trial_samples, n_chans, cuda): setup_logging() hostname = '' supplier = None sender = None buffer = None # load model to correct gpu id gpu_id = th.FloatTensor(1).cuda().get_device() def inner_device_mapping(storage, location): return storage.cuda(gpu_id) model_name = os.path.join(base_name, 'model.pkl') model = th.load(model_name, map_location=inner_device_mapping) predictor = ModelPredictor( model, input_time_length=input_time_length, pred_gap=pred_gap, cuda=cuda) if adapt_model: loss_function = log_categorical_crossentropy model_loss_function = None model_constraint = MaxNormDefaultConstraint() optimizer = Adam(model.parameters(), lr=learning_rate) n_preds_per_input = None # set later n_classes = None # set later trainer = BatchCntTrainer( model, loss_function, model_loss_function, model_constraint, optimizer, input_time_length, n_preds_per_input, n_classes, n_updates_per_break=n_updates_per_break, batch_size=batch_size, n_min_trials=n_min_trials, trial_start_offset=trial_start_offset, break_start_offset=break_start_offset, break_stop_offset=break_stop_offset, add_breaks=train_on_breaks, min_break_samples=min_break_samples, min_trial_samples=min_trial_samples, cuda=cuda) trainer.set_n_chans(n_chans) else: trainer = NoTrainer() if params_filename is not None: if params_filename == 'newest': # sort will already sort temporally with our time string format all_params_files = sorted(glob(os.path.join(base_name, "*.model_params.pkl"))) assert len(all_params_files) > 0, ("Expect atleast one params file " "if 'newest' given as argument") params_filename = all_params_files[-1] log.info("Loading model params from {:s}".format(params_filename)) model_params = th.load(params_filename, map_location=inner_device_mapping) model.load_state_dict(model_params) train_params_filename = params_filename.replace('model_params.pkl', 'trainer_params.pkl') if os.path.isfile(train_params_filename): if adapt_model and use_new_adam_params: log.info("Loading trainer params from {:s}".format( train_params_filename)) train_params = th.load(train_params_filename, map_location=inner_device_mapping) optimizer.load_state_dict(train_params) elif adapt_model: log.warn("No train/adam params found, starting optimization params " "from scratch (model params will be loaded anyways).") processor = StandardizeProcessor() if adapt_model and load_old_data: trainer.add_data_from_today( factor_new=processor.factor_new, eps=processor.eps) online_exp = OnlineExperiment( supplier=supplier, buffer=buffer, processor=processor, predictor=predictor, trainer=trainer, sender=sender) server = PredictionServer( (hostname, incoming_port), online_experiment=online_exp, out_hostname=out_hostname, out_port=out_port, plot_sensors=plot_sensors, use_out_server=use_out_server, save_data=save_data, model_base_name=base_name, save_model_trainer_params=adapt_model) # Compilation takes some time so initialize trainer already # before waiting in connection in server log.info("Starting server on port {:d}".format(incoming_port)) server.start() log.info("Started server") server.serve_forever()
# Bespoke Link to Instruments and Small Satellites (BLISS) # # Copyright 2018, by the California Institute of Technology. ALL RIGHTS # RESERVED. United States Government Sponsorship acknowledged. Any # commercial use must be negotiated with the Office of Technology Transfer # at the California Institute of Technology. # # This software may be subject to U.S. export control laws. By accepting # this software, the user agrees to comply with all applicable U.S. export # laws and regulations. User has the responsibility to obtain export licenses, # or other export authority as may be required before exporting such # information to foreign countries or providing access to foreign persons. # Mock incoming PDUs import gevent import gevent.server def handle(sock, address): count = 0 while True: sock.send('Hello {}'.format(count)) count += 1 gevent.sleep(2) if __name__ == '__main__': server = gevent.server.StreamServer(('127.0.0.1', 8000), handle) server.serve_forever()