def decorated_function(self, *args): try: return function(self, *args) except (OSError, socket.error), err: autostart = self.ui.configbool('inotify', 'autostart', True) if err.args[0] == errno.ECONNREFUSED: self.ui.warn(_('inotify-client: found dead inotify server ' 'socket; removing it\n')) os.unlink(os.path.join(self.root, '.hg', 'inotify.sock')) if err.args[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart: try: try: server.start(self.ui, self.dirstate, self.root, dict(daemon=True, daemon_pipefds='')) except server.AlreadyStartedException, inst: # another process may have started its own # inotify server while this one was starting. self.ui.debug(str(inst)) except Exception, inst: self.ui.warn(_('inotify-client: could not start inotify ' 'server: %s\n') % inst) else: try: return function(self, *args) except socket.error, err: self.ui.warn(_('inotify-client: could not talk to new ' 'inotify server: %s\n') % err.args[-1])
def status(self, files, match, list_ignored, list_clean, list_unknown=True): try: if not list_ignored and not self.inotifyserver: result = client.query(ui, repo, files, match, False, list_clean, list_unknown) if result is not None: return result except socket.error, err: if err[0] == errno.ECONNREFUSED: ui.warn(_('(found dead inotify server socket; ' 'removing it)\n')) os.unlink(repo.join('inotify.sock')) elif err[0] != errno.ENOENT: raise if ui.configbool('inotify', 'autostart'): query = None ui.debug(_('(starting inotify server)\n')) try: server.start(ui, repo) query = client.query except server.AlreadyStartedException, inst: # another process may have started its own # inotify server while this one was starting. ui.debug(str(inst)) query = client.query except Exception, inst: ui.warn(_('could not start inotify server: ' '%s\n') % inst) ui.print_exc()
def test_maths( self ): log_file_name = "integration_logs" # Remove possible previous file os.remove(log_file_name) # Start Graphite Proxy server.start( 9999, "true", "false" ) # Create python request senders to send requests to the graphite proxy proxy_host = "127.0.0.1" proxy_port = 8090 senders = [] for i in range(6): senders.append( request_sender.RequestSender( proxy_host, proxy_port, False ) ) # Send requests in parallel to test the server threading capabilities (each RequestSender is a different thread) # Math category 1 senders[0].run("test.integration.maths.1 2 0") senders[1].run("test.integration.maths.1 3 0") senders[2].run("test.integration.maths.1 5 0") # Math category 2 senders[3].run("test.integration.maths.2 2 0") senders[4].run("test.integration.maths.2 3 0") senders[5].run("test.integration.maths.2 5 0") # Wait for the computations time.sleep(5) # Stop the server thread to be able to quit the programm properly server.stop() # Get received messages from logs logs_file = open(log_file_name, 'r') logs = logs_file.read() # Check received math message for rule 1 (by number of received messages) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 10.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 2.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 5.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 3.3"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 3.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 1.5"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 1.2"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.1 100.0"), -1 ) # Check received math message for rule 2 (by number of spend time) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 10.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 2.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 5.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 3.3"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 3.0"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 1.5"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 1.2"), -1 ) self.assertNotEqual( logs.find("Message added: test.integration.maths.2 100.0"), -1 ) # Remove created logs file os.remove(log_file_name)
def decorated_function(self, *args): result = None try: return function(self, *args) except (OSError, socket.error), err: autostart = self.ui.configbool('inotify', 'autostart', True) if err[0] == errno.ECONNREFUSED: self.ui.warn(_('(found dead inotify server socket; ' 'removing it)\n')) os.unlink(self.repo.join('inotify.sock')) if err[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart: self.ui.debug(_('(starting inotify server)\n')) try: try: server.start(self.ui, self.repo) except server.AlreadyStartedException, inst: # another process may have started its own # inotify server while this one was starting. self.ui.debug(str(inst)) except Exception, inst: self.ui.warn(_('could not start inotify server: ' '%s\n') % inst) else: try: return function(self, *args) except socket.error, err: self.ui.warn(_('could not talk to new inotify ' 'server: %s\n') % err[-1])
def main(): import argparse parser = argparse.ArgumentParser( description='Start the Plow Render Node Daemon', usage='%(prog)s [opts]', ) parser.add_argument("-debug", action="store_true", help="Print more debugging output") args = parser.parse_args() logger = logging.getLogger() ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = RndFormatter(datefmt='%Y-%m-%d %H:%M:%S') ch.setFormatter(formatter) logger.addHandler(ch) logger.setLevel(logging.DEBUG if args.debug else logging.INFO) import server try: server.start() except KeyboardInterrupt: sys.exit(2)
def main(): if len(sys.argv) < 3: errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[1]) except ValueError: errprint('\nWEB_PORT must be an integer.\n') errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[2]) except ValueError: errprint('\nKADEMLIA_PORT must be an integer.\n') errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) if len(sys.argv) == 5: PEER = [(sys.argv[3], int(sys.argv[4]))] elif len(sys.argv) == 4: PEER = [] f = open(sys.argv[3],'r') lines = f.readlines() f.close() for line in lines: peer_ip,peer_udp = line.split() PEER.append((peer_ip,int(peer_udp))) else: PEER = None; cprint('PEER is %s' % str(PEER)) node_instance = NODE(KADEMLIA_PORT = int(sys.argv[2]),PEER = PEER); node_instance.registerNode(); webserver.start(getter = node_instance.searchKey,poster = node_instance.publishKey,web_port = int(sys.argv[1]));
def main(**kws): no_session = kws.pop('no_session', False) e = engine.HQueryEngine(kws) import server # server should not be imported in backend process server.start(e, no_session) # TODO "reset" -> make everything new # TODO put git hash of varial on webcreator (find on webcreator init) # TODO multiple instances: add random token to jug_file path (delete 2w olds) # TODO add multiple histos e.g. store histos via python, not in json # TODO CUTFLOW # TODO hint toggles (on bins vs. low, high / CUTFLOW) # TODO add multiple histos (toggled form) # TODO reloading: use ajax instead of full reload # TODO status from job submitter (warn when only few jobs are running) # TODO progress bar or (n_done / n_all) statement # TODO progress: sometimes it hangs until done. Why? # TODO first make histos for current section, send reload, then others # TODO lines in plots if selection is applied (improved N-1 feature) # TODO SGEJobSubmitter: Jobs are killed after 1 hour. Resubmit before that. # TODO cut efficiency / cutflow plot in map reduce # TODO histo_form: put width into CSS block # TODO separate CSS file for all hquery-related fields # TODO think about pulling everything through GET # TODO restart backend button
def run_test(tester, proxy_port, is_udp): # Create a python request receiver to receive Normal requests from the graphite proxy (emulate the Graphite Server) client_port = 9999 receiver = request_receiver.RequestReceiver() if not receiver.connect("127.0.0.1", client_port): print "Impossible to connect, abort." receiver.stop() sys.exit() # Start Graphite Proxy server.start( client_port, "false", "false" ) # Create python request senders to send requests to the graphite proxy proxy_host = "127.0.0.1" senders = [] for i in range(4): senders.append( request_sender.RequestSender( proxy_host, proxy_port, is_udp ) ) # Send requests in parallel to test the server threading capabilities (each RequestSender is a different thread) senders[0].run("Integration.test.1 0 0") senders[1].run("Integration.test.1 50 0") senders[2].run("Integration.test.2 100 0") senders[3].run("Integration.test.3 9999 0") time.sleep(2) server.stop() receiver.stop() # Stop the server thread to be able to quit the programm properly # Receive normal messages tester.assertNotEqual( receiver.getReceivedMEssage().find("Integration.test.1 0.000000 0\nIntegration.test.1 50.000000 0\nIntegration.test.2 100.000000 0\nIntegration.test.3 9999.000000 0\n"), -1 )
def decorated_function(self, *args): result = None try: return function(self, *args) except (OSError, socket.error), err: autostart = self.ui.configbool('inotify', 'autostart', True) if err[0] == errno.ECONNREFUSED: self.ui.warn( _('(found dead inotify server socket; ' 'removing it)\n')) os.unlink(self.repo.join('inotify.sock')) if err[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart: self.ui.debug(_('(starting inotify server)\n')) try: try: server.start(self.ui, self.repo) except server.AlreadyStartedException, inst: # another process may have started its own # inotify server while this one was starting. self.ui.debug(str(inst)) except Exception, inst: self.ui.warn( _('could not start inotify server: ' '%s\n') % inst) else: try: return function(self, *args) except socket.error, err: self.ui.warn( _('could not talk to new inotify ' 'server: %s\n') % err[-1])
def process(self): if self.listen_tcp: import server server.start(self) else: lines = self.read_lines_from_stdin() print "action=%s\n" % self.check(lines)
def start(): try: toolkit.verbose("Starting the system.") robot.start() server.start() toolkit.verbose("Succesfully started the system!") except: toolkit.verbose("Uh-oh. An error occured at startup.")
def async_main(start_ioloop=False): import tornado.ioloop from async import client, server server.start() client.start() if start_ioloop: tornado.ioloop.IOLoop.current().start()
def main(): argparser = ArgumentParser() argparser.add_argument('-P', '--port', default=8080, type=int) argparser.add_argument('-H', '--host', default='0.0.0.0', type=str) args = argparser.parse_args() initialize() start(port=args.port, host=args.host)
def main(): if len(sys.argv) < 3: errprint( 'Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[1]) except ValueError: errprint('\nWEB_PORT must be an integer.\n') errprint( 'Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[2]) except ValueError: errprint('\nKADEMLIA_PORT must be an integer.\n') errprint( 'Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) if len(sys.argv) == 5: PEER = [(sys.argv[3], int(sys.argv[4]))] elif len(sys.argv) == 4: PEER = [] f = open(sys.argv[3], 'r') lines = f.readlines() f.close() for line in lines: peer_ip, peer_udp = line.split() PEER.append((peer_ip, int(peer_udp))) else: PEER = None subprocess.Popen( ['python', 'examples/create_network.py', '10', '127.0.0.1'], stdout=subprocess.PIPE) #cprint('between subprocesses') time.sleep(5) subprocess.Popen(['python', 'gui.py', '4050', '127.0.0.1', '4000'], stdout=subprocess.PIPE) #subprocess.Popen(['gnome-terminal','--tab']) #cprint('after subprocesses') node_instance = NODE(KADEMLIA_PORT=int(sys.argv[2]), PEER=PEER) node_instance.registerNode() # python gui.py 4000 127.0.0.1 4000 --entangled-0.1 # gnome-terminal --tab # create_network.py 10 127.0.0.1 webserver.start(getter=node_instance.searchKey, poster=node_instance.publishKey, web_port=int(sys.argv[1]))
def browser_main(): # set up file environment file_manager.setup() # run ui server.start(1234) # delete all file_manager.clean_up()
def generate(edges, js): print 'generating graph...' _add_edges(edges) if js: _write_graph_to_json() server.start() else: _draw_graph()
def main(): """Run the world. This function sets up logging, connects to CloudSQL, and starts the API Server. It never returns. """ logging.getLogger().setLevel(logging.DEBUG) client = cloud_sql_client.CloudSQLClient( cloud_sql_backend.INSTANCE, cloud_sql_backend.DATABASE) backend = cloud_sql_backend.CloudSQLBackend(client) server.start(backend)
def main(media): username = input("Kullanıcı adı giriniz\nasy >> ") sender_email = input("Mail adresinizi giriniz\nasy >> ") password = input("Mail adresinizin parolasını giriniz\nasy >> ") toMail = input("Hedef mail adresini giriniz\nasy >> ") port = input("Server için port giriniz (4444,8080,9090)\nasy >> ") u_json = {"username": username} with open('site/public/js/df.json', 'w') as json_dosya: json.dump(u_json, json_dosya) server.start(int(port), media, username, sender_email, toMail, password)
def startup(): #load the config file and start the listener, daemon config = ConfigParser.ConfigParser() config.read('C:\dev\projects\elasticd\conf\settings.cfg') logging.debug("init starting up") p_manager = PluginManager(config) datastore = p_manager.get_datastore() registrar = Registrar(datastore) server.set_registrar(registrar) server.start()
def main(): parser = argparse.ArgumentParser() parser.add_argument( "--config", type=str, default="", help="The config file to use") flags, _ = parser.parse_known_args() if not flags.config: flags.config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.json") with open(flags.config) as f: config = json.loads(f.read()) server.start(config)
def main(): common.setup_logger() logger.info(messages.SERVER_STARTING) if not slackapi.logger_info(messages.SERVER_STARTING): logger.warn(messages.SLACK_POST_LOG_FAILED) try: # Start dispatcher dispatcher.start() # Start http server server.start() except Exception as ex: logger.error(messages.HTTP_SERVER_STOPPED.format(ex)) if not slackapi.logger_error(messages.HTTP_SERVER_STOPPED.format(ex)): logger.warn(messages.SLACK_POST_LOG_FAILED)
def main(): ''' Main function ''' welcome() arg_parser = argparse.ArgumentParser() prepare_options_parser(arg_parser) args, _ = arg_parser.parse_known_args() server_mode = args.server server_port = args.port audio_only = args.audio_only meta_only = args.meta_only page_url = args.page_url if server_mode: print(f"Starting Telluride Web Server on port {server_port}") server.start(BUILD, server_port) sys.exit(0) if page_url is None: print('Please pass a video page URL or "--help" for instructions\n') sys.exit(1) youtube_dl_opts = { 'nocheckcertificate': True, 'quiet': False, 'restrictfilenames': True } if meta_only: youtube_dl_opts['quiet'] = True youtube_dl_opts['format'] = 'bestaudio/best' with youtube_dl.YoutubeDL(youtube_dl_opts) as ydl: info_dict = ydl.extract_info(page_url, download=False) print(json.dumps(info_dict, indent=2)) sys.exit(0) if audio_only: print("Audio-only download.") youtube_dl_opts['format'] = 'bestaudio/best' youtube_dl_opts['postprocessors'] = [{ 'key': 'FFmpegExtractAudio', 'preferredcodec': 'mp3', 'preferredquality': '192', }] with youtube_dl.YoutubeDL(youtube_dl_opts) as ydl: ydl.download([page_url])
def main(): if len(sys.argv) < 3: errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[1]) except ValueError: errprint('\nWEB_PORT must be an integer.\n') errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) try: int(sys.argv[2]) except ValueError: errprint('\nKADEMLIA_PORT must be an integer.\n') errprint('Usage:\n%s WEB_PORT KADEMLIA_PORT {[KNOWN_NODE_IP KNOWN_NODE_PORT] or FILE}' % sys.argv[0]) sys.exit(1) if len(sys.argv) == 5: PEER = [(sys.argv[3], int(sys.argv[4]))] elif len(sys.argv) == 4: PEER = [] f = open(sys.argv[3], 'r') lines = f.readlines() f.close() for line in lines: peer_ip, peer_udp = line.split() PEER.append((peer_ip, int(peer_udp))) else: PEER = None subprocess.Popen(['python', 'examples/create_network.py', '10', '127.0.0.1'],stdout=subprocess.PIPE) #cprint('between subprocesses') time.sleep(5) subprocess.Popen(['python', 'gui.py', '4050', '127.0.0.1', '4000'],stdout=subprocess.PIPE) #subprocess.Popen(['gnome-terminal','--tab']) #cprint('after subprocesses') node_instance = NODE(KADEMLIA_PORT=int(sys.argv[2]), PEER=PEER) node_instance.registerNode() # python gui.py 4000 127.0.0.1 4000 --entangled-0.1 # gnome-terminal --tab # create_network.py 10 127.0.0.1 webserver.start(getter=node_instance.searchKey, poster=node_instance.publishKey, web_port=int(sys.argv[1]))
def startup(config_path=DEFAULT_SETTINGS_FILE): #init logging setup_logging() #load the config file and start the listener, daemon logging.debug("init starting up") config = ConfigParser.ConfigParser() logging.debug('reading setting from: %s' % config_path) config.read(config_path) #Load the plugin manager to get a handle to the plugins. _plugin_manager = PluginManager(config) locator = _plugin_manager.get_resource_locator() datastore = _plugin_manager.get_datastore() driver = _plugin_manager.get_driver() _registrar = Registrar(datastore, driver) #should the listener be started? start_server = config.getboolean('DEFAULT', 'start_server') if start_server: server.set_registrar(registrar) Thread.start(server.start()) #start looking for backends and updating the driver #THIS CALL WILL NOT RETURN daemon.start(_registrar, locator, config)
def startup(config_path=DEFAULT_SETTINGS_FILE): #init logging config = ConfigParser.ConfigParser() config.read(config_path) setup_logging(config) #load the config file and start the listener, daemon logging.debug('reading setting from: %s' % config_path) #Load the plugin manager to get a handle to the plugins. _plugin_manager = PluginManager(config) locator = _plugin_manager.get_resource_locator() datastore = _plugin_manager.get_datastore() driver = _plugin_manager.get_driver() _registrar = Registrar(datastore, driver) #should the listener be started? start_server = config.getboolean('DEFAULT', 'start_server') if start_server: server.set_registrar(registrar) Thread.start(server.start()) #start looking for backends and updating the driver #THIS CALL WILL NOT RETURN daemon.start(_registrar, locator, config)
def main(): p = Process(target=server.start()) q = Process(target=gui.run_gui()) p.start() p.join() q.start() q.join()
def do_process(): print("TRAITEMENT") li_ville = request.forms.ville li_sport = request.forms.sport li_niv = request.forms.niveau conn = server.start() if li_ville == "": res = server.query_act(conn, li_sport) else: if li_sport == "": res = server.query_city(conn, li_ville) else: if li_niv == "": res = server.query_city_and_act(conn, li_ville, li_sport) else: res = server.query_city_and_act_and_niv( conn, li_ville, li_sport, li_niv) server.close(conn) print(res) my_dict = {'res': res, 'nbRes': len(res)} return template("Template.html", my_dict)
def server_cmd(self, world_name, cmd): if cmd not in ['start', 'stop', 'reboot']: raise cherrypy.HTTPError(404) world = server.container(world_name) if world and cmd == 'reboot': port = server.ports(world)[0] else: port = server.next_port() if cmd == 'stop' or cmd == 'reboot': server.stop(world_name) if cmd == 'reboot' or cmd == 'start': server.start(world_name, port) return self.index(world_name)
def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hp:v", \ ["help", "port=", "version"]) except getopt.GetoptError: usage() sys.exit(2) for op, value in opts: if op == "-p": port = int(value) init() logging.info("Server is starting...") #with daemon.DaemonContext(): server.start(port) elif op == "-v": print("Version:" + configure.get_conf()["version"]) elif op == "-h": usage()
def main(): import argparse parser = argparse.ArgumentParser(description="Start the Plow Render Node Daemon", usage="%(prog)s [opts]") parser.add_argument("-debug", action="store_true", help="Print more debugging output") args = parser.parse_args() logger = logging.getLogger() ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = RndFormatter(datefmt="%Y-%m-%d %H:%M:%S") ch.setFormatter(formatter) logger.addHandler(ch) logger.setLevel(logging.DEBUG if args.debug else logging.INFO) import server server.start()
def main(): # Initialize logging. init_logging("stoplightd.log") # Start the server. server_queue = Queue() server.start(server_queue) # Create and run the manager. manager = Manager() while True: # Check for new jobs and add them. try: command = server_queue.get(block=False) if command["type"] == "add_job": # Add the job. manager.add_job(command["job_dir"]) except Empty: pass manager.update() time.sleep(5)
def test_statistics( self ): # Create a python request receiver to receive Normal requests from the graphite proxy (emulate the Graphite Server) client_port = 9999 receiver = request_receiver.RequestReceiver() if not receiver.connect("127.0.0.1", client_port): print "Impossible to connect, abort." receiver.stop() sys.exit() # Start Graphite Proxy server.start( client_port, "false", "true" ) time.sleep(4) server.stop() receiver.stop() # Stop the server thread to be able to quit the programm properly # Receive normal messages messages = receiver.getReceivedMEssage() self.assertNotEqual( messages.find("stats.global_buffer.messages.max"), -1 ) self.assertNotEqual( messages.find("stats.math_buffer.messages.max"), -1 ) self.assertNotEqual( messages.find("stats.messages.created.nbr"), -1 ) self.assertNotEqual( messages.find("stats.statistics.messages.created.nbr"), -1 )
def serve_homepage(): conn = server.start() liste_ville = server.get_city(conn) liste_sport = server.get_sport(conn) liste_niveau = server.getNiveau(conn) print(liste_niveau) server.close(conn) my_dict = { 'listeSport': liste_sport, 'listeVille': liste_ville, 'listeNiveau': liste_niveau } return template("index.html", my_dict)
def startup(config_path=DEFAULT_SETTINGS_FILE): ''' Load the config file and start the listener, daemon ''' logging.debug('Init starting up') config = ConfigParser.ConfigParser() logging.debug('Reading setting from: %s' % config_path) config.read(config_path) ''' Load the plugin manager to get a handle to the plugins. ''' plugin_manager = PluginManager(config) _locator = plugin_manager.get_resource_locator() _datastore = plugin_manager.get_datastore() _driver = plugin_manager.get_driver() _registrar = Registrar(_datastore, _driver) ''' Should the listener be started? ''' start_server = config.getboolean('DEFAULT', 'start_server') if start_server: server.set_registrar(registrar) thread.start(server.start()) ''' Start looking for backends and updating the driver ''' ''' THIS CALL WILL NOT RETURN ''' daemon.start(_registrar, _locator, config)
import numpy as np from multiprocessing import Queue import server from server import send_msg from PIL import Image import os from os import system as sys from keras.models import load_model import keras from replay import ExperienceReplay import random import time q = Queue() server.start(q) # the simulator saves the screen shots in to this folder # for the python script to read screen_shot_path = '../../ScreenShots' sys('rm -rf ' + screen_shot_path + '; mkdir ' + screen_shot_path) steering_angle = 0 steering_angles = [-0.45, 0.45] encoder = load_model('../../models/80-2 encoder') exp_replay = ExperienceReplay() new_model = False batch_size = 64 skip_learn = 1 if new_model:
def main(): server.start()
import os from blinker import signal jimaek = validator.PullBot() def on_pull(data): action = data.get("action", None) num = int(data.get("number", None)) if action in ("opened", "reopened", "synchronize"): # print "Validating pr {number}".format(**data) jimaek.validate(num) elif action == "closed": jimaek.closed_pr(num) def on_comment(data): number = data["issue"]["number"] comment = data["comment"]["body"] user = data["comment"]["user"]["login"] if data.get("action", None) == "created": jimaek.check_comment(number, comment, user) signal("pull_event").connect(on_pull) signal("comment_event").connect(on_comment) revalidation_service.start(jimaek) DEFAULT_PORT = int(os.environ.get("PORT", 9000)) server.start(DEFAULT_PORT)
def start_server(): import server server.start(conf)
def TGS(): server.start(TGSServer, db.TGS_NAME, SERVER, 8081)
def AS(): server.start(AuthenticationServer, db.AS_NAME, SERVER, 8080)
print('\tUsage: program -server|-client [-n number] [-i ip adress] ') print('') print('where: ') print ('\t-server\t\t-\tStarting the server module') print ('\t-client\t\t-\tStarting the client module') print('') print('optional:') print ('\t-n number\t-\tNumber of connections (only usable in server module). If null taken from input.') print ('\t-i ip adress\t-\t IP Adress for server (only usable in client module). If null taken from input.') if len(sys.argv) > 1: if sys.argv[1] == '-server': if len(sys.argv) >3: if sys.argv[2] == '-n': if(isinstance(int(sys.argv[3]), int)): server.start(sys.argv[3]) else: helpMessage() else: server.start(-1) elif sys.argv[1] == '-client': if len(sys.argv) >3: if sys.argv[2] == '-i': client.start(sys.argv[3]) else: client.start('-1') else: helpMessage() else: helpMessage()
def start_service_server(port=8080): try: server.start(app, port=port) except KeyboardInterrupt: stop_watch() server.stop()
import server if __name__ == '__main__': import argparse parser = argparse.ArgumentParser('MyNote Options.') parser.add_argument('--static-file-path', help='...', default='./static') parser.add_argument('--template-file-path', help='...', default='./template') parser.add_argument('--notes-file-path', help='...') args = parser.parse_args() if not args.notes_file_path: parser.error('You must specify the notes file path') server.start(args.static_file_path, args.template_file_path, args.notes_file_path)
format='%(asctime)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S: ', filename='websync.log', level=logging.INFO) console = logging.StreamHandler() console.setLevel(logging.INFO) logging.getLogger().addHandler(console) if __name__ == '__main__': initLogger() import sys portint=int(sys.argv[-1]) portstr=str(sys.argv[-1]) if isinstance(portint, int) and portint < 65535: app.config['BASE_DIR'] = os.path.abspath(os.path.dirname(__file__)) # API access point for MasterNode master.register("http://130.240.5.168:5002/", portstr) # Create database db.create_all() server.start(app, portint) # Remove database when server shuts down os.remove(os.path.join(app.config['BASE_DIR'], (portstr + '.db'))) master.unregister() logging.info('Exited successfully')
import subprocess import os import server as srv #import robotv1.server as srv os.chdir("/home/pi/robotRelease/robotv1") def git(*args): return subprocess.check_call(['git'] + list(args)) git("remote", "set-url", "origin", "https://*****:*****@github.com/cfacon/robotv1.git") git("pull") srv.start()
def SSBasic(): server.start(SSServerBasic, 'Basic', SERVER, 8082)
def server(port, location): cserver.start(port, location)
def SSBad(): server.start(SSServerBad, 'Bad', SERVER, 8083)
import psutil import os import time import server import chat print("Loading models...") start = time.time() model = chat.load_model() end = time.time() ram = psutil.Process(os.getpid()).memory_info().rss / 1000000 print("Done! Models loaded in", "{:.1f}".format(end - start), "seconds. Using", int(ram), "MB of RAM") server.start(model)
def start(accessBase): return server.start(accessBase)
import server if __name__ == '__main__': server.start() # set the secret key. keep this really secret: server.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
from signal import signal, SIGPIPE, SIG_DFL signal(SIGPIPE, SIG_DFL) import codecs UTF8Writer = codecs.getwriter("utf8") sys.stdout = UTF8Writer(sys.stdout) argparser = argparse.ArgumentParser(description="Analyse some bank statements.") argparser.add_argument("mode", choices=["parse", "append", "serve", "help"]) args = argparser.parse_args() if args.mode == "help": argparser.print_help() sys.exit() elif args.mode == "parse": import parser parser.tocsv(sys.stdin) elif args.mode == "append": import db db.append(sys.stdin) elif args.mode == "serve": import server server.start()
def spawn(server, outQueue, startEvent): server.outQueue = outQueue server.startEvent = startEvent server.start()