def run_server(): try: stat('out') except: Logger.log_error('Data tables not built yet') Logger.log_message('Please run ./run first') return Logger.log_message('Running application server') web_app = WebApp('localhost', 8000, True) web_app.run()
def start_standalone(): sys.stderr.write("Starting server...\n") sys.stderr.write("Python path:\n") sys.stderr.write(str(sys.path) + "\n") ############## LOAD SERVER CONFIGURATION ######################################## server_conf_file = os.path.join(os.path.dirname(__file__), SERVER_CONFIG_FILE) cherrypy.config.update(server_conf_file) sys.stderr.write("\tcherrypy configuration files loaded\n") ############### LOAD THE EXCAP APP ################################# # Classes with exposed methods root = WebApp.Root(MOUNT_POINT, VERBOSE) root.retrieve = RunQuery.Root(MOUNT_POINT, VERBOSE) # Mount app on server, with webapp config file app_conf_file = os.path.join(os.path.dirname(__file__), CONFIG_FILE) app = cherrypy.tree.mount(root, script_name=MOUNT_POINT, config=app_conf_file) sys.stderr.write("\t" + MOUNT_POINT + " mounted\n") # Add barleymap configuration #bmap_conf_dict = _load_globals() #abs_path = os.path.dirname(os.path.abspath(__file__)) ################ STARTING THE SERVER #################################### #if hasattr(cherrypy.engine, "signal_handler"): # cherrypy.engine.signal_handler.subscribe() #if hasattr(cherrypy.engine, "console_control_handler"): # cherrypy.engine.console_control_handler.subscribe() # or #cherrypy.engine.signals.subscribe() cherrypy.engine.start() sys.stderr.write("Server started. Accepting requests...\n") cherrypy.engine.block() ## Here the server stops to accept requests #################################### #cherrypy.quickstart(root, script_name="/"+ResourcesMng.get_app_name(), config=global_conf_dict) sys.stderr.write("Server stopped.\n") return
def gunicorn_entry(): """Entry point for Gunicorn It retrieves the option from envs and return an initialized instance of WebApp""" env = {} env["google_api_key"] = os.getenv("GOOGLE_API_KEY") env["pg_host"] = os.getenv("PG_HOST") env["pg_user"] = os.getenv("PG_USER") env["pg_pass"] = os.getenv("PG_PASS") env["pg_db_name"] = os.getenv("PG_DB_NAME") env["log_level"] = os.getenv("LOG_LEVEL") # Remove None env env = {k: env[k] for k in env if env[k] is not None} # Return initialized WebApp return WebApp(**env)
def gunicorn_entry(): """ Entry point for Gunicorn It retrieves the option from envs and return an initialized instance of WebApp :return an initialized instance of WebApp :rtype WebApp """ env = {} env["url_file"] = os.getenv("SS_URL_FILE") env["url_file_expiration"] = os.getenv("SS_URL_EXPIRATION") env["redis_url"] = os.getenv("SS_REDIS_URL") env["redis_statistics"] = os.getenv("SS_REDIS_STATISTICS") env["statistics_expiration"] = os.getenv("SS_STATISTICS_EXPIRATION") env["log_level"] = os.getenv("SS_LOG_LEVEL") env["log_level_modules"] = os.getenv("SS_LOG_LEVEL_MODULES") # Remove None env env = {k: env[k] for k in env if env[k] is not None} # Return initialized WebApp return WebApp(**env)
button.start() display = Display() ui = UiController(button, display, True, 1) #raw_input("menu up! press button...") store = SampleStore(599) singletons.samplestore = store singletons.uicontroller = ui #test # for i in range(1,15): # sample = Sample( 100+i, i ) # store.addSample( sample ) # for i in range(1,15): # sample = Sample( 100+15+i, 30-i ) # store.addSample( sample ) # webapp = WebApp( store ) #start the show! webapp.start() except KeyboardInterrupt: cleanupGpio()
const=3, help="equal to -vvv") # Parses args args = vars(parser.parse_args()) # Removes None elements args = {k: args[k] for k in args if args[k] is not None} # Parses the verbosity level if "log_level" in args: try: args["log_level"] = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG }[args["log_level"]] except KeyError: args["log_level"] = logging.DEBUG # Gets flask options flask_options = { "host": args.pop("host"), "port": args.pop("port"), "debug": args.pop("flask_debug") } # Starts Flask app WebApp(**args).run(**flask_options)
help="equal to -vvv") # Parses args args = vars(parser.parse_args()) # Remove None elements args = {k: args[k] for k in args if args[k] is not None} # Verbosity level map VERBOSITY_LEVELS = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG } # Compute verbosity level args["log_level"] = VERBOSITY_LEVELS[ args["log_level"]] if args["log_level"] < 4 else VERBOSITY_LEVELS[3] # Find flask port try: flask_port = args.pop("flask_port") except KeyError: flask_port = None # Create the web app app = WebApp(**args) # Run the web app app.run(debug=True, port=flask_port)
json.dumps( rpcutils.generateRPCResultResponse( reqParsed[rpcutils.ID], payload))) elif msg.type == aiohttp.WSMsgType.ERROR: raise rpcErrorHandler.InternalServerError( 'ws connection closed with exception %s' % ws.websocket.exception()) except rpcErrorHandler.Error as e: await ws.websocket.send_str( json.dumps( rpcutils.generateRPCResultResponse( reqParsed[rpcutils.ID] if reqParsed is not None else rpcutils.UNKNOWN_RPC_REQUEST_ID, e.jsonEncode()))) SubcriptionsHandler.removeClient(ws) return ws if __name__ == '__main__': print("Server started") app = WebApp() app.add_routes([web.post('/rpc', rpcServerHandler)]) app.add_routes([web.get('/ws', websocketServerHandler)]) for webSocket in wsutils.webSockets: webSocket() web.run_app(app, port=80)
# -*- coding:utf-8 -*- import sys import logging import logging.config import os from webapp import WebApp if __name__ == '__main__': if len(sys.argv) > 1 : port = sys.argv[1] try: port = int(port) except ValueError: print ("commnad line argument, port, error") else: port = 8181 curr_dir = os.path.dirname(__file__) logging_config_file = os.path.join(curr_dir, "./config/logging.ini") logging.config.fileConfig(logging_config_file) logger = logging.getLogger('root') # start Klein web app logger.info("start web engine with port: " + str(port)) web_app = WebApp() web_app.app.run('0.0.0.0', port=port)
import tornado.ioloop import tornado.web import torndb from tornado.options import define, options from webreq import handlers from webapp import WebApp define("port", default=1104, help="run on the given port", type=int) define("mysql_host", default="127.0.0.1:3306", help="database host") define("mysql_database", default="ticketdb", help="database name") define("mysql_user", default="root", help="database user") define("mysql_password", default="", help="database password") if __name__ == "__main__": tornado.options.parse_command_line() app = WebApp(handlers, options) app.listen(options.port) tornado.ioloop.IOLoop.current().start()
def bitcoinWS(): app = WebApp() app.add_routes([web.post(BITCOIN_CALLBACK_PATH, bitcoinCallback)])
def __init__(self, AuthClass, MBoxClass): WebApp.__init__(self, SessionManager(session_db_filename, max_session_length), template_template, "BoboMail") self.Authentication = AuthClass self.MailBox = MailBox(self, MBoxClass) self.Prefs = Preferences(self) self.AddrBook = AddressBook(self)
class Index(View): def GET(self,request): user1 = User.filter()[0] name = user1.get('name') return render_template("index.html",name=name) class Test(View): def GET(self,request,my_id): return "test",my_id urls = [ { 'url':'/', 'view':Index }, { 'url':'/test/<my_id>', 'view':Test } ] if __name__ == '__main__': app = WebApp() app.add_url_rule(urls) app.run()
if changing_color[1] == 255: break while True: changing_color[0] += 1 changing_color[1] -= 1 for i in range(count): np[i] = changing_color np.write() yield 1 if changing_color[0] == 255: break @WebApp.register('blik') def blik(): while True: for color in [red, blue, green]: color() yield 2000 def run(func): result = func() for wait in result: time.sleep_ms(wait) if __name__ == '__main__': connect() WebApp().start(default_func=storm)
def main(): args = sys.argv usage = ''' ./run txtdump\t<Gives the text dump of corpus> ./run sanitize\t<Sanitize the text dump to remove white spaces, etc.> ./run stem\t\t<Stem the sanitized text> ./run tf\t\t<Calculate the raw term frequency> ./run df\t\t<Calculate the document frequency> ./run itfidf\t<Calculate the inverse term frequency - inverse document frequency> ./run dict\t\t<Create the psycholinguistic dictionary> ./run kff\t\t<Calculate the Kucera Francis frequency> ./run syl\t\t<Calculate the number of syllables> ./run server\t<Run the application server> ''' if len(args) < 2: Logger.log_usage(usage) return if args[1] == 'server': web_app = WebApp('127.0.0.1', 5000, DEBUG) web_app.run() return elif args[1] == 'txtdump': txt_dump = TxtDump('corpus', path.join('tmp', 'txtdump')) txt_dump.run() return elif args[1] == 'sanitize': sanitizer = Sanitizer(path.join('tmp', 'txtdump'), path.join('tmp', 'sanitized')) sanitizer.run() return elif args[1] == 'stem': stemmer = Stemmer(path.join('tmp', 'sanitized'), path.join('tmp', 'stemmed')) stemmer.run() return elif args[1] == 'tf': tf = TermFrequency(path.join('tmp', 'stemmed'), path.join('data', 'tf_stemmed.csv')) tf.run() tf = TermFrequency(path.join('tmp', 'sanitized'), path.join('data', 'terms_list.csv')) tf.run() return elif args[1] == 'df': df = DocumentFrequency(path.join('tmp', 'stemmed'), path.join('data', 'df_stemmed.csv'), path.join('data', 'tf_stemmed.csv')) df.run() return elif args[1] == 'itfidf': itfidf = ITFIDF(path.join('data', 'itfidf_stemmed.csv'), path.join('data', 'tf_stemmed.csv'), path.join('data', 'df_stemmed.csv')) itfidf.run() return elif args[1] == 'dict': dict_creator = PsycholinguisticDbCreator(path.join('data', 'psycholinguistic_db'), path.join('data', 'psycholinguistic_db.csv')) dict_creator.create() return elif args[1] == 'kff': kf_freq_counter = KFFrequency(path.join('tmp', 'stemmed'), path.join('data', 'kff_stemmed.csv'), path.join('data', 'psycholinguistic_db.csv')) kf_freq_counter.run() return elif args[1] == 'syl': syllable_counter = SyllableCounter(path.join('tmp', 'stemmed'), path.join('data', 'syllables_stemmed.csv'), path.join('data', 'psycholinguistic_db.csv')) syllable_counter.run() return else: Logger.log_usage(usage) return
def main(): web_app = WebApp('0.0.0.0', 82, handlers) web_app.start()
if args.clean and os.path.exists(dataframe_store_path): shutil.rmtree(dataframe_store_path) trainModel = args.train if not os.path.exists(model_store_path) or not os.path.exists( dataframe_store_path): trainModel = True dataset = SpotifyRecommenderDataset() if trainModel: print("starting to train model...") train_dataloader = torch.utils.data.DataLoader(dataset, batch_size=256, shuffle=True) model = Autoencoder() model = train_autoencoder(model, train_dataloader, verbose=args.verbose, num_epochs=args.num_epochs) with torch.no_grad(): print("Evaluation loss:", test(model, dataset.model_input_tensor)) encodings = model.encode(dataset.model_input_tensor) dataset.add_encoding_tensor(encodings) torch.save(model, model_store_path) knn = visualization.KNN(dataset) webapp = WebApp(dataset, knn) webapp.run()