def test__database_exists(database_exists_setup, mocker): m = mocker.Mock() db_file = os.path.abspath( os.path.join(os.path.dirname(__file__), '../db/testitem.db')) assert os.path.exists(db_file) database.setup() m.create.assert_not_called()
def main(): df = pd.read_csv(PROGRESS_FILE) df = df.head(2) # DEBUG ONLY!!! # DB setup setup() try: # Create a queue to communicate with the worker threads queue = Queue() # Create 8 worker threads for x in range(8): worker = CrawlWorker(queue) # Setting daemon to True will let the main thread exit even though the workers are blocking worker.daemon = True worker.start() # Put the tasks into the queue as a tuple for index, row in df.iterrows(): lat = row['lat'] lng = row['lng'] for p in PROVIDERS: logger.info(f'queueing {p}') queue.put((p, lat, lng)) # Causes the main thread to wait for the queue to finish processing all the tasks queue.join() except (KeyboardInterrupt, Exception) as e: logger.info('Saving progress file...') df.to_csv(PROGRESS_FILE, index=False) raise e
def setupProfile(): '''Establish a new profile and setup the tables appropriately.''' # instead of a save-as, could just prompt for a name in a tkinter entry then store in ~/.KindleQuotes options = { 'parent': root, 'initialdir': path, 'title': 'Choose a Profile Name to save as: ', 'filetypes': [('Sqlite3 DB', '.s3db')] } name = tkFileDialog.asksaveasfilename(**options) if not name.endswith('.s3db'): name = ''.join([name, '.s3db']) f = open(name, 'w') f.close() database.setup(name) profile.database = name profile.name = os.path.basename(profile.database).rstrip('.s3db') profile_label['text'] = ''.join([ profile.name.capitalize(), "'s Kindle Quotes, Highlights, and Bookmarks" ])
def setup(config): keybase.setup(config) database.setup(config.db) import nltk libs = ('punkt', 'averaged_perceptron_tagger', 'universal_tagset') for lib in libs: nltk.download(lib, quiet=True)
def runserver(debug=True): from database import setup setup() try: http_port = int(sys.argv[1]) except: http_port = 5000 app.run(debug=debug, port=http_port, host='0.0.0.0')
def dropblockedword(bot, update): chatid = str(update.message.chat_id) if chatid != config.admin_chatid: update.message.reply_text('برای انجام این عمل باید ادمین باشید') return else: update.message.reply_text('دیتابیس با موفقیت دراپ شد') database.DROPTABLEblockedwords() database.setup()
def main(): """ """ setup() app = QtGui.QApplication(sys.argv) window = MainWindow() window.show() # window.showMaximized() sys.exit(app.exec_())
def setup(config): if config.sentry_dsn: sentry_sdk.init(config.sentry_dsn) database.setup(config.db) import nltk libs = ('punkt', 'averaged_perceptron_tagger', 'universal_tagset') for lib in libs: nltk.download(lib, quiet=True) return Bot(username=config.username, handler=Handler(config))
def main(): setup() gettext_windows.setup_env() locale.setlocale(locale.LC_ALL, '') gettext.install('power_m', localedir='locale', unicode=True) app = QtGui.QApplication(sys.argv) qb = MainWindows() qb.show() sys.exit(app.exec_())
def process_search(conn, addr): #循环处理客户端请求 recvData = conn.recv(102400) if not recvData: res = "the seat msg is empty, please re-send \n" conn.send(res) conn.close() return err, user_info, carset_cond = parse_search_info(recvData) print(recvData) if err: res = "the search.xml format error, please re-send" conn.send(res) conn.close() return dbName = db.setup() dbCon = db.connect(dbName) # 查询用户名密码是否正确 user_cond = '''user = '******' ''' % user_info[0] data_user = db.select_user_table(dbCon, user_cond) if len(data_user) == 0: send_search_result(conn, data_user, 'user_unvalid') else: if data_user[0][1] != user_info[1]: send_search_result(conn, data_user, 'password_unvalid') else: data_carseat = db.select_carseat_table(dbCon, carset_cond) dbCon.close() send_search_result(conn, data_carseat, 'none') conn.close()
def main(): """Test the approximate library.""" my_db = database.setup() my_db.add_task("Task 0") my_db.add_task("Task 1") my_db.add_task("Task 2") my_db.add_task("Task 3") my_db.add_task("Task 4") my_db.add_task("Task 5") my_db.add_task("Task 6") my_db.add_task("Task 7") my_db.remove_task(2) my_db.finish_task(3) my_db.finish_task(1) my_db.add_category("Memo") my_db.add_category("Email") my_db.assign_category(0, 1) my_db.assign_category(1, 1) my_db.assign_category(1, 3) for code, task in my_db.tasks.items(): print(task.id["name"]) print("\tcode:", task.id["code"]) print("\tcategories:", task.categories) for category in task.categories: print("\t\t", my_db.categories[category].id["name"]) print() print("Tasks in progress:", my_db.in_progress) print("Tasks finished:", my_db.finished)
def session(): DB_PATH = Path("tests/test_db.sqlite") DB_URI = f"sqlite:///{DB_PATH}" SessionMaker = database.setup(DB_PATH, DB_URI) session = SessionMaker() yield session # Everything after `yield` is the teardown code. session.close()
def main(): db = get_db_instance() try: should_save = True num_of_conditions = 5 num_of_condition_types = 3 num_of_alerts = 300000 number_of_listing_events = 21 db.connect() conditions = [ generate_conditions(i, num_of_conditions) for i in range(num_of_condition_types) ] setup(num_of_alerts, conditions) print("CREATING BLOOM FILTERS") create_alerts_bloom_filters() listing_events = generate_listing_events(number_of_listing_events, conditions) t_start = datetime.now() print("starting to find matching alerts") get_eligible_alerts(listing_events, conditions) print("finished finding matching alerts") t_end = datetime.now() time_elapsed = t_end - t_start if should_save: row = [ num_of_alerts, num_of_conditions, number_of_listing_events, time_elapsed ] save_results(row) except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if db.is_connected(): db.close_connection() print('Database connection closed.')
def restore_db(): settings = get_settings() service = MontaguService(settings) notifier = Notifier(settings['notify_channel']) try: ok = service.status == 'running' and service.db_volume_present if not ok: raise Exception('montagu not in a state we can restore') bb8_backup.restore(service) database.setup(service) if settings["add_test_user"] is True: add_test_users() notifier.post("*Restored* data from backup on `{}` :recycle:".format( settings['instance_name'])) except Exception as e: print(e) try: notifier.post("*Failed* to restore data on `{}` :bomb:", settings['instance_name']) except: raise
def server_init(): # 创建文件夹,储存所有图片 if not os.path.isdir(PicDirName): os.mkdir(PicDirName) # 创建定时器,用于每24小时创建一个文件夹处于图片 PicTimer = threading.Timer(0, fun_timer) #首次启动 PicTimer.start() # 初始化数据库 dbName = db.setup() dbCon = db.connect(dbName) if not dbCon: print('ERROR: %r not supported or unreachable, exit' % db) return db.init(dbCon) dbCon.close()
def setupProfile(): '''Establish a new profile and setup the tables appropriately.''' # instead of a save-as, could just prompt for a name in a tkinter entry then store in ~/.KindleQuotes options = { 'parent' : root, 'initialdir' : path, 'title' : 'Choose a Profile Name to save as: ', 'filetypes' : [('Sqlite3 DB','.s3db')] } name = tkFileDialog.asksaveasfilename(**options) if not name.endswith('.s3db'): name = ''.join([name,'.s3db']) f = open(name, 'w') f.close() database.setup(name) profile.database = name profile.name = os.path.basename(profile.database).rstrip('.s3db') profile_label['text'] = ''.join([profile.name.capitalize(), "'s Kindle Quotes, Highlights, and Bookmarks"])
def configure_montagu(service, data_exists): # Do things to the database if data_exists: print("Skipping data import: 'persist_data' is set, " "and this is not a first-time deployment") else: data_import.do(service) passwords = database.setup(service) # Push secrets into containers cert_paths = get_ssl_certificate(service.settings["certificate"]) token_keypair_paths = get_token_keypair() is_prod = service.settings["password_group"] == 'production' configure_api(service, passwords['api'], token_keypair_paths, service.settings["hostname"], is_prod, service.settings["orderly_web_api_url"]) task_queue_user = "******" task_queue_email = "*****@*****.**" if service.settings["use_real_diagnostic_reports"]: task_queue_password = get_secret( "task-queue-user/{}".format(service.settings["instance_name"]), "password") else: task_queue_password = "******" print("Configuring task queue user") add_user(task_queue_user, task_queue_user, task_queue_email, task_queue_password) orderlyweb_cli.add_user(task_queue_email) perms = ["*/reports.read", "*/reports.review", "*/reports.run"] orderlyweb_cli.grant_permissions(task_queue_email, perms) configure_task_queue(service, task_queue_email, task_queue_password, service.settings["orderly_web_api_url"], service.settings["use_real_diagnostic_reports"], service.settings["fake_smtp"]) configure_proxy(service, cert_paths) if service.settings["include_guidance_reports"]: configure_contrib_portal(service) if service.settings["copy_static_files"]: configure_static_server(service, token_keypair_paths)
def receive_msg(conn): #1.接收数据,1024字节 #2.如果发不完会存在缓冲去,下次在发送。 #3.缓冲区每次最多发32768字节 #4.每个系统不同,超出数值会有限制。 recvData = conn.recv(102400) #Linux系统内可以判断是否为空。 if not recvData: return ErrorEmpty curTime = time.localtime(time.time()) timeStr = "%04d%02d%02d.%02d%02d%02d" % (curTime.tm_year, curTime.tm_mon, curTime.tm_mday, curTime.tm_hour, curTime.tm_min, curTime.tm_sec) if len(recvData) > 0: root = "" print("recvData = %s" % str(recvData)) try: root = ET.fromstring(recvData) except xml.etree.ElementTree.ParseError, e: print(e) return ErrorFormat value = [] if root.tag == "identification": for child in root: value.append(child.text) else: res = "<?xml version=”1.0” encoding=”utf-8”?><reply package=identification status=fail time=%s/>" % timeStr print("reply = %s" % res) conn.send(res) return ErrorFormat value[6] = get_todayPath() + '/' + value[6] dbName = db.setup() dbCon = db.connect(dbName) db.insert_carseat_table(dbCon, tuple(value)) dbCon.close()
def emitRfidTag(self, val): if len(val) > 3: print(f"RFID-Tag: {str(val)} ({db.getAccountName(val)}) logged in.") self.activeTag = val if self.activeTag is not "": self.lastTag = self.activeTag self.qmlRfidSignal.emit(str(val)) if __name__ == "__main__": try: logging.info("Application started") app = QGuiApplication(sys.argv) engine = QQmlApplicationEngine() db = database.setup() win = MainWindow() engine.rootContext().setContextProperty("Coffee", win) engine.load(os.path.join(os.path.dirname(__file__), "main.qml")) backend.raspi_gpio_init() if not engine.rootObjects(): sys.exit(-1) sys.exit(app.exec_()) except (KeyboardInterrupt, SystemExit): backend.GPIO.cleanup() sys.exit()
""" for week_number, util_report in zip(weeks, all_util_reports): print(f"Week {week_number}") for ur in util_report: name = ur[0] percent = ur[1] print(c.FOUR_SPACES + name, f"{percent}%") print("") if __name__ == "__main__": input_path = get_input_file_path_from_args() if not input_path.is_file(): print("The given filename could not be found!") sys.exit() SessionMaker = database.setup(c.DB_PATH, c.DB_URI) session = SessionMaker() entries_list = utils.get_entries_from_input_file(input_path) database.populate_from_entries(session, entries_list) database.create_all_workers_util_reports(session) weeks = database.get_unique_weeks(session) all_util_reports = sort_all_util_reports(session, weeks) print_all_util_reports(weeks, all_util_reports)
def load_config(): config = ConfigParser.ConfigParser() config.read('server_config.cfg') if config.sections() == []: print 'Failed to load config file (server_config.cfg)' sys.exit(1) config_dict = config._sections['global'] config_dict.pop('__name__') return config_dict if __name__ == '__main__': cfg = load_config() options, args = _parse_options() print 'hi' try: # Set up the initialise database config db.setup(cfg['database_username'], cfg['database_password'], reset_db=options.reset_db) # Ensure we have a pstats directory to write into. if not os.path.exists('pstats'): os.makedirs('pstats') start_cherrypy(cfg['server_host'], cfg['server_port']) except Exception, ex: print str(ex)
points += question.getPunten(specialisatie) return int(points) @app.route("/bad-request", methods=["GET"]) def badrequest(): return render_template('errors/400.html'), 400 @app.errorhandler(404) def page_not_found(e): return render_template('errors/404.html'), 404 @app.errorhandler(400) def bad_request(e): return render_template('errors/400.html'), 400 if __name__ == "__main__": # Initialization, connecting with the database, retrieving questions db_conn = database.setup() vragen_dict = database.set_ans(db_conn, database.laad_vragen(db_conn)) questions = [] # Converts the questions from the dictionary to objects for id in vragen_dict: questions.append(Question(vragen_dict, id)) app.run(debug=True)
from PyQt4.QtGui import QApplication from database import setup from Common.ui.window import FWindow from Common.cmain import cmain from Common.ui.cstyle import CSS from ui.mainwindow import MainWindow app = QApplication(sys.argv) def main(): """ """ gettext_windows.setup_env() locale.setlocale(locale.LC_ALL, "") # gettext.install('mmain', localedir='locale', unicode=True) gettext.install("bmain.py", localedir="locale") window = MainWindow() window.setStyleSheet(CSS.appStyle) setattr(FWindow, "window", window) window.show() # window.showMaximized() sys.exit(app.exec_()) if __name__ == "__main__": setup() if cmain(): main()
cherrypy.engine.wait(cherrypy.process.wspbus.states.STARTED) cherrypy.log('CherryPy started') cherrypy.engine.block() def load_config(): config = ConfigParser.ConfigParser() config.read('server_config.cfg') if config.sections() == []: print 'Failed to load config file (server_config.cfg)' sys.exit(1) config_dict = config._sections['global'] config_dict.pop('__name__') return config_dict if __name__ == '__main__': cfg = load_config() try: # Set up the initialise database config db.setup(cfg['database_username'], cfg['database_password']) # Ensure we have a pstats directory to write into. if not os.path.exists('pstats'): os.makedirs('pstats') start_cherrypy(cfg['server_host'], cfg['server_port']) except Exception, ex: print str(ex)
logger.setLevel(logging.DEBUG) # create file handler which logs even debug messages fh = logging.FileHandler('logs\\logging_file_{}.log'.format( datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))) fh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) logger.addHandler(fh) logger.info("*************************************************") logger.info("Iniciando operação em {}".format(datetime.now())) logger.info("*************************************************") error = True db.setup() error = aux.dump_uasgs() and error error = aux.dump_licitacoes() and error error = aux.dump_modalidades_licitacoes() and error error = aux.dump_orgaos() and error error = aux.dump_irps() and error error = aux.dump_precos_praticados() and error error = aux.dump_registros_preco() and error error = aux.dump_item_licitacao() and error error = aux.dump_classes() and error error = aux.dump_grupos() and error error = aux.dump_pdms() and error error = aux.dump_materiais() and error
options, args = parser.parse_args() return options, args def load_config(): config = ConfigParser.ConfigParser() config.read('server_config.cfg') if config.sections() == []: print 'Failed to load config file (server_config.cfg)' sys.exit(1) config_dict = config._sections['global'] config_dict.pop('__name__') return config_dict if __name__ == '__main__': cfg = load_config() options, args = _parse_options() print 'hi' try: # Set up the initialise database config db.setup(cfg['database_username'], cfg['database_password'], reset_db=options.reset_db) # Ensure we have a pstats directory to write into. if not os.path.exists('pstats'): os.makedirs('pstats') start_cherrypy(cfg['server_host'], cfg['server_port']) except Exception, ex: print str(ex)
import os, sqlite3, sys import aiohttp import re from copy import deepcopy RunTmp = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'RunTmp') sys.path.append( os.path.join(os.path.dirname(os.path.dirname(__file__)), 'libs')) import database import logutils logger = logutils.getlogger("clfetch", "S") database.setlog(logger) dbcon = sqlite3.connect(os.path.join(RunTmp, "sqlite3.db")) database.setup('sqlite3', dbcon, False) CLURL = 'http://cc.ttum.pw/' SAVEPATH = 'E:\\HenTai\\CaoLiu' # database.execute("drop table t_caoliu") # database.execute("create table t_caoliu (url varchar(256) primary key, title varchar(256), status char(6))", True) # result = database.select(["*"], [], "t_caoliu", 0) # print(result) asyncq = asyncio.Queue() asyncq.qsize() # while 1: # try: # i = q.get() # except asyncio.queues.QueueEmpty: # break # response = urllib.request.urlopen('http://python.org/')
cherrypy.engine.block() def load_config(): config = ConfigParser.ConfigParser() config.read('server_config.cfg') if config.sections() == []: print 'Failed to load config file (server_config.cfg)' sys.exit(1) config_dict = config._sections['global'] config_dict.pop('__name__') return config_dict if __name__ == '__main__': cfg = load_config() print 'hi' try: # Set up the initialise database config db.setup(cfg['database_username'], cfg['database_password']) # Ensure we have a pstats directory to write into. if not os.path.exists('pstats'): os.makedirs('pstats') start_cherrypy(cfg['server_host'], cfg['server_port']) except Exception, ex: print str(ex)
__author__ = 'Dennis' import database import incheckzuil_ui db = "database/reisgegevens.db" if __name__ == "__main__": database.setup(db) incheckzuil_ui.setupUI() def send_ovnummer(db,ov_nummer): gegevens = database.get_traveldata(db,ov_nummer) if gegevens: print("\novnummer:" + str(ov_nummer)) print("naam: " + gegevens[0][2]) print("\nReizen:") for row in gegevens: print("ReisID: " + str(row[0])) print("Beginstation: " + row[3]) print("Eindstation: " + row[4] + "\n") else: print("Geen gegevens beschikbaar van het ovnummer: " + ov_nummer)
__author__ = 'Dennis' import database db = "database/reisgegevens.db" def write_amount_travels(db): data = database.get_countOV(db) f = open("rapportage/rapportage.txt", "w") f.write("Aantal reizen per ov-nummer:" + "\n") for ov in data: f.write(str(ov[0]) + " : " + str(ov[1]) + "\n") if __name__ == "__main__": database.setup(db) write_amount_travels(db)
def test__create_database(create_database_setup): db_file = os.path.abspath( os.path.join(os.path.dirname(__file__), '../db/testitem.db')) assert not os.path.exists(db_file) database.setup() assert os.path.exists(db_file)
if use_category: query += " " + category if not db.flag_word(category, word): print "Failed to flag word." return False print "Flagged word: [" + query + "]" return True if __name__ == "__main__": print(CATEGORIES) # cat0 = filter(valid_word, find_hyponyms(CATEGORIES['Food'])) # cat1 = filter(valid_word, find_hyponyms(CATEGORIES['Drinks'])) # print len(cat0) # print len(cat1) # # for w in cat0: # # print w['word'] # for i in range(5): # print "0: " + random_word(cat0) # print "1: " + random_word(cat1) # print find_gifs('donut') db.setup() print gifs_for_word('foodstuff', 'coffee') print flag_gif('foodstuff', 'coffee', 'https://media1.giphy.com/media/Z6vszQ8Mweukw/200w.gif') print gifs_for_word('foodstuff', 'coffee') print valid_word("platypus", "joe", category="what", is_file=True)
#!/usr/bin/env python3 from flask import Flask from flask_restful import Api from routes import app from flask_cors import CORS from socket_server import run_socket_server import database import random if __name__ == '__main__': random.seed(3) api = Api(app) CORS(app) database.setup("restaurants") #api.add_resource(Employees_Name, '/employees/<employee_id>') app.run(host='0.0.0.0', port='1337', debug=True, use_reloader=False)
def main(): database.setup() # Create the EventHandler and pass it your bot's token. updater = Updater(config.token) # Get the dispatcher to register handlers dp = updater.dispatcher # region Handlers dp.add_handler(CommandHandler("start", start)) dp.add_handler(CommandHandler("username", make_username)) dp.add_handler( CommandHandler("set", set, pass_args=True, pass_chat_data=True)) dp.add_handler( CommandHandler("del", delete, pass_args=True, pass_chat_data=True)) dp.add_handler(CommandHandler("trend", trend)) dp.add_handler(CommandHandler("blockwords", see_blocked_words)) dp.add_handler(CommandHandler("delete", delete_last_post)) dp.add_handler(CommandHandler("help", help_msg)) dp.add_handler(CommandHandler("test", test)) dp.add_handler(telegram.ext.CallbackQueryHandler(callback=callback_likes)) # region admin's handlers dp.add_handler( CommandHandler("blacklist", blacklist, pass_args=True, pass_chat_data=True)) dp.add_handler( CommandHandler("unblacklist", unblacklist, pass_args=True, pass_chat_data=True)) dp.add_handler( CommandHandler("blockword", add_blocked_word, pass_args=True, pass_chat_data=True)) dp.add_handler( CommandHandler("unblockword", delete_blocked_word, pass_args=True, pass_chat_data=True)) dp.add_handler(CommandHandler("sendtrend", sendtrend)) dp.add_handler(CommandHandler("DroptrenD", droptrend)) dp.add_handler(CommandHandler("DropblockedworD", dropblockedword)) dp.add_handler(CommandHandler("reset", restart)) dp.add_handler(CommandHandler("mostliked", most_liked)) dp.add_handler(CommandHandler("adminhelp", admin_help)) # endregion # region edit message conversation handler conv_handler = ConversationHandler( entry_points=[CommandHandler('edit', edit_last_post)], states={editmsg: [MessageHandler(Filters.text, editmsg)]}, fallbacks=[CommandHandler('cancel', cancel)]) conv_handler2 = ConversationHandler( entry_points=[CommandHandler('edit_caption', edit_caption)], states={ editmsg_captopn: [MessageHandler(Filters.text, editmsg_captopn)] }, fallbacks=[CommandHandler('cancel', cancel)]) dp.add_handler(conv_handler) dp.add_handler(conv_handler2) dp.add_handler(MessageHandler( Filters.all, twiiter)) # twits the text given to the channel # endregion # endregion handlers # log all errors dp.add_error_handler(error) # Start the Bot updater.start_polling() updater.idle()
combined_data = pd.merge(df_cbsa, df_zips, left_on='zip_code', right_on='zip')[['lat', 'lng']] df = combined_data if df is None else df.append(combined_data) assert df.isnull().values.any( ) == False, "Sanity check failed: one or more zip codes could not be mapped to a (lat, lng)" logger.info('running total # of coords: %s' % (df.shape, )) df['progress'] = 0 logger.info('head:\n%s' % (df.head(), )) logger.info('types:\n%s' % (df.dtypes, )) setup() with dbopen() as cur: cur.execute(""" CREATE TABLE IF NOT EXISTS progress (lat REAL, lng REAL, progress INT, UNIQUE(lat, lng) ON CONFLICT IGNORE); """) with dbopen(return_conn=True) as conn: df.to_sql('progress', conn, if_exists='append', index=False) # import pdb; pdb.set_trace()
import database import settings import time settings_object = settings.SetConfig() print(settings_object.dbhost) #time.sleep(5) #readback = database.read_db('') #print(readback) database.setup() # First time run def firsttimesetup(): print("thing will happen") def promptsetup(): while setup_choice: setup_choice = input("Do you want to run setup now? y/n", default="yes") if setup_choice.lower() == "yes" or "y": firsttimesetup() elif setup_choice.lower() == "no" or "n": print("Ok, Shutting down") exit(2) else: print("type yes or no")
import sqlite3 import time import database conn = sqlite3.connect("dns.db") database.setup('sqlite3', conn, False) database.execute( "create table keyvalue (key varchar(10) PRIMARY key, value varchar(128))") if 0: for i in xrange(1000000): database.insert([['key', str(i).rjust(10, '0')], ['value', str(i).rjust(10, '0')]], "keyvalue", False) if i % 5000 == 0: conn.commit() else: for i in xrange(10000, 20000): t1 = time.time() result = database.select( ["count(1)"], [["key", "=", str(i).rjust(10, '0')]], "keyvalue", 1) t2 = time.time() print(t2 - t1)
import sys import web import database import app.controllers urls = ( '/', 'app.controllers.index.Index', '/upload/log', 'app.controllers.upload.Log', '/upload/sample', 'app.controllers.upload.Sample', ) if __name__ == "__main__": database.setup() app = web.application(urls, globals()) app.run()