def main(): c = conn.cursor() database.init(c) conn.commit() while True: #{ checkSchedules(c) desiredTemp = getDesiredTemp() currTemp = readTemp() database.saveTemperature(c, currTemp) conn.commit() print(currTemp) # if there is a response from the web server if desiredTemp is not None: #{ if currTemp < desiredTemp: turnFurnaceOn() elif currTemp + settings.buff > desiredTemp: turnFurnaceOff() #} # sleep before the next loop time.sleep(settings.sleepIntervalSec)
def main(): print('Yes hello, this is yeen') database.init() logging.basicConfig( level=logging.DEBUG if config['debug_mode'] else logging.INFO) updater = Updater(token=config['telegram']['token']) dispatcher = updater.dispatcher # Global commands dispatcher.add_handler(CommandHandler('start', command_start)) dispatcher.add_handler(CommandHandler('hello', command_hello)) dispatcher.add_handler(CommandHandler('special', command_special)) # Module commands sin.setup_dispatcher(dispatcher) blacklist_user.setup_dispatcher(dispatcher) blacklist_chat.setup_dispatcher(dispatcher) blacklist_global.setup_dispatcher(dispatcher) usagelog.setup_dispatcher(dispatcher) e621.setup_dispatcher(dispatcher) # Let's go updater.start_polling(clean=config['telegram']['clean']) print('Idling') updater.idle()
def main(start_page_url, end_page_num, thread_num, counter): # create a new database at "movies.db" database.init() next_page = start_page_url # If next page is a FALSE value, the function return while int(os.path.basename(next_page)) <= end_page_num: print('Thread {} Entering URL:{}'.format(thread_num, next_page)) try: # parse the given page next_page = parse_page(next_page, thread_num, counter) except Exception as e: print('Thread {} facing exception:'.format(thread_num)) print('************************************* {}: '.format(e) + next_page) log_fail_movie(next_page, e, thread_num) next_page = skip_page(next_page) print('Thread {} skip page'.format(thread_num)) continue except KeyboardInterrupt: print('Exit by user') return # log thread complete time with open('failed_movies.txt', 'a') as f: f.write('Thread {} complete at {}\n'.format( thread_num, str(datetime.datetime.now().time())))
def init(config_updates=None): """ Delayed init to allow config updates. Updates can be passed as param here or set onto `config` upfront. i.e. `config.SETTING = updates.PREFIX_SETTING or updates.SETTING` """ if config_updates: config.from_object(config_updates) global assets, bootstrap, db #, cache ## Assets assets = Environment() ## Bootstrap bootstrap = Bootstrap() ## Database database.init(config) ## Plain SQLAlchemy # db = database.db ## Flask-SQLAlchemy db = SQLAlchemy() database.db = db ## Aggregator aggregator.init(config) # delayed init ## Poller poller.init(config) # delayed init ## Summarizer summarizer.init(config) # delayed init
def run(self): print("Starting " + self.name) # Imports import notification from concurrent.futures import thread # Make global variables available in local context global running, pref # Initialize everything database.init(pref.get_preference("database")) # A refill every 24 hours refill = 24 * 60 * 60 sleepy_time = 5 cur_time = int(calendar.timegm(time.gmtime())) last_time = cur_time - (cur_time % refill) # Run the code while running: # Calculate next refill cur_time = int(calendar.timegm(time.gmtime())) new = last_time + refill # Refill? if cur_time > new: notification.send_refill() control.inventory_refill() last_time = new else: # Wait till refill time.sleep(sleepy_time) database.close()
def setUp(self): with app.test_request_context(): # Initializes the database and # adds all ideas and users db.init() register_all_users() post_ideas()
def query(): """ 查询函数,支持模糊查询 json传入,key:查询的列(输错返回空列表),query:查询字符串,strict:False为模糊查询 :return: 查询到的论文 """ # 从发来的数据中提取数据 query_request = flask.request.get_json() if 'key' in query_request: key = query_request['key'] else: key = None if 'query' in query_request: query_string = query_request['query'] else: query_string = None if 'strict' in query_request: strict = query_request['strict'] # 从输入中提取数字 else: strict = None # 对发来数据中的None项处理 con = get_db() database.init(con) if key is None: key = "all" if query_string is None: return "Query shouldn't be None", 400 if strict is None: strict = False essays = database.query(con, key, query_string, strict) # 在数据库中查询 return json.dumps(essays), 200
def init(): ''' Initialises the server sockets and database ''' # DB db.init() # Folders for folder in REQUIRED_FOLDERS: if exists(folder): log('Folder "{}" exists'.format(folder)) else: log('Creating folder "{}"'.format(folder)) makedirs(folder) # Sockets soc = socket.socket() soc.bind(('', int(PORT))) soc.listen(MAX_CONNECTIONS) log("Listening on port: {}".format(PORT)) ssl_soc = ssl.wrap_socket( soc, ssl_version = ssl.PROTOCOL_TLSv1, cert_reqs = ssl.CERT_NONE, # not sure if this is important server_side = True, keyfile = KEY_FILE, certfile = CERT_FILE) return ssl_soc
def backupDatabase(): database.close() if not os.path.exists(globals.BACKUPFOLDER_NAME): os.makedirs(globals.BACKUPFOLDER_NAME) copyfile(globals.DATABASE_NAME, globals.BACKUPFOLDER_NAME + "/" + datetime.utcnow().strftime("%Y-%m-%d_%H-%M") + ".db") database.init()
def setUp(self): with app.test_request_context(): # Initializes the database # and adds all users db.init() register_all_users() post_ideas() send_messages() post_comments()
def init(): global crawler_id if len(sys.argv) < 2: print "Usage: %s [crawler id]" % sys.argv[0] sys.exit(1) signal.signal(signal.SIGINT, kill) crawler_id = int(sys.argv[1]) database.init(crawler_id)
def main(): root = tk.Tk() settings.load() root.geometry(settings.get('main_window_geometry')) app = MainWindow(root) db.init() root.mainloop()
def route_random(): database.init() row = database.fetch("SELECT title FROM articles WHERE title!='Main Page' ORDER BY RANDOM() LIMIT 1") database.close() if row != None: title = row['title'].replace(' ', '_') return redirect('/' + title) else: return redirect('/')
def launch(): core.init() module_store = core.get_plugin_store() database.init(module_store) graph_renderer = GraphRenderer() window = LavenderWindow(graph_renderer, width=1000, height=600) pyglet.app.run()
def main(): database.init() networking.BASE_NODE.node() operations = {} def get_help(): operations_string = str(list(operations.keys()))[1:-1].replace("'", "") return f"Possible operations are: {operations_string}" def loop(function_name, pause): function = operations[function_name] pause = float(pause) while True: value = function() if value is not None: print(value) time.sleep(pause) operations['mine'] = interface.mine_top operations['?'] = get_help operations['help'] = get_help operations['exit'] = exit operations['height'] = interface.block_height operations['keypair'] = interface.keypair operations['loop'] = loop operations['balance'] = interface.balance operations['send'] = interface.transact operations['address'] = interface.public_key operations['peers'] = interface.active_peers operations['dump'] = interface.private_key operations['load'] = interface.load_key operations['diff'] = interface.difficulty while True: try: user_input = input(">> ") except KeyboardInterrupt: user_input = 'exit' function_name, *args = user_input.split(' ') value = None try: function = operations[function_name.lower()] except KeyError: function = get_help try: value = function(*args) except TypeError: print( f"{function_name} expected " f"{len(inspect.signature(function).parameters)} arguments. Got " f"{len(args)}.") if value is not None: print(value)
def __init__(self): # init logger.info(u'初始化数据库、图片文件夹') db.init() util.makedirs(common.BING_PIC_DIR) util.get_icons() # 检查是否多开 util.suicider() # 更换壁纸 util.change_wallpaper() job.Timing(1, "Thread-1", 1).start()
def init(): today = time.strftime('%Y-%m-%d',time.localtime(time.time())) database.init() database.execute('update tasklist set status = \'started\'') fw = open('date.log', 'w') fw.write(today) fw.close() tasklist = database.getlist('select name from tasklist') statuslist = database.getlist('select status from tasklist') for i in range(0,len(tasklist)): if(statuslist[i] == 'started'): checktask(tasklist[i])
def main(): root = tk.Tk() root.geometry('1280x720+200+200') app = MainWindow(root) icon.create_icon() root.iconbitmap('flac-librarian.ico') icon.delete_icon() database.init() root.mainloop()
def main(): abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) database.init() parseBlacklist() if not DEBUG: myhttp.myLogger.verbose = False server = startServer() try: roll() except Exception as e: server.close() raise e
def __init__(self, result_type): self._result_type = result_type self.marker_groupby = '' self.plot_groupby = '' self.marker_groupby = 'Year' self.define_axis_length = False self.plot_width = cn.def_plot_width self.plot_height = cn.def_plot_height self.ypar = '' self.define_axis_limits = False self.xax_min = 0 self.xax_max = 0 self.yax_min = 0 self.yax_max = 0 self.marker_average_method = '' self.stat_line_method = '' self.site_filter = '' self.lane_filter = '' self.direction_filter = '' self.date_filter = '' self.month_from_filter = 0 self.month_to_filter = [] self.year_from_filter = 0 self.year_to_filter = 0 self.time_aggregation_interval = 'day' self.time_from_filter = 0 self.time_to_filter = 23 self.chart_default_fields = [] self.moving_average_days = 0 self._traffic_type = 1 # slow traffic or motorized traffic self.parameter_dic = {} db.init() self.direction_dic = {} self.all_directions_dic = db.get_all_directions_dic() self.site_dic = {} self.vehicle_dic = {} self.year_list = [] self.month_dic = {} self.sort_dic = { 'weekday_from': list(cn.weekday_short_dic.values()), 'month_from': list(cn.month_short_dic.values()), 'year_from': list(range(2014, 2030)), 'hour_from': list(cn.time_dic.values()), 'site_id': list(self.site_dic.values()), 'direction_id': [], 'week_of_year': list(range(1, 52)) }
def server_init(): # 创建文件夹,储存所有图片 if not os.path.isdir(PicDirName): os.mkdir(PicDirName) # 创建定时器,用于每24小时创建一个文件夹处于图片 PicTimer = threading.Timer(0, fun_timer) #首次启动 PicTimer.start() # 初始化数据库 dbName = db.setup() dbCon = db.connect(dbName) if not dbCon: print('ERROR: %r not supported or unreachable, exit' % db) return db.init(dbCon) dbCon.close()
def test_mark_item_as_tweeted(self): db_conn = database.init(database_file="python_mark_item_test.db") # Populate the database first c = db_conn.cursor() items = [ ("first title", "first_url", "python", 1), ("second title", "second_url", "python", 0), ("third title", "third_url", "python", 0), ] c.executemany( """ INSERT INTO items(title, url, category, is_tweeted) VALUES (?,?,?,?) """, items, ) # Get the ids c.execute("SELECT id FROM items WHERE is_tweeted=0") results = c.fetchall() item_ids = [] for result in results: item_ids.append(result[0]) # Mark each one of them for item_id in item_ids: database.mark_item_as_tweeted(db_conn, item_id) c.execute("SELECT id FROM items WHERE is_tweeted=0") results = c.fetchall() self.assertEqual(results, [])
def route_do_edit(): title = form('title') id = int(form('id')) content = form('content') hpot = form('email') if title is None or id is None or content is None or hpot is not "": return 'Error' if app.config['locked']: if form('pass') != app.config['pass']: return redirect('/') if not database.init(): return error(app.config['db_err_title'], app.config['db_err_msg']), 503 if id == 0: database.query('INSERT INTO articles VALUES(NULL, ?, ?, 0)', [escape(title), escape(content)]) else: database.query("UPDATE articles SET revision = 1 WHERE title=?", [title]) database.query("INSERT INTO articles VALUES(NULL, ?, ?, 0)", [escape(title), escape(content)]) database.close() return redirect(url_for('route_article', title=title))
def tweet_rust(): print("[rust] Tweeting ...") # Initialize database db_conn = database.init(config.DATABASE_FILE) # Initialize twitter client auth = tweepy.OAuthHandler(config.RUST_CONSUMER_KEY, config.RUST_CONSUMER_SECRET) auth.set_access_token(config.RUST_ACCESS_TOKEN, config.RUST_ACCESS_TOKEN_SECRET) twitter = tweepy.API(auth) # Get the tweet data print("[rust] Get the tweet data") item = database.get_untweeted_item(db_conn, "rust") if item is None: print("[rust] The tweet data is None") db_conn.close() return # Tweet the status status = "{} {} #rustlang".format(item.title, item.url) twitter.update_status(status=status) # Mark item as tweeted database.mark_item_as_tweeted(db_conn, item.id) print("[rust] Tweeted {}".format(item.url)) # Close database conn db_conn.close()
def __init__(self): ''' CSV reader, to get healt data stored in csv :param date: format: datetime :param user_id: int ''' self._db_inserts, self._db_extended = database.init()
def RunMultiProcess(): global route_table, mysession init_json = None try: with open('init.json', 'r') as f: init_json = json.load(f) except Exception as e: print("Loading Init Data Failed") print(e) else: database_url = init_json['DataBaseUrl'] status, ret_scoped_session = database.init(database_url) if not status: return ret_scoped_session.remove() l = multiprocessing.Lock() process = [] for port in init_json['RouteTable']: route_table[int(port)] = init_json['RouteTable'][port] p = Process(target=run_server, args=(int(port), l, database_url)) process.append(p) p.start() for p in process: p.join()
def run_server( port, lock, database_url, HandlerClass=ProxyRequestHandler, ServerClass=ThreadingHTTPServer, protocol="HTTP/1.1", ): scoped_session = None try: server_address = ('', port) HandlerClass.protocol_version = protocol status, ret_scoped_session = database.init(database_url) if not status: return scoped_session = ret_scoped_session HandlerClass.ScopedSession = ret_scoped_session HandlerClass.process_lock = lock httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() lock.acquire() print "Serving HTTP Proxy on", sa[0], "port", sa[1], "...", '\n' lock.release() httpd.serve_forever() database.session.close() except Exception as e: lock.acquire() print(e) lock.release() traceback.print_exc()
def test_get_untweeted_python_item(self): db_conn = database.init(database_file="python_items_test.db") # Populate the database first c = db_conn.cursor() items = [ ("first title", "first_url", "python", 1), ("second title", "second_url", "python", 0), ("third title", "third_url", "python", 0), ] c.executemany( """ INSERT INTO items(title, url, category, is_tweeted) VALUES (?,?,?,?) """, items, ) c.close() # Test the function, make sure it returns the oldest # untweeted item item = database.get_untweeted_item(db_conn, "python") self.assertEqual(item.title, "second title") self.assertEqual(item.url, "second_url") self.assertEqual(item.category, "python") db_conn.close()
def __init__(self): db = database.init() Base.metadata.bind = db DBSession = sessionmaker(bind=db) self.session = DBSession() self.node_dao = NodeDao(self.session) self.watcher_dao = WatcherDao(self.session) self.node_watcher_dao = NodeWatcherDao(self.session)
def init(config_updates=None): """ Delayed init to allow config updates. Updates can be passed as param here or set onto `config` upfront. i.e. `config.SETTING = updates.PREFIX_SETTING or updates.SETTING` """ if config_updates: config.from_object(config_updates) global db, r ## Database database.init(config) db = database.db ## Redis r = redis.StrictRedis(config.RQ_REDIS_HOST, config.RQ_REDIS_PORT, config.RQ_REDIS_DB)
def run(self): global running, pref print("Starting " + self.name) database.init(pref.get_preference("database")) # Communication loop ard = arduino.Interface(b'ZxPEh7ezUDq54pRv', pref.get_preference("arduino_port")) while running: if self._scanned_card(ard.read_rfid()): ard.send_accept() else: ard.send_reject() # threads.remove(self) print("Exiting " + self.name) database.close()
def authenticate(username, password): # Make sure at least something was entered since username/password can't be empty if username == "" or password == "": return "False" database.init("../data/database.db") print(username + "," + password) # Get a user object belonging to the entered credentials user = database.get_user_by_login(username, password) # If the user object is not null, the credentials are valid if not user: return "False" else: return "True" database.close()
def __init__(self, *args, **kwargs): self.initialize_autoembed() super().__init__(*args, **kwargs) self.neis = neispy.Client(os.environ["API_KEY"]) cogs.load(self) self.load_extension("jishaku") self.loop.create_task(database.init())
def init_security(): """ Initialize Flask-Security """ # Initialize database first import database db = database.init(app) import models # Then Flask-Security from flask_security import Security, SQLAlchemyUserDatastore global user_datastore user_datastore = SQLAlchemyUserDatastore(db, models.User, models.Role) security = Security(app, user_datastore) return security
def route_article(title): title = title.replace('_', ' ') if not database.init(): return error(app.config['db_err_title'], app.config['db_err_msg']), 503 article = database.fetch('SELECT * FROM articles WHERE title = ? AND revision = 0', [title]) database.close() if article != None: return render_template('article.html', title=title, content=article['content']) else: return render_template('article.html', title=title, content='There is currently no text on this page')
def main_menu(): """Run the main menu of the program""" repeat = False numberBase = "default" curDB = "root" prompt = ("Repeat is %s.\nCurrent Default Database: %s.\n" "Enter a valid command, or help.") aliases, dbAliases = init_aliases() dbConfig, production = config.init_config_file() database = db.init(dbConfig) command = "" while command != "quit": entry = ui.get_input( prompt % (("on" if repeat else "off"), curDB), "lower") command, args = parse_entry(aliases, dbAliases, entry) if command == "help": print("\nI'm working on the help file!!") elif command == "repeat": repeat = not repeat print("\nRepeat is now %s." % ("on" if repeat else "off")) elif command == "run": tracker.produce(database, production) elif command in ("switch", "root", "global"): curDB = change_database(curDB, command) elif command == "database": db.basic_actions(database, curDB, *tuple(args)) elif command == "load": db.load(database, *tuple(args)) elif command == "save": db.save(database, dbConfig, *tuple(args)) elif command == "wipe": wipePrompt = "Are you SURE you want to erase the database? Y/N" if ui.get_binary_choice(wipePrompt): database = db.init() elif command != "quit": print("\n\"%s\" is not a recognized command." % command)
def route_edit(title): title = title.replace('_', ' ') if not database.init(): return error(app.config['s_db_title'], app.config['s_db_msg']), 503 article = database.fetch('SELECT * FROM articles WHERE title = ?', [escape(title)]) database.close() if article != None: return render_template('edit.html', title=article['title'], id=article['id'], content=article['content']) else: return render_template('edit.html', title=title, id=0, content='')
def Start(): global BOT, proc, A, config_debugmode, db loadConfig() auth.load() BOT = Bot(config_prefix, config_rconip, config_rcon, config_debugmode, player.PlayerDatabase()) A = API() BOT.Startup() loadMods() proc = GameOutput(config_serversocket) x = os.uname() db = database.init() A.say('UrTBot V%s loaded on %s (%s/%s)' % (__Version__, sys.platform, x[2], x[4])) loop()
def run(): utils.init_logging() logger.info('Monobox fetcher starting up') config.init() database.init(config.get('common', 'database_uri')) max_age = config.get('fetcher', 'max_age') if max_age: timedelta = utils.str_to_timedelta(max_age) if timedelta is None: logger.error('Cannot convert configuration parameter ' 'max_age (%s) to timedelta' % max_age) sys.exit(1) else: timedelta = None urls = config.get('fetcher', 'sc_urls').split(' ') listeners_min = config.getint('fetcher', 'listeners_min') for url in urls: merge_sc_stations(url, listeners_min) if timedelta is not None: purge_old_stations(timedelta)
def route_revisions(title, rev): title = title.replace('_', ' ') if not database.init(): return error(app.config['db_err_title'], app.config['db_err_msg']), 503 if not database.fetch('SELECT 1 from articles WHERE title = ?', [title]): return redirect('/%s' % title) if rev == "list": revisions = database.fetch_all('SELECT * FROM articles WHERE title = ?', [title]) return render_template('revision.html', rev="list", title=title, revisions=revisions) elif rev != 0: article = database.fetch('SELECT * FROM articles WHERE title = ? AND revision = ?', [title, rev]) return render_template('revision.html', rev=rev, title=title, content=article['content']) else: return redirect('/%s' % title)
def Start(): global BOT, proc, A, config_debugmode, db, config, log config = ConfigFile() thread_handler.init(config) loadConfig(config) log = debug.init(config) BOT = Bot(config_prefix, config_rconip, config_rcon, config_debugmode, config=config) A = API() # @TODO Fix this bullshit BOT.Startup() loadMods() proc = GameOutput(config_serversocket) db = database.init(config) x = os.uname() A.say("UrTBot V%s loaded on %s (%s/%s)" % (_version_, sys.platform, x[2], x[4])) loop()
def setUp(self): self.db_fd, routes.app.config['DATABASE'] = tempfile.mkstemp() routes.app.config['TESTING'] = True self.app = routes.app.test_client() self.db = database.init(routes.app)
def init(config_file=None): utils.init_logging() logger.info('Monobox aggregator server starting up') config.init(config_file) database.init(config.get('common', 'database_uri'))
def main(): parser = argparse.ArgumentParser(description='FIX Gateway') parser.add_argument('--debug', action='store_true', help='Run in debug mode') parser.add_argument('--config-file', type=argparse.FileType('r'), help='Alternate configuration file') parser.add_argument('--log-config', type=argparse.FileType('w'), help='Alternate logger configuration file') args, unknown_args = parser.parse_known_args() logging.config.fileConfig(logconfig_file) log = logging.getLogger() if args.debug: log.setLevel(logging.DEBUG) log.info("Starting FIX Gateway") config = configparser.ConfigParser() # To kepp configparser from making everythign lowercase config.optionxform = str config.read(config_file) try: database.init(config) except Exception as e: log.error("Database failure, Exiting") print(e) raise return # we don't want to run with a screwed up database log.info("Setting Initial Values") try: for item in config.items("initial"): database.write(item[0], item[1]) except Exception as e: log.error("Problem setting initial values from configuration - {0}".format(e)) # TODO: Add a hook here for post database creation code # TODO: Need to do some more thorough error checking here # run through the plugin_list dict and find all the plugins that are # configured to be loaded and load them. for each in config: if each[:5] == "conn_": if config.getboolean(each, "load"): module = config.get(each, "module") try: load_plugin(each[5:], module, config) except Exception as e: logging.critical("Unable to load module - " + module + ": " + str(e)) # TODO add a hook here for pre module run code for each in plugins: plugins[each].run() iteration = 0 while True: try: job = plugin.jobQueue.get(timeout=1.0) if job == "QUIT": break except KeyboardInterrupt: log.info("Termination from keybaord received") break except queue.Empty: pass iteration += 1 # Every four times through the loop we do some stuff if iteration % 4 == 0: # check how many plugins are running and exit if zero running_count = 0 for each in plugins: if plugins[each].is_running(): running_count += 1 if running_count == 0: log.info("No plugins running, quitting") break for each in plugins: plugins[each].stop() log.info("FIX Gateway Exiting Normally")
map = Code(open('./maps/search_info.js', 'r').read()) reduce = Code(open('./reduces/search_info.js', 'r').read()) result = db.connection.wacc.searches.map_reduce( map, reduce, out=SON([("reduce", "tmp")]) ) # Now if the key already exist the reduce function is used to merge the results. return result def gender_relationship_frequency(scope): """ . """ map = Code(open('./maps/gender_relationship_frequency.js', 'r').read()) reduce = Code(open('./reduces/frequency.js', 'r').read()) result = db.connection.wacc.stalkers.map_reduce( map, reduce, "tmp", scope=scope ) return result if __name__ == '__main__': db.init() db.connection.register([Stalker, Search, Victim]) #db.connection.wacc.drop_collection('stalkers') #db.connection.wacc.drop_collection('searches') #db.connection.wacc.drop_collection('victims') #stalker_relationship_frequency() #search_location_frequency() #gender_location_frequency() gender_relationship_frequency("male")
import database database.init() database.clearSubscriptions() database.addSubscription('Watchful12', 'TestBotSlave1', 'SubTestBot1') database.addSubscription('Watchful12', 'TestBotSlave2', 'SubTestBot1') database.addSubscription('Watchful12', 'TestBotSlave3', 'SubTestBot1') database.addSubscription('Watchful12', 'TestBotSlave4', 'SubTestBot1') database.addSubscription('Watchful12', 'TestBotSlave1', 'SubTestBot2') database.addSubscription('Watchful12', 'TestBotSlave2', 'SubTestBot2') database.addSubscription('Watchful12', 'TestBotSlave5', 'SubTestBot2') database.resetAllSubscriptionTimes() database.close()
c = 0 for c_id, p_id in id_pairs: print c_id, p_id url = problem_detail_url % (c_id, p_id) c += 1 if c % 50 == 0: topcoder.login() try: page = topcoder.get_page(url) except Exception, e: print c_id, p_id, e continue #import pdb #pdb.set_trace() p = compile('<td class="statText">Top Submission</td>[\d\D]*?</tr>') divs = p.findall(page) testcase_crawled = False for div in divs: p = compile('<td class="statText" align="right">[\d\D]*?</td>') tds = p.findall(div) for i in xrange(4): gen_solution(cur, tds[i], i, p_id) if __name__ == '__main__': topcoder = Topcoder() topcoder.login() conn, cur = init() gen_href(cur, topcoder) close(conn)
def setUp(self): with app.test_request_context(): db.init() register_all_users()
def setUp(self): with app.test_request_context(): # Sets up the database db.init()
def setUp(self): with app.test_request_context(): # Initializes the database db.init() register_all_users() send_messages()
from flask import Flask from flask_restful import Api import logging import config import database app = Flask(__name__) api = Api(app) app.config.from_object('config.BaseConfig') database.init(app) from resources.sellers import Sellers from resources.products import Products from resources.tokens import Tokens # config.configure_logging_relative('logging.ini') logging.basicConfig(filename="ecommerce.log", filemode='a', format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s', datefmt='%H:%M:%S', level=logging.DEBUG) logger = logging.getLogger(__name__) logger.info("Registering resources") api.add_resource(Sellers, '/sellers') api.add_resource(Products, '/products') api.add_resource(Tokens, '/token')
def init_database(): db.init() return "Database Initialized"