def scload(): logging.basicConfig(level=logging.INFO, format=crawl_utils.LOGFORMAT) crawl_utils.lock_or_die() print "Populating db (one-off) with logfiles and milestones. " + \ "Running the scoresd.py daemon is preferred." db = connect_db() init_listeners(db) def proc_file(fn, filename): info("Updating db with %s" % filename) try: f = open(filename) try: fn(db, filename, f) finally: f.close() except IOError: warn("Error reading %s, skipping it." % log) cursor = db.cursor() set_active_cursor(cursor) try: if not OPT.no_load: master = create_master_reader() full_load(cursor, master) if not OPT.load_only: import pagedefs pagedefs.rebuild(cursor) finally: set_active_cursor(None) cursor.close() report_query_times() db.close()
def scload(): logging.basicConfig(level=logging.INFO, format=crawl_utils.LOGFORMAT) crawl_utils.lock_or_die() print "Populating db (one-off) with logfiles and milestones. " + \ "Running the scoresd.py daemon is preferred." db = connect_db() init_listeners(db) def proc_file(fn, filename): info("Updating db with %s" % filename) try: f = open(filename) try: fn(db, filename, f) finally: f.close() except IOError: warn("Error reading %s, skipping it." % log) cursor = db.cursor() set_active_cursor(cursor, db) try: if not OPT.no_load: master = create_master_reader() full_load(cursor, master) if not OPT.load_only: import pagedefs pagedefs.rebuild(cursor) finally: set_active_cursor(None) cursor.close() report_query_times() db.close()
def cleanup_listeners(db): for e in LISTENERS: e.cleanup(db) def create_master_reader(): blacklist = Blacklist(BLACKLIST_FILE) processors = ([MilestoneFile(x) for x in MILESTONES] + [Logfile(x, blacklist) for x in LOGS]) return MasterXlogReader(processors) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) crawl_utils.lock_or_die() print "Populating db (one-off) with logfiles and milestones. " + \ "Running the taildb.py daemon is preferred." load_extensions() db = connect_db() init_listeners(db) def proc_file(fn, filename): info("Updating db with %s" % filename) try: f = open(filename) try: fn(db, filename, f) finally:
e.initialize(db) def cleanup_listeners(db): for e in LISTENERS: e.cleanup(db) def create_master_reader(): blacklist = Blacklist(BLACKLIST_FILE) processors = ([ MilestoneFile(x) for x in MILESTONES ] + [ Logfile(x, blacklist) for x in LOGS ]) return MasterXlogReader(processors) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) crawl_utils.lock_or_die() print "Populating db (one-off) with logfiles and milestones. " + \ "Running the taildb.py daemon is preferred." load_extensions() db = connect_db() init_listeners(db) def proc_file(fn, filename): info("Updating db with %s" % filename) try: f = open(filename) try: fn(db, filename, f) finally: