def main(): utils.SetWorkingDirectory(__file__) utils.Log('Clearing the last draw database') f = open('draw_database_last.txt', 'w') file_list = glob.glob(os.path.join('UTC_logs', 'UTC_log*.log')) for filename in file_list: parsefile.ProcessFile(filename, verbose=False, append_production=True) utils.ArchiveUTClog(filename)
def main(): utils.Log('Looking for logs in {0}\n'.format(source_dir)) utils.SetWorkingDirectory(__file__) try: file_list = glob.glob(os.path.join(source_dir, 'UTC_log*.log')) except: utils.Log('Cannot get list of files at: {0}\n'.format(source_dir)) raise existing_file_list_1 = glob.glob(os.path.join('UTC_logs', 'UTC_log*.log')) existing_file_list_2 = glob.glob( os.path.join('UTC_logs', 'parsed', 'UTC_log*.log')) existing = existing_file_list_1 + existing_file_list_2 existing = [os.path.basename(x) for x in existing] for filename in file_list: basename = os.path.basename(filename) if basename in existing: utils.Log('Already in UTC_Logs, or parsed: {0}\n'.format(basename)) else: utils.Log('Attempting copy: {0}\n'.format(filename)) shutil.copyfile(filename, os.path.join('UTC_logs', basename))
""" Script to generate mega-summary """ import glob import os import traceback import parsercode.utils as utils if __name__ == '__main__': utils.Log('Starting\n') try: utils.SetWorkingDirectory(__file__) utils.SumUpAllUserData('all_user_aggregates.txt') except Exception as ex: utils.Log(traceback.format_exc() + '\n') raise
def main(): utils.SetWorkingDirectory(__file__) aggregator.run_land_counts()
def main(): utils.SetWorkingDirectory(__file__) run_1_fetch_UTC_logs.main() run_2_parse_UTC_logs.main() run_2_parse_UTC_logs.main()
def main(): utils.SetWorkingDirectory(__file__) aggregator.main()