def reset_logger(trainsize=None): """Switch to the log file within the folder for experiments with `trainsize` snapshots of training data. If `trainsize` is None, switch to the log file in the base folder. """ # Remove all old logging handlers. logger = logging.getLogger() for hdlr in logger.handlers[:]: logger.removeHandler(hdlr) # Get the log filename and append a newline. log_filename = config.log_path(trainsize) with open(log_filename, 'a') as lf: lf.write('\n') # Get a new logging handler to the log file. handler = logging.FileHandler(log_filename, 'a') handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) handler.setLevel(logging.INFO) logger.addHandler(handler) print(f"Logging to {log_filename}") # Log the session header. if hasattr(sys.modules["__main__"], "__file__"): _front = f"({os.path.basename(sys.modules['__main__'].__file__)})" _end = time.strftime('%Y-%m-%d %H:%M:%S') _mid = '-' * (79 - len(_front) - len(_end) - 20) header = f"NEW SESSION {_front} {_mid} {_end}" else: header = f"NEW SESSION {time.strftime(' %Y-%m-%d %H:%M:%S'):->61}" logging.info(header)
def syncDown(): init() parts = config.log_path().split('/') parts.pop() path = '/'.join(parts) commands = [ 'cd ' + path, 'git pull origin ' + config.syncRepoBranch() + ' --quiet >> /dev/null', ] doCommands(commands)
def init(): if not config.syncRepoUrl(): print("No sync_repo_url set. Aborting.") exit(2) parts = config.log_path().split('/') filename = parts.pop() directory = '/'.join(parts) if not hasRepo(directory): createRepo(directory, filename, config.syncRepoUrl(), config.syncRepoBranch())
def syncUp(): init() parts = config.log_path().split('/') file = parts.pop() path = '/'.join(parts) commands = [ 'cd ' + path, 'git add ' + file, 'git commit -m "Update WorkTimer data" >> /dev/null', 'git push origin ' + config.syncRepoBranch() + ' --quiet >> /dev/null', ] doCommands(commands)
def save(days): data = {} # serialize with date keys for day in days.days: data[day.date.strftime("%Y-%m-%d")] = day data = jsonlib.dumps(data, sort_keys=True, default=lambda o: jsonDefault(o), indent=4) if data: file = io.open(config.log_path(), 'w', encoding='utf8') file.write(data) file.close()
def download(project, status, start_index=0, one_step=100): print(project + ": " + status + " download") has_more = True dir_path = os.path.join(download_dir, project, status) # Log file initialization logfile_name = config.log_path(project, status) log_obj = open(logfile_name, 'a+') log_obj.write('start log\n') try_num = 3 while has_more: file_name = '%s-%s.json' % (start_index, start_index + num_one_step - 1) file_path = os.path.join(dir_path, file_name) try: if os.path.exists(file_path): start_index = start_index + one_step continue changes_str = get_changes(project, start_index, status) except: if try_num > 0: try_num = try_num - 1 else: try_num = 3 log_message = '%s %s %s to %s exception!' % ( project, status, start_index, start_index + num_one_step - 1) print(log_message) log_obj.write(log_message + '\n') start_index = start_index + one_step pass change_dict_list = simplejson.loads(changes_str) if len(change_dict_list) == 0: break # length less than number of step, indicate the code review download complete if len(change_dict_list) < num_one_step: break write_file(file_path, changes_str) log_message = '%s %s %s to %s has downloaded!' % ( project, status, start_index, start_index + num_one_step - 1) log_obj.write(log_message + '\n') print(log_message) start_index = start_index + one_step log_obj.write('end log\n') print(project + " end") log_obj.close()
def log(): subprocess.call(["open", config.log_path()])
def main(): try: commands = [] for argument in sys.argv[1:]: if argument.startswith('-'): keyAndValue = argument.split('=') key = keyAndValue[0].strip().lstrip('-') if key == 'help': commands.insert(0, 'help') if len(keyAndValue) > 1: value = keyAndValue[1].strip() if value == 'false': config.overrides[key] = False elif value == 'true': config.overrides[key] = True else: config.overrides[key] = value else: commands.append(argument.strip()) daysData = storage.yaml.load(config.log_path()) days = importer.getDays(daysData) if not commands: output.status(days) elif commands[0] == 'timer': if len(commands) > 1: actions.timer(days, commands[1]) else: actions.timer(days) elif commands[0] == 'update': if len(commands) > 1: actions.change(days, commands[1]) elif commands[0] == 'pause': actions.pause(days) elif commands[0] == 'log': actions.log() elif commands[0] == 'export': actions.export(days) elif commands[0] == 'report': if len(commands) > 1 and commands[1] == 'text': actions.export(days) elif len(commands) > 1 and commands[1] == 'ods': actions.report.export_ods(days) else: actions.report.export_excel(days) elif commands[0] == 'import': if len(commands) > 1: actions.add(commands[1], days) else: print("No file to import given.") elif commands[0] == 'sync': actions.syncDown() actions.syncUp() else: print("WorkTimer " + __version__ + " by Max Melzer (moehrenzahn.de)") print("") print("Usage: run worktimer.py to display current timer stats.") print(" Use param 'timer [category]' to start or stop timer") print( " Use param 'update [category]' to change the currently tracked category" ) print(" Use param 'pause' to start or stop pause") print( " Use param 'sync' to attempt synchronisation with remote repository" ) print(" Use param 'log' to open the log file in default editor") print( " Use param 'export' to export your log to a human-readable text file" ) print(" Use param 'report' to create a work report spreadsheet") print( " 'report excel --xlsx_template='template.xlsx'' for Excel" ) print( " 'report ods --ods_template='template.ods'' for Open Document" ) print( " Use param 'import [file]' to import a log into your existing database" ) print("") print("Options (see config_default.json) for complete list:") print(" --hours_per_day=8") print(" --log=path/to/your/work/log") print(" --sync_automatically=false") return 2 return 0 except ValueError as e: output.notification('Critical Error', str(e)) return 1