Example #1
0
def add_item(task_str):
    create_backup(tdl_log)

    new_task = ToDoTask(task_str)
    with open(tdl_log, 'a') as writer:
        writer.write("{t_date}\t{task}".format(t_date=new_task.date, task=new_task.task))
        writer.write('\n')
Example #2
0
def add_to_dir_jump(targ_dir):

    create_backup(dir_jump_file_log)
    print "Adding: " + targ_dir
    with open(dir_jump_file_log, "a") as f:
        f.write(str(getcwd()))
        f.write("\n")
Example #3
0
def remove_from_dir_jump(targ_pos):

    jump_list = get_jump_list()

    print "Removing: " + jump_list[targ_pos]
    del jump_list[
        targ_pos]  # del apparently more efficient than pop; avoids issue where directory is also prevDir

    create_backup(dir_jump_file_log)
    write_to_dir_jump(jump_list)
Example #4
0
def delete_item(number_of_item):
    create_backup(tdl_log)
    task_list = load_task_list()

    print "Removing:\n\t {num}) {date} \t {task}".format(
        num=str(number_of_item),
        date=task_list[number_of_item - 1].date,
        task=task_list[number_of_item - 1].task)

    task_list.remove(task_list[number_of_item - 1])

    with open(tdl_log, 'w') as writer:
        for t in task_list:
            if isinstance(t, ToDoTask) is True:
                writer.write("{t_date}\t{task}".format(t_date=t.date, task=t.task))
                writer.write('\n')
Example #5
0
File: tag.py Project: rcamba/util
def merge_changes_to_base_tag_file(base_tag_file_log, tag_file_changes_log):
    print "Merging tag changes log with base file"

    with open(tag_file_changes_log) as reader:
        change_lines = reader.read().split('\n')
    with open(base_tag_file_log) as reader:
        base_tag_dict = json.load(reader, object_pairs_hook=collections.OrderedDict)

    change_lines = [l for l in change_lines if len(l) > 0]
    for line in change_lines:  # last change won't be written yet, can only validate up to all changes except last
        parse_change_line_to_tag_dict(line, base_tag_dict)

    # noinspection PyArgumentList
    base_tag_dict = collections.OrderedDict(sorted(base_tag_dict.items(), key=lambda item: item[0]))

    # validate backup similar to original log file
    orig_tag_dict = load_tag_dict()
    if base_tag_dict == orig_tag_dict:

        with open(base_tag_file_log, 'w') as writer:
            tag_dict_str = json.dumps(base_tag_dict, indent=2, ensure_ascii=False,
                                      encoding="utf-8", separators=(',', ': '))
            writer.write(tag_dict_str.encode("utf-8"))

        open(tag_file_changes_log, 'w').close()

        create_backup(base_tag_file_log, backup_dir=os.path.join(default_backup_dir, "tag_file"))

    else:
        error_alert("Backup contents not similar to original. Merge aborted!")
        keys_not_in_orig = [key for key in base_tag_dict if key not in orig_tag_dict]
        keys_not_in_new = [key for key in orig_tag_dict if key not in base_tag_dict]
        if len(keys_not_in_orig) > 0:
            print "Keys in new but not in original", keys_not_in_orig
        if len(keys_not_in_new) > 0:
            print "Keys in original but not in new", keys_not_in_new

        elif len(keys_not_in_new) == 0 and len(keys_not_in_orig) == 0:
            for k in orig_tag_dict:
                if orig_tag_dict[k] != base_tag_dict[k]:
                    print k, set(orig_tag_dict[k]).symmetric_difference(set(base_tag_dict[k]))