def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path, dt=backup_date, fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full',
                                       backup_key,
                                       backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault',
                                        backup_key,
                                        backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {'backup_key': backup_key, 'backup_date': backup_date, 'files': files, 'total': total}
def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path,
                                          dt=backup_date,
                                          fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full', backup_key, backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault', backup_key, backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }
def incremental_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    files = []

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    # TODO check if it's really the last state on the remote storage
    last_state = Dir(cache_path).get('{0}.state.*'.format(backup_key),
                                     sort_reverse=True,
                                     abspath=True)

    last_state = DirState.from_json(last_state)
    current_state = DirState(backup_dir)

    last_sv = sigvault.SigVaultReader(CACHE_PATH, backup_key)

    diff = current_state - last_state

    state_file = current_state.to_json(cache_path,
                                       dt=backup_date,
                                       fmt='{0}.state.{1}.json')
    files.append(state_file)

    created_file = FileFinder.make_key('created', backup_key, backup_date)
    created_file = os.path.join(cache_path, created_file)
    # Store files from diff['created'] into a new archive
    created_file = process_created(created_file, diff['created'],
                                   backup_dir.path)
    if created_file:
        files.append(created_file)

    updated_file = FileFinder.make_key('updated', backup_key, backup_date)
    updated_file = os.path.join(cache_path, updated_file)

    # Compute and store delta from the list of updated files
    updated_file = process_updated(updated_file, diff['updated'],
                                   backup_dir.path, last_sv)
    if updated_file:
        files.append(updated_file)

    if diff['created'] or diff['updated']:
        sigvault_file = FileFinder.make_key('sigvault', backup_key,
                                            backup_date)

        sigvault_file = os.path.join(CACHE_PATH, sigvault_file)
        new_sv = sigvault.open_vault(sigvault_file,
                                     'w',
                                     base_path=backup_dir.path)
        for f in itertools.chain(diff['created'], diff['updated']):
            new_sv.add(f)
        new_sv.close()
        files.append(sigvault_file)

    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }
def incremental_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    files = []

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    # TODO check if it's really the last state on the remote storage
    last_state = Dir(cache_path).get('{0}.state.*'.format(backup_key), sort_reverse=True, abspath=True)

    last_state = DirState.from_json(last_state)
    current_state = DirState(backup_dir)

    last_sv = sigvault.SigVaultReader(CACHE_PATH, backup_key)

    diff = current_state - last_state

    state_file = current_state.to_json(cache_path, dt=backup_date, fmt='{0}.state.{1}.json')
    files.append(state_file)

    created_file = FileFinder.make_key('created',
                                       backup_key,
                                       backup_date)
    created_file = os.path.join(cache_path, created_file)
    # Store files from diff['created'] into a new archive
    created_file = process_created(created_file,
                                   diff['created'],
                                   backup_dir.path)
    if created_file:
        files.append(created_file)

    updated_file = FileFinder.make_key('updated',
                                       backup_key,
                                       backup_date)
    updated_file = os.path.join(cache_path, updated_file)

    # Compute and store delta from the list of updated files
    updated_file = process_updated(updated_file,
                                   diff['updated'],
                                   backup_dir.path,
                                   last_sv)
    if updated_file:
        files.append(updated_file)

    if diff['created'] or diff['updated']:
        sigvault_file = FileFinder.make_key('sigvault',
                                            backup_key,
                                            backup_date)

        sigvault_file = os.path.join(CACHE_PATH, sigvault_file)
        new_sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)
        for f in itertools.chain(diff['created'], diff['updated']):
            new_sv.add(f)
        new_sv.close()
        files.append(sigvault_file)

    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {'backup_key': backup_key, 'backup_date': backup_date, 'files': files, 'total': total}