def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path, dt=backup_date, fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full',
                                       backup_key,
                                       backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault',
                                        backup_key,
                                        backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {'backup_key': backup_key, 'backup_date': backup_date, 'files': files, 'total': total}
def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path,
                                          dt=backup_date,
                                          fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full', backup_key, backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault', backup_key, backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }
예제 #3
0
def get_dir_changes(directory, configfile):
	d = Dir(directory)
	dir_state_new = DirState(d)
	try:
		d2 = Dir('./')
		dir_state_old = DirState.from_json(configfile + '.json')
		dir_state_new.to_json(fmt=configfile + '.json')
		return dir_state_new - dir_state_old
	except:
		dir_state_new.to_json(fmt=configfile + '.json')
		return 'new'
예제 #4
0
파일: dirdiff.py 프로젝트: Al12rs/dirdiffpy
    xxhash64 = xxhash.xxh64()
    with open(filepath, 'rb') as fp:
        while 1:
            data = fp.read(blocksize)
            if data:
                xxhash64.update(data)
            else:
                break
    return xxhash64


def xxhash_file(filepath, blocksize=4096):
    hash = _xxhash_file(filepath, blocksize)
    return hash.hexdigest()


d = Dir("C:\\Modding\\WJModlists\\NOISE\\mods\\Interesting NPCs SE")
dir_state = DirState(d, None, xxhash_file)

#with open("./3dnpc_state.json", 'w') as f:
#            f.write(json.dumps(dir_state.state))

old_state = DirState(d,
                     DirState.from_json('./out/3dnpc_state.json').state,
                     xxhash_file)

diff = dir_state - old_state

with open("./out/diff.json", 'w') as f:
    f.write(json.dumps(diff))
def incremental_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    files = []

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    # TODO check if it's really the last state on the remote storage
    last_state = Dir(cache_path).get('{0}.state.*'.format(backup_key),
                                     sort_reverse=True,
                                     abspath=True)

    last_state = DirState.from_json(last_state)
    current_state = DirState(backup_dir)

    last_sv = sigvault.SigVaultReader(CACHE_PATH, backup_key)

    diff = current_state - last_state

    state_file = current_state.to_json(cache_path,
                                       dt=backup_date,
                                       fmt='{0}.state.{1}.json')
    files.append(state_file)

    created_file = FileFinder.make_key('created', backup_key, backup_date)
    created_file = os.path.join(cache_path, created_file)
    # Store files from diff['created'] into a new archive
    created_file = process_created(created_file, diff['created'],
                                   backup_dir.path)
    if created_file:
        files.append(created_file)

    updated_file = FileFinder.make_key('updated', backup_key, backup_date)
    updated_file = os.path.join(cache_path, updated_file)

    # Compute and store delta from the list of updated files
    updated_file = process_updated(updated_file, diff['updated'],
                                   backup_dir.path, last_sv)
    if updated_file:
        files.append(updated_file)

    if diff['created'] or diff['updated']:
        sigvault_file = FileFinder.make_key('sigvault', backup_key,
                                            backup_date)

        sigvault_file = os.path.join(CACHE_PATH, sigvault_file)
        new_sv = sigvault.open_vault(sigvault_file,
                                     'w',
                                     base_path=backup_dir.path)
        for f in itertools.chain(diff['created'], diff['updated']):
            new_sv.add(f)
        new_sv.close()
        files.append(sigvault_file)

    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }
def incremental_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    files = []

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    # TODO check if it's really the last state on the remote storage
    last_state = Dir(cache_path).get('{0}.state.*'.format(backup_key), sort_reverse=True, abspath=True)

    last_state = DirState.from_json(last_state)
    current_state = DirState(backup_dir)

    last_sv = sigvault.SigVaultReader(CACHE_PATH, backup_key)

    diff = current_state - last_state

    state_file = current_state.to_json(cache_path, dt=backup_date, fmt='{0}.state.{1}.json')
    files.append(state_file)

    created_file = FileFinder.make_key('created',
                                       backup_key,
                                       backup_date)
    created_file = os.path.join(cache_path, created_file)
    # Store files from diff['created'] into a new archive
    created_file = process_created(created_file,
                                   diff['created'],
                                   backup_dir.path)
    if created_file:
        files.append(created_file)

    updated_file = FileFinder.make_key('updated',
                                       backup_key,
                                       backup_date)
    updated_file = os.path.join(cache_path, updated_file)

    # Compute and store delta from the list of updated files
    updated_file = process_updated(updated_file,
                                   diff['updated'],
                                   backup_dir.path,
                                   last_sv)
    if updated_file:
        files.append(updated_file)

    if diff['created'] or diff['updated']:
        sigvault_file = FileFinder.make_key('sigvault',
                                            backup_key,
                                            backup_date)

        sigvault_file = os.path.join(CACHE_PATH, sigvault_file)
        new_sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)
        for f in itertools.chain(diff['created'], diff['updated']):
            new_sv.add(f)
        new_sv.close()
        files.append(sigvault_file)

    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {'backup_key': backup_key, 'backup_date': backup_date, 'files': files, 'total': total}