def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path, dt=backup_date, fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full',
                                       backup_key,
                                       backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault',
                                        backup_key,
                                        backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {'backup_key': backup_key, 'backup_date': backup_date, 'files': files, 'total': total}
def get_full_backups(key, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    fulls = _dir.files('{0}.full.*'.format(key), sort_reverse=True, abspath=True)
    fulls = [_extract_dt_from_key(k)[1] for k in fulls]
    return fulls
Пример #3
0
 def __init__(self, base_path='.', key=None):
     self.tars = []
     _dir = Dir(base_path)
     for sv_file in _dir.files('{0}.sigvault.*.tgz'.format(key),
                               sort_reverse=True):
         archive = bltn_open(os.path.join(_dir.path, sv_file), 'rb')
         tar = tarfile.open(fileobj=archive, mode='r:gz')
         self.tars.append(tar)
 def __init__(self, base_path='.', key=None):
     self.tars = []
     _dir = Dir(base_path)
     for sv_file in _dir.files('{0}.sigvault.*.tgz'.format(key),
                               sort_reverse=True):
         archive = bltn_open(os.path.join(_dir.path, sv_file), 'rb')
         tar = tarfile.open(fileobj=archive, mode='r:gz')
         self.tars.append(tar)
Пример #5
0
def parse_tmp_dir(directory):
	directory = directory + '/tmp~'
	if not os.path.exists(directory):
		return []
	d = Dir(directory)
	tmp_files = []
	for root, dirs, files in d.walk():
		for file in files:
			tmp_files.append((root, file, os.stat(root + '/' + file).st_size, get_file_sha256_hash(root + '/' + file)))
	return tmp_files
def get_full_backups(key, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    fulls = _dir.files('{0}.full.*'.format(key),
                       sort_reverse=True,
                       abspath=True)
    fulls = [_extract_dt_from_key(k)[1] for k in fulls]
    return fulls
Пример #7
0
	def process_restore_folder(self):
		restoredir = self.clientdir + '/restore~'
		d = Dir(restoredir)
	
		for root, dirs, files in d.walk():
			for file in files:
				if file in self.file_dict:
					if not os.path.exists(os.path.dirname(self.clientdir + '/' + self.file_dict[file])):
						os.makedirs(os.path.dirname(self.clientdir + '/' + self.file_dict[file]))
					decrypt_file(self.key, root + '/' + file, self.clientdir + '/' + self.file_dict[file])
					os.unlink(root + '/' + file)
Пример #8
0
def parse_existing_clientdir(pwd, directory):
	file_dict = {}
	curdir = os.getcwd()
	d = Dir(directory)
	for root, dirs, files in d.walk():
		for fileName in files:
			if fileName[-1] == '~' or root[-1] == '~':
				continue
			else:
				relFile = root[len(curdir) + 1:] + '/' + fileName
				file_dict[hashlib.sha256(pwd + relFile).hexdigest()] = relFile
	return file_dict
Пример #9
0
 def latest_version_ip_list(self):
     """
     Get a list of latest version packages from repository storage.
     @return:    List of dictionary items of IPs available in repository storage.
     """
     files = Dir(config_path_storage, exclude_file='').files()
     sortkeyfn = lambda s: s[1]
     tuples = []
     for repofile in files:
         if repofile.endswith(".tar"):
             f, fname = os.path.split(repofile)
             if f.startswith("pairtree_root"):
                 version = f[-5:] if f[-5:] != '' else '00001'
                 repoitem = (repofile, version)
                 tuples.append(repoitem)
     tuples.sort(key=sortkeyfn, reverse=True)
     items_grouped_by_version = []
     for key, valuesiter in groupby(tuples, key=sortkeyfn):
         items_grouped_by_version.append(
             dict(version=key, items=list(v[0] for v in valuesiter)))
     lastversionfiles = []
     for version_items in items_grouped_by_version:
         for item in version_items['items']:
             p, f = os.path.split(item)
             p2 = os.path.join(self.repository_storage_dir,
                               p[:p.find("/data/")])
             obj_id = self.repo_storage_client._get_id_from_dirpath(p2)
             if not obj_id in [x['id'] for x in lastversionfiles]:
                 lastversionfiles.append({
                     "id": obj_id,
                     "version": version_items['version'],
                     "path": item
                 })
     return lastversionfiles
Пример #10
0
def parse_new_dir(directory, pwd, key):
	file_dict = {}
	curdir = os.getcwd()
	if not os.path.exists(directory + '/tmp~'):
		os.makedirs(directory + '/tmp~')
	d = Dir(directory)
	for root, dirs, files in d.walk():
		for file in files:
			if file[-1] == '~' or root[-1] == '~':
				continue
			else:
				original_file = root + '/' + file
				new_file = hashlib.sha256(pwd + root[len(curdir) + 1:] + '/' + file).hexdigest()
				encrypt_file(key, original_file, directory + '/tmp~/' + new_file)
				file_dict[new_file] = original_file
	return file_dict
Пример #11
0
def get_full_and_incremental(key, cache_path=None):
    """ From a directory as source, iterate over states files from a full backup,
    till the end/or another full backup. The first item is actually the full backup. """
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    last_full = _dir.get('{0}.full.*'.format(key), sort_reverse=True, abspath=True)
    last_full_date, last_full_dt = _extract_dt_from_key(last_full)
    previous_state = FileFinder.check_key('state', key, last_full_dt)
    yield last_full, None, last_full_dt

    for s_file in _dir.files('{0}.state.*'.format(key)):
        s_str = '.'.join(s_file.split('.')[-3:-1])
        s_dt = datetime.strptime(s_str, '%Y-%m-%dT%H:%M:%S.%f')
        if s_dt > last_full_dt and not FileFinder.check_key('full', key, s_dt):
            yield s_file, previous_state, s_dt
            previous_state = s_file
Пример #12
0
    def testDirectory(self):
        tmpdir = tempfile.mkdtemp()
        with open(os.path.join(tmpdir, 'testfile1'), 'wb') as fh:
            fh.write(os.urandom(256 << 10))
        testdir = os.path.join(tmpdir, 'testdir')
        os.mkdir(testdir)
        with open(os.path.join(testdir, 'testfile2'), 'wb') as fh:
            fh.write(os.urandom(256 << 10))

        dir_br = put_directory(self.server, tmpdir)

        dest = tempfile.mkdtemp()

        get_directory(self.server, dir_br, dest)

        # Check the two directories are equal using Dirtools.hash
        self.assertEqual(Dir(tmpdir).hash(), Dir(dest).hash())

        shutil.rmtree(tmpdir)
        shutil.rmtree(dest)
def get_full_and_incremental(key, cache_path=None):
    """ From a directory as source, iterate over states files from a full backup,
    till the end/or another full backup. The first item is actually the full backup. """
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    last_full = _dir.get('{0}.full.*'.format(key),
                         sort_reverse=True,
                         abspath=True)
    last_full_date, last_full_dt = _extract_dt_from_key(last_full)
    previous_state = FileFinder.check_key('state', key, last_full_dt)
    yield last_full, None, last_full_dt

    for s_file in _dir.files('{0}.state.*'.format(key)):
        s_str = '.'.join(s_file.split('.')[-3:-1])
        s_dt = datetime.strptime(s_str, '%Y-%m-%dT%H:%M:%S.%f')
        if s_dt > last_full_dt and not FileFinder.check_key('full', key, s_dt):
            yield s_file, previous_state, s_dt
            previous_state = s_file
Пример #14
0
 def on_any_event(self, event):
   LOG.debug('%s %s', event.event_type, event.src_path)
   
   new_ref = Dir(self.path).hash()
   snapshot = DirectorySnapshot(self.path, recursive=True)
   diff     = DirectorySnapshotDiff(self.ref_snapshot, snapshot)
       
   # compare directory hashes to determine if recompile needed
   if self.ref != new_ref:
     self.print_changes(diff)
     self.runtime.compile()
     self.ref = new_ref
     self.ref_snapshot = snapshot
def full_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    backup_dir_state = DirState(backup_dir)
    state_file = backup_dir_state.to_json(cache_path,
                                          dt=backup_date,
                                          fmt='{0}.state.{1}.json')

    created_file = FileFinder.make_key('full', backup_key, backup_date)

    created_file = os.path.join(cache_path, created_file)
    backup_dir.compress_to(created_file)

    # Create a new SigVault
    sigvault_file = FileFinder.make_key('sigvault', backup_key, backup_date)
    sigvault_file = os.path.join(CACHE_PATH, sigvault_file)

    sv = sigvault.open_vault(sigvault_file, 'w', base_path=backup_dir.path)

    for f in backup_dir.iterfiles():
        sv.add(f)

    sv.close()

    files = [state_file, created_file, sigvault_file]
    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }
Пример #16
0
    def __init__(self, app, src, dst):
        """src is absolute and dst is relative to the app's static_folder """
        static_folder = app.static_folder
        # Add "cache busting" without flask.assets
        abs_src = os.path.join(static_folder, src)
        abs_dst = os.path.join(static_folder, dst)

        directory = Dir(abs_src)
        #####################################################
        # Make sure the destination directory is different if
        # any of the files has changed
        # This is a form of cache busting.
        #####################################################
        # - get a hash of the directory;
        # - take only first 16 hex digits (= 16*16 bits)
        uniq = directory.hash()[:16]

        dst_dirtree_relpath = os.path.join(dst, uniq)
        dst_dirtree_abspath = os.path.join(static_folder, dst_dirtree_relpath)

        if not os.path.exists(dst_dirtree_abspath):
            shutil.copytree(abs_src, dst_dirtree_abspath)

        self.dst_url = dst_dirtree_relpath
Пример #17
0
def _put_directory(con, path, permanode=False):
    """ Put a directory, this function is called recursively over sub-directories. """
    # Initialization of the current directory schema.
    directory = Directory(con, path)
    # Since a Directory must point to a static set, we initialize one too.
    static_set = StaticSet(con)
    static_set_members = []
    # Don't walk recursively with walk, since we already
    # calling _put_dir recursively.
    root, dirs, files = Dir(path).walk().next()
    for f in files:
        static_set_members.append(con.put_file(os.path.join(root, f)))
    for d in dirs:
        static_set_members.append(
            _put_directory(con, os.path.join(root, d), permanode=False))

    static_set_br = static_set.save(static_set_members)

    # We return the directory blobRef
    return directory.save(static_set_br, permanode=permanode)
Пример #18
0
def _put_mutable_directory(con, path):
    """ Put a mutable directory, this function is called recursively over sub-directories. """
    # Initialization of the permanode that will hold the mutable directory
    p = con.permanode()
    p_title = os.path.basename(os.path.normpath(path))
    mutable_files = []
    root, dirs, files = Dir(path).walk().next()
    for f in files:
        file_br = con.put_file(os.path.join(root, f), permanode=True)
        mutable_files.append((f, file_br))
    for d in dirs:
        dir_name = os.path.basename(os.path.normpath(os.path.join(root, d)))
        dir_br = _put_mutable_directory(con, os.path.join(root, d))
        mutable_files.append((dir_name, dir_br))

    p_br = p.save(title=p_title)
    for f_filename, f_br in mutable_files:
        p.add_camli_path(f_filename, f_br)

    # We return the permanode blobRef
    return p_br
Пример #19
0
    xxhash64 = xxhash.xxh64()
    with open(filepath, 'rb') as fp:
        while 1:
            data = fp.read(blocksize)
            if data:
                xxhash64.update(data)
            else:
                break
    return xxhash64


def xxhash_file(filepath, blocksize=4096):
    hash = _xxhash_file(filepath, blocksize)
    return hash.hexdigest()


d = Dir("C:\\Modding\\WJModlists\\NOISE\\mods\\Interesting NPCs SE")
dir_state = DirState(d, None, xxhash_file)

#with open("./3dnpc_state.json", 'w') as f:
#            f.write(json.dumps(dir_state.state))

old_state = DirState(d,
                     DirState.from_json('./out/3dnpc_state.json').state,
                     xxhash_file)

diff = dir_state - old_state

with open("./out/diff.json", 'w') as f:
    f.write(json.dumps(diff))
Пример #20
0
from dirtools import Dir

path = '/Users/Bart/Downloads/Crimediggers/Lara/output'

d = Dir(path, exclude_file='.gitignore')

files = d.files()
for file in files:
	print file
Пример #21
0
 def __init__(self, runtime, path):
   self.runtime = runtime
   self.ref = Dir(path).hash()
   self.ref_snapshot = DirectorySnapshot(path, recursive=True)
   self.path = path
def incremental_backup(path, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    files = []

    backup_date = datetime.utcnow()
    backup_dir = Dir(path)
    backup_key = backup_dir.path.strip('/').split('/')[-1]

    # TODO check if it's really the last state on the remote storage
    last_state = Dir(cache_path).get('{0}.state.*'.format(backup_key),
                                     sort_reverse=True,
                                     abspath=True)

    last_state = DirState.from_json(last_state)
    current_state = DirState(backup_dir)

    last_sv = sigvault.SigVaultReader(CACHE_PATH, backup_key)

    diff = current_state - last_state

    state_file = current_state.to_json(cache_path,
                                       dt=backup_date,
                                       fmt='{0}.state.{1}.json')
    files.append(state_file)

    created_file = FileFinder.make_key('created', backup_key, backup_date)
    created_file = os.path.join(cache_path, created_file)
    # Store files from diff['created'] into a new archive
    created_file = process_created(created_file, diff['created'],
                                   backup_dir.path)
    if created_file:
        files.append(created_file)

    updated_file = FileFinder.make_key('updated', backup_key, backup_date)
    updated_file = os.path.join(cache_path, updated_file)

    # Compute and store delta from the list of updated files
    updated_file = process_updated(updated_file, diff['updated'],
                                   backup_dir.path, last_sv)
    if updated_file:
        files.append(updated_file)

    if diff['created'] or diff['updated']:
        sigvault_file = FileFinder.make_key('sigvault', backup_key,
                                            backup_date)

        sigvault_file = os.path.join(CACHE_PATH, sigvault_file)
        new_sv = sigvault.open_vault(sigvault_file,
                                     'w',
                                     base_path=backup_dir.path)
        for f in itertools.chain(diff['created'], diff['updated']):
            new_sv.add(f)
        new_sv.close()
        files.append(sigvault_file)

    files = [{'path': f, 'size': os.path.getsize(f)} for f in files]
    total = sum([f['size'] for f in files])

    return {
        'backup_key': backup_key,
        'backup_date': backup_date,
        'files': files,
        'total': total
    }