def get_full_backups(key, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    fulls = _dir.files('{0}.full.*'.format(key), sort_reverse=True, abspath=True)
    fulls = [_extract_dt_from_key(k)[1] for k in fulls]
    return fulls
Exemple #2
0
 def __init__(self, base_path='.', key=None):
     self.tars = []
     _dir = Dir(base_path)
     for sv_file in _dir.files('{0}.sigvault.*.tgz'.format(key),
                               sort_reverse=True):
         archive = bltn_open(os.path.join(_dir.path, sv_file), 'rb')
         tar = tarfile.open(fileobj=archive, mode='r:gz')
         self.tars.append(tar)
 def __init__(self, base_path='.', key=None):
     self.tars = []
     _dir = Dir(base_path)
     for sv_file in _dir.files('{0}.sigvault.*.tgz'.format(key),
                               sort_reverse=True):
         archive = bltn_open(os.path.join(_dir.path, sv_file), 'rb')
         tar = tarfile.open(fileobj=archive, mode='r:gz')
         self.tars.append(tar)
def get_full_backups(key, cache_path=None):
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    fulls = _dir.files('{0}.full.*'.format(key),
                       sort_reverse=True,
                       abspath=True)
    fulls = [_extract_dt_from_key(k)[1] for k in fulls]
    return fulls
def get_full_and_incremental(key, cache_path=None):
    """ From a directory as source, iterate over states files from a full backup,
    till the end/or another full backup. The first item is actually the full backup. """
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    last_full = _dir.get('{0}.full.*'.format(key), sort_reverse=True, abspath=True)
    last_full_date, last_full_dt = _extract_dt_from_key(last_full)
    previous_state = FileFinder.check_key('state', key, last_full_dt)
    yield last_full, None, last_full_dt

    for s_file in _dir.files('{0}.state.*'.format(key)):
        s_str = '.'.join(s_file.split('.')[-3:-1])
        s_dt = datetime.strptime(s_str, '%Y-%m-%dT%H:%M:%S.%f')
        if s_dt > last_full_dt and not FileFinder.check_key('full', key, s_dt):
            yield s_file, previous_state, s_dt
            previous_state = s_file
def get_full_and_incremental(key, cache_path=None):
    """ From a directory as source, iterate over states files from a full backup,
    till the end/or another full backup. The first item is actually the full backup. """
    if cache_path is None:
        cache_path = tempfile.gettempdir()

    _dir = Dir(cache_path)
    last_full = _dir.get('{0}.full.*'.format(key),
                         sort_reverse=True,
                         abspath=True)
    last_full_date, last_full_dt = _extract_dt_from_key(last_full)
    previous_state = FileFinder.check_key('state', key, last_full_dt)
    yield last_full, None, last_full_dt

    for s_file in _dir.files('{0}.state.*'.format(key)):
        s_str = '.'.join(s_file.split('.')[-3:-1])
        s_dt = datetime.strptime(s_str, '%Y-%m-%dT%H:%M:%S.%f')
        if s_dt > last_full_dt and not FileFinder.check_key('full', key, s_dt):
            yield s_file, previous_state, s_dt
            previous_state = s_file
from dirtools import Dir

path = '/Users/Bart/Downloads/Crimediggers/Lara/output'

d = Dir(path, exclude_file='.gitignore')

files = d.files()
for file in files:
	print file