Exemple #1
0
def deduplicate(log, *subpath, timeout=None):
    log.info('locking the deduplication db...')
    with lock.Lock(path.saviour('.duperemove.hashfile-lock'), timeout=timeout):
        log.info('deduplicating...')
        run = log.pipe_powered(subprocess.run,
                               stdout=logging.INFO, stderr=logging.WARNING)
        r = run(['duperemove',
                 '--hashfile', path.saviour('.duperemove.hashfile'),
                 '-hdr', path.saviour('_', *subpath)])
        assert r.returncode in (0, 22)  # nothing to deduplicate
Exemple #2
0
def _deduplicate(log, db_name, resource_name, timeout=None):
    log.info(f'locking the deduplication db {db_name}...')
    hashfilesdir = path.saviour('.duperemove', 'hashfiles')
    if not os.path.exists(hashfilesdir):
        os.makedirs(hashfilesdir)
        os.system(f'chattr +C {hashfilesdir} || true')
    db_file = path.saviour('.duperemove', 'hashfiles', db_name)
    db_lock = path.saviour('.duperemove', 'locks', db_name, makedirs=True)
    with lock.Lock(db_lock, timeout=timeout):
        log.info(f'deduplicating {resource_name} ({db_name})...')
        run = log.pipe_powered(subprocess.run,
                               stdout=logging.INFO,
                               stderr=logging.WARNING)
        r = run([
            'duperemove', '--dedupe-options=nofiemap', '--io-threads=2',
            '--cpu-threads=2', '--hashfile', db_file, '-hdr',
            path.saviour('_', resource_name, 'data')
        ])
        assert r.returncode in (0, 22)  # nothing to deduplicate
Exemple #3
0
def is_fetcheable(source, url, timeout=2):
    if source == 'local':
        return os.path.exists(path.saviour(url))
    elif source != 'direct':
        url = source + '/' + url
        url = 'http://' + url if '://' not in source else url
    try:
        r = requests.head(url, allow_redirects=False, timeout=timeout)
        return r.status_code < 400
    except (requests.exceptions.BaseHTTPError, urllib3.exceptions.HTTPError,
            requests.exceptions.Timeout, OSError) as ex:
        log.warning(f'{ex}')
        return False
    return False
Exemple #4
0
 def fetch(self, url, out_path):
     sources = saviour_sources()
     for i, (source, cache) in enumerate(sources):
         if is_fetcheable(source, url) or i == len(sources) - 1:
             if source == 'local':
                 reflink.auto(path.saviour(url), out_path)
                 return
             sess = self._get_requests_session(direct=not cache)
             if source == 'direct':
                 surl = url
             else:
                 surl = source + '/' + url
                 surl = 'http://' + surl if '://' not in source else surl
             log.debug(f'fetching{"/caching" if cache else ""} '
                       f'{os.path.basename(url)} from {surl}')
             r = sess.get(surl)  # not raw because that punctures cache
             with open(out_path, 'wb') as f:
                 f.write(r.content)
             return
Exemple #5
0
def mirror(config, *what_to_mirror, deduplicate=None):
    total_failures = []
    failures = collections.defaultdict(list)

    with open(config) as f:
        config = ruamel.yaml.YAML(typ='safe').load(f)
    if 'mirror' in config and not config['mirror']:
        log.warning('mirroring is disabled in config')
        return

    hows, whats = config['how'], config['what']
    if not what_to_mirror:
        what_to_mirror = whats.keys()
    else:
        what_to_mirror = ([
            k for k in whats.keys() if any(
                fnmatch.fnmatch(k, req) for req in what_to_mirror)
        ] + [k for k in what_to_mirror if '=' in k])

    if not what_to_mirror:
        log.error('nothing to mirror')
        return

    for resource in what_to_mirror:
        log.debug(f'processing {resource}...')

        if '=' not in resource:  # example: alpine-3.13=alpine/v3.13/main/x86
            resource_name, tail = resource, ''
            s = whats[resource_name]
        else:  # example: alpine-3.13=alpine/v3.13/main/x86
            resource_name, s = resource.split('=', 1)
            # FIXME UGLY: config overrides are stronger that = (more syntax?)
            # TODO: whats shouldn't be a dict, I think, just a list of strings
            if resource_name in whats:
                s = whats[resource_name]

        if s is None:
            s = resource_name
        if '/' in s:
            how_name, suffix = s.split('/', 1)
            suffix = '/' + suffix
        else:
            how_name, suffix = s, ''

        try:
            how = hows[how_name]
        except KeyError:
            log.error(f'missing how section on {how_name}')
            raise SystemExit()

        url = how['url'] + suffix
        method = how['method']
        sources = (how['sources'] if 'sources' in how else [how['url']])
        sources = [s + suffix for s in sources]
        extra_args = {
            k: v
            for k, v in how.items()
            if k not in ('url', 'sources', 'method', 'validate', 'deduplicate')
        }

        if f'method_{method}' not in globals():
            log.error(f'unsupported method {method}')
            raise SystemExit()

        meth = globals()[f'method_{method}']
        symlink = path.saviour(url.rstrip('/'))
        # usually symlink points to data, but while we're working on it,
        # it temporarily points to a consistent snapshot of it named `snap`
        data = os.path.realpath(path.saviour('_', resource_name, 'data'))
        snap = os.path.realpath(path.saviour('_', resource_name, 'snap'))
        temp = os.path.realpath(path.saviour('_', resource_name, 'temp'))
        lockfile = path.saviour('_', resource_name) + '-lock'
        assert data.startswith(os.path.realpath(path.SAVIOUR))
        assert snap.startswith(os.path.realpath(path.SAVIOUR))
        assert temp.startswith(os.path.realpath(path.SAVIOUR))

        sublog = log.Sublogger(f'{method} {resource_name}')
        sublog.info('locking...')
        with lock.Lock(lockfile):
            os.makedirs(os.path.dirname(snap), exist_ok=True)

            if os.path.exists(temp):
                sublog.info('removing stale temp...')
                _remove(temp)
            if os.path.exists(symlink):  # it's already published
                if os.path.exists(data) and not os.path.exists(snap):
                    # `data` is present and is the best we have to publish
                    sublog.info('snapshotting...')
                    reflink.always(data, temp, preserve=True)
                    os.rename(temp, snap)
                if os.path.exists(snap):
                    # link to a consistent snapshot while we work on `data`
                    _symlink(snap, symlink)

            for source in sources:
                sublog.info(f'trying {source}...')
                try:
                    meth(sublog, source, snap, data, **extra_args)
                    assert os.path.exists(data)
                    if 'validate' in how:
                        sublog.info(f'validating with {how["validate"]}...')
                        validator = globals()[f'validate_{how["validate"]}']
                        validator(sublog, source, data)
                        sublog.info('validated')
                    break
                except Exception as _:
                    traceback.print_exc()
                    failures[resource_name].append(source)
                    fingertip.util.log.warning(f'failed to mirror {source}')

            if len(failures[resource_name]) == len(sources):
                sublog.error(f'failed to mirror '
                             f'from all {len(sources)} sources')
                total_failures.append(resource_name)
                continue

            _symlink(data, symlink)
            if os.path.exists(snap):
                os.rename(snap, temp)  # move it out the way asap
                sublog.info('removing now obsolete snapshot...')
                _remove(temp)

        how_deduplicate = how.get('deduplicate', True)
        db_name = how_deduplicate if how_deduplicate is not True else how_name
        if how_deduplicate and deduplicate is not False:
            try:
                _deduplicate(sublog, db_name, resource_name, timeout=1)
            except lock.LockTimeout:
                log.warning(f'skipped deduplication of {resource_name}, '
                            f'db {db_name} was locked')
    if total_failures:
        fingertip.util.log.error(f'failed: {", ".join(total_failures)}')
        raise FailureToMirrorError(", ".join(total_failures))
    log.info('saviour has completed mirroring')
Exemple #6
0
    def __init__(self, url, *path_components, enough_to_have=None):
        if not path_components:
            path_components = [url.replace('/', '::')]
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            cache_is_enough = False
            if os.path.exists(cache_path):
                try:
                    cr = git.Repo(cache_path)
                    cache_is_enough = (enough_to_have
                                       and _has_rev(cr, enough_to_have))
                except git.GitError as e:
                    log.error(f'something wrong with git cache {cache_path}')
                    log.error(str(e))
                _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_is_enough:
                    log.info(f'not re-fetching {url} from {source} '
                             f'because {enough_to_have} '
                             'is already present in cache')
                    git.Repo.clone_from(cache_path, self.path, mirror=True)
                    break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={os.path.exists(cache_path)}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl,
                                        self.path,
                                        mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True
Exemple #7
0
def mirror(config, *what_to_mirror):
    total_failures = []
    failures = collections.defaultdict(list)

    with open(config) as f:
        config = ruamel.yaml.YAML(typ='safe').load(f)
    hows, whats = config['how'], config['what']
    if not what_to_mirror:
        what_to_mirror = whats.keys()
    else:
        what_to_mirror = [k for k in whats.keys()
                          if any((fnmatch.fnmatch(k, req)
                                  for req in what_to_mirror))]

    for resource_name in what_to_mirror or whats.keys():
        s = whats[resource_name]
        log.debug(f'processing {resource_name}...')

        if s is None:
            how, suffix = resource_name, ''
        elif '/' in s:
            how, suffix = s.split('/', 1)
            suffix = '/' + suffix
        else:
            how, suffix = s, ''

        try:
            how = hows[how]
        except KeyError:
            log.error(f'missing how section on {how}')
            raise SystemExit()

        url = how['url'] + suffix
        method = how['method']
        sources = (how['sources'] if 'sources' in how else [how['url']])
        sources = [s + suffix for s in sources]
        extra_args = {k: v for k, v in how.items()
                      if k not in ('url', 'sources', 'method')}

        if f'method_{method}' not in globals():
            log.error(f'unsupported method {method}')
            raise SystemExit()

        meth = globals()[f'method_{method}']
        symlink = path.saviour(url.rstrip('/'))
        # usually symlink points to data, but while we're working on it,
        # it temporarily points to a consistent snapshot of it named `snap`
        data = path.saviour('_', resource_name, 'data')
        snap = path.saviour('_', resource_name, 'snap')
        temp = path.saviour('_', resource_name, 'temp')
        lockfile = path.saviour('_', resource_name) + '-lock'
        assert data.startswith(path.SAVIOUR)
        assert snap.startswith(path.SAVIOUR)
        assert temp.startswith(path.SAVIOUR)

        sublog = log.Sublogger(f'{method} {resource_name}')
        sublog.info('locking...')
        with lock.Lock(lockfile):
            os.makedirs(os.path.dirname(snap), exist_ok=True)

            if os.path.exists(temp):
                sublog.info('removing stale temp...')
                _remove(temp)
            if os.path.exists(symlink):  # it's already published
                if os.path.exists(data) and not os.path.exists(snap):
                    # `data` is present and is the best we have to publish
                    sublog.info('snapshotting...')
                    reflink.always(data, temp, preserve=True)
                    os.rename(temp, snap)
                if os.path.exists(snap):
                    # link to a consistent snapshot while we work on `data`
                    _symlink(snap, symlink)

            for source in sources:
                sublog.info(f'trying {source}...')
                try:
                    meth(sublog, source, snap, data, **extra_args)
                    assert os.path.exists(data)
                    break
                except Exception as _:
                    traceback.print_exc()
                    failures[resource_name].append(source)
                    fingertip.util.log.warning(f'failed to mirror {source}')

            if len(failures[resource_name]) == len(sources):
                sublog.error(f'failed to mirror '
                             f'from all {len(sources)} sources')
                total_failures.append(resource_name)
                continue

            _symlink(data, symlink)
            if os.path.exists(snap):
                os.rename(snap, temp)  # move it out the way asap
                sublog.info('removing now obsolete snapshot...')
                _remove(temp)

            try:
                deduplicate(sublog, resource_name, timeout=1)
            except lock.LockTimeout:
                log.warning('skipped deduplication, db was locked')
    if total_failures:
        fingertip.util.log.error(f'failed: {", ".join(total_failures)}')
        raise SystemExit()
    log.info('saviour has completed mirroring')
Exemple #8
0
    def __init__(self, url, *path_components, enough_to_have=None):
        assert path_components
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        cache_exists = os.path.exists(cache_path)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        update_not_needed = None
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_exists and update_not_needed is None:
                    cr = git.Repo(cache_path)
                    update_not_needed = enough_to_have and (
                        enough_to_have in (t.name for t in cr.tags) or
                        enough_to_have in (h.name for h in cr.heads) or
                        enough_to_have in (c.hexsha for c in cr.iter_commits())
                        # that's not all revspecs, but best-effort is fine
                    )
                    if update_not_needed:
                        log.info(f'not re-fetching {url} from {source} '
                                 f'because {enough_to_have} '
                                 'is already present in cache')
                        git.Repo.clone_from(cache_path, self.path, mirror=True)
                        break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={cache_exists}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl, self.path, mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True