Beispiel #1
0
 def __init__(self, url, *path_components, enough_to_have=None):
     with Repo(url, *path_components, enough_to_have=enough_to_have) as r:
         cache_path = path.downloads('git', *path_components, makedirs=True)
         self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                           path_components[-1])
         self.self_destruct = False
         git.Repo.clone_from(r.path, self.path)
     git.Repo.__init__(self, self.path)
Beispiel #2
0
def validate_rpm_repository(log, _unused_src, dst):
    repo_desc_for_mirroring = textwrap.dedent(f'''
        [repo]
        baseurl = {dst}
        name = repo
        enabled = 1
        gpgcheck = 0
    ''')
    repodir = temp.disappearing_dir()
    cachedir = temp.disappearing_dir()
    with open(os.path.join(repodir, f'whatever.repo'), 'w') as f:
        f.write(repo_desc_for_mirroring)
    run = log.pipe_powered(subprocess.run,
                           stdout=logging.DEBUG,
                           stderr=logging.WARNING)
    run([
        'dnf', '--setopt=skip_if_unavailable=0',
        f'--setopt=reposdir={repodir}', f'--setopt=cachedir={cachedir}',
        '--repoid=repo', '--refresh', 'makecache'
    ],
        check=True)
Beispiel #3
0
def clone_and_load(from_path, name_hint=None):
    log.debug(f'clone {from_path}')
    temp_path = temp.disappearing_dir(from_path, hint=name_hint)
    log.debug(f'temp = {temp_path}')
    os.makedirs(temp_path, exist_ok=True)
    with open(os.path.join(from_path, 'machine.clpickle'), 'rb') as f:
        m = cloudpickle.load(f)
    m.log = log.Sublogger('<cloning>')
    m.hooks.clone(temp_path)
    m._parent_path = os.path.realpath(from_path)
    m.path = temp_path
    with open(os.path.join(m.path, 'machine.clpickle'), 'wb') as f:
        cloudpickle.dump(m, f)
    return _load_from_path(temp_path)
Beispiel #4
0
def method_reposync(log,
                    src,
                    base,
                    dst,
                    arches=['noarch', 'x86_64'],
                    source='auto',
                    metadata='download',
                    options=[],
                    excludes=[]):
    if source == 'auto':
        source = '/source' in src or '/SRPM' in src
    excludes = [e[:-4] if e.endswith('.rpm') else e for e in excludes]
    repo_desc_for_mirroring = textwrap.dedent(f'''
        [repo]
        baseurl = {src}
        name = repo
        enabled = 1
        gpgcheck = 0
    ''')
    repodir = temp.disappearing_dir()
    with open(os.path.join(repodir, f'whatever.repo'), 'w') as f:
        f.write(repo_desc_for_mirroring)
    run = log.pipe_powered(subprocess.run,
                           stdout=logging.INFO,
                           stderr=logging.WARNING)
    run([
        'dnf', f'--setopt=reposdir={repodir}', 'reposync', '--norepopath',
        f'--download-path={dst}', '--repoid=repo', '--delete', '--remote-time'
    ] + [f'--arch={arch}' for arch in arches] +
        (['--download-metadata'] if metadata != 'generate' else []) +
        (['--source'] if source else []) +
        (['--exclude=' + ','.join(excludes)] if excludes else []) + options,
        check=True)
    run = log.pipe_powered(
        subprocess.run,  # either too silent or too noisy =/
        stdout=logging.INFO,
        stderr=logging.INFO)
    createrepo_c_options = ['-v', '--error-exit-val', '--ignore-lock']
    if metadata == 'regenerate':
        log.info('regenerating metadata...')
        run(['createrepo_c'] + createrepo_c_options + ['--update', dst],
            check=True)
    elif metadata == 'generate':
        log.info('generating metadata from scratch...')
        run(['createrepo_c'] + createrepo_c_options + [dst], check=True)
Beispiel #5
0
 def __init__(self, backend_name, sealed=True, expire_in='7d'):
     self.hooks = hooks.HookManager()
     os.makedirs(path.MACHINES, exist_ok=True)
     self.path = temp.disappearing_dir(path.MACHINES)
     self._parent_path = path.MACHINES
     # States: loaded -> spun_up -> spun_down -> saved/dropped
     self._state = 'spun_down'
     self._transient = False
     self._up_counter = 0
     self.sealed = sealed
     self.expiration = expiration.Expiration(expire_in)
     self.time_desync = time_desync.TimeDesync(self)
     self.backend = backend_name
     self.log = log.Sublogger(f'plugins.backend.{backend_name}',
                              os.path.join(self.path, 'log.txt'))
     self.log.debug(f'created {backend_name}')
     self.hooks.clone.append(lambda to: reflink.auto(
         os.path.join(self.path, 'log.txt'), os.path.join(to, 'log.txt')))
Beispiel #6
0
def method_reposync(log, src, base, dst,
                    arches=['noarch', 'x86_64'], source='auto', options=[]):
    if source == 'auto':
        source = '/source' in src or '/SRPM' in src
    repo_desc_for_mirroring = textwrap.dedent(f'''
        [repo]
        baseurl = {src}
        name = repo
        enabled = 1
        gpgcheck = 0
    ''')
    repodir = temp.disappearing_dir()
    with open(os.path.join(repodir, f'whatever.repo'), 'w') as f:
        f.write(repo_desc_for_mirroring)
    run = log.pipe_powered(subprocess.run,
                           stdout=logging.INFO, stderr=logging.WARNING)
    run(['dnf', f'--setopt=reposdir={repodir}', 'reposync',
         f'--download-path={dst}', '--norepopath',
         '--download-metadata', '--delete', '--repoid=repo'] +
        [f'--arch={arch}' for arch in arches] + options +
        (['--source'] if source else []),
        check=True)
Beispiel #7
0
    def __init__(self, url, *path_components, enough_to_have=None):
        if not path_components:
            path_components = [url.replace('/', '::')]
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            cache_is_enough = False
            if os.path.exists(cache_path):
                try:
                    cr = git.Repo(cache_path)
                    cache_is_enough = (enough_to_have
                                       and _has_rev(cr, enough_to_have))
                except git.GitError as e:
                    log.error(f'something wrong with git cache {cache_path}')
                    log.error(str(e))
                _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_is_enough:
                    log.info(f'not re-fetching {url} from {source} '
                             f'because {enough_to_have} '
                             'is already present in cache')
                    git.Repo.clone_from(cache_path, self.path, mirror=True)
                    break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={os.path.exists(cache_path)}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl,
                                        self.path,
                                        mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True
Beispiel #8
0
    def __init__(self, url, *path_components, enough_to_have=None):
        assert path_components
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        cache_exists = os.path.exists(cache_path)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        update_not_needed = None
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_exists and update_not_needed is None:
                    cr = git.Repo(cache_path)
                    update_not_needed = enough_to_have and (
                        enough_to_have in (t.name for t in cr.tags) or
                        enough_to_have in (h.name for h in cr.heads) or
                        enough_to_have in (c.hexsha for c in cr.iter_commits())
                        # that's not all revspecs, but best-effort is fine
                    )
                    if update_not_needed:
                        log.info(f'not re-fetching {url} from {source} '
                                 f'because {enough_to_have} '
                                 'is already present in cache')
                        git.Repo.clone_from(cache_path, self.path, mirror=True)
                        break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={cache_exists}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl, self.path, mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True