Пример #1
0
 def __init__(self, url, *path_components, enough_to_have=None):
     with Repo(url, *path_components, enough_to_have=enough_to_have) as r:
         cache_path = path.downloads('git', *path_components, makedirs=True)
         self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                           path_components[-1])
         self.self_destruct = False
         git.Repo.clone_from(r.path, self.path)
     git.Repo.__init__(self, self.path)
Пример #2
0
def hack_around_unpacking(uri, headers, wrong_content):
    log.warning(f're-fetching correct content for {uri}')
    r = requests.get(uri, headers=headers, stream=True, allow_redirects=False)
    h = hashlib.sha256(wrong_content).hexdigest()
    cachefile = path.downloads('fixups', h, makedirs=True)
    if not os.path.exists(cachefile):
        with path.wip(cachefile) as wip:
            with open(wip, 'wb') as f:
                shutil.copyfileobj(r.raw, f)
    with open(cachefile, 'rb') as f:
        return f.read()
Пример #3
0
 def _get_requests_session(self, direct=False):
     if not direct:
         cache = cachecontrol.caches.FileCache(path.downloads('cache'))
         sess = cachecontrol.CacheControl(requests.Session(), cache=cache)
     else:
         sess = requests.Session()
     for uri, kwargs in self._mocks:
         adapter = requests_mock.Adapter()
         adapter.register_uri('HEAD', uri, **kwargs)
         adapter.register_uri('GET', uri, **kwargs)
         sess.mount(uri, adapter)
     return sess
Пример #4
0
 def _get_requests_session(self, direct=False):
     if not direct:
         kwargs = ({'filemode': 0o0660, 'dirmode': 0o0770}
                   if is_cache_group_writeable() else {})
         cache = cachecontrol.caches.FileCache(path.downloads('cache'),
                                               **kwargs)
         sess = cachecontrol.CacheControl(requests.Session(), cache=cache)
     else:
         sess = requests.Session()
     for uri, kwargs in self._mocks.items():
         adapter = requests_mock.Adapter()
         adapter.register_uri('HEAD', uri, **kwargs)
         adapter.register_uri('GET', uri, **kwargs)
         sess.mount(uri, adapter)
     return sess
Пример #5
0
# Licensed under GNU General Public License v3 or later, see COPYING.
# Copyright (c) 2019 Red Hat, Inc., see CONTRIBUTORS.

import fasteners
import git

import os
import tarfile

from fingertip.util import log, path, temp

OFFLINE = os.getenv('FINGERTIP_OFFLINE', '0') != '0'
DIR = path.downloads('git')


class Repo(git.Repo):
    def __init__(self, url, *path_components, enough_to_have=None):
        self.url = url
        self.path = os.path.join(DIR, *path_components)
        lock_path = self.path + '-lock'
        self.lock = fasteners.process_lock.InterProcessLock(lock_path)
        self.lock.acquire()
        if not os.path.exists(self.path):
            log.info(f'cloning {url}...')
            git.Repo.clone_from(url, self.path, mirror=True)  # TODO: use bare
            super().__init__(self.path)
        else:
            super().__init__(self.path)
            update_not_needed = enough_to_have and (
                enough_to_have in (t.name for t in self.tags)
                or enough_to_have in (h.name for h in self.heads)
Пример #6
0
    def __init__(self, url, *path_components, enough_to_have=None):
        if not path_components:
            path_components = [url.replace('/', '::')]
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            cache_is_enough = False
            if os.path.exists(cache_path):
                try:
                    cr = git.Repo(cache_path)
                    cache_is_enough = (enough_to_have
                                       and _has_rev(cr, enough_to_have))
                except git.GitError as e:
                    log.error(f'something wrong with git cache {cache_path}')
                    log.error(str(e))
                _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_is_enough:
                    log.info(f'not re-fetching {url} from {source} '
                             f'because {enough_to_have} '
                             'is already present in cache')
                    git.Repo.clone_from(cache_path, self.path, mirror=True)
                    break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={os.path.exists(cache_path)}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl,
                                        self.path,
                                        mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True
Пример #7
0
    def __init__(self, url, *path_components, enough_to_have=None):
        assert path_components
        self.url = url
        cache_path = path.downloads('git', *path_components, makedirs=True)
        cache_exists = os.path.exists(cache_path)
        self.path = temp.disappearing_dir(os.path.dirname(cache_path),
                                          path_components[-1])
        lock_working_copy_path = self.path + '-lock'
        lock_cache_path = cache_path + '-lock'
        lock.Lock.__init__(self, lock_working_copy_path)
        update_not_needed = None
        sources = saviour_sources()
        self.self_destruct = False
        with lock.Lock(lock_cache_path), lock.Lock(lock_working_copy_path):
            _remove(self.path)

            for i, (source, cache) in enumerate(sources):
                last_source = i == len(sources) - 1

                if cache and cache_exists and update_not_needed is None:
                    cr = git.Repo(cache_path)
                    update_not_needed = enough_to_have and (
                        enough_to_have in (t.name for t in cr.tags) or
                        enough_to_have in (h.name for h in cr.heads) or
                        enough_to_have in (c.hexsha for c in cr.iter_commits())
                        # that's not all revspecs, but best-effort is fine
                    )
                    if update_not_needed:
                        log.info(f'not re-fetching {url} from {source} '
                                 f'because {enough_to_have} '
                                 'is already present in cache')
                        git.Repo.clone_from(cache_path, self.path, mirror=True)
                        break

                if source == 'local':
                    surl = path.saviour(url).replace('//', '/')  # workaround
                    if not os.path.exists(surl) and not last_source:
                        continue
                    log.info(f'cloning {url} from local saviour mirror')
                    git.Repo.clone_from(surl, self.path, mirror=True)
                    break
                elif source == 'direct':
                    surl = url
                else:
                    surl = source + '/' + url
                    surl = 'http://' + surl if '://' not in source else surl

                log.info(f'cloning {url} from {source} '
                         f'cache_exists={cache_exists}...')
                try:
                    # TODO: bare clone
                    # no harm in referencing cache, even w/o cached+
                    git.Repo.clone_from(surl, self.path, mirror=True,
                                        dissociate=True,
                                        reference_if_able=cache_path)
                except git.GitError:
                    log.warning(f'could not clone {url} from {source}')
                    if last_source:
                        raise
                    continue
                break

            _remove(cache_path)
            reflink.auto(self.path, cache_path)
            git.Repo.__init__(self, self.path)
            self.remotes[0].set_url(url)
        self.self_destruct = True