def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     rpms = []
     logs = []
     for chroot in client.build_chroot_proxy.get_list(build_id):
         url = chroot.result_url
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url)
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(
                 urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, chroot.name)
             os.makedirs(dest, exist_ok=True)
             dest = os.path.join(dest, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 local_path = dest.replace('.log.gz', '.log')
                 os.rename(dest, local_path)
                 logs.append(local_path)
     return rpms, logs
 def test_download_existing_file(self, url, content):
     """Test downloading existing file"""
     local_file = 'local_file'
     DownloadHelper.download_file(url, local_file)
     assert os.path.isfile(local_file)
     with open(local_file) as f:
         assert f.readline().strip() == content
Exemple #3
0
 def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException as e:
         raise RebaseHelperError(
             'Failed to get copr build details for {}: {}'.format(
                 build_id, str(e)))
     rpms = []
     logs = []
     for _, url in six.iteritems(result.data['results_by_chroot']):
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url)
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(
                 urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 extracted = dest.replace('.log.gz', '.log')
                 try:
                     with gzip.open(dest, 'rb') as archive:
                         with open(extracted, 'wb') as f:
                             f.write(archive.read())
                 except (IOError, EOFError):
                     raise RebaseHelperError(
                         'Failed to extract {}'.format(dest))
                 logs.append(extracted)
     return rpms, logs
 def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException as e:
         raise RebaseHelperError(
             'Failed to get copr build details for {}: {}'.format(build_id, str(e)))
     rpms = []
     logs = []
     for _, url in result.data['results_by_chroot'].items():
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url)
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 extracted = dest.replace('.log.gz', '.log')
                 try:
                     with gzip.open(dest, 'rb') as archive:
                         with open(extracted, 'wb') as f:
                             f.write(archive.read())
                 except (IOError, EOFError):
                     raise RebaseHelperError(
                         'Failed to extract {}'.format(dest))
                 logs.append(extracted)
     return rpms, logs
Exemple #5
0
    def download_remote_sources(self):
        """
        Method that iterates over all sources and downloads ones, which contain URL instead of just a file.

        :return: None
        """
        try:
            # try to download old sources from Fedora lookaside cache
            LookasideCacheHelper.download(self.lookaside_cache_preset, os.path.dirname(self.path), self.header.name,
                                          self.sources_location)
        except LookasideCacheError as e:
            logger.verbose("Downloading sources from lookaside cache failed. "
                           "Reason: %s.", str(e))

        # filter out only sources with URL
        remote_files = [source for source in self.sources if bool(urllib.parse.urlparse(source).scheme)]
        # download any sources that are not yet downloaded
        for remote_file in remote_files:
            local_file = os.path.join(self.sources_location, os.path.basename(remote_file))
            if not os.path.isfile(local_file):
                logger.verbose("File '%s' doesn't exist locally, downloading it.", local_file)
                try:
                    DownloadHelper.download_file(remote_file, local_file)
                except DownloadError as e:
                    raise RebaseHelperError("Failed to download file from URL {}. "
                                            "Reason: '{}'. ".format(remote_file, str(e))) from e
Exemple #6
0
    def _get_initial_patches(self) -> Dict[str, List[PatchObject]]:
        """Returns a dict of patches from a spec file"""
        patches_applied = []
        patches_not_used = []
        patches_list = [p for p in self.spc.sources if p[2] == 2]
        strip_options = self._get_patch_strip_options(patches_list)

        for patch, num, _ in patches_list:
            is_url = bool(urllib.parse.urlparse(patch).scheme)
            filename = os.path.basename(patch) if is_url else patch
            patch_path = os.path.join(self.sources_location, filename)
            if not os.path.exists(patch_path):
                if is_url:
                    logger.info('Patch%s is remote, trying to download the patch', num)
                    try:
                        DownloadHelper.download_file(patch, filename)
                    except DownloadError:
                        logger.error('Could not download remote patch %s', patch)
                        continue
                else:
                    logger.error('Patch %s does not exist', filename)
                    continue
            patch_num = num
            if patch_num in strip_options:
                patches_applied.append(PatchObject(patch_path, patch_num, strip_options[patch_num]))
            else:
                patches_not_used.append(PatchObject(patch_path, patch_num, None))
        patches_applied = sorted(patches_applied, key=lambda x: x.index)
        return {"applied": patches_applied, "not_applied": patches_not_used}
 def test_download_existing_file(self, url, content):
     """Test downloading existing file"""
     local_file = 'local_file'
     DownloadHelper.download_file(url, local_file)
     assert os.path.isfile(local_file)
     with open(local_file) as f:
         assert f.readline().strip() == content
 def _download_source(cls,
                      tool,
                      url,
                      package,
                      filename,
                      hashtype,
                      hsh,
                      target=None):
     if target is None:
         target = os.path.basename(filename)
     if os.path.exists(target):
         if cls._hash(target, hashtype) == hsh:
             # nothing to do
             return
         else:
             os.unlink(target)
     if tool == 'fedpkg':
         url = '{0}/{1}/{2}/{3}/{4}/{2}'.format(url, package, filename,
                                                hashtype, hsh)
     else:
         url = '{0}/{1}/{2}/{3}/{2}'.format(url, package, filename, hsh)
     try:
         DownloadHelper.download_file(url, target)
     except DownloadError as e:
         raise LookasideCacheError(six.text_type(e))
    def download_task_results(cls, session, tasklist, destination):
        """Downloads packages and logs of finished Koji tasks.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            tasklist (list): List of task IDs.
            destination (str): Path where to download files to.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        rpms = []
        logs = []
        for task_id in tasklist:
            logger.info('Downloading packages and logs for task %s', task_id)
            task = session.getTaskInfo(task_id, request=True)
            if task['state'] in [koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN']]:
                logger.info('Task %s is still running!', task_id)
                continue
            elif task['state'] != koji.TASK_STATES['CLOSED']:
                logger.info('Task %s did not complete successfully!', task_id)
            if task['method'] == 'buildArch':
                tasks = [task]
            elif task['method'] == 'build':
                opts = dict(parent=task_id, method='buildArch', decode=True,
                            state=[koji.TASK_STATES['CLOSED'], koji.TASK_STATES['FAILED']])
                tasks = session.listTasks(opts=opts)
            else:
                logger.info('Task %s is not a build or buildArch task!', task_id)
                continue
            for task in tasks:
                base_path = koji.pathinfo.taskrelpath(task['id'])
                output = session.listTaskOutput(task['id'])
                for filename in output:
                    local_path = os.path.join(destination, filename)
                    download = False
                    fn, ext = os.path.splitext(filename)
                    if ext == '.rpm':
                        if task['state'] != koji.TASK_STATES['CLOSED']:
                            continue
                        if local_path not in rpms:
                            nevra = RpmHelper.split_nevra(fn)
                            # FIXME: multiple arches
                            download = nevra['arch'] in ['noarch', 'x86_64']
                            if download:
                                rpms.append(local_path)
                    else:
                        if local_path not in logs:
                            download = True
                            logs.append(local_path)
                    if download:
                        logger.info('Downloading file %s', filename)
                        url = '/'.join([session.opts['topurl'], 'work', base_path, filename])
                        DownloadHelper.download_file(url, local_path)
        return rpms, logs
    def download_build(cls, session, build_id, destination, arches):
        """Downloads RPMs and logs of a Koji build.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            build_id (str): Koji build ID.
            destination (str): Path where to download files to.
            arches (list): List of architectures to be downloaded.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        build = session.getBuild(build_id)
        packages = session.listRPMs(buildID=build_id)
        rpms = []
        logs = []
        os.makedirs(destination, exist_ok=True)
        for pkg in packages:
            if pkg['arch'] not in arches:
                continue
            filename = '.'.join([pkg['nvr'], pkg['arch'], 'rpm'])
            local_path = os.path.join(destination, filename)
            if local_path not in rpms:
                url = '/'.join([
                    session.opts['topurl'],
                    'packages',
                    build['package_name'],
                    build['version'],
                    build['release'],
                    pkg['arch'],
                    filename])
                DownloadHelper.download_file(url, local_path)
                rpms.append(local_path)
            if pkg['arch'] == 'src':
                # No logs for SRPM in koji
                continue
            for logname in ['build.log', 'root.log', 'state.log']:
                local_path = os.path.join(destination, logname)
                if local_path not in logs:
                    url = '/'.join([
                        session.opts['topurl'],
                        'packages',
                        build['package_name'],
                        build['version'],
                        build['release'],
                        'data',
                        'logs',
                        pkg['arch'],
                        logname])
                    DownloadHelper.download_file(url, local_path)
                    logs.append(local_path)
        return rpms, logs
Exemple #11
0
 def download_remote_sources(self):
     """
     Download the sources from the URL in the configuration (if the path in
     the configuration match to the URL basename from SourceX) or from the one
     from SourceX in specfile.
     """
     # Fetch all sources defined in packit.yaml -> sources
     for source in self.package_config.sources:
         logger.info(f"Downloading source {source.path!r}.")
         DownloadHelper.download_file(
             source.url,
             str(
                 Path(self.specfile.sources_location).joinpath(
                     source.path)),
         )
     self.specfile.download_remote_sources()
Exemple #12
0
    def download_build(cls, session, build_id, destination, arches):
        """Downloads RPMs and logs of a Koji build.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            build_id (str): Koji build ID.
            destination (str): Path where to download files to.
            arches (list): List of architectures to be downloaded.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        build = session.getBuild(build_id)
        packages = session.listRPMs(buildID=build_id)
        rpms = []
        logs = []
        os.makedirs(destination, exist_ok=True)
        for pkg in packages:
            if pkg['arch'] not in arches:
                continue
            filename = '.'.join([pkg['nvr'], pkg['arch'], 'rpm'])
            local_path = os.path.join(destination, filename)
            if local_path not in rpms:
                url = '/'.join([
                    session.opts['topurl'], 'packages', build['package_name'],
                    build['version'], build['release'], pkg['arch'], filename
                ])
                DownloadHelper.download_file(url, local_path)
                rpms.append(local_path)
            if pkg['arch'] == 'src':
                # No logs for SRPM in koji
                continue
            for logname in ['build.log', 'root.log', 'state.log']:
                local_path = os.path.join(destination, logname)
                if local_path not in logs:
                    url = '/'.join([
                        session.opts['topurl'], 'packages',
                        build['package_name'], build['version'],
                        build['release'], 'data', 'logs', pkg['arch'], logname
                    ])
                    DownloadHelper.download_file(url, local_path)
                    logs.append(local_path)
        return rpms, logs
    def test_keyboard_interrupt_situation(self, monkeypatch):
        """
        Test that the local file is deleted in case KeyboardInterrupt is raised during the download
        """
        KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid'
        LOCAL_FILE = os.path.basename(KNOWN_URL)

        def interrupter():
            raise KeyboardInterrupt

        # make sure that some function call inside tha actual download section raises the KeyboardInterrupt exception.
        monkeypatch.setattr('time.time', interrupter)

        with pytest.raises(KeyboardInterrupt):
            DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE)

        assert not os.path.exists(LOCAL_FILE)
Exemple #14
0
    def test_keyboard_interrupt_situation(self, monkeypatch):
        """
        Test that the local file is deleted in case KeyboardInterrupt is raised during the download
        """
        KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid'
        LOCAL_FILE = os.path.basename(KNOWN_URL)

        def interrupter():
            raise KeyboardInterrupt

        # make sure that some function call inside tha actual download section raises the KeyboardInterrupt exception.
        monkeypatch.setattr('time.time', interrupter)

        with pytest.raises(KeyboardInterrupt):
            DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE)

        assert not os.path.exists(LOCAL_FILE)
    def download_build(cls, session, build_id, destination, arches):
        """Downloads RPMs and logs of a Koji build.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            build_id (str): Koji build ID.
            destination (str): Path where to download files to.
            arches (list): List of architectures to be downloaded.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        build = session.getBuild(build_id)
        pathinfo = koji.PathInfo(topdir=session.opts['topurl'])
        rpms: List[str] = []
        logs: List[str] = []
        os.makedirs(destination, exist_ok=True)
        for pkg in session.listBuildRPMs(build_id):
            if pkg['arch'] not in arches:
                continue
            rpmpath = pathinfo.rpm(pkg)
            local_path = os.path.join(destination, os.path.basename(rpmpath))
            if local_path not in rpms:
                url = pathinfo.build(build) + '/' + rpmpath
                DownloadHelper.download_file(url, local_path)
                rpms.append(local_path)
        for logfile in session.getBuildLogs(build_id):
            if logfile['dir'] not in arches:
                continue
            local_path = os.path.join(destination, logfile['name'])
            if local_path not in logs:
                url = pathinfo.topdir + '/' + logfile['path']
                DownloadHelper.download_file(url, local_path)
                logs.append(local_path)
        return rpms, logs
Exemple #16
0
 def download_remote_sources(self):
     """
     Download the sources from the URL in the configuration (if the path in
     the configuration match to the URL basename from SourceX) or from the one
     from SourceX in specfile.
     """
     # Fetch all sources defined in packit.yaml -> sources
     for source in self.package_config.sources:
         source_path = self.specfile.sourcedir.joinpath(source.path)
         if not source_path.is_file():
             logger.info(f"Downloading source {source.path!r}.")
             DownloadHelper.download_file(
                 source.url,
                 str(source_path),
             )
     # Try to download sources defined in "sources" file from Fedora lookaside cache
     try:
         LookasideCacheHelper.download(
             "fedpkg",
             self.specfile.path.parent,
             self.specfile.expanded_name,
             self.specfile.sourcedir,
         )
     except LookasideCacheError as e:
         logger.debug(
             f"Downloading sources from lookaside cache failed: {e}.")
     # Fetch all remote sources defined in the spec file
     with self.specfile.sources() as sources, self.specfile.patches(
     ) as patches:
         for source in sources + patches:
             if source.remote:
                 source_path = self.specfile.sourcedir.joinpath(
                     source.expanded_filename)
                 if not source_path.is_file():
                     logger.info(f"Downloading source {source.filename!r}.")
                     DownloadHelper.download_file(
                         source.expanded_location,
                         str(source_path),
                     )
Exemple #17
0
    def download_task_results(cls, session, tasklist, destination):
        """Downloads packages and logs of finished Koji tasks.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            tasklist (list): List of task IDs.
            destination (str): Path where to download files to.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        rpms = []
        logs = []
        for task_id in tasklist:
            logger.info('Downloading packages and logs for task %s', task_id)
            task = session.getTaskInfo(task_id, request=True)
            if task['state'] in [
                    koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN']
            ]:
                logger.info('Task %s is still running!', task_id)
                continue
            elif task['state'] != koji.TASK_STATES['CLOSED']:
                logger.info('Task %s did not complete successfully!', task_id)
            if task['method'] == 'buildArch':
                tasks = [task]
            elif task['method'] == 'build':
                opts = dict(parent=task_id,
                            method='buildArch',
                            decode=True,
                            state=[
                                koji.TASK_STATES['CLOSED'],
                                koji.TASK_STATES['FAILED']
                            ])
                tasks = session.listTasks(opts=opts)
            else:
                logger.info('Task %s is not a build or buildArch task!',
                            task_id)
                continue
            for task in tasks:
                base_path = koji.pathinfo.taskrelpath(task['id'])
                output = session.listTaskOutput(task['id'])
                for filename in output:
                    local_path = os.path.join(destination, filename)
                    download = False
                    fn, ext = os.path.splitext(filename)
                    if ext == '.rpm':
                        if task['state'] != koji.TASK_STATES['CLOSED']:
                            continue
                        if local_path not in rpms:
                            nevra = RpmHelper.split_nevra(fn)
                            # FIXME: multiple arches
                            download = nevra['arch'] in ['noarch', 'x86_64']
                            if download:
                                rpms.append(local_path)
                    else:
                        if local_path not in logs:
                            download = True
                            logs.append(local_path)
                    if download:
                        logger.info('Downloading file %s', filename)
                        url = '/'.join([
                            session.opts['topurl'], 'work', base_path, filename
                        ])
                        DownloadHelper.download_file(url, local_path)
        return rpms, logs
 def test_download_non_existing_file(self, url):
     """Test downloading NON existing file"""
     local_file = 'local_file'
     with pytest.raises(DownloadError):
         DownloadHelper.download_file(url, local_file)
     assert not os.path.isfile(local_file)
Exemple #19
0
 def test_download_non_existing_file(self, url):
     """Test downloading NON existing file"""
     local_file = 'local_file'
     with pytest.raises(DownloadError):
         DownloadHelper.download_file(url, local_file)
     assert not os.path.isfile(local_file)