Exemple #1
0
    def fork_repository(self, connection, repository, destination,
                        source=False, locale=False):
        # TODO(download gpk)
        # TODO(sources and locales)
        new_repo = copy.copy(repository)
        new_repo.url = utils.localize_repo_url(destination, repository.url)
        packages_file = utils.get_path_from_url(
            self._get_url_of_metafile(new_repo, "Packages")
        )
        release_file = utils.get_path_from_url(
            self._get_url_of_metafile(new_repo, "Release")
        )
        self.logger.info(
            "clone repository %s to %s", repository, new_repo.url
        )
        utils.ensure_dir_exist(os.path.dirname(release_file))

        release = deb822.Release()
        release["Origin"] = repository.origin
        release["Label"] = repository.origin
        release["Archive"] = repository.name[0]
        release["Component"] = repository.name[1]
        release["Architecture"] = _ARCHITECTURES[repository.architecture]
        with open(release_file, "wb") as fd:
            release.dump(fd)

        open(packages_file, "ab").close()
        gzip.open(packages_file + ".gz", "ab").close()
        return new_repo
Exemple #2
0
 def add_packages(self, connection, repository, packages):
     basedir = utils.get_path_from_url(repository.url)
     index_file = utils.get_path_from_url(
         self._get_url_of_metafile(repository, "Packages")
     )
     utils.ensure_dir_exist(os.path.dirname(index_file))
     index_gz = index_file + ".gz"
     count = 0
     # load existing packages
     self.get_packages(connection, repository, packages.add)
     with open(index_file, "wb") as fd1:
         with closing(gzip.open(index_gz, "wb")) as fd2:
             writer = utils.composite_writer(fd1, fd2)
             for pkg in packages:
                 filename = os.path.join(basedir, pkg.filename)
                 with closing(debfile.DebFile(filename)) as deb:
                     debcontrol = deb.debcontrol()
                 debcontrol.setdefault("Origin", repository.origin)
                 debcontrol["Size"] = str(pkg.filesize)
                 debcontrol["Filename"] = pkg.filename
                 for k, v in six.moves.zip(_CHECKSUM_METHODS, pkg.checksum):
                     debcontrol[k] = v
                 writer(debcontrol.dump())
                 writer("\n")
                 count += 1
     self.logger.info("saved %d packages in %s", count, repository)
     self._update_suite_index(repository)
 def add_packages(self, connection, repository, packages):
     basedir = utils.get_path_from_url(repository.url)
     index_file = utils.get_path_from_url(
         self._get_url_of_metafile(repository, "Packages"))
     utils.ensure_dir_exist(os.path.dirname(index_file))
     index_gz = index_file + ".gz"
     count = 0
     # load existing packages
     self.get_packages(connection, repository, packages.add)
     with open(index_file, "wb") as fd1:
         with closing(gzip.open(index_gz, "wb")) as fd2:
             writer = utils.composite_writer(fd1, fd2)
             for pkg in packages:
                 filename = os.path.join(basedir, pkg.filename)
                 with closing(debfile.DebFile(filename)) as deb:
                     debcontrol = deb.debcontrol()
                 debcontrol.setdefault("Origin", repository.origin)
                 debcontrol["Size"] = str(pkg.filesize)
                 debcontrol["Filename"] = pkg.filename
                 for k, v in six.moves.zip(_CHECKSUM_METHODS, pkg.checksum):
                     debcontrol[k] = v
                 writer(debcontrol.dump())
                 writer("\n")
                 count += 1
     self.logger.info("saved %d packages in %s", count, repository)
     self._update_suite_index(repository)
Exemple #4
0
 def fork_repository(self, connection, repository, destination,
                     source=False, locale=False):
     # TODO(download gpk)
     # TODO(sources and locales)
     new_repo = copy.copy(repository)
     new_repo.url = utils.normalize_repository_url(destination)
     utils.ensure_dir_exist(destination)
     self.add_packages(connection, new_repo, set())
     return new_repo
Exemple #5
0
 def fork_repository(self, connection, repository, destination, options):
     # TODO(download gpk)
     # TODO(sources and locales)
     new_repo = copy.copy(repository)
     new_repo.url = utils.normalize_repository_url(destination)
     utils.ensure_dir_exist(destination)
     groupstree = self._load_groups(connection, repository)
     self._rebuild_repository(connection, new_repo, set(), groupstree)
     return new_repo
Exemple #6
0
 def fork_repository(self, connection, repository, destination, options):
     # TODO(download gpk)
     # TODO(sources and locales)
     new_repo = copy.copy(repository)
     new_repo.url = utils.normalize_repository_url(destination)
     utils.ensure_dir_exist(destination)
     groupstree = self._load_groups(connection, repository)
     self._rebuild_repository(connection, new_repo, set(), groupstree)
     return new_repo
Exemple #7
0
 def create_repository(self, repository_data, arch):
     repository = Repository(
         name=repository_data['name'],
         url=utils.normalize_repository_url(repository_data["uri"]),
         architecture=arch,
         origin=repository_data.get('origin')
     )
     utils.ensure_dir_exist(utils.get_path_from_url(repository.url))
     return repository
Exemple #8
0
 def create_repository(self, connection, repository_data, arch):
     repository = Repository(name=repository_data['name'],
                             url=utils.normalize_repository_url(
                                 repository_data["uri"]),
                             architecture=arch,
                             path=repository_data.get('path'),
                             origin=repository_data.get('origin'))
     utils.ensure_dir_exist(utils.get_path_from_url(repository.url))
     self._rebuild_repository(connection, repository, None, None)
     return repository
Exemple #9
0
 def create_repository(self, connection, repository_data, arch):
     repository = Repository(
         name=repository_data['name'],
         url=utils.normalize_repository_url(repository_data["uri"]),
         architecture=arch,
         path=repository_data.get('path'),
         origin=repository_data.get('origin')
     )
     utils.ensure_dir_exist(utils.get_path_from_url(repository.url))
     self._rebuild_repository(connection, repository, None, None)
     return repository
Exemple #10
0
 def fork_repository(self, connection, repository, destination,
                     source=False, locale=False):
     # TODO(download gpk)
     # TODO(sources and locales)
     new_repo = copy.copy(repository)
     new_repo.url = utils.localize_repo_url(destination, repository.url)
     self.logger.info(
         "clone repository %s to %s", repository, new_repo.url
     )
     utils.ensure_dir_exist(new_repo.url)
     self.rebuild_repository(new_repo, set())
     return new_repo
    def build_packages(self, sources, output_dir):
        """Builds new package(s).

        :param sources: list descriptions of packages for building
        :param output_dir: directory for new packages
        :return: list of names of packages which was built
        """
        output_dir = os.path.abspath(output_dir)
        utils.ensure_dir_exist(output_dir)
        packages = []
        for source in sources:
            self.controller.build_packages(source, output_dir, packages.append)
        return packages
    def _create_repository_structure(self, repository):
        packages_file = utils.get_path_from_url(
            self._get_url_of_metafile(repository, "Packages"))
        release_file = utils.get_path_from_url(
            self._get_url_of_metafile(repository, "Release"))
        utils.ensure_dir_exist(os.path.dirname(release_file))

        release = deb822.Release()
        release["Origin"] = repository.origin
        release["Label"] = repository.origin
        release["Archive"] = repository.section[0]
        release["Component"] = repository.section[1]
        release["Architecture"] = _ARCHITECTURES[repository.architecture]
        with open(release_file, "wb") as fd:
            release.dump(fd)

        open(packages_file, "ab").close()
        gzip.open(packages_file + ".gz", "ab").close()
Exemple #13
0
    def _create_repository_structure(self, repository):
        packages_file = utils.get_path_from_url(
            self._get_url_of_metafile(repository, "Packages")
        )
        release_file = utils.get_path_from_url(
            self._get_url_of_metafile(repository, "Release")
        )
        utils.ensure_dir_exist(os.path.dirname(release_file))

        release = deb822.Release()
        release["Origin"] = repository.origin
        release["Label"] = repository.origin
        release["Archive"] = repository.section[0]
        release["Component"] = repository.section[1]
        release["Architecture"] = _ARCHITECTURES[repository.architecture]
        with open(release_file, "wb") as fd:
            release.dump(fd)

        open(packages_file, "ab").close()
        gzip.open(packages_file + ".gz", "ab").close()
Exemple #14
0
    def retrieve(self, url, filename, **attributes):
        """Downloads remote file.

        :param url: the remote file`s url
        :param filename: the target filename on local filesystem
        :param attributes: the file attributes, like size, hashsum, etc.
        :return: the count of actually copied bytes
        """
        offset = 0
        try:
            stats = os.stat(filename)
            expected_size = attributes.get('size', -1)
            if expected_size == stats.st_size:
                # TODO(check hashsum)
                return 0

            if stats.st_size < expected_size:
                offset = stats.st_size
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise
            ensure_dir_exist(os.path.dirname(filename))

        logger.info("download: %s from the offset: %d", url, offset)

        fd = os.open(filename, os.O_CREAT | os.O_WRONLY)
        try:
            return self._copy_stream(fd, url, offset)
        except RangeError:
            if offset == 0:
                raise
            logger.warning(
                "Failed to resume download, starts from the beginning: %s",
                url
            )
            return self._copy_stream(fd, url, 0)
        finally:
            os.fsync(fd)
            os.close(fd)
Exemple #15
0
 def test_ensure_dir_exist(self, os):
     os.makedirs.side_effect = [
         True,
         OSError(utils.errno.EEXIST, ""),
         OSError(utils.errno.EACCES, ""),
         ValueError()
     ]
     utils.ensure_dir_exist("/nonexisted")
     os.makedirs.assert_called_with("/nonexisted")
     utils.ensure_dir_exist("/existed")
     os.makedirs.assert_called_with("/existed")
     with self.assertRaises(OSError):
         utils.ensure_dir_exist("/private")
     with self.assertRaises(ValueError):
         utils.ensure_dir_exist(1)
 def test_ensure_dir_exist(self, os):
     os.makedirs.side_effect = [
         True,
         OSError(utils.errno.EEXIST, ""),
         OSError(utils.errno.EACCES, ""),
         ValueError()
     ]
     utils.ensure_dir_exist("/nonexisted")
     os.makedirs.assert_called_with("/nonexisted")
     utils.ensure_dir_exist("/existed")
     os.makedirs.assert_called_with("/existed")
     with self.assertRaises(OSError):
         utils.ensure_dir_exist("/private")
     with self.assertRaises(ValueError):
         utils.ensure_dir_exist(1)
Exemple #17
0
    def build_packages(self, data, cache, output_dir, consumer):
        src = cache[data['src']]
        spec = cache[data['rpm']['spec']]
        options = data['rpm'].get('options', {})

        with utils.create_tmp_dir() as tmpdir:
            self._buildsrpm(
                resultdir=tmpdir, spec=spec, sources=src, **options
            )
            srpms_dir = os.path.join(output_dir, 'SRPM')
            utils.ensure_dir_exist(srpms_dir)
            srpms = glob.iglob(os.path.join(srpms_dir, '*.src.rpm'))
            rpms_dir = os.path.join(output_dir, 'RPM')
            utils.ensure_dir_exist(rpms_dir)
            self._rebuild(srpms, resultdir=tmpdir, **options)

            # rebuild commands rebuilds source rpm too
            # notify only about last version
            for rpm in utils.move_files(tmpdir, srpms_dir, '*.src.rpm'):
                consumer(rpm)

            for rpm in utils.move_files(tmpdir, rpms_dir, '*.rpm'):
                consumer(rpm)
Exemple #18
0
    def build_packages(self, data, cache, output_dir, consumer):
        src = cache[data['src']]
        spec = cache[data['rpm']['spec']]
        options = data['rpm'].get('options', {})

        with utils.create_tmp_dir() as tmpdir:
            self._buildsrpm(resultdir=tmpdir,
                            spec=spec,
                            sources=src,
                            **options)
            srpms_dir = os.path.join(output_dir, 'SRPM')
            utils.ensure_dir_exist(srpms_dir)
            srpms = glob.iglob(os.path.join(srpms_dir, '*.src.rpm'))
            rpms_dir = os.path.join(output_dir, 'RPM')
            utils.ensure_dir_exist(rpms_dir)
            self._rebuild(srpms, resultdir=tmpdir, **options)

            # rebuild commands rebuilds source rpm too
            # notify only about last version
            for rpm in utils.move_files(tmpdir, srpms_dir, '*.src.rpm'):
                consumer(rpm)

            for rpm in utils.move_files(tmpdir, rpms_dir, '*.rpm'):
                consumer(rpm)