def fork_repository(self, connection, repository, destination, source=False, locale=False): # TODO(download gpk) # TODO(sources and locales) new_repo = copy.copy(repository) new_repo.url = utils.localize_repo_url(destination, repository.url) packages_file = utils.get_path_from_url( self._get_url_of_metafile(new_repo, "Packages") ) release_file = utils.get_path_from_url( self._get_url_of_metafile(new_repo, "Release") ) self.logger.info( "clone repository %s to %s", repository, new_repo.url ) utils.ensure_dir_exist(os.path.dirname(release_file)) release = deb822.Release() release["Origin"] = repository.origin release["Label"] = repository.origin release["Archive"] = repository.name[0] release["Component"] = repository.name[1] release["Architecture"] = _ARCHITECTURES[repository.architecture] with open(release_file, "wb") as fd: release.dump(fd) open(packages_file, "ab").close() gzip.open(packages_file + ".gz", "ab").close() return new_repo
def add_packages(self, connection, repository, packages): basedir = utils.get_path_from_url(repository.url) index_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Packages")) utils.ensure_dir_exist(os.path.dirname(index_file)) index_gz = index_file + ".gz" count = 0 # load existing packages self.get_packages(connection, repository, packages.add) with open(index_file, "wb") as fd1: with closing(gzip.open(index_gz, "wb")) as fd2: writer = utils.composite_writer(fd1, fd2) for pkg in packages: filename = os.path.join(basedir, pkg.filename) with closing(debfile.DebFile(filename)) as deb: debcontrol = deb.debcontrol() debcontrol.setdefault("Origin", repository.origin) debcontrol["Size"] = str(pkg.filesize) debcontrol["Filename"] = pkg.filename for k, v in six.moves.zip(_CHECKSUM_METHODS, pkg.checksum): debcontrol[k] = v writer(debcontrol.dump()) writer("\n") count += 1 self.logger.info("saved %d packages in %s", count, repository) self._update_suite_index(repository)
def add_packages(self, connection, repository, packages): basedir = utils.get_path_from_url(repository.url) index_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Packages") ) utils.ensure_dir_exist(os.path.dirname(index_file)) index_gz = index_file + ".gz" count = 0 # load existing packages self.get_packages(connection, repository, packages.add) with open(index_file, "wb") as fd1: with closing(gzip.open(index_gz, "wb")) as fd2: writer = utils.composite_writer(fd1, fd2) for pkg in packages: filename = os.path.join(basedir, pkg.filename) with closing(debfile.DebFile(filename)) as deb: debcontrol = deb.debcontrol() debcontrol.setdefault("Origin", repository.origin) debcontrol["Size"] = str(pkg.filesize) debcontrol["Filename"] = pkg.filename for k, v in six.moves.zip(_CHECKSUM_METHODS, pkg.checksum): debcontrol[k] = v writer(debcontrol.dump()) writer("\n") count += 1 self.logger.info("saved %d packages in %s", count, repository) self._update_suite_index(repository)
def test_get_path_from_url(self): cases = [ ("/a/f.txt", ("/a/f.txt",)), ("/a/f.txt", ("file:///a/f.txt?size=1",)), ("/f.txt", ("http://host/f.txt", False)), ] self._check_cases(self.assertEqual, cases, utils.get_path_from_url) with self.assertRaises(ValueError): utils.get_path_from_url("http:///a/f.txt")
def _add_to_cache(self, url, cache): path = utils.get_path_from_url(url, ensure_file=False) if not utils.is_local(url): path = os.path.join(self.context.cache_dir, utils.get_filename_from_uri(path)) self.context.connection.retrieve(url, path) cache[url] = path
def rebuild_repository(self, repository, packages): """Overrides method of superclass.""" basepath = utils.get_path_from_url(repository.url) self.logger.info("rebuild repository in %s", basepath) md_config = createrepo.MetaDataConfig() try: md_config.workers = multiprocessing.cpu_count() md_config.directory = str(basepath) md_config.update = True mdgen = createrepo.MetaDataGenerator( config_obj=md_config, callback=CreaterepoCallBack(self.logger) ) mdgen.doPkgMetadata() mdgen.doRepoMetadata() mdgen.doFinalMove() except createrepo.MDError as e: err_msg = six.text_type(e) self.logger.exception( "failed to create yum repository in %s: %s", basepath, err_msg ) shutil.rmtree( os.path.join(md_config.outputdir, md_config.tempdir), ignore_errors=True ) raise RuntimeError( "Failed to create yum repository in {0}." .format(err_msg))
def load_package_from_file(self, repository, filename): filepath = utils.get_path_from_url(repository.url + filename) _, size, checksum = next(iter(utils.get_size_and_checksum_for_files( [filepath], _checksum_collector) )) with closing(debfile.DebFile(filepath)) as deb: debcontrol = deb822.Packages( deb.control.get_content(debfile.CONTROL_FILE) ) return Package( repository=repository, name=debcontrol["package"], version=Version(debcontrol['version']), filesize=int(debcontrol.get('size', size)), filename=filename, checksum=FileChecksum(*checksum), mandatory=self._is_mandatory(debcontrol), requires=self._get_relations( debcontrol, "depends", "pre-depends", "recommends" ), provides=self._get_relations(debcontrol, "provides"), obsoletes=[], group=debcontrol.get('section'), )
def _update_suite_index(self, repository): """Updates the Release file in the suite.""" path = os.path.join( utils.get_path_from_url(repository.url), "dists", repository.section[0] ) release_path = os.path.join(path, "Release") self.logger.info( "added repository suite release file: %s", release_path ) with open(release_path, "a+b") as fd: fcntl.flock(fd.fileno(), fcntl.LOCK_EX) try: fd.seek(0) release = deb822.Release(fd) self._add_to_release(release, repository) for m in _CHECKSUM_METHODS: release.setdefault(m, []) self._add_files_to_release( release, path, self._get_metafiles(repository) ) fd.truncate(0) release.dump(fd) finally: fcntl.flock(fd.fileno(), fcntl.LOCK_UN)
def _add_package(self, repository, src_url, consumer): dst_path = self.driver.get_relative_path( repository, utils.get_filename_from_uri(src_url)) self.context.connection.retrieve( src_url, utils.get_path_from_url(urljoin(repository.url, dst_path))) consumer(self.driver.load_package_from_file(repository, dst_path))
def get_repository(self, connection, url, arch, consumer): name = utils.get_path_from_url(url, False) consumer(Repository( name=name, url=url + "/", architecture=arch, origin="" ))
def _add_to_cache(self, url, cache): path = utils.get_path_from_url(url, ensure_file=False) if not utils.is_local(url): path = os.path.join( self.context.cache_dir, utils.get_filename_from_uri(path) ) self.context.connection.retrieve(url, path) cache[url] = path
def _get_metafiles(self, repository): """Gets the sequence of metafiles for repository.""" return ( utils.get_path_from_url( self._get_url_of_metafile(repository, filename) ) for filename in _REPOSITORY_FILES )
def test_get_path_from_url(self): self.assertEqual( "/a/f.txt", utils.get_path_from_url("/a/f.txt") ) self.assertEqual( "/a/f.txt", utils.get_path_from_url("file:///a/f.txt?size=1") ) with self.assertRaises(ValueError): utils.get_path_from_url("http:///a/f.txt") self.assertEqual( "/f.txt", utils.get_path_from_url("http://host/f.txt", False) )
def _create_repository_structure(self, repository): packages_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Packages")) release_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Release")) utils.ensure_dir_exist(os.path.dirname(release_file)) release = deb822.Release() release["Origin"] = repository.origin release["Label"] = repository.origin release["Archive"] = repository.section[0] release["Component"] = repository.section[1] release["Architecture"] = _ARCHITECTURES[repository.architecture] with open(release_file, "wb") as fd: release.dump(fd) open(packages_file, "ab").close() gzip.open(packages_file + ".gz", "ab").close()
def create_repository(self, repository_data, arch): repository = Repository( name=repository_data['name'], url=utils.normalize_repository_url(repository_data["uri"]), architecture=arch, origin=repository_data.get('origin') ) utils.ensure_dir_exist(utils.get_path_from_url(repository.url)) return repository
def _add_package(self, repository, src_url, consumer): dst_path = self.driver.get_relative_path( repository, utils.get_filename_from_uri(src_url) ) self.context.connection.retrieve( src_url, utils.get_path_from_url(urljoin(repository.url, dst_path)) ) consumer(self.driver.load_package_from_file(repository, dst_path))
def create_repository(self, connection, repository_data, arch): repository = Repository(name=repository_data['name'], url=utils.normalize_repository_url( repository_data["uri"]), architecture=arch, path=repository_data.get('path'), origin=repository_data.get('origin')) utils.ensure_dir_exist(utils.get_path_from_url(repository.url)) self._rebuild_repository(connection, repository, None, None) return repository
def create_repository(self, connection, repository_data, arch): repository = Repository( name=repository_data['name'], url=utils.normalize_repository_url(repository_data["uri"]), architecture=arch, path=repository_data.get('path'), origin=repository_data.get('origin') ) utils.ensure_dir_exist(utils.get_path_from_url(repository.url)) self._rebuild_repository(connection, repository, None, None) return repository
def _create_repository_structure(self, repository): packages_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Packages") ) release_file = utils.get_path_from_url( self._get_url_of_metafile(repository, "Release") ) utils.ensure_dir_exist(os.path.dirname(release_file)) release = deb822.Release() release["Origin"] = repository.origin release["Label"] = repository.origin release["Archive"] = repository.section[0] release["Component"] = repository.section[1] release["Architecture"] = _ARCHITECTURES[repository.architecture] with open(release_file, "wb") as fd: release.dump(fd) open(packages_file, "ab").close() gzip.open(packages_file + ".gz", "ab").close()
def _rebuild_repository(self, conn, repo, packages, groupstree=None): basepath = utils.get_path_from_url(repo.url) self.logger.info("rebuild repository in %s", basepath) md_config = createrepo.MetaDataConfig() mdfile_path = os.path.join( basepath, md_config.finaldir, md_config.repomdfile ) update = packages is not None and os.path.exists(mdfile_path) groupsfile = None if groupstree is None and update: # The createrepo lose the groups information on update # to prevent this set group info manually groupstree = self._load_groups(conn, repo) if groupstree is not None: groupsfile = os.path.join(tempfile.gettempdir(), "groups.xml") with open(groupsfile, "w") as fd: groupstree.write(fd) try: md_config.workers = multiprocessing.cpu_count() md_config.directory = str(basepath) md_config.groupfile = groupsfile md_config.update = update if not packages: # only generate meta-files, without packages info md_config.excludes = ["*"] mdgen = createrepo.MetaDataGenerator( config_obj=md_config, callback=CreaterepoCallBack(self.logger) ) mdgen.doPkgMetadata() mdgen.doRepoMetadata() mdgen.doFinalMove() except createrepo.MDError as e: err_msg = six.text_type(e) self.logger.exception( "failed to create yum repository in %s: %s", basepath, err_msg ) shutil.rmtree( os.path.join(md_config.outputdir, md_config.tempdir), ignore_errors=True ) raise RuntimeError( "Failed to create yum repository in {0}." .format(err_msg)) finally: if groupsfile is not None: os.unlink(groupsfile)
def fork_repository(self, repository, destination, options): """Creates copy of repositories. :param repository: the origin repository :param destination: the target folder :param options: The options, see RepositoryCopyOptions :return: the mapping origin to cloned repository. """ new_path = os.path.join( destination, repository.path or # the cut first '/', because otherwise path.join uses it as is utils.get_path_from_url(repository.url, False)[1:]) logger.info("cloning repository '%s' to '%s'", repository, new_path) return self.driver.fork_repository(self.context.connection, repository, new_path, options)
def fork_repository(self, repository, destination, source, locale): """Creates copy of repositories. :param repository: the origin repository :param destination: the target folder :param source: If True, the source packages will be copied too. :param locale: If True, the localisation will be copied too. :return: the mapping origin to cloned repository. """ new_path = os.path.join( destination, repository.path or # the cut first '/', because otherwise path.join uses it as is utils.get_path_from_url(repository.url, False)[1:]) logger.info("cloning repository '%s' to '%s'", repository, new_path) return self.driver.fork_repository(self.context.connection, repository, new_path, source, locale)
def fork_repository(self, repository, destination, source, locale): """Creates copy of repositories. :param repository: the origin repository :param destination: the target folder :param source: If True, the source packages will be copied too. :param locale: If True, the localisation will be copied too. :return: the mapping origin to cloned repository. """ new_path = os.path.join( destination, repository.path or utils.get_path_from_url(repository.url, False) ) logger.info("cloning repository '%s' to '%s'", repository, new_path) return self.driver.fork_repository( self.context.connection, repository, new_path, source, locale )
def fork_repository(self, repository, destination, options): """Creates copy of repositories. :param repository: the origin repository :param destination: the target folder :param options: The options, see RepositoryCopyOptions :return: the mapping origin to cloned repository. """ new_path = os.path.join( destination, repository.path or # the cut first '/', because otherwise path.join uses it as is utils.get_path_from_url(repository.url, False)[1:] ) logger.info("cloning repository '%s' to '%s'", repository, new_path) return self.driver.fork_repository( self.context.connection, repository, new_path, options )
def _copy_package(self, target, package, observer): if package.repository is None: src_url = package.filename dst_path = self.driver.get_relative_path( target, utils.get_filename_from_uri(package.filename)) elif target.url != package.repository.url: src_url = urljoin(package.repository.url, package.filename) dst_path = package.filename else: return bytes_copied = self.context.connection.retrieve( src_url, utils.get_path_from_url(urljoin(target.url, dst_path)), size=package.filesize) if package.filesize <= 0: package.filesize = bytes_copied if observer: observer(bytes_copied)
def _rebuild_repository(self, conn, repo, packages, groupstree=None): basepath = utils.get_path_from_url(repo.url) self.logger.info("rebuild repository in %s", basepath) md_config = createrepo.MetaDataConfig() mdfile_path = os.path.join(basepath, md_config.finaldir, md_config.repomdfile) update = packages is not None and os.path.exists(mdfile_path) groupsfile = None if groupstree is None and update: # The createrepo lose the groups information on update # to prevent this set group info manually groupstree = self._load_groups(conn, repo) if groupstree is not None: groupsfile = os.path.join(tempfile.gettempdir(), "groups.xml") with open(groupsfile, "w") as fd: groupstree.write(fd) try: md_config.workers = 1 md_config.directory = str(basepath) md_config.groupfile = groupsfile md_config.update = update if not packages: # only generate meta-files, without packages info md_config.excludes = ["*"] mdgen = createrepo.MetaDataGenerator(config_obj=md_config, callback=CreaterepoCallBack( self.logger)) mdgen.doPkgMetadata() mdgen.doRepoMetadata() mdgen.doFinalMove() except createrepo.MDError as e: err_msg = six.text_type(e) self.logger.exception("failed to create yum repository in %s: %s", basepath, err_msg) shutil.rmtree(os.path.join(md_config.outputdir, md_config.tempdir), ignore_errors=True) raise RuntimeError( "Failed to create yum repository in {0}.".format(err_msg)) finally: if groupsfile is not None: os.unlink(groupsfile)
def load_package_from_file(self, repository, filepath): fullpath = utils.get_path_from_url(repository.url + filepath) _, size, checksum = next(iter(utils.get_size_and_checksum_for_files( [fullpath], _checksum_collector) )) pkg = createrepo.yumbased.YumLocalPackage(filename=fullpath) hdr = pkg.returnLocalHeader() return Package( repository=repository, name=hdr["name"], version=PackageVersion( hdr['epoch'], hdr['version'], hdr['release'] ), filesize=int(hdr['size']), filename=filepath, checksum=FileChecksum(*checksum), mandatory=False, requires=self._parse_package_relations(pkg.requires), obsoletes=self._parse_package_relations(pkg.obsoletes), provides=self._parse_package_relations(pkg.provides), )
def _copy_package(self, target, package, observer): if package.repository is None: src_url = package.filename dst_path = self.driver.get_relative_path( target, utils.get_filename_from_uri(package.filename) ) elif target.url != package.repository.url: src_url = urljoin(package.repository.url, package.filename) dst_path = package.filename else: return bytes_copied = self.context.connection.retrieve( src_url, utils.get_path_from_url(urljoin(target.url, dst_path)), size=package.filesize ) if package.filesize <= 0: package.filesize = bytes_copied if observer: observer(bytes_copied)
def load_package_from_file(self, repository, filepath): fullpath = utils.get_path_from_url(repository.url + filepath) _, size, checksum = next( iter( utils.get_size_and_checksum_for_files([fullpath], _checksum_collector))) pkg = createrepo.yumbased.YumLocalPackage(filename=fullpath) hdr = pkg.returnLocalHeader() return Package( repository=repository, name=hdr["name"], version=RpmPackageVersion(hdr['epoch'], hdr['version'], hdr['release']), filesize=int(hdr['size']), filename=filepath, checksum=FileChecksum(*checksum), mandatory=False, requires=self._parse_package_relations(pkg.requires), obsoletes=self._parse_package_relations(pkg.obsoletes), provides=self._get_provides_from_rpm(pkg), group=hdr["group"], )
def _update_suite_index(self, repository): """Updates the Release file in the suite.""" path = os.path.join(utils.get_path_from_url(repository.url), "dists", repository.section[0]) release_path = os.path.join(path, "Release") self.logger.info("added repository suite release file: %s", release_path) with open(release_path, "a+b") as fd: fcntl.flock(fd.fileno(), fcntl.LOCK_EX) try: fd.seek(0) release = deb822.Release(fd) self._add_to_release(release, repository) for m in _CHECKSUM_METHODS: release.setdefault(m, []) self._add_files_to_release(release, path, self._get_metafiles(repository)) fd.truncate(0) release.dump(fd) finally: fcntl.flock(fd.fileno(), fcntl.LOCK_UN)
def load_package_from_file(self, repository, filename): filepath = utils.get_path_from_url(repository.url + filename) _, size, checksum = next( iter( utils.get_size_and_checksum_for_files([filepath], _checksum_collector))) with closing(debfile.DebFile(filepath)) as deb: debcontrol = deb822.Packages( deb.control.get_content(debfile.CONTROL_FILE)) return Package( repository=repository, name=debcontrol["package"], version=Version(debcontrol['version']), filesize=int(debcontrol.get('size', size)), filename=filename, checksum=FileChecksum(*checksum), mandatory=self._is_mandatory(debcontrol), requires=self._get_relations(debcontrol, "depends", "pre-depends", "recommends"), provides=self._get_relations(debcontrol, "provides"), obsoletes=[], group=debcontrol.get('section'), )
def _get_metafiles(self, repository): """Gets the sequence of metafiles for repository.""" return (utils.get_path_from_url( self._get_url_of_metafile(repository, filename)) for filename in _REPOSITORY_FILES)