def process_main(self, item=None): releases = self.parent.releases components = self.parent.components architectures = self.parent.architectures dl_reqs = [] for release in releases: self.verify_release(release) # generate repo_metas for Releases self.parent.apt_repo_meta[ release] = repometa = aptrepo.AptRepoMeta( release=open(self.parent.release_files[release], "rb"), upstream_url=self.parent.feed_urls[release]) # get release unit codename = repometa.codename suite = repometa.release.get('suite') rel_unit = self.parent.release_units[release] = models.DebRelease.\ get_or_create_and_associate(self.parent.repo, codename, suite) # Prevent this unit from being cleaned up try: self.parent.deb_releases_to_check.remove(rel_unit) except ValueError: pass # get release component units for component in repometa.components: if components is None or component.split( '/')[-1] in components: comp_unit = self.parent.component_units[release][component] = \ models.DebComponent.get_or_create_and_associate(self.parent.repo, rel_unit, component) self.parent.component_packages[release][component] = [] # Prevent this unit from being cleaned up try: self.parent.deb_comps_to_check.remove(comp_unit) except ValueError: pass # generate download requests for all relevant packages files rel_dl_reqs = repometa.create_Packages_download_requests( self.get_working_dir()) # Filter the rel_dl_reqs by selected components and architectures if components: rel_dl_reqs = [ dlr for dlr in rel_dl_reqs if dlr.data['component'].split('/')[-1] in components ] if architectures: rel_dl_reqs = [ dlr for dlr in rel_dl_reqs if dlr.data['architecture'] in architectures ] self.parent.packages_urls[release] = set( [dlr.url for dlr in rel_dl_reqs]) dl_reqs.extend(rel_dl_reqs) self.parent.step_download_Packages._downloads = [ DownloadRequest(dlr.url, dlr.destination, data=dlr.data) for dlr in dl_reqs ]
def process_main(self, item=None): self.parent.apt_repo_meta = repometa = aptrepo.AptRepoMeta( release=open(self.parent.release_file, "rb"), upstream_url=self.parent.feed_url) components = list(repometa.iter_component_arch_binaries()) if len(components) != 1: raise PulpCodedTaskFailedException( DEBSYNC001, repo_id=self.get_repo().repo_obj.repo_id, feed_url=self.parent.feed_url, comp_count=len(components)) dl_reqs = repometa.create_Packages_download_requests( self.get_working_dir()) self.parent.step_download_Packages._downloads = [ DownloadRequest(x.url, x.destination, data=x.data) for x in dl_reqs ]
def process_main(self, unit=None): units = self.parent.publish_units.units filenames = [x.storage_path for x in units] sign_options = configuration.get_gpg_sign_options( self.get_repo(), self.get_config()) arch = 'amd64' repometa = aptrepo.AptRepoMeta(codename=self.Codename, components=[self.Component], architectures=[arch]) arepo = aptrepo.AptRepo(self.get_working_dir(), repo_name=self.get_repo().id, metadata=repometa, gpg_sign_options=sign_options) arepo.create(filenames, component=self.Component, architecture=arch, with_symlinks=True)
def process_main(self, item=None): units = self.parent.publish_units.units comp_units = self.parent.publish_components.units release_units = self.parent.publish_releases.units sign_options = configuration.get_gpg_sign_options( self.get_repo(), self.get_config()) for release_unit in release_units: codename = release_unit.codename rel_components = [ comp for comp in comp_units if comp.release == codename ] architectures = set() comp_arch_units = {} for component_unit in rel_components: # group units by architecture (all, amd64, armeb, ...) arch_units = defaultdict(list) for unit in [ unit for unit in units if unit.id in component_unit.packages ]: arch_units[unit.architecture].append(unit) # architecture 'all' is special; append it to all other architectures all_units = arch_units.pop('all', []) for arch in arch_units: arch_units[arch].extend(all_units) architectures.add(arch) comp_arch_units[component_unit.name] = arch_units repometa = aptrepo.AptRepoMeta( codename=codename, components=[comp.name for comp in rel_components], architectures=list(architectures), ) # TODO Get the suite to work in debpkgr # repometa.release.setdefault('Suite', suite) arepo = aptrepo.AptRepo(self.get_working_dir(), repo_name=self.get_repo().id, metadata=repometa, gpg_sign_options=sign_options) for component in comp_arch_units: for architecture, ca_units in comp_arch_units[ component].iteritems(): filenames = [unit.storage_path for unit in ca_units] arepo.create(filenames, component=component, architecture=architecture, with_symlinks=True) # Prepare generic releases containing all packages in one component generic_release_names = [] # In case, no release_units were available (old style repository), # publish as 'stable/main' if len(release_units) == 0: generic_release_names.append(('stable', 'main')) # create a special release with one component to include all packets # group units by architecture (all, amd64, armeb, ...) if self.get_config().get(constants.PUBLISH_DEFAULT_RELEASE_KEYWORD, False): generic_release_names.append(('default', 'all')) # only do this, iff necessary if len(generic_release_names) > 0: # collect all package units architectures = set() arch_units = defaultdict(list) for unit in units: arch_units[unit.architecture].append(unit) # architecture 'all' is special; append it to all other architectures all_units = arch_units.pop('all', []) for arch in arch_units: arch_units[arch].extend(all_units) architectures.add(arch) for codename, component_name in generic_release_names: repo_meta = aptrepo.AptRepoMeta( codename=codename, components=[component_name], architectures=list(architectures), ) arepo = aptrepo.AptRepo(self.get_working_dir(), repo_name=self.get_repo().id, metadata=repo_meta, gpg_sign_options=sign_options) for architecture, a_units in arch_units.iteritems(): filenames = [unit.storage_path for unit in a_units] arepo.create(filenames, component=component_name, architecture=architecture, with_symlinks=True)