async def info_for_run(self, run_id, suite_name, package): with tempfile.TemporaryDirectory() as td: await self.artifact_manager.retrieve_artifacts( run_id, td, timeout=DEFAULT_GCS_TIMEOUT ) p = subprocess.Popen(["dpkg-scanpackages", td], stdout=subprocess.PIPE, stderr=subprocess.PIPE) for para in Packages.iter_paragraphs(p.stdout): para["Filename"] = os.path.join( suite_name, "pkg", package, run_id, os.path.basename(para["Filename"]), ) yield bytes(para) yield b"\n" for line in p.stderr.readlines(): if line.startswith(b'dpkg-scanpackages: '): line = line[len(b'dpkg-scanpackages: '):] if line.startswith(b'info: '): logging.debug('%s', line.rstrip(b'\n').decode()) elif line.startswith(b'warning: '): logging.warning('%s', line.rstrip(b'\n').decode()) elif line.startswith(b'error: '): logging.error('%s', line.rstrip(b'\n').decode()) else: logging.info( 'dpkg-scanpackages error: %s', line.rstrip(b'\n').decode()) await asyncio.sleep(0)
def fetch_debian_packages(release: str, suites: list | None = None, mirror: str = 'https://deb.debian.org/debian'): debian_packages = defaultdict(list) if suites is None: suites = ['main'] for suite in suites: repo_url = f'{mirror}/dists/{release}/{suite}/binary-amd64/Packages.gz' remote_packages = requests.get(repo_url) for pkg in Packages.iter_paragraphs(gzip.decompress(remote_packages.content), use_apt_pkg=False): debian_packages[pkg['Package']].append(pkg['Version']) return debian_packages
def read_modules(package, core=False): # type: (str, bool) -> List[UMC_Module] """ Read |UMC| module definition from :file:`debian/<package>.umc-modules`. :param package: Name of the package. :param core: Import as core-module, e.g. the ones shipped with |UDM| itself. :returns: List of |UMC| module definitions. """ modules = [] # type: List[UMC_Module] file_umc_module = os.path.join('debian/', package + '.umc-modules') file_control = os.path.join('debian/control') if not os.path.isfile(file_umc_module): return modules provides = [] with io.open(file_control, 'r', encoding='utf-8') as fd_control: with warnings.catch_warnings( ): # debian/deb822.py:982: UserWarning: cannot parse package relationship "${python3:Depends}", returning it raw for pkg in Packages.iter_paragraphs(fd_control): if pkg.get('Package') == package: provides = [ p[0]['name'] for p in pkg.relations['provides'] ] break with open(file_umc_module, 'rb') as fd_umc: for item in Deb822.iter_paragraphs(fd_umc): item = dict((k, [v]) for k, v in item.items()) # simulate dh_ucs.parseRfc822 behaviour # required fields if not core: for required in (MODULE, PYTHON, DEFINITION, JAVASCRIPT): if not item.get(required): raise Error( 'UMC module definition incomplete. key %s missing' % (required, )) # single values item['package'] = package item['provides'] = provides module = UMC_Module(item) if core: if module.module_name != 'umc-core' or not module.xml_categories: raise Error('Module definition does not match core module') modules.append(module) return modules
def parse_debrepo_arch(basedir: str) -> list: """ Parse certain arch dir. Structure: * Packages * Release (stripped) """ result_data = [] with open(os.path.join(basedir, 'Packages')) as f: while True: p = dict(Packages(f)) if p == {}: break result_data.append(p) return result_data
def parse_packages_file_content(file_content): packages = [] for para in Packages.iter_paragraphs(StringIO(file_content)): args = {} for key, value in para.iteritems(): key = key.lower() if key == 'md5sum': key = 'md5' elif key == 'package': key = 'name' elif key == 'size': value = int(value) if key in FetchedPackage._equality_attributes: args[key] = value packages.append(FetchedPackage(**args)) return packages
def section(self, arch, name): if name not in self.components: raise ValueError("Invalid components name '%s'"%name) if arch=='source': fname = '%s/source/Source'%name else: fname = '%s/binary-%s/Packages'%(name,arch) for suf in ('.gz','.xz','.bz2',''): fullname = fname+suf if fullname not in self._top: continue with self._top.getfile(fullname) as F: info = {} for pkg in Packages.iter_paragraphs(F, use_apt_pkg=True): info[pkg['Package']] = pkg return info raise RuntimeError("Package listing not available with any known suffix")
def find_debs(archive, suite, component, arch, source, version): url = find_dsc(archive, suite, component, source, version) if url[:7] == "http://": dsc = Dsc2(StringIO(requests.get(url).content)) else: dsc = Dsc2(filename=url) components = [component] for line in dsc['Package-List']: if "/" in line['section']: component, _ = line['section'].split("/") if component not in components: components.append(component) filenames = [] for component in components: url = "{archive}/dists/{suite}/{component}/binary-{arch}/Packages.gz".format( archive=archive, suite=suite, component=component, arch=arch, ) if url[:7] == "http://": packages = GzipFile(fileobj=StringIO(requests.get(url).content)) else: packages = GzipFile(filename=url) for entry in Packages.iter_paragraphs(packages): name = entry['Source'] if 'Source' in entry else entry['Package'] if (name == source and entry['Version'] == version) or (name == "%s (%s)" % (source, version)): filenames.append(entry['Filename']) if filenames == []: raise Exception("Damnit, no such packages?") ret = [] for filename in filenames: url = "{archive}/{filename}".format( archive=archive, filename=filename ) ret.append(url) return ret
def _compress(self, filename): """ Compress given file. :param str filename: The name of the file to compress. """ self.log.debug('Compressing %s ...', filename) sorter = subprocess.Popen( ('apt-sortpkgs', filename + '.tmp'), stdout=subprocess.PIPE, ) tee = subprocess.Popen( ('tee', filename), stdin=subprocess.PIPE, stdout=subprocess.PIPE, ) gzip = subprocess.Popen( ('gzip', ), stdin=tee.stdout, stdout=open(filename + '.gz', 'wb'), ) tee.stdout.close() prev = None for pkg in Packages.iter_paragraphs(sorter.stdout): if prev: if prev["Package"] != pkg["Package"]: tee.stdin.write("%s\n" % prev) elif prev["Filename"].startswith( '../') and not pkg["Filename"].startswith('../'): pass elif not prev["Filename"].startswith( '../') and pkg["Filename"].startswith('../'): continue elif apt_pkg.version_compare(prev["Version"], pkg["Version"]) >= 0: continue prev = pkg if prev: tee.stdin.write("%s\n" % prev) tee.stdin.close() rc_sorter, rc_tee, rc_gzip = sorter.wait(), tee.wait(), gzip.wait() self.log.debug('sorter=%d tee=%d gzip=%d', rc_sorter, rc_tee, rc_gzip) os.remove(filename + '.tmp')
def extract_deb_packages(data, url): """ Extract package metadata from debian Packages file """ extracted = extract(data, url) package_re = re.compile(b'^Package: ', re.M) plen = len(package_re.findall(extracted)) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) bio = BytesIO(extracted) for i, stanza in enumerate(Packages.iter_paragraphs(bio)): # https://github.com/furlongm/patchman/issues/55 if 'version' not in stanza: continue fullversion = Version(stanza['version']) arch = stanza['architecture'] name = stanza['package'] epoch = fullversion._BaseVersion__epoch if epoch is None: epoch = '' version = fullversion._BaseVersion__upstream_version release = fullversion._BaseVersion__debian_revision if release is None: release = '' progress_update_s.send(sender=None, index=i + 1) package = PackageString(name=name, epoch=epoch, version=version, release=release, arch=arch, packagetype='D') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def _set_description(spec, fcontrol, fchangelog): # Get packages info packages = Packages.iter_paragraphs(fcontrol) info = utils.get_package_info(packages) if not utils.validate_packages(info['packages']): _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (incomplete package)')) return {'status': 'fail', 'code': 406, 'msg': _('Incomplete package')} name = info['name'] priority = info['priority'] section = info['section'] # The package must have a name if name is None: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (package name not found)')) return { 'status': 'fail', 'code': 406, 'msg': _('Package name not found') } # Check if this package is registered try: pkg = RepoPackage.objects.get(name=name) except RepoPackage.DoesNotExist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (unregistered package: %(name)s)') % \ {'name': name}) return { 'status': 'fail', 'code': 406, 'msg': _('Unregistered package: %(name)s') % { 'name': name } } # Check if this package is registered for selected distribution try: pkgdist = pkg.packagedistribution_set.get( distribution=spec.distribution.repo) except PackageDistribution.DoesNotExist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (not available in %(name)s distribution') % \ {'name': spec.distribution.repo.name}) return { 'status': 'fail', 'code': 406, 'msg': _('Unregistered package: %(name)s') % { 'name': name } } # Get package version c = Changelog(fchangelog) version = str(c.version) dist = c.distributions.split('-')[0] target_dist = spec.distribution.repo.name.split('-')[0] if dist != target_dist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (distribution mismatch: %(dist)s)') % \ {'dist': dist}) return { 'status': 'fail', 'code': 406, 'msg': _('Distribution mismatch: %(dist)s') % { 'dist': dist } } # Get last changelog content last_changelog = None if len(c._blocks) > 0: last_changelog = str(c._blocks[0]).replace('\n\n', '\n').strip() # Save packages info spec.package = pkg spec.version = version spec.changelog = last_changelog spec.priority = priority spec.section = section spec.save() utils.store_package_info(spec, info) return {'status': 'ok', 'package': name, 'version': version}
def _set_description(spec, fcontrol, fchangelog): # Get packages info packages = Packages.iter_paragraphs(fcontrol) info = utils.get_package_info(packages) if not utils.validate_packages(info['packages']): _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (incomplete package)')) return {'status': 'fail', 'code': 406, 'msg': _('Incomplete package')} name = info['name'] priority = info['priority'] section = info['section'] # The package must have a name if name is None: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (package name not found)')) return {'status': 'fail', 'code': 406, 'msg': _('Package name not found')} # Check if this package is registered try: pkg = RepoPackage.objects.get(name=name) except RepoPackage.DoesNotExist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (unregistered package: %(name)s)') % \ {'name': name}) return {'status': 'fail', 'code': 406, 'msg': _('Unregistered package: %(name)s') % {'name': name}} # Check if this package is registered for selected distribution try: pkgdist = pkg.packagedistribution_set.get(distribution=spec.distribution.repo) except PackageDistribution.DoesNotExist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (not available in %(name)s distribution') % \ {'name': spec.distribution.repo.name}) return {'status': 'fail', 'code': 406, 'msg': _('Unregistered package: %(name)s') % {'name': name}} # Get package version c = Changelog(fchangelog) version = str(c.version) dist = c.distributions.split('-')[0] target_dist = spec.distribution.repo.name.split('-')[0] if dist != target_dist: _set_spec_status(spec.id, -2) spec.add_log(_('Package rejected (distribution mismatch: %(dist)s)') % \ {'dist': dist}) return {'status': 'fail', 'code': 406, 'msg': _('Distribution mismatch: %(dist)s') % {'dist': dist}} # Get last changelog content last_changelog = None if len(c._blocks) > 0: last_changelog = str(c._blocks[0]).replace('\n\n', '\n').strip() # Save packages info spec.package = pkg spec.version = version spec.changelog = last_changelog spec.priority = priority spec.section = section spec.save() utils.store_package_info(spec, info) return {'status': 'ok', 'package': name, 'version': version}
from debian.deb822 import Release, Packages import requests BASE_REPO = "https://dl.bintray.com/vamega/personal-debian-server/" distribution = 'stretch' release_file_url = BASE_REPO + 'dists/' + distribution + '/Release' resp = requests.get(release_file_url) if resp.status_code == 200: release = Release(resp.content) items = [l for l in release['SHA256'] if l['name'].endswith('Packages')] print(items) useful_keys = [ 'Package', 'Version', 'Filename', 'SHA1', 'SHA256', 'Size', ] for item in items: package_url = BASE_REPO + 'dists/' + distribution + '/' + item['name'] resp = requests.get(package_url) if resp.status_code == 200: for package in Packages.iter_paragraphs(resp.content): subsection = {k: package[k] for k in useful_keys} print(subsection)
def update_dists_files(self): """ Rewrite from | /var/lib/univention-repository/mirror/ | 4.0/ | maintained/ >>> | 4.0-1/ <<< | amd64/ | Packages (relative to <<<) | *.deb to | dists/ | ucs401/ | main/ | binary-amd64/ | Packages (relative to >>>) | Release | debian-installer/ | binary-amd64/ | Packages (relative to >>>) | Release >> from sys import stderr >> logging.basicConfig(stream=stderr, level=logging.DEBUG) >> m = UniventionMirror(False) >> m.update_dists_files() """ # iterate over all local repositories repos = self.list_local_repositories(start=self.version_start, end=self.version_end, unmaintained=False) for outdir, version, is_maintained in repos: self.log.info('Processing %s...', version) start_version = UCS_Version((version.major, 0, 0)) dist = 'univention' if version.major < 4 else 'ucs%(major)d%(minor)d%(patchlevel)d' % version archs = [] for arch in self.architectures: prev = [ (dir2, os.path.join(dir2, arch2, 'Packages')) for (dir2, ver2, maint2) in repos for arch2 in (arch, 'all') if (start_version <= ver2 <= version and os.path.exists( os.path.join(dir2, arch2, 'Packages'))) ] if not prev: self.log.warn('No file "Packages" found for %s', arch) continue prev.reverse() archs.append(arch) main_name = os.path.join(outdir, 'dists', dist, 'main', 'binary-%s' % arch, 'Packages') inst_name = os.path.join(outdir, 'dists', dist, 'main', 'debian-installer', 'binary-%s' % arch, 'Packages') self.log.debug('Generating %s and %s ...', main_name, inst_name) makedirs(os.path.dirname(main_name)) makedirs(os.path.dirname(inst_name)) main = open(main_name + '.tmp', 'w') inst = open(inst_name + '.tmp', 'w') try: for dir2, src_name in prev: self.log.debug('Appending %s ...', src_name) indir = os.path.dirname(dir2) with open(src_name, 'r') as src: for pkg in Packages.iter_paragraphs(src): abs_deb = os.path.join(indir, pkg['Filename']) pkg['Filename'] = os.path.relpath( abs_deb, outdir) dst = inst if pkg[ 'Section'] == 'debian-installer' else main pkg.dump(dst) dst.write('\n') finally: main.close() inst.close() self._compress(main_name) self._compress(inst_name) rel_name = os.path.join(outdir, 'dists', dist, 'main', 'binary-%s' % arch, 'Release') self.log.debug('Generating %s ...', rel_name) with open(rel_name, 'w') as rel: print >> rel, 'Archive: stable' print >> rel, 'Origin: Univention' print >> rel, 'Label: Univention' print >> rel, 'Version: %(major)d.%(minor)d.%(patchlevel)d' % version print >> rel, 'Component: main' print >> rel, 'Architecture: %s' % (arch, ) if archs: self._release(outdir, dist, archs, version)
for source_stanza in Sources.iter_paragraphs( sequence=gzip_reader, encoding='utf-8', ): source = Source( source_stanza['package'], Version(source_stanza['version']), stanza=source_stanza, ) sources.setdefault(source.name, {})[source.version] = source for f in runtime_package_lists: test.diag('Examining runtime %s...' % f) with GzipFile(f, 'rb') as gzip_reader: for binary_stanza in Packages.iter_paragraphs( sequence=gzip_reader, encoding='utf-8', ): binary = Binary(binary_stanza, binary_version_marker='+srt') if binary.name not in packages_txt_binary_sources: test.not_ok( 'Runtime %s contains %s, not listed in packages.txt' % (f, binary.name)) elif packages_txt_binary_sources[binary.name] != binary.source: test.not_ok( 'packages.txt thinks %s is built by source %s, ' 'but %s_%s_%s was built by %s_%s' % ( binary.name, packages_txt_binary_sources[binary.name], binary.name, binary.version, binary.arch, binary.source, binary.source_version))
def fetch_packages(self): fp = self.fetch_indexed_file('Packages') return Packages.iter_paragraphs(fp, use_apt_pkg=False)
for source_stanza in Sources.iter_paragraphs( sequence=gzip_reader, encoding='utf-8', ): source = Source( source_stanza['package'], Version(source_stanza['version']), stanza=source_stanza, ) sources.setdefault(source.name, {})[source.version] = source for f in runtime_package_lists: test.diag('Examining runtime %s...' % f) with GzipFile(f, 'rb') as gzip_reader: for binary_stanza in Packages.iter_paragraphs( sequence=gzip_reader, encoding='utf-8', ): binary = Binary(binary_stanza, binary_version_marker='+srt') if ( binary.source not in sources or binary.source_version not in sources[binary.source] ): test.not_ok( 'source package %s_%s for %s not found' % (binary.source, binary.source_version, binary.name) ) else: test.ok( '%s source package in Sources' % binary.name)
#!venv/bin/python import json, subprocess from debian.deb822 import Packages from elasticsearch import Elasticsearch res = dict() for pkg in Packages.iter_paragraphs(open('packages')): pkgdict = dict(pkg) res[pkgdict["Package"]] = pkgdict json.dump(res, open('Packages.json', 'w'))