def get_maintainer(self, package): maintainer = pseudo_packages.get_maintainer(package) if maintainer is not None: return split_address(maintainer) if package in self.cache: candidate = self.depcache.get_candidate_ver(self.cache[package]) if candidate is not None: records = apt_pkg.PackageRecords(self.cache) if records.lookup(candidate.file_list[0]): return split_address(records.maintainer) # delete record iterator del records if package.startswith("src:"): package = package[4:] records = apt_pkg.SourceRecords() if records.lookup(package): version = records.version maintainer = records.maintainer while records.lookup(package) is not None: if apt_pkg.version_compare(records.version, version) > 0: version = records.version maintainer = records.maintainer return split_address(maintainer) raise NewDataSource.DataError( "Unable to get maintainer for {}.".format(package) )
def apt_get_source_package(name: str) -> Deb822: """Get source package metadata. Args: name: Name of the source package Returns: A `Deb822` object """ import apt_pkg apt_pkg.init() try: sources = apt_pkg.SourceRecords() except apt_pkg.Error as e: if e.args[0] == ( "E:You must put some 'deb-src' URIs in your sources.list"): raise NoAptSources() raise by_version: Dict[str, Deb822] = {} while sources.lookup(name): by_version[sources.version] = sources.record # type: ignore if len(by_version) == 0: raise NoSuchPackage(name) # Try the latest version version = sorted(by_version, key=Version)[-1] return Deb822(by_version[version])
def fetch_source(self, destdir="", progress=None, unpack=True): """Get the source code of a package. The parameter *destdir* specifies the directory where the source will be fetched to. The parameter *progress* may refer to an apt_pkg.AcquireProgress() object. If not specified or None, apt.progress.text.AcquireProgress() is used. The parameter *unpack* describes whether the source should be unpacked (``True``) or not (``False``). By default, it is unpacked. If *unpack* is ``True``, the path to the extracted directory is returned. Otherwise, the path to the .dsc file is returned. """ src = apt_pkg.SourceRecords() acq = apt_pkg.Acquire(progress or apt.progress.text.AcquireProgress()) dsc = None record = self._records source_name = record.source_pkg or self.package.shortname source_version = record.source_ver or self._cand.ver_str source_lookup = src.lookup(source_name) while source_lookup and source_version != src.version: source_lookup = src.lookup(source_name) if not source_lookup: raise ValueError("No source for %r" % self) files = list() for md5, size, path, type_ in src.files: base = os.path.basename(path) destfile = os.path.join(destdir, base) if type_ == 'dsc': dsc = destfile if _file_is_same(destfile, size, md5): print('Ignoring already existing file: %s' % destfile) continue files.append( apt_pkg.AcquireFile(acq, src.index.archive_uri(path), md5, size, base, destfile=destfile)) acq.run() for item in acq.items: if item.status != item.STAT_DONE: raise FetchError("The item %r could not be fetched: %s" % (item.destfile, item.error_text)) if unpack: outdir = src.package + '-' + apt_pkg.upstream_version(src.version) outdir = os.path.join(destdir, outdir) subprocess.check_call(["dpkg-source", "-x", dsc, outdir]) return os.path.abspath(outdir) else: return os.path.abspath(dsc)
def get_binaries_for_source_pkg(srcname): """ Return all binary package names for the given source package name. :param srcname: The source package name. :return: A list of binary package names. """ pkgnames = set() recs = apt_pkg.SourceRecords() while recs.lookup(srcname): for binary in recs.binaries: pkgnames.add(binary) return pkgnames
async def check(self, external_data): assert self.should_check(external_data) LOG.debug("Checking %s", external_data.filename) package_name = external_data.checker_data["package-name"] root = external_data.checker_data["root"] dist = external_data.checker_data["dist"] component = external_data.checker_data.get("component", "") src_pkg = external_data.checker_data.get("source", False) if not component and not dist.endswith("/"): LOG.warning( '%s is missing Debian repo "component"; for an ' 'exact URL, "dist" must end with /', package_name, ) return arch = self._translate_arch(external_data.arches[0]) with self._load_repo(root, dist, component, arch, src_pkg) as cache: if src_pkg: src_record = apt_pkg.SourceRecords() source_version, source_files = None, None while src_record.lookup(package_name): source_version, source_files = src_record.version, src_record.files if not source_version: raise ValueError(f"No source package {package_name}") source_file = next(f for f in source_files if f.type == "tar") new_version = ExternalFile( urllib.parse.urljoin( root.rstrip("/") + "/", source_file.path), str(source_file.hashes.find("sha256")).split(":")[1], source_file.size, re.sub(r"^\d+:", "", source_version), # Strip epoch if present timestamp=None, ) else: package = cache[package_name] candidate = package.candidate new_version = ExternalFile( candidate.uri, candidate.sha256, candidate.size, candidate.version, timestamp=_get_timestamp_for_candidate(candidate), ) external_data.set_new_version(new_version)
def main(): apt_pkg.init() cache = apt_pkg.Cache() i = 0 print "Running PkgSrcRecords test on all packages:" for x in cache.Packages: i += 1 src = apt_pkg.SourceRecords() if src.Lookup(x.Name): #print src.Package pass print "\r%i/%i=%.3f%% " % (i, cache.PackageCount, (float(i) / float(cache.PackageCount) * 100)),
def test_source_records_smoke(self): src = apt_pkg.SourceRecords() while src.step(): for f in src.files: # unpacking as a tuple works as before md5, size, path, type_ = f self.assertTrue(isinstance(md5, str)) self.assertTrue(isinstance(size, long)) self.assertTrue(isinstance(path, str)) self.assertTrue(isinstance(type_, str)) # access using getters self.assertTrue(isinstance(f.hashes, apt_pkg.HashStringList)) self.assertTrue(isinstance(f.size, long)) self.assertTrue(isinstance(f.path, str)) self.assertTrue(isinstance(f.type, str))
def look_up(self, name, url): if "/" in name: (name, version) = name.split("/", 1) else: version = None try: import apt_pkg except ImportError as e: raise DependencyNotPresent('apt_pkg', e) apt_pkg.init() sources = apt_pkg.SourceRecords() urls = {} while sources.lookup(name): control = Deb822(sources.record) pkg_version = control["Version"] try: urls[pkg_version] = control["Dgit"].split(' ') except KeyError: pass if len(urls) == 0: raise urlutils.InvalidURL(path=url, extra='no URLs found') if version is None: # Try the latest version version = sorted(urls, key=Version)[-1] if version not in urls: raise urlutils.InvalidURL(path=url, extra='version %s not found' % version) if len(urls[version]) < 3: raise urlutils.InvalidURL( path=url, extra='dgit header does not have location information') url = urlutils.join_segment_parameters( urls[version][3], {"tag": urlutils.quote(urls[version][2], '')}) note("Resolved package URL from Debian package %s/%s: %s", name, version, url) return url
def test_source_records_smoke(self): src = apt_pkg.SourceRecords() self.assertTrue(src.step()) self.assertEqual(src.maintainer, "Julian Andres Klode <*****@*****.**>") # nopep8 self.assertEqual(src.binaries, ["dh-autoreconf"]) self.assertEqual(src.package, "dh-autoreconf") self.assertEqual(2, len(src.files)) # unpacking as a tuple works as before md5, size, path, type_ = f = src.files[0] self.assertEqual(md5, "6576a28fe1918ce10bd31543ba545901") self.assertEqual(size, 1578) self.assertEqual(path, "dh-autoreconf_16.dsc") self.assertEqual(type_, "dsc") # access using getters self.assertTrue(isinstance(f.hashes, apt_pkg.HashStringList)) self.assertEqual(str(f.hashes[0]), "SHA512:4b1a3299f2a8b01b0c75db97fd16cb39919949c74d19ea6cf28e1bbd4891d3515b3e2b90b96a64df665cebf6d95409e704e670909ae91fcfe92409ee1339bffc") # nopep8 self.assertEqual(str(f.hashes[1]), "Checksum-FileSize:1578") self.assertEqual(str(f.hashes[2]), "SHA256:1c1b2ab5f1ae5496bd50dbb3c30e9b7d181a06c8d02ee8d7e9c35ed6f2a69b5f") # nopep8 self.assertEqual(str(f.hashes[3]), "SHA1:c9bf7a920013021dad5fbd898dfd5a79c7a150f9") # nopep8 self.assertEqual(str(f.hashes[4]), "MD5Sum:6576a28fe1918ce10bd31543ba545901") # nopep8 self.assertEqual(f.size, 1578) self.assertEqual(f.path, "dh-autoreconf_16.dsc") self.assertEqual(f.type, "dsc") # unpacking as a tuple works as before md5, size, path, type_ = f = src.files[1] self.assertEqual(md5, "302c8bf43db02412e3f2197fd0f2ee0f") self.assertEqual(size, 7372) self.assertEqual(path, "dh-autoreconf_16.tar.xz") self.assertEqual(type_, "tar") # access using getters self.assertTrue(isinstance(f.hashes, apt_pkg.HashStringList)) self.assertEqual(str(f.hashes[0]), "SHA512:10448dd179ec12bf4310a9a514110a85f56e51893aa36a97ac3a6f8d7ce99d099e62cfdb78e271e2d94431e8832da0f643de821b6643b80e3f0b0f5d682cf9a9") # nopep8 self.assertEqual(str(f.hashes[1]), "Checksum-FileSize:7372") # nopep8 self.assertEqual(str(f.hashes[2]), "SHA256:5c6a6a362907327bec77a867ff3fd0eceba8015d1b881b48275aff7e4ce0f629") # nopep8 self.assertEqual(str(f.hashes[3]), "SHA1:58459600164398ad6807ddd877a6f814c799c62c") # nopep8 self.assertEqual(str(f.hashes[4]), "MD5Sum:302c8bf43db02412e3f2197fd0f2ee0f") # nopep8 self.assertEqual(f.size, 7372) self.assertEqual(f.path, "dh-autoreconf_16.tar.xz") self.assertEqual(f.type, "tar") self.assertFalse(src.step())
def select_apt_packages(package_names, maintainer): packages = [] import apt_pkg apt_pkg.init() sources = apt_pkg.SourceRecords() while sources.step(): if maintainer: fullname, email = parseaddr(sources.maintainer) if email not in maintainer: continue if package_names and sources.package not in package_names: continue packages.append(sources.package) return packages
def fetch_tarballs(self, package, upstream_version, target_dir, _apt_pkg=None, components=None): if _apt_pkg is None: try: import apt_pkg except ImportError as e: raise DependencyNotPresent('apt_pkg', e) else: apt_pkg = _apt_pkg apt_pkg.init() # Handle the case where the apt.sources file contains no source # URIs (LP:375897) try: sources = apt_pkg.SourceRecords() except SystemError: raise PackageVersionNotPresent(package, upstream_version, self) sources.restart() note("Using apt to look for the upstream tarball.") while sources.lookup(package): filenames = [] for (checksum, size, filename, filekind) in sources.files: if filekind != "tar": continue filename = os.path.basename(filename) if filename.startswith("%s_%s.orig" % (package, upstream_version)): filenames.append(filename) if filenames: if self._run_apt_source(package, sources.version, target_dir): return [ os.path.join(target_dir, filename) for filename in filenames ] else: note("apt found %s/%s but could not download.", package, sources.version) note("apt could not find %s/%s.", package, upstream_version) raise PackageVersionNotPresent(package, upstream_version, self)
def look_up(self, name, url): if "/" in name: (name, version) = name.split("/", 1) else: version = None try: import apt_pkg except ImportError as e: raise DependencyNotPresent('apt_pkg', e) apt_pkg.init() sources = apt_pkg.SourceRecords() by_version = {} while sources.lookup(name): by_version[sources.version] = sources.record if len(by_version) == 0: raise urlutils.InvalidURL(path=url, extra='package not found') if version is None: # Try the latest version version = sorted(by_version, key=Version)[-1] if version not in by_version: raise urlutils.InvalidURL(path=url, extra='version %s not found' % version) control = Deb822(by_version[version]) try: vcs, url = source_package_vcs_url(control) except KeyError: note("Retrieving Vcs locating from %s Debian version %s", name, version) raise urlutils.InvalidURL(path=url, extra='no VCS URL found') note("Resolved package URL from Debian package %s/%s: %s", name, version, url) return url
def get_source_package(name): """Get source package metadata. Args: name: Name of the source package Returns: A `Deb822` object """ apt_pkg.init() sources = apt_pkg.SourceRecords() by_version = {} while sources.lookup(name): by_version[sources.version] = sources.record if len(by_version) == 0: raise NoSuchPackage(name) # Try the latest version version = sorted(by_version, key=Version)[-1] return Deb822(by_version[version])
def _count(self): counts = {} import apt_pkg apt_pkg.init() apt_pkg.config.set("Dir", self.rootdir) try: apt_cache = apt_pkg.SourceRecords() except apt_pkg.Error as e: if (e.args[0] == "E:You must put some 'deb-src' URIs in your sources.list"): raise NoAptSources() raise apt_cache.restart() while apt_cache.step(): try: for d in apt_cache.build_depends.values(): for o in d: for p in o: counts.setdefault(p[0], 0) counts[p[0]] += 1 except AttributeError: pass return counts
#!/usr/bin/python import apt_pkg apt_pkg.init() #cache = apt_pkg.Cache() #sources = apt_pkg.SourceRecords(cache) sources = apt_pkg.SourceRecords() sources.Restart() while sources.Lookup('hello'): print sources.Package, sources.Version, sources.Maintainer, \ sources.Section, `sources.Binaries` print sources.Files print sources.Index.ArchiveURI(sources.Files[0][2])
def get_changelog(self, uri=None, cancel_lock=None): """ Download the changelog of the package and return it as unicode string. The parameter *uri* refers to the uri of the changelog file. It may contain multiple named variables which will be substitued. These variables are (src_section, prefix, src_pkg, src_ver). An example is the Ubuntu changelog:: "http://changelogs.ubuntu.com/changelogs/pool" \\ "/%(src_section)s/%(prefix)s/%(src_pkg)s" \\ "/%(src_pkg)s_%(src_ver)s/changelog" The parameter *cancel_lock* refers to an instance of threading.Lock, which if set, prevents the download. """ # Return a cached changelog if available if self._changelog != u"": return self._changelog if uri is None: if not self.candidate: pass if self.candidate.origins[0].origin == "Debian": uri = "http://packages.debian.org/changelogs/pool" \ "/%(src_section)s/%(prefix)s/%(src_pkg)s" \ "/%(src_pkg)s_%(src_ver)s/changelog" elif self.candidate.origins[0].origin == "Ubuntu": uri = "http://changelogs.ubuntu.com/changelogs/pool" \ "/%(src_section)s/%(prefix)s/%(src_pkg)s" \ "/%(src_pkg)s_%(src_ver)s/changelog" else: res = _("The list of changes is not available") return res if isinstance(res, unicode) else res.decode("utf-8") # get the src package name src_pkg = self.candidate.source_name # assume "main" section src_section = "main" # use the section of the candidate as a starting point section = self.candidate.section # get the source version src_ver = self.candidate.source_version try: # try to get the source version of the pkg, this differs # for some (e.g. libnspr4 on ubuntu) # this feature only works if the correct deb-src are in the # sources.list otherwise we fall back to the binary version number src_records = apt_pkg.SourceRecords() except SystemError: pass else: while src_records.lookup(src_pkg): if not src_records.version: continue if self.candidate.source_version == src_records.version: # Direct match, use it and do not do more lookups. src_ver = src_records.version section = src_records.section break if apt_pkg.version_compare(src_records.version, src_ver) > 0: # The version is higher, it seems to match. src_ver = src_records.version section = src_records.section section_split = section.split("/", 1) if len(section_split) > 1: src_section = section_split[0] del section_split # lib is handled special prefix = src_pkg[0] if src_pkg.startswith("lib"): prefix = "lib" + src_pkg[3] # stip epoch src_ver_split = src_ver.split(":", 1) if len(src_ver_split) > 1: src_ver = "".join(src_ver_split[1:]) del src_ver_split uri = uri % { "src_section": src_section, "prefix": prefix, "src_pkg": src_pkg, "src_ver": src_ver } timeout = socket.getdefaulttimeout() # FIXME: when python2.4 vanishes from the archive, # merge this into a single try..finally block (pep 341) try: try: # Set a timeout for the changelog download socket.setdefaulttimeout(2) # Check if the download was canceled if cancel_lock and cancel_lock.isSet(): return u"" # FIXME: python3.2: Should be closed manually changelog_file = urllib2.urlopen(uri) # do only get the lines that are new changelog = u"" regexp = "^%s \((.*)\)(.*)$" % (re.escape(src_pkg)) while True: # Check if the download was canceled if cancel_lock and cancel_lock.isSet(): return u"" # Read changelog line by line line_raw = changelog_file.readline() if not line_raw: break # The changelog is encoded in utf-8, but since there isn't # any http header, urllib2 seems to treat it as ascii line = line_raw.decode("utf-8") #print line.encode('utf-8') match = re.match(regexp, line) if match: # strip epoch from installed version # and from changelog too installed = getattr(self.installed, 'version', None) if installed and ":" in installed: installed = installed.split(":", 1)[1] changelog_ver = match.group(1) if changelog_ver and ":" in changelog_ver: changelog_ver = changelog_ver.split(":", 1)[1] if (installed and apt_pkg.version_compare( changelog_ver, installed) <= 0): break # EOF (shouldn't really happen) changelog += line # Print an error if we failed to extract a changelog if len(changelog) == 0: changelog = _("The list of changes is not available") if not isinstance(changelog, unicode): changelog = changelog.decode("utf-8") self._changelog = changelog except urllib2.HTTPError: res = _("The list of changes is not available yet.\n\n" "Please use http://launchpad.net/ubuntu/+source/%s/" "%s/+changelog\n" "until the changes become available or try again " "later.") % (src_pkg, src_ver) return res if isinstance(res, unicode) else res.decode("utf-8") except (IOError, httplib.BadStatusLine): res = _("Failed to download the list of changes. \nPlease " "check your Internet connection.") return res if isinstance(res, unicode) else res.decode("utf-8") finally: socket.setdefaulttimeout(timeout) return self._changelog
def look_up(self, name, url): if "/" in name: (name, version) = name.split("/", 1) else: version = None apt_pkg.init() # Older versions of apt_pkg don't have SourceRecords, # newer versions give a deprecation warning when using # GetPkgSrcRecords. try: sources = apt_pkg.SourceRecords() except AttributeError: sources = apt_pkg.GetPkgSrcRecords() urls = {} lookup = getattr(sources, 'lookup', None) or sources.Lookup while lookup(name): record = getattr(sources, 'record', None) or sources.Record for l in record.splitlines(): if not ": " in l: continue (field, value) = l.strip("\n").split(": ", 1) if field == "Version": pkg_version = value elif field.startswith("X-Vcs-") or field.startswith("Vcs-"): vcs = field.split("-")[-1] urls.setdefault(pkg_version, {})[vcs] = value if len(urls) == 0: raise errors.InvalidURL(path=url, extra='no URLs found') if version is None: # Try the latest version cmp = getattr(apt_pkg, 'version_compare', getattr(apt_pkg, 'VersionCompare', None)) version = sorted(urls, cmp=cmp)[0] if not version in urls: raise errors.InvalidURL(path=url, extra='version %s not found' % version) note("Retrieving Vcs locating from %s Debian version %s", name, version) if "Bzr" in urls[version]: return urls[version]["Bzr"] if "Svn" in urls[version]: try: from .. import svn except ImportError: note("This package uses subversion. If you would like to " "access it with bzr then please install brz-svn " "and re-run the command.") else: return urls[version]["Svn"] if "Git" in urls[version]: try: from .. import git except ImportError: note("This package uses git. If you would like to " "access it with bzr then please install brz-git " "and re-run the command.") else: from breezy import urlutils url = urls[version]["Git"] if ' -b ' in url: (url, branch) = url.split(' -b ', 1) url = urlutils.join_segment_parameters( url, {'branch': branch}) return url if "Hg" in urls[version]: try: from .. import hg except ImportError: note("This package uses hg. If you would like to " "access it with bzr then please install brz-hg" "and re-run the command.") else: return urls[version]["Hg"] raise errors.InvalidURL(path=url, extra='unsupported VCSes %r found' % urls[version].keys())
file, index = version.FileList.pop(0) records.Lookup((file, index)) if records.SourcePkg != "": srcpkg = records.SourcePkg else: srcpkg = pkg.Name return srcpkg # main apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.Init() records = apt_pkg.PackageRecords(cache) srcrecords = apt_pkg.SourceRecords() # base package that we use for build-depends calculation if len(sys.argv) < 2: print "need a package name as argument" sys.exit(1) try: pkg = base = cache[sys.argv[1]] except KeyError: print "No package %s found" % sys.argv[1] sys.exit(1) all_build_depends = set() # get the build depdends for the package itself srcpkg_name = get_source_pkg(base, records, depcache) print "srcpkg_name: %s " % srcpkg_name
async def check(self, external_data: ExternalBase): assert self.should_check(external_data) LOG.debug("Checking %s", external_data.filename) package_name = external_data.checker_data["package-name"] root = external_data.checker_data["root"] dist = external_data.checker_data["dist"] component = external_data.checker_data.get("component", "") src_pkg = external_data.checker_data.get("source", False) if not component and not dist.endswith("/"): LOG.warning( '%s is missing Debian repo "component"; for an ' 'exact URL, "dist" must end with /', package_name, ) return arch = self._translate_arch(external_data.arches[0]) cache: apt.Cache with self._load_repo(root, dist, component, arch, src_pkg) as cache: if src_pkg: src_record = apt_pkg.SourceRecords() source_version, source_files = None, None while src_record.lookup(package_name): source_version, source_files = src_record.version, src_record.files if not source_version: raise ValueError(f"No source package {package_name}") assert source_files is not None source_file = next(f for f in source_files if f.type == "tar") src_url = urllib.parse.urljoin( root.rstrip("/") + "/", source_file.path) new_version = ExternalFile( url=src_url, checksum=read_deb_hashes(source_file.hashes), size=source_file.size, # Strip epoch if present version=re.sub(r"^\d+:", "", source_version), timestamp=await get_timestamp_from_url(src_url, self.session), ) else: package = cache[package_name] candidate = package.candidate assert candidate is not None assert candidate.uri is not None new_version = ExternalFile( url=candidate.uri, # FIXME: apt.package.Version.{md5,sha1,sha256} can raise an exception # if given hash isn't set, while sha512 isn't accessible at all. # Raw hashes are handy, but accessible only through protected property. checksum=read_deb_hashes(candidate._records.hashes), size=candidate.size, version=candidate.version, timestamp=await self._get_timestamp_for_candidate(candidate), ) external_data.set_new_version(new_version)
def _load(self): """Regenerates the fake configuration and loads the packages caches.""" if self.loaded: return True # Modify the default configuration to create the fake one. apt_pkg.init_system() self.cache_dir.preauthChild( self.apt_config['Dir::State']).preauthChild( self.apt_config['Dir::State::Lists']).remove() self.cache_dir.preauthChild( self.apt_config['Dir::State']).preauthChild( self.apt_config['Dir::State::Lists']).child( 'partial').makedirs() sources_file = self.cache_dir.preauthChild( self.apt_config['Dir::Etc']).preauthChild( self.apt_config['Dir::Etc::sourcelist']) sources = sources_file.open('w') sources_count = 0 deb_src_added = False self.packages.check_files() self.indexrecords = {} # Create an entry in sources.list for each needed index file for f in self.packages: # we should probably clear old entries from self.packages and # take into account the recorded mtime as optimization file = self.packages[f] if f.split('/')[-1] == "Release": self.addRelease(f, file) fake_uri = 'http://apt-p2p' + f fake_dirname = '/'.join(fake_uri.split('/')[:-1]) if f.endswith('Sources'): deb_src_added = True source_line = 'deb-src ' + fake_dirname + '/ /' else: source_line = 'deb ' + fake_dirname + '/ /' listpath = self.cache_dir.preauthChild( self.apt_config['Dir::State']).preauthChild( self.apt_config['Dir::State::Lists']).child( apt_pkg.uri_to_filename(fake_uri)) sources.write(source_line + '\n') log.msg("Sources line: " + source_line) sources_count = sources_count + 1 if listpath.exists(): #we should empty the directory instead listpath.remove() os.symlink(file.path, listpath.path) sources.close() if sources_count == 0: log.msg("No Packages files available for %s backend" % (self.cache_dir.path)) return False log.msg("Loading Packages database for " + self.cache_dir.path) for key, value in self.apt_config.items(): apt_pkg.config[key] = value self.cache = apt_pkg.Cache(OpProgress()) self.records = apt_pkg.PackageRecords(self.cache) if deb_src_added: self.srcrecords = apt_pkg.SourceRecords() else: self.srcrecords = None self.loaded = True return True