def load_package_index(self): packages_file = os.path.join(self._repo_dir, "Packages.gz") if not os.path.exists(packages_file): return {}, "" buf = "" with ArchiveFileReader(packages_file, raw=True) as archive: for entry in archive: buf = archive.read_data() h = hashlib.sha256() h.update(buf) text = buf.decode("utf-8") index = {} for entry in re.split(r"\n\n+", text, flags=re.MULTILINE): meta_data = DebianPackageMetaData(entry) try: name = meta_data["Package"] version = meta_data["Version"] except KeyError: continue index.setdefault(name, {})[version] = meta_data #end for return index, h.hexdigest()
def meta_data(self, debug_pkg=False): dep_type_2_str = { "requires": "Depends", "provides": "Provides", "conflicts": "Conflicts", "replaces": "Replaces" } meta = DebianPackageMetaData() meta["Package"] = self.name + "-dbg" if debug_pkg else self.name meta["Version"] = self.version meta["Source"] = self.source meta["Architecture"] = self.architecture meta["Maintainer"] = self.maintainer if debug_pkg: meta["Section"] = "debug" meta["Depends"] = "%s (= %s)" % (self.name, self.version) meta["Description"] = "debug symbols for ELF binaries in "\ "package '%s'" % self.name else: meta["Section"] = self.section for dep_type in ["requires", "provides", "conflicts", "replaces"]: relations = self.relations.get(dep_type) if not (relations and relations.list): continue meta[dep_type_2_str[dep_type]] = str(relations) #end for meta["Description"] = self.description.summary() full_description = self.description.full_description() if full_description: meta["Description"] += "\n" + full_description #end if return meta
def extract_control_data(self, filename): meta_data = None with TemporaryDirectory() as tmpdir: with ArchiveFileReader(filename) as archive: for entry in archive: if not entry.pathname.startswith("control.tar."): continue data_file = os.path.join(tmpdir, entry.pathname) with open(data_file, "wb+") as outfile: while True: buf = archive.read_data(4096) if not buf: break outfile.write(buf) #end while #end with pool_path = re.sub( r"^" + re.escape(self._repo_dir) + r"/*", "", filename ) meta_data = DebianPackageMetaData( self._extract_control_data(data_file)) meta_data["Filename"] = pool_path break #end for #end with #end with meta_data["SHA256"] = self._file_sha256_sum(filename) meta_data["Size"] = os.path.getsize(filename) return meta_data
def _parse_package_list(self, what=SOURCE | BINARY): # noqa: pkg_types = [] if what & self.SOURCE: pkg_types.append("source") self.source.clear() if what & self.BINARY: pkg_types.extend(["binary-{}".format(self.arch), "binary-all"]) self.binary.clear() LOGGER.info("(re)loading package cache, please hold on.") for component, base_url in self.sources_list: for pocket in self.pockets: for type_ in pkg_types: if type_ == "source": meta_gz = "Sources.gz" cache = self.source else: meta_gz = "Packages.gz" cache = self.binary #end if meta_file = os.path.join(self._cache_dir, "dists", self.release, component, pocket, type_, meta_gz) if not os.path.exists(meta_file): continue with ArchiveFileReader(meta_file, raw=True) as archive: try: next(iter(archive)) except StopIteration: # The archive is empty. continue buf = archive\ .read_data()\ .decode("utf-8") pool_base = re.match( r"^(?P<pool_base>https?://.*?)/dists/.*$", base_url).group("pool_base") for chunk in re.split(r"\n\n+", buf, flags=re.MULTILINE): chunk = chunk.strip() if not chunk: continue meta_data = DebianPackageMetaData( chunk, base_url=pool_base) pkg_name = meta_data["Package"] pkg_version = meta_data["Version"] cache\ .setdefault(pkg_name, DebianPackageDict())\ .setdefault(pkg_version, meta_data) #end for #end with #end for #end for #end for return (self.source, self.binary)
def parse_control_file(self, debian_control_file): """ Parses the control file and creates a list of DebianBinaryPackages instances stored in self.packages. """ LOGGER.info("parsing metadata from {}".format(debian_control_file)) with open(debian_control_file, "r", encoding="utf-8") as f: content = f.read() content = content.strip() content = re.sub(r"^\s*\n", r"\n", content, flags=re.M) blocks = re.split(r"\n\n+", content) while True: source_meta = DebianPackageMetaData(string=blocks.pop(0)) if source_meta: break #end while for i in range(0, len(blocks)): metadata = DebianPackageMetaData(string=blocks[i]) if not metadata: continue tmp_arch_list = metadata\ .get("Architecture", "any")\ .split() arch_list = [] for arch in tmp_arch_list: if arch.endswith("-any"): if arch == "linux-any": arch_list.append("any") else: continue elif arch.startswith("any-"): _, cpu = arch.rsplit("-", 1)[-1] arch_list.append(cpu) else: arch_list.append(arch) #end for skip = True for arch in ["all", "any", self.arch]: if arch in arch_list: skip = False break #end for if skip: continue package_name = metadata["Package"] skip = False for suffix in ["-dbg", "-di", "-udeb"]: if package_name.endswith(suffix): skip = True break #end if #end for if skip: continue pkg = DebianPackage(self._cache, package_name, version=self.version.full, release=self.release, arch=self.arch, work_dir=self.work_dir) pkg.metadata["Description"] = metadata.get("Description", "") pkg.metadata["Section"] = metadata.get("Section", source_meta["Section"]) self.packages.append(pkg)
def _load_package_list(self, suite, base_url, component, type_, update=False, inrelease=None): if not inrelease: inrelease = self._load_inrelease_file(suite, base_url, update=update) cache_dir = os.path.join(self._cache_dir, "dists", self.release, suite, component, type_) if not os.path.isdir(cache_dir): os.makedirs(cache_dir) source_url = None for ext in [".gz", ".xz"]: if type_ == "source": filename = "Sources" + ext source = f"{component}/source/{filename}" target = os.path.join(cache_dir, filename) cache = self.source else: filename = "Packages" + ext source = f"{component}/{type_}/{filename}" target = os.path.join(cache_dir, filename) cache = self.binary #end if try: sha256sum = inrelease.hash_for_filename(source) source_url = "{}/{}".format(base_url, inrelease.by_hash_path(source)) except KeyError: continue else: break #end for if not source_url: raise DebianPackageCache.Error( 'unable to locate index file for "{}" in "{}" ' 'suite'.format(type_, suite)) #end if if not os.path.islink(target): old_tag = "" else: old_tag = os.path.basename(os.readlink(target)) new_tag = None if update: try: downloader = Downloader() if not os.path.isdir(cache_dir): os.makedirs(cache_dir) new_tag = downloader.tag(source_url) if old_tag != new_tag: downloader.download_named_tag(source_url, target, new_tag, permissions=0o0644) if old_tag: try: os.unlink(os.path.join(cache_dir, old_tag)) except OSError: pass #end if #end if except Exception as e: raise DebianPackageCache.Error( 'failed to download "{}": {}'.format(source_url, str(e))) #end try #end if try: digest = hashlib.sha256() with open(target, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): digest.update(chunk) if digest.hexdigest() != sha256sum: raise BoltError('wrong hash for "{}".'.format(target)) with ArchiveFileReader(target, raw=True) as archive: try: next(iter(archive)) except StopIteration: buf = "" else: buf = archive.read_data().decode("utf-8") pool_base = re.match(r"^(?P<pool_base>https?://.*?)/dists/.*$", base_url).group("pool_base") for chunk in re.split(r"\n\n+", buf, flags=re.MULTILINE): chunk = chunk.strip() if not chunk: continue meta_data = DebianPackageMetaData(chunk, base_url=pool_base) meta_data["Suite"] = suite meta_data["Component"] = component pkg_name = meta_data["Package"] pkg_version = meta_data["Version"] cache\ .setdefault(pkg_name, DebianPackageDict())\ .setdefault(pkg_version, meta_data) #end for #end with except Exception as e: files_to_delete = [ target, os.path.join(os.path.dirname(target), new_tag or old_tag) ] for filename in files_to_delete: try: os.unlink(filename) except OSError: pass #end for raise DebianPackageCache.Error('failed to load "{}": {}'.format( target, str(e)))