Beispiel #1
0
    def _binary_deb_list_contents_impl(self, filename, tmpdir):
        data_tarball = None

        with ArchiveFileReader(filename) as archive:
            for entry in archive:
                if entry.pathname.startswith("data.tar"):
                    data_tarball = os.path.join(tmpdir, entry.pathname)

                    with open(data_tarball, "wb+") as f:
                        for chunk in iter(lambda: archive.read_data(4096),
                                          b""):
                            f.write(chunk)
                        #end for
                    #end with

                    break
                #end if
            #end for
        #end with

        if not data_tarball:
            raise BoltError("binary package %s contains no data." %
                            data_tarball)

        contents = []

        # parse data file entries and build content listing
        with ArchiveFileReader(data_tarball) as archive:
            for entry in archive:
                entry_path = self.fix_path(entry.pathname)

                if entry.is_directory and self.is_path_implicit(entry_path):
                    continue
                if self.is_doc_path(entry_path):
                    continue
                if self.is_l10n_path(entry_path):
                    continue
                if self.is_menu_path(entry_path):
                    continue

                if entry.is_directory:
                    entry_type = stat.S_IFDIR
                elif entry.is_symbolic_link:
                    entry_type = stat.S_IFLNK
                elif entry.is_file or entry.is_hardlink:
                    entry_type = stat.S_IFREG
                else:
                    raise BoltError("type of '%s' unknown '%d'" %
                                    (entry_path, entry_type))

                contents.append([
                    entry_path, entry_type, entry.mode, entry.uname,
                    entry.gname
                ])
            #end for
        #end with

        return contents
    def load_package_index(self):
        packages_file = os.path.join(self._repo_dir, "Packages.gz")

        if not os.path.exists(packages_file):
            return {}, ""

        buf = ""

        with ArchiveFileReader(packages_file, raw=True) as archive:
            for entry in archive:
                buf = archive.read_data()

        h = hashlib.sha256()
        h.update(buf)

        text = buf.decode("utf-8")
        index = {}

        for entry in re.split(r"\n\n+", text, flags=re.MULTILINE):
            meta_data = DebianPackageMetaData(entry)

            try:
                name    = meta_data["Package"]
                version = meta_data["Version"]
            except KeyError:
                continue

            index.setdefault(name, {})[version] = meta_data
        #end for

        return index, h.hexdigest()
    def unpack(self, source_dir=".", source_cache=None):
        for source, upstream_source, subdir, sha256sum in self.sources:
            archive_file = self._retrieve_archive_file(
                source,
                upstream_source,
                sha256sum,
                source_cache=source_cache,
            )

            if not (archive_file and os.path.isfile(archive_file)):
                msg = "source archive for '%s' not found." % source
                raise PackagingError(msg)

            source_dir_and_subdir = os.path.normpath(source_dir + os.sep +
                                                     subdir)
            os.makedirs(source_dir_and_subdir, exist_ok=True)

            LOGGER.info("unpacking {}".format(archive_file))

            m = re.match(r"^(.*?\.debdiff)\.(?:gz|xz|bz2)$",
                         os.path.basename(archive_file))

            if m:
                with ArchiveFileReader(archive_file, raw=True) as archive:
                    try:
                        next(iter(archive))
                    except StopIteration:
                        continue

                    outfile = os.path.join(source_dir_and_subdir, m.group(1))
                    with open(outfile, "wb+") as f:
                        for chunk in iter(lambda: archive.read_data(4096),
                                          b""):
                            f.write(chunk)
            else:
                with ArchiveFileReader(archive_file) as archive:
                    archive.unpack_to_disk(base_dir=source_dir_and_subdir,
                                           strip_components=1)

        return self
    def _extract_control_data(self, filename):
        with ArchiveFileReader(filename) as archive:
            for entry in archive:
                if not entry.pathname == "control":
                    continue

                meta_data = archive\
                    .read_data()\
                    .decode("utf-8")

                meta_data = \
                    re.sub(r"^\s+.*?$\n?", "", meta_data, flags=re.MULTILINE)

                return meta_data.strip()
    def extract_control_data(self, filename):
        meta_data = None

        with TemporaryDirectory() as tmpdir:
            with ArchiveFileReader(filename) as archive:
                for entry in archive:
                    if not entry.pathname.startswith("control.tar."):
                        continue

                    data_file = os.path.join(tmpdir, entry.pathname)

                    with open(data_file, "wb+") as outfile:
                        while True:
                            buf = archive.read_data(4096)
                            if not buf:
                                break
                            outfile.write(buf)
                        #end while
                    #end with

                    pool_path = re.sub(
                        r"^" + re.escape(self._repo_dir) + r"/*",
                        "",
                        filename
                    )

                    meta_data = DebianPackageMetaData(
                        self._extract_control_data(data_file))

                    meta_data["Filename"] = pool_path

                    break
                #end for
            #end with
        #end with

        meta_data["SHA256"] = self._file_sha256_sum(filename)
        meta_data["Size"]   = os.path.getsize(filename)

        return meta_data
Beispiel #6
0
    def _parse_package_list(self, what=SOURCE | BINARY):  # noqa:
        pkg_types = []

        if what & self.SOURCE:
            pkg_types.append("source")
            self.source.clear()
        if what & self.BINARY:
            pkg_types.extend(["binary-{}".format(self.arch), "binary-all"])
            self.binary.clear()

        LOGGER.info("(re)loading package cache, please hold on.")

        for component, base_url in self.sources_list:
            for pocket in self.pockets:
                for type_ in pkg_types:
                    if type_ == "source":
                        meta_gz = "Sources.gz"
                        cache = self.source
                    else:
                        meta_gz = "Packages.gz"
                        cache = self.binary
                    #end if

                    meta_file = os.path.join(self._cache_dir, "dists",
                                             self.release, component, pocket,
                                             type_, meta_gz)

                    if not os.path.exists(meta_file):
                        continue

                    with ArchiveFileReader(meta_file, raw=True) as archive:
                        try:
                            next(iter(archive))
                        except StopIteration:
                            # The archive is empty.
                            continue

                        buf = archive\
                            .read_data()\
                            .decode("utf-8")

                        pool_base = re.match(
                            r"^(?P<pool_base>https?://.*?)/dists/.*$",
                            base_url).group("pool_base")

                        for chunk in re.split(r"\n\n+",
                                              buf,
                                              flags=re.MULTILINE):
                            chunk = chunk.strip()
                            if not chunk:
                                continue

                            meta_data = DebianPackageMetaData(
                                chunk, base_url=pool_base)

                            pkg_name = meta_data["Package"]
                            pkg_version = meta_data["Version"]

                            cache\
                                .setdefault(pkg_name, DebianPackageDict())\
                                .setdefault(pkg_version, meta_data)
                        #end for
                    #end with
                #end for
            #end for
        #end for

        return (self.source, self.binary)
Beispiel #7
0
    def unpack(self):
        """
        Unpacks the components that make up this source package in the right
        order. Does not apply Quilt patches. This method expects to find the
        downloaded files in self.work_dir and unpacks them in place.
        """
        # Unpack orig tarball

        orig_tarball, \
        orig_components, \
        deb_tarball, \
        deb_patches, \
        debdiff_gz = self._guess_file_components()

        pkg_name, pkg_version, _ = \
            self._orig_tarball_split_name(orig_tarball)

        archive_source_path = os.path.join(self.work_dir, orig_tarball)

        outdir = os.path.join(self.work_dir,
                              "{}-{}".format(pkg_name, pkg_version))
        os.makedirs(outdir, exist_ok=True)

        LOGGER.info("unpacking Debian package sources to {}".format(outdir))

        # Check if contents are contained in a folder

        strip_components = 1

        with ArchiveFileReader(archive_source_path) as archive:
            header = archive.next_entry()
            if not header.is_directory:
                strip_components = 0
            else:
                prefix = header.pathname

                if not prefix.startswith(pkg_name):
                    for header in archive:
                        if not header.pathname.startswith(prefix):
                            strip_components = 0
                            break
                    #end for
                #end for
            #end if
        #end with

        # Unpack upstream orig tarball

        with ArchiveFileReader(archive_source_path) as archive:
            archive.unpack_to_disk(outdir, strip_components=strip_components)

        # Unpack orig components

        for comp_filename in orig_components:
            comp_name = self._comp_name_from_comp_filename(comp_filename)
            archive_source_path = os.path.join(self.work_dir, comp_filename)
            comp_dir = os.path.join(outdir, comp_name)
            os.makedirs(comp_dir, exist_ok=True)

            with ArchiveFileReader(archive_source_path) as archive:
                archive.unpack_to_disk(comp_dir, strip_components=1)
        #end for

        # Unpack debdiff.gz

        if debdiff_gz:
            archive_source_path = os.path.join(self.work_dir, debdiff_gz)
            debian_dir = os.path.join(outdir, "debian")
            os.makedirs(debian_dir, exist_ok=True)

            patch_cmd = ["patch", "-d", outdir, "-p1", "-st"]

            with ArchiveFileReader(archive_source_path, raw=True) as archive:
                try:
                    next(iter(archive))
                    proc = subprocess.Popen(patch_cmd, stdin=subprocess.PIPE)
                    for chunk in iter(lambda: archive.read_data(4096), b""):
                        proc.stdin.write(chunk)
                    proc.stdin.close()
                    proc.wait()
                except StopIteration:
                    pass
            #end with
        #end if

        # Unpack Debian changes tarball

        if deb_tarball:
            archive_source_path = os.path.join(self.work_dir, deb_tarball)

            with ArchiveFileReader(archive_source_path) as archive:
                archive.unpack_to_disk(outdir)
        #end if

        return self
    def _load_package_list(self,
                           suite,
                           base_url,
                           component,
                           type_,
                           update=False,
                           inrelease=None):
        if not inrelease:
            inrelease = self._load_inrelease_file(suite,
                                                  base_url,
                                                  update=update)

        cache_dir = os.path.join(self._cache_dir, "dists", self.release, suite,
                                 component, type_)

        if not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)

        source_url = None

        for ext in [".gz", ".xz"]:
            if type_ == "source":
                filename = "Sources" + ext
                source = f"{component}/source/{filename}"
                target = os.path.join(cache_dir, filename)
                cache = self.source
            else:
                filename = "Packages" + ext
                source = f"{component}/{type_}/{filename}"
                target = os.path.join(cache_dir, filename)
                cache = self.binary
            #end if

            try:
                sha256sum = inrelease.hash_for_filename(source)
                source_url = "{}/{}".format(base_url,
                                            inrelease.by_hash_path(source))
            except KeyError:
                continue
            else:
                break
        #end for

        if not source_url:
            raise DebianPackageCache.Error(
                'unable to locate index file for "{}" in "{}" '
                'suite'.format(type_, suite))
        #end if

        if not os.path.islink(target):
            old_tag = ""
        else:
            old_tag = os.path.basename(os.readlink(target))

        new_tag = None

        if update:
            try:
                downloader = Downloader()

                if not os.path.isdir(cache_dir):
                    os.makedirs(cache_dir)

                new_tag = downloader.tag(source_url)
                if old_tag != new_tag:
                    downloader.download_named_tag(source_url,
                                                  target,
                                                  new_tag,
                                                  permissions=0o0644)
                    if old_tag:
                        try:
                            os.unlink(os.path.join(cache_dir, old_tag))
                        except OSError:
                            pass
                    #end if
                #end if
            except Exception as e:
                raise DebianPackageCache.Error(
                    'failed to download "{}": {}'.format(source_url, str(e)))
            #end try
        #end if

        try:
            digest = hashlib.sha256()

            with open(target, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    digest.update(chunk)

            if digest.hexdigest() != sha256sum:
                raise BoltError('wrong hash for "{}".'.format(target))

            with ArchiveFileReader(target, raw=True) as archive:
                try:
                    next(iter(archive))
                except StopIteration:
                    buf = ""
                else:
                    buf = archive.read_data().decode("utf-8")

                pool_base = re.match(r"^(?P<pool_base>https?://.*?)/dists/.*$",
                                     base_url).group("pool_base")

                for chunk in re.split(r"\n\n+", buf, flags=re.MULTILINE):
                    chunk = chunk.strip()
                    if not chunk:
                        continue

                    meta_data = DebianPackageMetaData(chunk,
                                                      base_url=pool_base)

                    meta_data["Suite"] = suite
                    meta_data["Component"] = component

                    pkg_name = meta_data["Package"]
                    pkg_version = meta_data["Version"]

                    cache\
                        .setdefault(pkg_name, DebianPackageDict())\
                        .setdefault(pkg_version, meta_data)
                #end for
            #end with
        except Exception as e:
            files_to_delete = [
                target,
                os.path.join(os.path.dirname(target), new_tag or old_tag)
            ]

            for filename in files_to_delete:
                try:
                    os.unlink(filename)
                except OSError:
                    pass
            #end for

            raise DebianPackageCache.Error('failed to load "{}": {}'.format(
                target, str(e)))