Example #1
0
    def _writeSuiteArchOrSource(self, distroseries, pocket, component,
                                file_stub, arch_name, arch_path,
                                all_series_files):
        """Write out a Release file for an architecture or source."""
        # XXX kiko 2006-08-24: Untested method.

        suite = distroseries.getSuite(pocket)
        self.log.debug("Writing Release file for %s/%s/%s" % (
            suite, component, arch_path))

        # Now, grab the actual (non-di) files inside each of
        # the suite's architectures
        file_stub = os.path.join(component, arch_path, file_stub)

        all_series_files.update(get_suffixed_indices(file_stub))
        all_series_files.add(os.path.join(component, arch_path, "Release"))

        release_file = Release()
        release_file["Archive"] = suite
        release_file["Version"] = distroseries.version
        release_file["Component"] = component
        release_file["Origin"] = self._getOrigin()
        release_file["Label"] = self._getLabel()
        release_file["Architecture"] = arch_name

        with open(os.path.join(self._config.distsroot, suite,
                               component, arch_path, "Release"), "w") as f:
            release_file.dump(f, "utf-8")
Example #2
0
from debian.deb822 import Release, Packages
import requests

BASE_REPO = "https://dl.bintray.com/vamega/personal-debian-server/"
distribution = 'stretch'
release_file_url = BASE_REPO + 'dists/' + distribution + '/Release'

resp = requests.get(release_file_url)

if resp.status_code == 200:
    release = Release(resp.content)
    items = [l for l in release['SHA256'] if l['name'].endswith('Packages')]
    print(items)

useful_keys = [
    'Package',
    'Version',
    'Filename',
    'SHA1',
    'SHA256',
    'Size',
]

for item in items:
    package_url = BASE_REPO + 'dists/' + distribution + '/' + item['name']
    resp = requests.get(package_url)
    if resp.status_code == 200:
        for package in Packages.iter_paragraphs(resp.content):
            subsection = {k: package[k] for k in useful_keys}
            print(subsection)
Example #3
0
    def _writeSuite(self, distroseries, pocket):
        """Write out the Release files for the provided suite."""
        # XXX: kiko 2006-08-24: Untested method.

        # As we generate file lists for apt-ftparchive we record which
        # distroseriess and so on we need to generate Release files for.
        # We store this in release_files_needed and consume the information
        # when writeReleaseFiles is called.
        if (distroseries.name, pocket) not in self.release_files_needed:
            # If we don't need to generate a release for this release
            # and pocket, don't!
            return

        all_components = [
            comp.name for comp in
            self.archive.getComponentsForSeries(distroseries)]
        all_architectures = [
            a.architecturetag for a in distroseries.enabled_architectures]
        all_files = set()
        for component in all_components:
            self._writeSuiteSource(
                distroseries, pocket, component, all_files)
            for architecture in all_architectures:
                self._writeSuiteArch(
                    distroseries, pocket, component, architecture, all_files)
            self._writeSuiteI18n(
                distroseries, pocket, component, all_files)

        drsummary = "%s %s " % (self.distro.displayname,
                                distroseries.displayname)
        if pocket == PackagePublishingPocket.RELEASE:
            drsummary += distroseries.version
        else:
            drsummary += pocket.name.capitalize()

        suite = distroseries.getSuite(pocket)
        release_file = Release()
        release_file["Origin"] = self._getOrigin()
        release_file["Label"] = self._getLabel()
        release_file["Suite"] = suite
        release_file["Version"] = distroseries.version
        release_file["Codename"] = distroseries.name
        release_file["Date"] = datetime.utcnow().strftime(
            "%a, %d %b %Y %k:%M:%S UTC")
        release_file["Architectures"] = " ".join(sorted(all_architectures))
        release_file["Components"] = " ".join(
            reorder_components(all_components))
        release_file["Description"] = drsummary
        if (pocket == PackagePublishingPocket.BACKPORTS and
            distroseries.backports_not_automatic):
            release_file["NotAutomatic"] = "yes"
            release_file["ButAutomaticUpgrades"] = "yes"

        for filename in sorted(all_files, key=os.path.dirname):
            entry = self._readIndexFileContents(suite, filename)
            if entry is None:
                continue
            release_file.setdefault("MD5Sum", []).append({
                "md5sum": hashlib.md5(entry).hexdigest(),
                "name": filename,
                "size": len(entry)})
            release_file.setdefault("SHA1", []).append({
                "sha1": hashlib.sha1(entry).hexdigest(),
                "name": filename,
                "size": len(entry)})
            release_file.setdefault("SHA256", []).append({
                "sha256": hashlib.sha256(entry).hexdigest(),
                "name": filename,
                "size": len(entry)})

        self._writeReleaseFile(suite, release_file)
        all_files.add("Release")

        # Skip signature if the archive signing key is undefined.
        if self.archive.signing_key is None:
            self.log.debug("No signing key available, skipping signature.")
            return

        # Sign the repository.
        archive_signer = IArchiveSigningKey(self.archive)
        archive_signer.signRepository(suite)
        all_files.add("Release.gpg")

        # Make sure all the timestamps match, to make it easier to insert
        # caching headers on mirrors.
        self._syncTimestamps(suite, all_files)
Example #4
0
async def write_suite_files(
    base_path, db, package_info_provider, suite, components, arches, origin, gpg_context
):

    stamp = mktime(datetime.utcnow().timetuple())

    r = Release()
    r["Origin"] = origin
    r["Label"] = suite.debian_build.archive_description
    r["Codename"] = suite.name
    r["Suite"] = suite.name
    r["Date"] = formatdate(timeval=stamp, localtime=False, usegmt=True)
    r["NotAutomatic"] = "yes"
    r["ButAutomaticUpgrades"] = "yes"
    r["Architectures"] = " ".join(arches)
    r["Components"] = " ".join(components)
    r["Description"] = "Generated by the Debian Janitor"

    for component in components:
        logger.debug('Publishing component %s/%s', suite.name, component)
        component_dir = component
        os.makedirs(os.path.join(base_path, component_dir), exist_ok=True)
        for arch in arches:
            arch_dir = os.path.join(component_dir, "binary-%s" % arch)
            os.makedirs(os.path.join(base_path, arch_dir), exist_ok=True)
            br = Release()
            br["Origin"] = origin
            br["Label"] = suite.debian_build.archive_description
            br["Archive"] = suite.name
            br["Architecture"] = arch
            br["Component"] = component
            bp = os.path.join(arch_dir, "Release")
            with open(os.path.join(base_path, bp), "wb") as f:
                r.dump(f)
            add_file_info(r, base_path, bp)

            packages_path = os.path.join(component, "binary-%s" % arch, "Packages")
            SUFFIXES = {
                "": open,
                ".gz": gzip.GzipFile,
                ".bz2": bz2.BZ2File,
            }
            with ExitStack() as es:
                fs = []
                for suffix, fn in SUFFIXES.items():
                    fs.append(
                        es.enter_context(
                            fn(os.path.join(base_path, packages_path + suffix), "wb")
                        )
                    )
                async for chunk in get_packages(
                        db, package_info_provider, suite.name, component, arch):
                    for f in fs:
                        f.write(chunk)
            for suffix in SUFFIXES:
                add_file_info(r, base_path, packages_path + suffix)
            await asyncio.sleep(0)
        await asyncio.sleep(0)

    logger.debug('Writing Release file for %s', suite.name)
    with open(os.path.join(base_path, "Release"), "wb") as f:
        r.dump(f)

    logger.debug('Writing Release.gpg file for %s', suite.name)
    data = gpg.Data(r.dump())
    with open(os.path.join(base_path, "Release.gpg"), "wb") as f:
        signature, result = gpg_context.sign(data, mode=gpg_mode.DETACH)
        f.write(signature)

    logger.debug('Writing InRelease file for %s', suite.name)
    data = gpg.Data(r.dump())
    with open(os.path.join(base_path, "InRelease"), "wb") as f:
        signature, result = gpg_context.sign(data, mode=gpg_mode.CLEAR)
        f.write(signature)