예제 #1
0
def clean_byhash(now_date, session):
    cnf = Config()

    Logger.log(["Cleaning out unused by-hash files..."])

    q = session.execute("""
        DELETE FROM hashfile h
        USING suite s, archive a
        WHERE s.id = h.suite_id
          AND a.id = s.archive_id
          AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP
        RETURNING a.path, s.suite_name, h.path""")
    count = q.rowcount

    if not Options["No-Action"]:
        for base, suite, path in q:
            suite_suffix = utils.suite_suffix(suite)
            filename = os.path.join(base, 'dists', suite, suite_suffix, path)
            try:
                os.unlink(filename)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
                Logger.log(
                    ['database referred to non-existing file', filename])
            else:
                Logger.log(['delete hashfile', suite, path])
        session.commit()

    if count > 0:
        Logger.log(["total", count])
예제 #2
0
    def suite_path(self):
        """
        Absolute path to the suite-specific files.
        """
        suite_suffix = utils.suite_suffix(self.suite.suite_name)

        return os.path.join(self.suite.archive.path, 'dists',
                            self.suite.suite_name, suite_suffix)
예제 #3
0
    def suite_release_path(self):
        """
        Absolute path where Release files are physically stored.
        This should be a path that sorts after the dists/ directory.
        """
        cnf = Config()
        suite_suffix = utils.suite_suffix(self.suite.suite_name)

        return os.path.join(self.suite.archive.path, 'zzz-dists',
                            self.suite.suite_name, suite_suffix)
예제 #4
0
    def generate_release_files(self):
        """
        Generate Release files for the given suite

        @type suite: string
        @param suite: Suite name
        """

        suite = self.suite
        session = object_session(suite)

        architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)

        # Attribs contains a tuple of field names and the database names to use to
        # fill them in
        attribs = (('Origin',      'origin'),
                    ('Label',       'label'),
                    ('Suite',       'release_suite_output'),
                    ('Version',     'version'),
                    ('Codename',    'codename'),
                    ('Changelogs',  'changelog_url'),
                  )

        # A "Sub" Release file has slightly different fields
        subattribs = (('Archive',  'suite_name'),
                       ('Origin',   'origin'),
                       ('Label',    'label'),
                       ('Version',  'version'))

        # Boolean stuff. If we find it true in database, write out "yes" into the release file
        boolattrs = (('NotAutomatic',         'notautomatic'),
                      ('ButAutomaticUpgrades', 'butautomaticupgrades'),
                      ('Acquire-By-Hash',      'byhash'),
                    )

        cnf = Config()
        cnf_suite_suffix = cnf.get("Dinstall::SuiteSuffix", "").rstrip("/")

        suite_suffix = utils.suite_suffix(suite.suite_name)

        self.create_output_directories()
        self.create_release_symlinks()

        outfile = os.path.join(self.suite_release_path(), "Release")
        out = open(outfile + ".new", "w")

        for key, dbfield in attribs:
            # Hack to skip NULL Version fields as we used to do this
            # We should probably just always ignore anything which is None
            if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
                continue

            out.write("%s: %s\n" % (key, getattr(suite, dbfield)))

        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))

        if suite.validtime:
            validtime = float(suite.validtime)
            out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time() + validtime))))

        for key, dbfield in boolattrs:
            if getattr(suite, dbfield, False):
                out.write("%s: yes\n" % (key))

        out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))

        components = [c.component_name for c in suite.components]

        out.write("Components: %s\n" % (" ".join(components)))

        # For exact compatibility with old g-r, write out Description here instead
        # of with the rest of the DB fields above
        if getattr(suite, 'description') is not None:
            out.write("Description: %s\n" % suite.description)

        for comp in components:
            for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
                if not re_gensubrelease.match(dirpath):
                    continue

                subfile = os.path.join(dirpath, "Release")
                subrel = open(subfile + '.new', "w")

                for key, dbfield in subattribs:
                    if getattr(suite, dbfield) is not None:
                        subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))

                for key, dbfield in boolattrs:
                    if getattr(suite, dbfield, False):
                        subrel.write("%s: yes\n" % (key))

                subrel.write("Component: %s%s\n" % (suite_suffix, comp))

                # Urgh, but until we have all the suite/component/arch stuff in the DB,
                # this'll have to do
                arch = os.path.split(dirpath)[-1]
                if arch.startswith('binary-'):
                    arch = arch[7:]

                subrel.write("Architecture: %s\n" % (arch))
                subrel.close()

                os.rename(subfile + '.new', subfile)

        # Now that we have done the groundwork, we want to get off and add the files with
        # their checksums to the main Release file
        oldcwd = os.getcwd()

        os.chdir(self.suite_path())

        hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]

        fileinfo = {}
        fileinfo_byhash = {}

        uncompnotseen = {}

        for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
            # SuiteSuffix deprecation:
            # components on security-master are updates/{main,contrib,non-free}, but
            # we want dists/${suite}/main.  Until we can rename the components,
            # we cheat by having an updates -> . symlink.  This should not be visited.
            if cnf_suite_suffix:
                path = os.path.join(dirpath, cnf_suite_suffix)
                try:
                    target = os.readlink(path)
                    if target == ".":
                        dirnames.remove(cnf_suite_suffix)
                except (OSError, ValueError):
                    pass
            for entry in filenames:
                if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
                    continue

                filename = os.path.join(dirpath.lstrip('./'), entry)

                if re_includeinrelease_byhash.match(entry):
                    fileinfo[filename] = fileinfo_byhash[filename] = {}
                elif re_includeinrelease_plain.match(entry):
                    fileinfo[filename] = {}
                # Skip things we don't want to include
                else:
                    continue

                contents = open(filename, 'r').read()

                # If we find a file for which we have a compressed version and
                # haven't yet seen the uncompressed one, store the possibility
                # for future use
                if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
                elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
                    uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
                elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (XzFile, filename)

                fileinfo[filename]['len'] = len(contents)

                for hf in hashes:
                    fileinfo[filename][hf.release_field] = hf.func(contents)

        for filename, comp in uncompnotseen.items():
            # If we've already seen the uncompressed file, we don't
            # need to do anything again
            if filename in fileinfo:
                continue

            fileinfo[filename] = {}

            # File handler is comp[0], filename of compressed file is comp[1]
            contents = comp[0](comp[1], 'r').read()

            fileinfo[filename]['len'] = len(contents)

            for hf in hashes:
                fileinfo[filename][hf.release_field] = hf.func(contents)

        for field in sorted(h.release_field for h in hashes):
            out.write('%s:\n' % field)
            for filename in sorted(fileinfo.keys()):
                out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))

        out.close()
        os.rename(outfile + '.new', outfile)

        self._update_hashfile_table(session, fileinfo_byhash, hashes)
        self._make_byhash_links(fileinfo_byhash, hashes)
        self._make_byhash_base_symlink(fileinfo_byhash, hashes)

        sign_release_dir(suite, os.path.dirname(outfile))

        os.chdir(oldcwd)

        return
예제 #5
0
def main():
    global Cnf, Options, Logger

    os.umask(0o002)

    Cnf = utils.get_conf()
    Arguments = [('h', "help", "Generate-Index-Diffs::Options::Help"),
                  ('a', 'archive', 'Generate-Index-Diffs::Options::Archive', 'hasArg'),
                  ('c', None, "Generate-Index-Diffs::Options::CanonicalPath", "hasArg"),
                  ('p', "patchname", "Generate-Index-Diffs::Options::PatchName", "hasArg"),
                  ('d', "tmpdir", "Generate-Index-Diffs::Options::TempDir", "hasArg"),
                  ('m', "maxdiffs", "Generate-Index-Diffs::Options::MaxDiffs", "hasArg"),
                  ('n', "no-act", "Generate-Index-Diffs::Options::NoAct"),
                  ('v', "verbose", "Generate-Index-Diffs::Options::Verbose"),
                ]
    suites = apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
    Options = Cnf.subtree("Generate-Index-Diffs::Options")
    if "Help" in Options:
        usage()

    maxdiffs = Options.get("MaxDiffs::Default", "56")
    maxpackages = Options.get("MaxDiffs::Packages", maxdiffs)
    maxcontents = Options.get("MaxDiffs::Contents", maxdiffs)
    maxsources = Options.get("MaxDiffs::Sources", maxdiffs)

    # can only be set via config at the moment
    max_parallel = int(Options.get("MaxParallel", "8"))

    if "PatchName" not in Options:
        format = "%Y-%m-%d-%H%M.%S"
        Options["PatchName"] = time.strftime(format)

    session = DBConn().session()
    pending_tasks = []

    if not suites:
        query = session.query(Suite.suite_name)
        if Options.get('Archive'):
            archives = utils.split_args(Options['Archive'])
            query = query.join(Suite.archive).filter(Archive.archive_name.in_(archives))
        suites = [s.suite_name for s in query]

    for suitename in suites:
        print("Processing: " + suitename)

        suiteobj = get_suite(suitename.lower(), session=session)

        # Use the canonical version of the suite name
        suite = suiteobj.suite_name

        if suiteobj.untouchable:
            print("Skipping: " + suite + " (untouchable)")
            continue

        skip_all = True
        if suiteobj.separate_contents_architecture_all or suiteobj.separate_packages_architecture_all:
            skip_all = False

        architectures = get_suite_architectures(suite, skipall=skip_all, session=session)
        components = [c.component_name for c in session.query(Component.component_name)]

        suite_suffix = utils.suite_suffix(suitename)
        if components and suite_suffix:
            longsuite = suite + "/" + suite_suffix
        else:
            longsuite = suite

        merged_pdiffs = suiteobj.merged_pdiffs

        tree = os.path.join(suiteobj.archive.path, 'dists', longsuite)

        # See if there are Translations which might need a new pdiff
        cwd = os.getcwd()
        for component in components:
            workpath = os.path.join(tree, component, "i18n")
            if os.path.isdir(workpath):
                os.chdir(workpath)
                for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
                    for entry in filenames:
                        if not re_includeinpdiff.match(entry):
                            continue
                        (fname, fext) = os.path.splitext(entry)
                        processfile = os.path.join(workpath, fname)
                        storename = "%s/%s_%s_%s" % (Options["TempDir"], suite, component, fname)
                        coroutine = genchanges(Options, processfile + ".diff", storename, processfile, maxdiffs, merged_pdiffs)
                        pending_tasks.append(coroutine)
        os.chdir(cwd)

        for archobj in architectures:
            architecture = archobj.arch_string

            if architecture == "source":
                longarch = architecture
                packages = "Sources"
                maxsuite = maxsources
            else:
                longarch = "binary-%s" % architecture
                packages = "Packages"
                maxsuite = maxpackages

            for component in components:
                # Process Contents
                file = "%s/%s/Contents-%s" % (tree, component, architecture)

                storename = "%s/%s_%s_contents_%s" % (Options["TempDir"], suite, component, architecture)
                coroutine = genchanges(Options, file + ".diff", storename, file, maxcontents, merged_pdiffs)
                pending_tasks.append(coroutine)

                file = "%s/%s/%s/%s" % (tree, component, longarch, packages)
                storename = "%s/%s_%s_%s" % (Options["TempDir"], suite, component, architecture)
                coroutine = genchanges(Options, file + ".diff", storename, file, maxsuite, merged_pdiffs)
                pending_tasks.append(coroutine)

    asyncio.run(process_pdiff_tasks(pending_tasks, max_parallel))