Пример #1
0
    def import_package_files(self,
                             suite: str,
                             component: str,
                             fnames: List[str],
                             ignore_signature: bool = False,
                             add_overrides: bool = True) -> bool:

        # run dak import command.
        args = ['import']
        if ignore_signature:
            args.append('-s')
        if add_overrides:
            args.append('-a')

        args.append(suite)
        args.append(component)
        args.extend(fnames)

        ret, out = self._run_dak(args, check=False)

        if ret != 0:
            raise Exception('Unable to import package files \'{}\': {}'.format(
                ' '.join(fnames), out))

        log.info('Imported \'{}\' into \'{}/{}\'.'.format(
            ' '.join([os.path.basename(f) for f in fnames]), suite, component))
        return True
Пример #2
0
    def _run_migration_internal(self, session, suites_from: List[ArchiveSuite],
                                suite_to: ArchiveSuite):

        mi_wspace = self._get_migrate_workspace(suites_from, suite_to)
        britney_conf = os.path.join(mi_wspace, 'britney.conf')
        if not os.path.isfile(britney_conf):
            log.warning(
                'No Britney config for migration run "{}" - maybe the configuration was not yet updated?'
                .format(self._get_migration_name(suites_from, suite_to)))
            return None

        log.info('Migration run for "{}"'.format(
            self._get_migration_name(suites_from, suite_to)))
        # ensure prerequisites are met and Britney is fed with all the data it needs
        self._prepare_source_data(session, mi_wspace, suites_from, suite_to)
        self._create_faux_packages(session, mi_wspace, suites_from, suite_to)
        self._collect_urgencies(mi_wspace)
        self._setup_dates(mi_wspace)
        self._setup_various(mi_wspace, suites_from, suite_to)

        # execute the migration tester
        self._britney.run(mi_wspace, britney_conf)

        # tell dak to import the new data (overriding the target suite)
        dak = DakBridge()
        heidi_result = self._postprocess_heidi_file(mi_wspace)
        ret = dak.set_suite_to_britney_result(suite_to.name, heidi_result)
        if not ret:
            return None

        res = self._retrieve_excuses(session, mi_wspace, suites_from, suite_to)
        return res
Пример #3
0
    def _setup_dates(self, mi_wspace: str):
        dates_policy_file = os.path.join(mi_wspace, 'state',
                                         'age-policy-dates')
        if os.path.isfile(dates_policy_file):
            return

        log.info('Writing dates policy file.')
        # just make an empty file for now
        with open(dates_policy_file, 'w') as f:
            f.write('\n')
Пример #4
0
    def remove_package(self, package_name: str, suite_name: str) -> bool:
        ''' Remove a package from a specified suite. '''

        log.info('Removing \'{}\' from \'{}\''.format(package_name,
                                                      suite_name))

        # actually remove a package
        args = [
            'rm', '-m', 'RID: Removed from Debian', '-C', 'janitor@dak', '-s',
            suite_name, package_name
        ]

        ret, out = self._run_dak(args, 'y\n', check=False)

        if ret != 0:
            raise Exception(
                'Unable to remove package \'{}\' from \'{}\': {}'.format(
                    package_name, suite_name, out))

        return True
Пример #5
0
    def _collect_urgencies(self, mi_wspace: str):

        urgencies = ''
        for subdir, dirs, files in os.walk(
                self._lconf.archive_urgencies_export_dir):
            for fbasename in files:
                fname = os.path.join(subdir, fbasename)
                if not os.path.isfile(fname):
                    continue
                if not fbasename.startswith('install-urgencies'):
                    continue

                log.debug('Reading urgencies from {}'.format(fname))
                with open(fname, 'r') as f:
                    urgencies = urgencies + f.read()

        log.info('Writing urgency policy file.')
        urgency_policy_file = os.path.join(mi_wspace, 'state',
                                           'age-policy-urgencies')
        with open(urgency_policy_file, 'w') as f:
            f.write(urgencies)
Пример #6
0
    def set_suite_to_britney_result(self, suite_name: str,
                                    heidi_file: str) -> bool:
        '''
        Import a Britney result (HeidiResult file) into the dak database.
        This will *override* all existing package information in the target suite.
        Use this command with great care!
        '''

        # do some sanity checks
        if not os.path.isfile(heidi_file):
            log.warning(
                'Britney result not imported: File "{}" does not exist.'.
                format(heidi_file))
            return False

        # an empty file might cause us to delete the whole repository contents.
        # this is a safeguard against that.
        heidi_data = None
        with open(heidi_file, 'r') as f:
            heidi_data = f.read().strip()
        if not heidi_data:
            log.warning(
                'Stopped Britney result import: File "{}" is empty.'.format(
                    heidi_file))
            return True

        log.info('Importing britney result from {}'.format(heidi_file))

        # run dak control-suite command.
        args = ['control-suite']
        args.extend(['--set', suite_name, '--britney'])
        ret, out = self._run_dak(args, input_data=heidi_data, check=False)

        if ret != 0:
            raise Exception('Unable apply Britney result to "{}": {}'.format(
                suite_name, out))

        log.info('Updated packages in "{}" based on Britney result.'.format(
            suite_name))
        return True
Пример #7
0
    def _setup_various(self, mi_wspace: str, suites_source: List[ArchiveSuite],
                       suite_target: ArchiveSuite):
        # set up some random files which we do not use at all currently
        for suite in suites_source:
            rcbugs_policy_file_u = os.path.join(
                mi_wspace, 'state', 'rc-bugs-{}'.format(suite.name))
            if not os.path.isfile(rcbugs_policy_file_u):
                log.info('Writing RC bugs policy file (source).')
                # just make an empty file for now
                with open(rcbugs_policy_file_u, 'w') as f:
                    f.write('')

        rcbugs_policy_file_t = os.path.join(
            mi_wspace, 'state', 'rc-bugs-{}'.format(suite_target.name))
        if not os.path.isfile(rcbugs_policy_file_t):
            log.info('Writing RC bugs policy file (target).')
            # just make an empty file for now
            with open(rcbugs_policy_file_t, 'w') as f:
                f.write('')

        # there is no support for Piuparts yet, but Britney crashes without these files
        piupats_dummy_json = (
            '{"_id": "Piuparts Package Test Results Summary", '
            '"_version": "1.0", '
            '"packages": {}}\n')
        for suite in suites_source:
            piuparts_file_u = os.path.join(
                mi_wspace, 'state',
                'piuparts-summary-{}.json'.format(suite.name))
            if not os.path.isfile(piuparts_file_u):
                log.info('Writing Piuparts summary file (source).')
                # just make an empty file for now
                with open(piuparts_file_u, 'w') as f:
                    f.write(piupats_dummy_json)

        piuparts_file_t = os.path.join(
            mi_wspace, 'state',
            'piuparts-summary-{}.json'.format(suite_target.name))
        if not os.path.isfile(piuparts_file_t):
            log.info('Writing Piuparts summary file (target).')
            # just make an empty file for now
            with open(piuparts_file_t, 'w') as f:
                f.write(piupats_dummy_json)
Пример #8
0
    def update_config(self):
        '''
        Update configuration and distribution
        copy of Britney
        '''

        log.info('Updating configuration')

        with session_scope() as session:
            for mentry in session.query(SpearsMigrationEntry).all():

                si_res = self._suites_from_migration_entry(session, mentry)
                if si_res['error']:
                    continue
                suites_from = si_res['from']
                suite_to = si_res['to']
                assert len(suites_from) >= 1

                log.info('Refreshing Britney config for "{}"'.format(
                    self._get_migration_name(suites_from, suite_to)))
                mi_wspace = self._get_migrate_workspace(suites_from, suite_to)
                bc = BritneyConfig(mi_wspace)
                bc.set_archive_paths(
                    self._get_source_suite_dists_dir(mi_wspace, suites_from),
                    os.path.join(self._lconf.archive_root_dir, 'dists',
                                 suite_to.name))
                bc.set_components([c.name for c in suite_to.components])
                bc.set_architectures([a.name for a in suite_to.architectures])
                bc.set_delays(mentry.delays)

                hints = session.query(SpearsHint).filter(
                    SpearsHint.migration_id ==
                    mentry.make_migration_id()).all()
                bc.set_hints(hints)

                bc.save()

        log.info('Updating Britney')
        self._britney.update_dist()

        return True
Пример #9
0
def install_trusted_keyfile(options):
    ''' Install a public key to trust a client node. '''
    from shutil import copyfile

    if not options.name:
        print('No name for this public key / client given!')
        sys.exit(1)

    source_keyfile = options.keyfile
    if not source_keyfile:
        print('No public key file given!')
        sys.exit(1)

    if not os.path.isfile(source_keyfile):
        print('Public key file "{}" was not found.'.format(source_keyfile))
        sys.exit(1)

    pub_key = None
    sec_key = None
    try:
        pub_key, sec_key = zmq.auth.load_certificate(source_keyfile)
    except ValueError:
        pass
    if not pub_key:
        log.info('The given keyfile does not contain a public ZCurve key!')
    if sec_key:
        print('')
        print(
            '/!\\ The current file contains a secret ZCurve key. This file should never leave the client machine it is installed on.'
        )
        print('')

    _, verify_key = keyfile_read_verify_key(source_keyfile)
    if not verify_key:
        log.info('The given keyfile does not contain a verification key!')
    if not verify_key and not pub_key:
        log.error(
            'The keyfile does not contain either a public encryption, nor a verification key. Can not continue.'
        )
        sys.exit(4)

    _, sign_key = keyfile_read_signing_key(source_keyfile)
    if sign_key:
        print('')
        print(
            '/!\\ The current file contains a secret signing key. This file should never leave the client machine it is installed on.'
        )
        print('')

    lconf = LocalConfig()
    target_keyfile = os.path.join(lconf.trusted_curve_keys_dir,
                                  '{}.pub.key'.format(options.name))
    if os.path.isfile(target_keyfile) and not options.force:
        print(
            'We already trust a key for "{}" on this machine. You can override the existing one by specifying "--force".'
            .format(options.name))
        sys.exit(2)

    try:
        copyfile(source_keyfile, target_keyfile)
    except Exception as e:
        print('Failed to install new public key as {}: {}'.format(
            target_keyfile, str(e)))
        sys.exit(3)
    print('Installed as {}'.format(target_keyfile))
Пример #10
0
    def autosync(self, session, sync_conf, remove_cruft: bool = True):
        ''' Synchronize all packages that are newer '''

        self._synced_source_pkgs = []
        active_src_pkgs = []  # source packages which should have their binary packages updated
        res_issues = []

        target_suite = session.query(ArchiveSuite) \
                              .filter(ArchiveSuite.name == self._target_suite_name).one()
        sync_conf = session.query(SynchrotronConfig) \
                           .join(SynchrotronConfig.destination_suite) \
                           .join(SynchrotronConfig.source) \
                           .filter(ArchiveSuite.name == self._target_suite_name,
                                   SynchrotronSource.suite_name == self._source_suite_name).one_or_none()

        for component in target_suite.components:
            dest_pkg_map = self._get_target_source_packages(component.name)

            # The source package lists contains many different versions, some source package
            # versions are explicitly kept for GPL-compatibility.
            # Sometimes a binary package migrates into another suite, dragging a newer source-package
            # that it was built against with itslf into the target suite.
            # These packages then have a source with a high version number, but might not have any
            # binaries due to them migrating later.
            # We need to care for that case when doing binary syncs (TODO: and maybe safeguard against it
            # when doing source-only syncs too?), That's why we don't filter out the newest packages in
            # binary-sync-mode.
            if sync_conf.sync_binaries:
                src_pkg_range = self._source_repo.source_packages(ArchiveSuite(self._source_suite_name), component)
            else:
                src_pkg_range = self._get_repo_source_package_map(self._source_repo,
                                                                  self._source_suite_name,
                                                                  component).values()

            for spkg in src_pkg_range:
                # ignore blacklisted packages in automatic sync
                if spkg.name in self._sync_blacklist:
                    continue

                dpkg = dest_pkg_map.get(spkg.name)
                if dpkg:
                    if version_compare(dpkg.version, spkg.version) >= 0:
                        log.debug('Skipped sync of {}: Target version \'{}\' is equal/newer than source version \'{}\'.'
                                  .format(spkg.name, dpkg.version, spkg.version))
                        continue

                    # check if we have a modified target package,
                    # indicated via its Debian revision, e.g. "1.0-0tanglu1"
                    if self._distro_tag in version_revision(dpkg.version):
                        log.info('Not syncing {}/{}: Destination has modifications (found {}).'
                                 .format(spkg.name, spkg.version, dpkg.version))

                        # add information that this package needs to be merged to the issue list
                        issue = SynchrotronIssue()
                        issue.package_name = spkg.name
                        issue.source_version = spkg.version
                        issue.target_version = dpkg.version
                        issue.kind = SynchrotronIssueKind.MERGE_REQUIRED

                        res_issues.append(issue)
                        continue

                # sync source package
                # the source package must always be known to dak first
                ret = self._import_source_package(spkg, component.name)
                if not ret:
                    return False, []

                # a new source package is always active and needs it's binary packages synced, in
                # case we do binary syncs.
                active_src_pkgs.append(spkg)

            # all packages in the target distribution are considered active, as long as they don't
            # have modifications.
            for spkg in dest_pkg_map.values():
                if self._distro_tag in version_revision(spkg.version):
                    active_src_pkgs.append(spkg)

            # import binaries as well. We test for binary updates for all available active source packages,
            # as binNMUs might have happened in the source distribution.
            # (an active package in this context is any source package which doesn't have modifications in the
            # target distribution)
            ret = self._import_binaries_for_source(sync_conf, target_suite, component.name, active_src_pkgs)
            if not ret:
                return False, []

        # test for cruft packages
        target_pkg_index = {}
        for component in target_suite.components:
            dest_pkg_map = self._get_repo_source_package_map(self._target_repo,
                                                             target_suite.name,
                                                             component.name)
            for pkgname, pkg in dest_pkg_map.items():
                target_pkg_index[pkgname] = pkg

        # check which packages are present in the target, but not in the source suite
        for component in target_suite.components:
            src_pkg_map = self._get_repo_source_package_map(self._source_repo,
                                                            self._source_suite_name,
                                                            component.name)
            for pkgname in src_pkg_map.keys():
                target_pkg_index.pop(pkgname, None)

        # remove cruft packages
        if remove_cruft:
            for pkgname, dpkg in target_pkg_index.items():
                dpkg_ver_revision = version_revision(dpkg.version, False)
                # native packages are never removed
                if not dpkg_ver_revision:
                    continue

                # check if the package is intoduced as new in the distro, in which case we won't remove it
                if dpkg_ver_revision.startswith('0' + self._distro_tag):
                    continue

                # if this package was modified in the target distro, we will also not remove it, but flag it
                # as "potential cruft" for someone to look at.
                if self._distro_tag in dpkg_ver_revision:
                    issue = SynchrotronIssue()
                    issue.kind = SynchrotronIssueKind.MAYBE_CRUFT
                    issue.source_suite = self._source_suite_name
                    issue.target_suite = self._target_suite_name
                    issue.package_name = dpkg.name
                    issue.source_version = None
                    issue.target_version = dpkg.version

                    res_issues.append(issue)
                    continue

                # check if we can remove this package without breaking stuff
                if self._dak.package_is_removable(dpkg.name, target_suite.name):
                    # try to remove the package
                    try:
                        self._dak.remove_package(dpkg.name, target_suite.name)
                    except Exception as e:
                        issue = SynchrotronIssue()
                        issue.kind = SynchrotronIssueKind.REMOVAL_FAILED
                        issue.source_suite = self._source_suite_name
                        issue.target_suite = self._target_suite_name
                        issue.package_name = dpkg.name
                        issue.source_version = None
                        issue.target_version = dpkg.version
                        issue.details = str(e)

                        res_issues.append(issue)
                else:
                    # looks like we can not remove this
                    issue = SynchrotronIssue()
                    issue.kind = SynchrotronIssueKind.REMOVAL_FAILED
                    issue.source_suite = self._source_suite_name
                    issue.target_suite = self._target_suite_name
                    issue.package_name = dpkg.name
                    issue.source_version = None
                    issue.target_version = dpkg.version
                    issue.details = 'This package can not be removed without breaking other packages. It needs manual removal.'

                    res_issues.append(issue)

        self._publish_synced_spkg_events(sync_conf.source.os_name,
                                         sync_conf.source.suite_name,
                                         sync_conf.destination_suite.name,
                                         False)

        return True, res_issues
Пример #11
0
    def sync_packages(self, component: str, pkgnames: List[str], force: bool = False):
        self._synced_source_pkgs = []

        with session_scope() as session:
            sync_conf = session.query(SynchrotronConfig) \
                               .join(SynchrotronConfig.destination_suite) \
                               .join(SynchrotronConfig.source) \
                               .filter(ArchiveSuite.name == self._target_suite_name,
                                       SynchrotronSource.suite_name == self._source_suite_name).one_or_none()
            if not sync_conf:
                log.error('Unable to find a sync config for this source/destination combination.')
                return False

            if not sync_conf.sync_enabled:
                log.error('Can not synchronize package: Synchronization is disabled for this configuration.')
                return False

            target_suite = session.query(ArchiveSuite) \
                                  .filter(ArchiveSuite.name == self._target_suite_name).one()

            dest_pkg_map = self._get_target_source_packages(component)
            src_pkg_map = self._get_repo_source_package_map(self._source_repo,
                                                            self._source_suite_name,
                                                            component)

            for pkgname in pkgnames:
                spkg = src_pkg_map.get(pkgname)
                dpkg = dest_pkg_map.get(pkgname)

                if not spkg:
                    log.info('Can not sync {}: Does not exist in source.'.format(pkgname))
                    continue
                if pkgname in self._sync_blacklist:
                    log.info('Can not sync {}: The package is blacklisted.'.format(pkgname))
                    continue

                if dpkg:
                    if version_compare(dpkg.version, spkg.version) >= 0:
                        if force:
                            log.warning('{}: Target version \'{}\' is newer/equal than source version \'{}\'.'
                                        .format(pkgname, dpkg.version, spkg.version))
                        else:
                            log.info('Can not sync {}: Target version \'{}\' is newer/equal than source version \'{}\'.'
                                     .format(pkgname, dpkg.version, spkg.version))
                            continue

                    if not force:
                        if self._distro_tag in version_revision(dpkg.version):
                            log.error('Not syncing {}/{}: Destination has modifications (found {}).'
                                      .format(spkg.name, spkg.version, dpkg.version))
                            continue

                # sync source package
                # the source package must always be known to dak first
                ret = self._import_source_package(spkg, component)
                if not ret:
                    return False

            ret = self._import_binaries_for_source(sync_conf, target_suite, component, self._synced_source_pkgs, force)

            # TODO: Analyze the input, fetch the packages from the source distribution and
            # import them into the target in their correct order.
            # Then apply the correct, synced override from the source distro.

            self._publish_synced_spkg_events(sync_conf.source.os_name,
                                             sync_conf.source.suite_name,
                                             sync_conf.destination_suite.name,
                                             force)
            return ret
Пример #12
0
def update_appstream_data(session, local_repo, repo, suite, component, arch):
    '''
    Import AppStream metadata about software components and associate it with the
    binary packages the data belongs to.
    '''

    if arch.name == 'all':
        # arch:all has no AppStream components, those are always associated with an architecture
        # and are included in arch-specific files (even if the package they belong to is arch:all)
        return

    arch_all = session.query(ArchiveArchitecture) \
                      .filter(ArchiveArchitecture.name == 'all').one()

    yaml_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'Components-{}.yml.xz'.format(arch.name)))
    if not yaml_fname:
        return

    cidmap_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'CID-Index-{}.json.gz'.format(arch.name)),
        check=False)
    if not cidmap_fname:
        return

    with gzip.open(cidmap_fname, 'rb') as f:
        cid_map = json.loads(f.read())
    with lzma.open(yaml_fname, 'r') as f:
        yaml_data = str(f.read(), 'utf-8')

    mdata = AppStream.Metadata()
    mdata.set_locale('ALL')
    mdata.set_format_style(AppStream.FormatStyle.COLLECTION)
    mdata.set_parse_flags(AppStream.ParseFlags.IGNORE_MEDIABASEURL)

    mdata.parse(yaml_data, AppStream.FormatKind.YAML)
    cpts = mdata.get_components()
    if len(cpts) == 0:
        return

    log.debug('Found {} software components in {}/{}'.format(
        len(cpts), suite.name, component.name))

    # create context for direct serialization to collection XML chunk
    context = AppStream.Context()
    context.set_locale('ALL')
    context.set_style(AppStream.FormatStyle.COLLECTION)

    for cpt in cpts:
        cpt.set_active_locale('C')

        pkgname = cpt.get_pkgname()
        if not pkgname:
            # we skip these for now, web-apps have no package assigned - we might need a better way to map
            # those to their packages, likely with an improved appstream-generator integration
            log.debug(
                'Found DEP-11 component without package name in {}/{}: {}'.
                format(suite.name, component.name, cpt.get_id()))
            continue

        # fetch package this component belongs to
        bin_pkg = session.query(BinaryPackage) \
            .filter(BinaryPackage.name == pkgname) \
            .filter(BinaryPackage.repo_id == repo.id) \
            .filter(BinaryPackage.architecture_id.in_((arch.id, arch_all.id))) \
            .filter(BinaryPackage.component_id == component.id) \
            .filter(BinaryPackage.suites.any(ArchiveSuite.id == suite.id)) \
            .order_by(BinaryPackage.version.desc()).first()

        if not bin_pkg:
            log.info('Found orphaned DEP-11 component in {}/{}: {}'.format(
                suite.name, component.name, cpt.get_id()))
            continue

        dcpt = SoftwareComponent()
        dcpt.kind = int(cpt.get_kind())
        dcpt.cid = cpt.get_id()

        dcpt.xml = cpt.to_xml_data(context)

        dcpt.gcid = cid_map.get(dcpt.cid)
        if not dcpt.gcid:
            log.info('Found DEP-11 component without GCID in {}/{}: {}'.format(
                suite.name, component.name, cpt.get_id()))

        # create UUID for this component (based on GCID or XML data)
        dcpt.update_uuid()

        existing_dcpt = session.query(SoftwareComponent) \
            .filter(SoftwareComponent.uuid == dcpt.uuid).one_or_none()
        if existing_dcpt:
            if bin_pkg in existing_dcpt.bin_packages:
                continue  # the binary package is already registered with this component
            existing_dcpt.bin_packages.append(bin_pkg)
            continue  # we already have this component, no need to add it again

        # add new software component to database
        dcpt.name = cpt.get_name()
        dcpt.summary = cpt.get_summary()
        dcpt.description = cpt.get_description()

        for icon in cpt.get_icons():
            if icon.get_kind() == AppStream.IconKind.CACHED:
                dcpt.icon_name = icon.get_name()
                break

        dcpt.project_license = cpt.get_project_license()
        dcpt.developer_name = cpt.get_developer_name()

        # test for free software
        dcpt.is_free = False
        if not dcpt.project_license:
            # We have no license set.
            # If we are in the 'main' component, we
            # assume we have free software
            if bin_pkg.component.name == 'main':
                dcpt.is_free = True
        else:
            # have AppStream test the SPDX license expression for free software
            dcpt.is_free = AppStream.license_is_free_license(
                dcpt.project_license)

        dcpt.categories = []
        for cat in cpt.get_categories():
            dcpt.categories.append(cat)

        dcpt.bin_packages = [bin_pkg]

        session.add(dcpt)
        log.debug('Added new software component \'{}\' to database'.format(
            dcpt.cid))
    session.commit()
Пример #13
0
    def _create_faux_packages(self, session, mi_wspace: str,
                              suites_source: List[ArchiveSuite],
                              suite_target: ArchiveSuite):
        '''
        If we have a partial source and target suite, we need to let Britney know about the
        parent packages somehow.
        At the moment, we simply abuse the FauxPackages system for that.
        '''

        # we don't support more than one source suite for this feature at the moment
        if len(suites_source) > 1:
            log.info(
                'Not auto-generating faux packages: Multiple suites set as sources.'
            )
            return

        suite_source = suites_source[0]

        if suite_source.parent and suite_target.parent:
            log.info(
                'Creating faux-packages to aid resolving of partial suites.')
        else:
            log.info(
                'No auto-generating faux packages: No source and target suite parents, generation is unnecessary.'
            )
            return

        existing_pkg_arch_set = set()
        log.debug(
            'Creating index of valid packages that do not need a faux package.'
        )

        # we need repository information to only generate faux packages if a package doesn't exist
        # in our source suite(s) already
        repo = self._get_local_repo(session)

        for suite in suites_source:
            esuite = session.query(ArchiveSuite) \
                            .options(joinedload(ArchiveSuite.components)) \
                            .options(joinedload(ArchiveSuite.architectures)) \
                            .filter(ArchiveSuite.id == suite.id).one()
            session.expunge(
                esuite
            )  # we don't want packages accidentally added to the database here
            for component in esuite.components:
                for arch in esuite.architectures:
                    aname = arch.name
                    for bpkg in repo.binary_packages(esuite, component, arch):
                        existing_pkg_arch_set.add(aname + ':' + bpkg.name)
                    for spkg in repo.source_packages(esuite, component):
                        existing_pkg_arch_set.add(aname + ':' + spkg.name)

        archive_root_dir = self._lconf.archive_root_dir
        fauxpkg_fname = os.path.join(mi_wspace, 'input', 'faux-packages')

        log.debug('Generating faux packages list')
        fauxpkg_data = {}
        for component in suite_target.parent.components:

            for installer_dir in ['', 'debian-installer']:
                for arch in suite_target.parent.architectures:
                    pfile = os.path.join(archive_root_dir, 'dists',
                                         suite_target.parent.name,
                                         component.name, installer_dir,
                                         'binary-{}'.format(arch.name),
                                         'Packages.xz')
                    if not os.path.isfile(pfile):
                        continue

                    log.debug('Reading data for faux packages list: {}'.format(
                        pfile))

                    with TagFile(pfile) as tf:
                        for e in tf:
                            pkgname = e['Package']
                            pkgversion = e['Version']
                            pkgarch = e['Architecture']

                            pkid = '{}-{}-{}'.format(pkgname, pkgversion,
                                                     pkgarch)
                            if pkid in fauxpkg_data:
                                continue
                            pkgname_arch = pkgarch + ':' + pkgname
                            if pkgname_arch in existing_pkg_arch_set:
                                continue
                            provides = e.get('Provides', '')

                            data = 'Package: {}\nVersion: {}'.format(
                                pkgname, pkgversion)
                            if pkgarch and pkgarch != 'all':
                                data = data + '\nArchitecture: {}'.format(
                                    pkgarch)
                            if provides:
                                data = data + '\nProvides: {}'.format(provides)
                            if component.name != 'main':
                                data = data + '\nComponent: {}'.format(
                                    component.name)

                            fauxpkg_data[pkid] = data

                            # FIXME: We shouldn't have to special-case this :any case,
                            # rather Britney should do the right thing and recognize this
                            # notation for faux-packages. But until that is fixed
                            # properly and since a dependency on python3:any is so common, we
                            # will work around this issue
                            if pkgname == 'python3':
                                pkid = '{}-{}-{}'.format(
                                    'python3:any', pkgversion, pkgarch)
                                if pkid in fauxpkg_data:
                                    continue
                                fauxpkg_data[pkid] = data.replace(
                                    'Package: python3\n',
                                    'Package: python3:any\n')

        with open(fauxpkg_fname, 'w') as f:
            for segment in fauxpkg_data.values():
                f.write(segment + '\n\n')
Пример #14
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        sync_sources = session.query(SynchrotronSource).all()
        autosyncs = session.query(SynchrotronConfig) \
                           .filter(SynchrotronConfig.sync_enabled == True) \
                           .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        for autosync in autosyncs:
            log.info('Synchronizing packages from {}/{} with {}'.format(
                autosync.source.os_name, autosync.source.suite_name,
                autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(autosync.destination_suite.name,
                                autosync.source.suite_name)
            ret, issue_data = engine.autosync(session, autosync,
                                              autosync.auto_cruft_remove)
            if not ret:
                sys.exit(2)
                return

            existing_sync_issues = {}
            for ssource in sync_sources:
                all_issues = session.query(SynchrotronIssue) \
                                    .filter(SynchrotronIssue.source_suite == ssource.suite_name,
                                            SynchrotronIssue.target_suite == autosync.destination_suite.name,
                                            SynchrotronIssue.config_id == autosync.id) \
                                    .all()
                for eissue in all_issues:
                    eid = '{}-{}-{}:{}'.format(eissue.package_name,
                                               eissue.source_version,
                                               eissue.target_version,
                                               str(eissue.kind))
                    existing_sync_issues[eid] = eissue

            for info in issue_data:
                eid = '{}-{}-{}:{}'.format(info.package_name,
                                           info.source_version,
                                           info.target_version, str(info.kind))
                issue = existing_sync_issues.pop(eid, None)
                if issue:
                    # the issue already exists, so we just update it
                    new_issue = False
                else:
                    new_issue = True
                    issue = info
                    issue.config = autosync

                if new_issue:
                    session.add(issue)

                    data = {
                        'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'suite_src': issue.source_suite,
                        'suite_dest': issue.target_suite,
                        'version_src': issue.source_version,
                        'version_dest': issue.target_version,
                        'kind': str(issue.kind)
                    }

                    emitter.submit_event('new-autosync-issue', data)

            for eissue in existing_sync_issues.values():
                session.delete(eissue)

                data = {
                    'name': eissue.package_name,
                    'src_os': autosync.source.os_name,
                    'suite_src': eissue.source_suite,
                    'suite_dest': eissue.target_suite,
                    'version_src': eissue.source_version,
                    'version_dest': eissue.target_version,
                    'kind': str(eissue.kind)
                }

                emitter.submit_event('resolved-autosync-issue', data)
Пример #15
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        autosyncs = session.query(SynchrotronConfig).filter(SynchrotronConfig.sync_enabled == True) \
            .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        bconf, sconf = get_sync_config()
        blacklist_pkgnames = get_package_blacklist()  # the blacklist is global for now

        for autosync in autosyncs:
            incoming_suite = get_suiteinfo_for_suite(autosync.destination_suite)
            sconf.syncBinaries = autosync.sync_binaries
            sconf.source.defaultSuite = autosync.source.suite_name
            sconf.source.repoUrl = autosync.source.repo_url

            log.info('Synchronizing packages from {}/{} with {}'.format(autosync.source.os_name, autosync.source.suite_name,
                                                                        autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(bconf, sconf, incoming_suite)
            engine.setBlacklist(blacklist_pkgnames)

            ret, issue_data = engine.autosync()
            publish_synced_spkg_events(engine,
                                       autosync.source.os_name,
                                       autosync.source.suite_name,
                                       autosync.destination_suite.name,
                                       emitter=emitter)
            if not ret:
                sys.exit(2)
                return

            for ssuite in sconf.source.suites:
                session.query(SynchrotronIssue) \
                    .filter(SynchrotronIssue.source_suite == ssuite.name,
                            SynchrotronIssue.target_suite == incoming_suite.name,
                            SynchrotronIssue.config_id == autosync.id) \
                    .delete()

            for info in issue_data:
                issue = SynchrotronIssue()
                issue.config = autosync
                issue.kind = SynchrotronIssueKind(info.kind)
                issue.package_name = info.packageName
                issue.source_suite = info.sourceSuite
                issue.target_suite = info.targetSuite
                issue.source_version = info.sourceVersion
                issue.target_version = info.targetVersion
                issue.details = info.details
                session.add(issue)

                data = {'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'src_suite': issue.source_suite,
                        'dest_suite': issue.target_suite,
                        'src_version': issue.source_version,
                        'dest_version': issue.target_version,
                        'kind': str(issue.kind)}

                emitter.submit_event('autosync-issue', data)
Пример #16
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        autosyncs = session.query(SynchrotronConfig).filter(SynchrotronConfig.sync_enabled == True) \
            .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        bconf, sconf = get_sync_config()
        blacklist_pkgnames = get_package_blacklist(
        )  # the blacklist is global for now

        for autosync in autosyncs:
            incoming_suite = get_suiteinfo_for_suite(
                autosync.destination_suite)
            sconf.syncBinaries = autosync.sync_binaries
            sconf.source.defaultSuite = autosync.source.suite_name
            sconf.source.repoUrl = autosync.source.repo_url

            log.info('Synchronizing packages from {}/{} with {}'.format(
                autosync.source.os_name, autosync.source.suite_name,
                autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(bconf, sconf, incoming_suite)
            engine.setBlacklist(blacklist_pkgnames)

            ret, issue_data = engine.autosync()
            publish_synced_spkg_events(engine,
                                       autosync.source.os_name,
                                       autosync.source.suite_name,
                                       autosync.destination_suite.name,
                                       emitter=emitter)
            if not ret:
                sys.exit(2)
                return

            existing_sync_issues = {}
            for ssuite in sconf.source.suites:
                all_issues = session.query(SynchrotronIssue) \
                                    .filter(SynchrotronIssue.source_suite == ssuite.name,
                                            SynchrotronIssue.target_suite == incoming_suite.name,
                                            SynchrotronIssue.config_id == autosync.id) \
                                    .all()
                for eissue in all_issues:
                    eid = '{}-{}-{}:{}'.format(eissue.package_name,
                                               eissue.source_version,
                                               eissue.target_version,
                                               str(eissue.kind))
                    existing_sync_issues[eid] = eissue

            for info in issue_data:
                issue_kind = SynchrotronIssueKind(info.kind)
                eid = '{}-{}-{}:{}'.format(info.packageName,
                                           info.sourceVersion,
                                           info.targetVersion, str(issue_kind))
                issue = existing_sync_issues.pop(eid, None)
                if issue:
                    # the issue already exists, so we just update it
                    new_issue = False
                else:
                    new_issue = True
                    issue = SynchrotronIssue()
                    issue.config = autosync
                    issue.package_name = info.packageName
                    issue.source_version = info.sourceVersion
                    issue.target_version = info.targetVersion
                    issue.kind = issue_kind

                issue.source_suite = info.sourceSuite
                issue.target_suite = info.targetSuite

                issue.details = info.details

                if new_issue:
                    session.add(issue)

                    data = {
                        'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'suite_src': issue.source_suite,
                        'suite_dest': issue.target_suite,
                        'version_src': issue.source_version,
                        'version_dest': issue.target_version,
                        'kind': str(issue.kind)
                    }

                    emitter.submit_event('new-autosync-issue', data)

            for eissue in existing_sync_issues.values():
                session.delete(eissue)

                data = {
                    'name': eissue.package_name,
                    'src_os': autosync.source.os_name,
                    'suite_src': eissue.source_suite,
                    'suite_dest': eissue.target_suite,
                    'version_src': eissue.source_version,
                    'version_dest': eissue.target_version,
                    'kind': str(eissue.kind)
                }

                emitter.submit_event('resolved-autosync-issue', data)