Esempio n. 1
0
def install_service_cert(options):
    ''' Install a private key for a specific service '''
    from shutil import copyfile

    service = '' if not options.service else options.service.lower()
    if service != 'lighthouse':
        print('The "service" option is not "lighthouse". Currently, keys can only be installed for the Lighthouse module.')
        sys.exit(1)

    source_keyfile = options.keyfile
    if not source_keyfile:
        print('No private key file given!')
        sys.exit(1)

    if not os.path.isfile(source_keyfile):
        print('Private key file "{}" was not found.'.format(source_keyfile))
        sys.exit(1)

    pub_key, sec_key = zmq.auth.load_certificate(source_keyfile)
    if not sec_key:
        print('The given keyfile does not contain a secret key!')

    lconf = LocalConfig()
    target_keyfile = lconf.zcurve_secret_keyfile_for_module(service)
    if os.path.isfile(target_keyfile) and not options.force:
        print('We already have a secret key for this service on the current machine. You can override the existing one by specifying "--force".')
        sys.exit(2)

    try:
        copyfile(source_keyfile, target_keyfile)
    except Exception as e:
        print('Failed to install new secret key as {}: {}'.format(target_keyfile, str(e)))
        sys.exit(3)
    print('Installed private key as {}'.format(target_keyfile))
Esempio n. 2
0
class BaseConfig:

    PROJECT = 'Laniakea Software View'
    BUG_REPORT_URL = 'https://github.com/lkorigin/laniakea/issues'

    OS_NAME = config_get_project_name()

    LOG_STORAGE_URL = '/raw/logs'  # web URL where raw logs are stored by Rubicon
    APPSTREAM_MEDIA_URL = LocalConfig().archive_appstream_media_url
    ARCHIVE_URL = LocalConfig().archive_url

    #
    # Caching behavior
    #
    CACHE_TYPE = 'simple'
    CACHE_DEFAULT_TIMEOUT = 300

    # Get app root path, also can use flask.root_path.
    # ../../config.py
    PROJECT_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))

    SECRET_KEY = os.urandom(16)

    DEBUG = False
    TESTING = False

    LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs')

    THEME = 'default'
Esempio n. 3
0
def localconfig(samplesdir):
    '''
    Retrieve a Laniakea LocalConfig object which is set
    up for testing.
    '''
    import json

    test_aux_data_dir = os.path.join('/tmp', 'test-lkaux')
    if os.path.isdir(test_aux_data_dir):
        from shutil import rmtree
        rmtree(test_aux_data_dir)
    os.makedirs(test_aux_data_dir)

    config_tmpl_fname = os.path.join(samplesdir, 'config', 'base-config.json')
    with open(config_tmpl_fname, 'r') as f:
        config_json = json.load(f)

    config_json['CurveKeysDir'] = os.path.join(test_aux_data_dir, 'keys',
                                               'curve')
    config_json['Archive']['path'] = os.path.join(samplesdir, 'samplerepo',
                                                  'dummy')

    config_fname = os.path.join(test_aux_data_dir, 'base-config.json')
    with open(config_fname, 'w') as f:
        json.dump(config_json, f)

    conf = LocalConfig(config_fname)
    conf = LocalConfig.instance
    assert conf.cache_dir == '/var/tmp/laniakea'
    assert conf.workspace == '/tmp/test-lkws/'

    assert conf.database_url == 'postgresql://*****:*****@localhost:5432/laniakea_unittest'
    assert conf.lighthouse.endpoints_jobs == ['tcp://*:5570']
    assert conf.lighthouse.endpoints_submit == ['tcp://*:5571']
    assert conf.lighthouse.endpoints_publish == ['tcp://*:5572']
    assert conf.lighthouse.servers_jobs == ['tcp://localhost:5570']
    assert conf.lighthouse.servers_submit == ['tcp://localhost:5571']
    assert conf.lighthouse.servers_publish == ['tcp://localhost:5572']

    # Check injected sample certificate directory
    assert conf.secret_curve_keyfile_for_module('test').startswith(
        '/tmp/test-lkaux/keys/curve/secret/')
    os.makedirs(conf._curve_keys_basedir, exist_ok=True)

    # add the trusted keyring with test keys
    conf._trusted_gpg_keyrings = []
    conf._trusted_gpg_keyrings.append(
        os.path.join(samplesdir, 'gpg', 'keyrings', 'keyring.gpg'))
    conf._trusted_gpg_keyrings.append(
        os.path.join(samplesdir, 'gpg', 'keyrings', 'other-keyring.gpg'))

    # set our GPG secret keyring dir
    conf._secret_gpg_home_dir = os.path.join(samplesdir, 'gpg', 'home')

    return conf
Esempio n. 4
0
    def __init__(self, endpoint, pub_queue):
        self._server = None
        self._ctx = zmq.Context.instance()

        lconf = LocalConfig()
        self._trusted_keys_dir = lconf.trusted_curve_keys_dir + '/'
        self._server_private_key = lconf.secret_curve_keyfile_for_module(
            LkModule.LIGHTHOUSE)

        self._jobs_endpoint = endpoint
        self._worker = JobWorker(pub_queue)
Esempio n. 5
0
    def __init__(self, verbose=False):
        self._server = None
        self._ctx = zmq.Context.instance()

        if verbose:
            log.basicConfig(level=log.DEBUG, format="[%(levelname)s] %(message)s")

        lconf = LocalConfig()
        self._trusted_keys_dir = lconf.zcurve_trusted_certs_dir + '/'
        self._server_private_key = lconf.zcurve_secret_keyfile_for_module(LkModule.LIGHTHOUSE)

        self._jobs_endpoint = lconf.lighthouse_endpoint
        self._worker = JobWorker()
Esempio n. 6
0
def get_sync_config():
    import laniakea.native
    from laniakea.native import SyncSourceSuite, create_native_baseconfig

    lconf = LocalConfig()
    bconf = create_native_baseconfig()

    with session_scope() as session:
        sync_sources = session.query(SynchrotronSource).all()

        # FIXME: SynchrotronConfig needs adjustments in the D code to work
        # better with the new "multiple autosync tasks" model.
        # Maybe when doing this there's a good opportunity to rewrite some of
        # the D code in Python...
        sconf = laniakea.native.SynchrotronConfig()
        sconf.sourceName = sync_sources[0].os_name
        sconf.syncBinaries = False
        sconf.sourceKeyrings = lconf.synchrotron_sourcekeyrings

        sconf.source.defaultSuite = None
        sconf.source.repoUrl = sync_sources[0].repo_url

        source_suites = []
        for sd in sync_sources:
            sssuite = SyncSourceSuite()
            sssuite.name = sd.suite_name
            sssuite.architectures = sd.architectures
            sssuite.components = sd.components

            source_suites.append(sssuite)
        sconf.source.suites = source_suites

    return bconf, sconf
Esempio n. 7
0
    def __init__(self, local_config=None):
        if not local_config:
            local_config = LocalConfig()
        self._lconf = local_config
        self._loaded = False

        # try to load default configuration
        self.load()
Esempio n. 8
0
    def __init__(self):
        self._flatpak_exe = shutil.which('flatpak')
        self._ostree_exe = shutil.which('ostree')
        self._lconf = LocalConfig()

        if not self._flatpak_exe:
            raise Exception('Unable to find the "flatpak" binary, can not modify Flatpak repositories.')
        if not self._ostree_exe:
            raise Exception('Unable to find the "ostree" binary, can not modify Flatpak repositories.')
Esempio n. 9
0
    def __init__(self, event_pub_queue):
        self._lconf = LocalConfig()
        self._arch_indep_affinity = config_get_value(LkModule.ARIADNE, 'indep_arch_affinity')
        self._event_pub_queue = event_pub_queue

        with session_scope() as session:
            # FIXME: We need much better ways to select the right suite to synchronize with
            incoming_suite = session.query(ArchiveSuite) \
                .filter(ArchiveSuite.accept_uploads == True) \
                .order_by(ArchiveSuite.name) \
                .first()  # noqa: E712
            self._default_incoming_suite_name = incoming_suite.name
Esempio n. 10
0
def run(args):
    if len(args) == 0:
        print('Need a subcommand to proceed!')
        sys.exit(1)

    parser = create_parser()
    args = parser.parse_args(args)
    check_print_version(args)
    check_verbose(args)
    if args.config_fname:
        LocalConfig(args.config_fname)

    args.func(args)
Esempio n. 11
0
def create_native_baseconfig():
    from laniakea import LocalConfig

    bconf = BaseConfig()

    bconf.projectName = config_get_project_name()
    bconf.archive.distroTag = config_get_distro_tag()

    lconf = LocalConfig()
    bconf.cacheDir = lconf.cache_dir
    bconf.workspace = lconf.workspace
    bconf.archive.rootPath = lconf.archive_root_dir

    return bconf
Esempio n. 12
0
def install_service_keyfile(options):
    ''' Install a private key for a specific service '''
    from shutil import copyfile

    service = '' if not options.service else options.service.lower()
    if not service:
        print('The "service" option must not be empty')
        sys.exit(1)

    source_keyfile = options.keyfile
    if not source_keyfile:
        print('No private key file given!')
        sys.exit(1)

    if not os.path.isfile(source_keyfile):
        print('Private key file "{}" was not found.'.format(source_keyfile))
        sys.exit(1)

    _, sec_key = zmq.auth.load_certificate(source_keyfile)
    if not sec_key:
        print('The given keyfile does not contain a secret ZCurve key!')

    lconf = LocalConfig()
    target_keyfile = lconf.secret_curve_keyfile_for_module(service)
    if os.path.isfile(target_keyfile) and not options.force:
        print(
            'We already have a secret key for this service on the current machine. You can override the existing one by specifying "--force".'
        )
        sys.exit(2)

    try:
        copyfile(source_keyfile, target_keyfile)
    except Exception as e:
        print('Failed to install new secret key as {}: {}'.format(
            target_keyfile, str(e)))
        sys.exit(3)
    print('Installed private key as {}'.format(target_keyfile))
Esempio n. 13
0
    def __init__(self, endpoints, pub_queue):
        from laniakea import LocalConfig, LkModule
        from laniakea.msgstream.signing import NACL_ED25519, decode_signing_key_base64, \
            keyfile_read_signing_key

        lconf = LocalConfig()
        self._sockets = []
        self._endpoints = endpoints
        self._ctx = zmq.Context.instance()
        self._pub_queue = pub_queue

        # load our own signing key, so we can sign outgoing messages
        keyfile = lconf.secret_curve_keyfile_for_module(LkModule.LIGHTHOUSE)

        self._signer_id = None
        self._signing_key = None
        if os.path.isfile(keyfile):
            self._signer_id, self._signing_key = keyfile_read_signing_key(keyfile)

        if not self._signing_key:
            log.warning('Can not sign outgoing messages: No valid signing key found for this module.')
        else:
            if type(self._signing_key) is str:
                self._signing_key = decode_signing_key_base64(NACL_ED25519, self._signing_key)
Esempio n. 14
0
    def __init__(self, endpoint, pub_queue):
        from glob import glob
        from laniakea import LocalConfig
        from laniakea.msgstream import keyfile_read_verify_key

        lconf = LocalConfig()
        self._socket = None
        self._ctx = zmq.Context.instance()

        self._pub_queue = pub_queue
        self._endpoint = endpoint

        # Load all the keys that we trust to receive messages from
        # TODO: Implement auto-reloading of valid keys list if directory changes
        self._trusted_keys = {}
        for keyfname in glob(os.path.join(lconf.trusted_curve_keys_dir, '*')):
            signer_id, verify_key = keyfile_read_verify_key(keyfname)
            if signer_id and verify_key:
                self._trusted_keys[signer_id] = verify_key
Esempio n. 15
0
def create_native_baseconfig():
    from laniakea import LocalConfig

    session = session_factory()
    bconf = BaseConfig()

    bconf.projectName = config_get_project_name()
    bconf.archive.distroTag = config_get_distro_tag()

    dev_suite = session.query(ArchiveSuite) \
        .filter(ArchiveSuite.devel_target == True).one()  # noqa: E712

    bconf.archive.develSuite = dev_suite.name

    lconf = LocalConfig()
    bconf.cacheDir = lconf.cache_dir
    bconf.workspace = lconf.workspace
    bconf.archive.rootPath = lconf.archive_root_dir

    return bconf
Esempio n. 16
0
    def __init__(self):
        from laniakea.db import config_get_project_name

        # default to system germinator (usually /usr/bin/germinate)
        self._germinate_exe = 'germinate'

        self._lconf = LocalConfig()
        self._project_name = config_get_project_name()

        self._metapackage_git_url = config_get_value(LkModule.PLANTER,
                                                     'git_seeds_url')

        workspace = os.path.join(self._lconf.workspace, 'planter')
        os.makedirs(workspace, exist_ok=True)

        # meta package / seed source directory
        self._meta_src_dir = os.path.join(workspace, 'meta')

        # output dir
        self._results_base_dir = os.path.join(workspace, 'results')
Esempio n. 17
0
def add_flatpak_repo(options):
    print_header('Add new Flatpak repository')
    lconf = LocalConfig()
    fputil = FlatpakUtil()

    with session_scope() as session:
        repo_name = input_str('Machine-readable name of this repository (e.g. "{}")'.format(config_get_distro_tag()))
        repo_name = repo_name.replace(' ', '_')

        repo = FlatpakRepository(repo_name)
        repo_path = os.path.join(lconf.archive_flatpak_root_dir, repo.name)
        if os.path.isdir(repo_path):
            print_error_exit('Repository path at "{}" already exists, can not continue!'.format(repo_path))
            return
        repo.default_branch = 'stable'

        collection_id = None
        while True:
            collection_id = input_str('Set collection-id (a globally unique reverse DNS value to identify the collection of Flatpaks in this repository)')
            if len(collection_id.split('.')) < 3:
                print_note('Please enter a rDNS ID!')
            else:
                break
        repo.collection_id = collection_id

        repo.title = input_str('Human-readable repository title')
        repo.comment = input_str('Short description / tagline of this repository')
        repo.description = input_str('Longer repository description', allow_empty=True)

        repo.url_homepage = input_str('Homepage URL of this repository', allow_empty=True)
        repo.url_icon = input_str('URL of a repository icon', allow_empty=True)

        repo.gpg_key_id = input_str('GPG key ID of the key used to sign this repository')

        repo.allowed_branches = input_list('List of permitted branch names in this repository', allow_empty=True)
        if 'stable' not in repo.allowed_branches:
            repo.allowed_branches.append('stable')

        fputil.init_repo(repo, repo_path)
        session.add(repo)
Esempio n. 18
0
    def __init__(self, target_suite_name: str, source_suite_name: str):
        self._lconf = LocalConfig()
        self._dak = DakBridge()

        # FIXME: Don't hardcode this!
        repo_name = 'master'

        # the repository of the distribution we import stuff into
        self._target_repo = Repository(self._lconf.archive_root_dir, repo_name)
        self._target_repo.set_trusted(True)

        self._target_suite_name = target_suite_name
        self._source_suite_name = source_suite_name
        self._distro_tag = config_get_distro_tag()
        self._synced_source_pkgs: list[SourcePackage] = []

        with session_scope() as session:
            sync_source = session.query(SynchrotronSource) \
                                 .filter(SynchrotronSource.suite_name == self._source_suite_name).one()

            # FIXME: Synchrotron needs adjustments to work
            # better with the new "multiple autosync tasks" model.
            # This code will need to be revised for that
            # (currently it is just a 1:1 translation from D code)

            # the repository of the distribution we use to sync stuff from
            self._source_repo = Repository(
                sync_source.repo_url, sync_source.os_name,
                self._lconf.synchrotron_sourcekeyrings)

        # we trust everything by default
        self._imports_trusted = True

        with session_scope() as session:
            self._sync_blacklist = set([
                value for value, in session.query(SyncBlacklistEntry.pkgname)
            ])
Esempio n. 19
0
def import_suite_packages(suite_name):
    # FIXME: Don't hardcode the "master" repository here, fully implement
    # the "multiple repositories" feature
    repo_name = 'master'

    session = session_factory()
    suite = session.query(ArchiveSuite) \
        .filter(ArchiveSuite.name == suite_name).one()
    repo = session.query(ArchiveRepository) \
        .filter(ArchiveRepository.name == repo_name).one()

    lconf = LocalConfig()
    local_repo = Repository(lconf.archive_root_dir, lconf.cache_dir, repo_name,
                            [])

    for component in suite.components:

        # fetch all source packages for the given repository
        # FIXME: Urgh... We need to do this better, this is not efficient.
        existing_spkgs = dict()
        all_existing_src_packages = session.query(SourcePackage) \
            .options(joinedload(SourcePackage.suites)) \
            .filter(SourcePackage.repo_id == repo.id) \
            .filter(SourcePackage.component_id == component.id).all()
        for e_spkg in all_existing_src_packages:
            existing_spkgs[e_spkg.uuid] = e_spkg

        for spi in local_repo.getSourcePackages(suite.name, component.name):
            spkg = SourcePackage()
            spkg.name = spi.name
            spkg.version = spi.ver
            spkg.repo = repo
            spkg.update_uuid(
            )  # we can generate the uuid from name/version/repo-name now

            db_spkg = existing_spkgs.pop(spkg.uuid, None)
            if db_spkg:
                if suite in db_spkg.suites:
                    continue  # the source package is already registered with this suite
                db_spkg.suites.append(suite)
                continue

            # if we are here, the source package is completely new and is only in one suite
            spkg.suites = [suite]
            spkg.component = component
            spkg.architectures = spi.architectures
            spkg.standards_version = spi.standardsVersion
            spkg.format_version = spi.format
            spkg.homepage = spi.homepage
            spkg.vcs_browser = spi.vcsBrowser
            spkg.maintainer = spi.maintainer
            spkg.uploaders = spi.uploaders
            spkg.build_depends = spi.buildDepends
            spkg.directory = spi.directory

            binaries = []
            for b in spi.binaries:
                binfo = PackageInfo()
                binfo.deb_type = b.debType
                binfo.name = b.name
                binfo.version = b.ver
                binaries.append(binfo)
            spkg.binaries = binfo

            for fi in spi.files:
                f = ArchiveFile()
                f.fname = fi.fname
                f.size = fi.size
                f.sha256sum = fi.sha256sum
                spkg.files.append(f)

            session.add(spkg)

        for old_spkg in existing_spkgs.values():
            if suite in old_spkg.suites:
                old_spkg.suites.remove(suite)
            if len(old_spkg.suites) <= 0:
                for f in old_spkg.files:
                    session.delete(f)
                session.delete(old_spkg)

        # commit the source package changes already
        session.commit()

        for arch in suite.architectures:

            # Get all binary packages for the given architecture
            # FIXME: Urgh... We need to do this better, this is not efficient.
            existing_bpkgs = dict()
            for e_bpkg in session.query(BinaryPackage) \
                    .options(joinedload(BinaryPackage.suites)) \
                    .filter(BinaryPackage.repo_id == repo.id) \
                    .filter(BinaryPackage.component_id == component.id) \
                    .filter(BinaryPackage.architecture_id == arch.id).all():
                existing_bpkgs[e_bpkg.uuid] = e_bpkg

            # add information about regular binary packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.getBinaryPackages(suite.name, component.name,
                                             arch.name))
            session.commit()

            # add information about debian-installer packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.getInstallerPackages(suite.name, component.name,
                                                arch.name))
            session.commit()

            for old_bpkg in existing_bpkgs.values():
                if suite in old_bpkg.suites:
                    old_bpkg.suites.remove(suite)
                if len(old_bpkg.suites) <= 0:
                    session.delete(old_bpkg.pkg_file)
                    session.delete(old_bpkg)
            session.commit()

            # import new AppStream component metadata
            import_appstream_data(session, local_repo, repo, suite, component,
                                  arch)

    # delete orphaned AppStream metadata
    for cpt in session.query(SoftwareComponent).filter(
            ~SoftwareComponent.bin_packages.any()).all():
        session.delete(cpt)
    session.commit()
Esempio n. 20
0
def install_trusted_keyfile(options):
    ''' Install a public key to trust a client node. '''
    from shutil import copyfile

    if not options.name:
        print('No name for this public key / client given!')
        sys.exit(1)

    source_keyfile = options.keyfile
    if not source_keyfile:
        print('No public key file given!')
        sys.exit(1)

    if not os.path.isfile(source_keyfile):
        print('Public key file "{}" was not found.'.format(source_keyfile))
        sys.exit(1)

    pub_key = None
    sec_key = None
    try:
        pub_key, sec_key = zmq.auth.load_certificate(source_keyfile)
    except ValueError:
        pass
    if not pub_key:
        log.info('The given keyfile does not contain a public ZCurve key!')
    if sec_key:
        print('')
        print(
            '/!\\ The current file contains a secret ZCurve key. This file should never leave the client machine it is installed on.'
        )
        print('')

    _, verify_key = keyfile_read_verify_key(source_keyfile)
    if not verify_key:
        log.info('The given keyfile does not contain a verification key!')
    if not verify_key and not pub_key:
        log.error(
            'The keyfile does not contain either a public encryption, nor a verification key. Can not continue.'
        )
        sys.exit(4)

    _, sign_key = keyfile_read_signing_key(source_keyfile)
    if sign_key:
        print('')
        print(
            '/!\\ The current file contains a secret signing key. This file should never leave the client machine it is installed on.'
        )
        print('')

    lconf = LocalConfig()
    target_keyfile = os.path.join(lconf.trusted_curve_keys_dir,
                                  '{}.pub.key'.format(options.name))
    if os.path.isfile(target_keyfile) and not options.force:
        print(
            'We already trust a key for "{}" on this machine. You can override the existing one by specifying "--force".'
            .format(options.name))
        sys.exit(2)

    try:
        copyfile(source_keyfile, target_keyfile)
    except Exception as e:
        print('Failed to install new public key as {}: {}'.format(
            target_keyfile, str(e)))
        sys.exit(3)
    print('Installed as {}'.format(target_keyfile))
Esempio n. 21
0
def import_suite_packages(suite_name):
    # FIXME: Don't hardcode the "master" repository here, fully implement
    # the "multiple repositories" feature
    repo_name = 'master'

    session = session_factory()
    suite = session.query(ArchiveSuite) \
        .filter(ArchiveSuite.name == suite_name).one()
    repo = session.query(ArchiveRepository) \
        .filter(ArchiveRepository.name == repo_name).one()

    lconf = LocalConfig()
    local_repo = Repository(lconf.archive_root_dir,
                            repo.name,
                            trusted_keyrings=[],
                            entity=repo)

    # we unconditionally trust the local repository - for now
    local_repo.set_trusted(True)

    # event emitted for message passing
    emitter = EventEmitter(LkModule.ARCHIVE)

    for component in suite.components:

        # fetch all source packages for the given repository
        # FIXME: Urgh... Can this be more efficient?
        existing_spkgs = dict()
        all_existing_src_packages = session.query(SourcePackage) \
            .options(joinedload(SourcePackage.suites)) \
            .filter(SourcePackage.repo_id == repo.id) \
            .filter(SourcePackage.component_id == component.id).all()
        for e_spkg in all_existing_src_packages:
            existing_spkgs[e_spkg.uuid] = e_spkg

        for spkg in local_repo.source_packages(suite, component):
            db_spkg = existing_spkgs.pop(spkg.uuid, None)
            if db_spkg:
                session.expunge(spkg)
                if suite in db_spkg.suites:
                    continue  # the source package is already registered with this suite
                db_spkg.suites.append(suite)
                _emit_package_event(emitter,
                                    'source-package-published-in-suite', spkg,
                                    {'suite_new': suite.name})
                continue

            session.add(spkg)
            _emit_package_event(emitter, 'source-package-published', spkg)

        for old_spkg in existing_spkgs.values():
            if suite in old_spkg.suites:
                old_spkg.suites.remove(suite)
                _emit_package_event(emitter, 'source-package-suite-removed',
                                    old_spkg, {'suite_old': suite.name})
            if len(old_spkg.suites) <= 0:
                for f in old_spkg.files:
                    session.delete(f)
                session.delete(old_spkg)
                _emit_package_event(emitter, 'removed-source-package',
                                    old_spkg)

        # commit the source package changes already
        session.commit()

        for arch in suite.architectures:

            # Get all binary packages UUID/suite-id combinations for the given architecture and suite
            # FIXME: Urgh... Can this be more efficient?
            bpkg_b = Bundle('bin_package', BinaryPackage.uuid)
            suite_b = Bundle('archive_suite', ArchiveSuite.id)
            existing_bpkgs = dict()
            for e_bpkg, suite_i in session.query(bpkg_b, suite_b) \
                    .filter(BinaryPackage.repo_id == repo.id) \
                    .filter(BinaryPackage.component_id == component.id) \
                    .filter(BinaryPackage.architecture_id == arch.id).join(BinaryPackage.suites):
                sl = existing_bpkgs.get(e_bpkg.uuid)
                if not sl:
                    existing_bpkgs[e_bpkg.uuid] = [
                        suite_i.id
                    ]  # if there is just one suite, we may get a scalar here
                else:
                    sl.append(suite_i.id)

            # add information about regular binary packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.binary_packages(suite, component, arch), emitter)

            # add information about debian-installer packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.installer_packages(suite, component, arch), emitter)
            session.commit()

            for old_bpkg_uuid, suites in existing_bpkgs.items():
                suites_count = len(suites)
                if suite.id in suites:
                    rc = session.query(binpkg_suite_assoc_table) \
                                .filter(binpkg_suite_assoc_table.c.suite_id == suite.id) \
                                .filter(binpkg_suite_assoc_table.c.bin_package_uuid == old_bpkg_uuid) \
                                .delete(synchronize_session=False)
                    if rc > 0:
                        suites_count -= 1
                if suites_count <= 0:
                    # delete the old package, we don't need it anymore if it is in no suites
                    session.query(ArchiveFile) \
                        .filter(ArchiveFile.binpkg_id == old_bpkg_uuid).delete()
                    session.query(BinaryPackage) \
                        .filter(BinaryPackage.uuid == old_bpkg_uuid).delete()

                    # NOTE: We do not emit messages for removed binary packages, as they are usually
                    # deleted with their source package (unless we have an arch-specific removal) and we
                    # don't want to spam messages which may be uninteresting to current Laniakea modules.

            session.commit()

            # import new AppStream component metadata / delete old components
            update_appstream_data(session, local_repo, repo, suite, component,
                                  arch)

    # delete orphaned AppStream metadata
    for cpt in session.query(SoftwareComponent).filter(
            ~SoftwareComponent.bin_packages.any()).all():
        session.delete(cpt)
    session.commit()