Exemplo n.º 1
0
def add_image_recipe(options):

    print_header('Add new ISO/IMG image build recipe')

    with session_scope() as session:
        recipe = ImageBuildRecipe()

        recipe.distribution = input_str(
            'Name of the distribution to build the image for')
        recipe.suite = input_str('Name of the suite to build the image for')
        recipe.flavor = input_str('Flavor to build')
        recipe.architectures = input_list('List of architectures to build for')

        while True:
            kind_str = input_str(
                'Type of image that we are building (iso/img)').lower()
            if kind_str == 'iso':
                recipe.kind = ImageKind.ISO
                break
            if kind_str == 'img':
                recipe.kind = ImageKind.IMG
                break
            print_note('The selected image kind is unknown.')

        recipe.git_url = input_str(
            'Git repository URL containing the image build configuration')

        recipe.result_move_to = input_str(
            'Place to move the build result to (placeholders like %{DATE} are allowed)'
        )

        # ensure we have a name
        recipe.regenerate_name()

        # add recipe to the database
        session.add(recipe)
        session.commit()

        # announce the event
        emitter = EventEmitter(LkModule.ADMINCLI)
        ev_data = {
            'name': recipe.name,
            'kind': kind_str,
            'architectures': recipe.architectures,
            'distribution': recipe.distribution,
            'suite': recipe.suite,
            'flavor': recipe.flavor
        }
        emitter.submit_event_for_mod(LkModule.ISOTOPE, 'recipe-created',
                                     ev_data)

        print_done('Created recipe with name: {}'.format(recipe.name))
Exemplo n.º 2
0
    def _publish_synced_spkg_events(self, src_os, src_suite, dest_suite, forced=False, emitter=None):
        ''' Submit events for the synced source packages to the message stream '''
        if not emitter:
            emitter = EventEmitter(LkModule.SYNCHROTRON)
        for spkg in self._synced_source_pkgs:
            data = {'name': spkg.name,
                    'version': spkg.version,
                    'src_os': src_os,
                    'suite_src': src_suite,
                    'suite_dest': dest_suite,
                    'forced': forced}

            emitter.submit_event('src-package-imported', data)
Exemplo n.º 3
0
def publish_synced_spkg_events(engine, src_os, src_suite, dest_suite, forced=False, emitter=None):
    ''' Submit events for the synced source packages to the message stream '''
    if not emitter:
        emitter = EventEmitter(LkModule.SYNCHROTRON)
    spkgs = engine.getSyncedSourcePackages()
    for spkg in spkgs:
        data = {'name': spkg.name,
                'version': spkg.ver,
                'src_os': src_os,
                'src_suite': src_suite,
                'dest_suite': dest_suite,
                'forced': forced}

        emitter.submit_event('src-package-synced', data)
Exemplo n.º 4
0
def trigger_image_build(options):
    recipe_name = options.trigger_build

    with session_scope() as session:
        recipe: Optional[ImageBuildRecipe] = session.query(ImageBuildRecipe) \
                                                    .filter(ImageBuildRecipe.name == recipe_name).one_or_none()

        if not recipe:
            print_note(
                'Recipe with name "{}" was not found!'.format(recipe_name))
            sys.exit(2)

        emitter = EventEmitter(LkModule.ADMINCLI)

        job_count = 0
        for arch in recipe.architectures:
            job = Job()
            job.module = LkModule.ISOTOPE
            job.kind = JobKind.OS_IMAGE_BUILD
            job.trigger = recipe.uuid
            job.architecture = recipe.host_architecture
            if job.architecture != arch:
                job.data = {'architecture': arch}

            session.add(job)
            session.commit()  # to generate an uuid for this job to announce

            job_count += 1

            # announce the event
            ev_data = {
                'name': recipe.name,
                'architecture': arch,
                'format': str(recipe.format),
                'distribution': recipe.distribution,
                'suite': recipe.suite,
                'environment': recipe.environment,
                'style': recipe.style,
                'job_id': str(job.uuid)
            }
            emitter.submit_event_for_mod(LkModule.ISOTOPE, 'build-job-added',
                                         ev_data)

        session.commit()
        print_done('Scheduled {} job(s) for {}.'.format(
            job_count, recipe.name))
Exemplo n.º 5
0
def import_files_from(conf, incoming_dir):
    '''
    Import files from an untrusted incoming source.

    IMPORTANT: We assume that the uploader can not edit their files post-upload.
    If they could, we would be vulnerable to timing attacks here.
    '''

    emitter = EventEmitter(LkModule.RUBICON)
    for dud_file in glob(os.path.join(incoming_dir, '*.dud')):
        dud = Dud(dud_file)

        try:
            dud.validate(keyrings=conf.trusted_gpg_keyrings)
        except Exception as e:
            reason = 'Signature validation failed: {}'.format(str(e))
            reject_upload(conf, dud, reason, emitter)
            continue

        # if we are here, the file is good to go
        accept_upload(conf, dud, emitter)
Exemplo n.º 6
0
def import_suite_packages(suite_name):
    # FIXME: Don't hardcode the "master" repository here, fully implement
    # the "multiple repositories" feature
    repo_name = 'master'

    session = session_factory()
    suite = session.query(ArchiveSuite) \
        .filter(ArchiveSuite.name == suite_name).one()
    repo = session.query(ArchiveRepository) \
        .filter(ArchiveRepository.name == repo_name).one()

    lconf = LocalConfig()
    local_repo = Repository(lconf.archive_root_dir,
                            repo.name,
                            trusted_keyrings=[],
                            entity=repo)

    # we unconditionally trust the local repository - for now
    local_repo.set_trusted(True)

    # event emitted for message passing
    emitter = EventEmitter(LkModule.ARCHIVE)

    for component in suite.components:

        # fetch all source packages for the given repository
        # FIXME: Urgh... Can this be more efficient?
        existing_spkgs = dict()
        all_existing_src_packages = session.query(SourcePackage) \
            .options(joinedload(SourcePackage.suites)) \
            .filter(SourcePackage.repo_id == repo.id) \
            .filter(SourcePackage.component_id == component.id).all()
        for e_spkg in all_existing_src_packages:
            existing_spkgs[e_spkg.uuid] = e_spkg

        for spkg in local_repo.source_packages(suite, component):
            db_spkg = existing_spkgs.pop(spkg.uuid, None)
            if db_spkg:
                session.expunge(spkg)
                if suite in db_spkg.suites:
                    continue  # the source package is already registered with this suite
                db_spkg.suites.append(suite)
                _emit_package_event(emitter,
                                    'source-package-published-in-suite', spkg,
                                    {'suite_new': suite.name})
                continue

            session.add(spkg)
            _emit_package_event(emitter, 'source-package-published', spkg)

        for old_spkg in existing_spkgs.values():
            if suite in old_spkg.suites:
                old_spkg.suites.remove(suite)
                _emit_package_event(emitter, 'source-package-suite-removed',
                                    old_spkg, {'suite_old': suite.name})
            if len(old_spkg.suites) <= 0:
                for f in old_spkg.files:
                    session.delete(f)
                session.delete(old_spkg)
                _emit_package_event(emitter, 'removed-source-package',
                                    old_spkg)

        # commit the source package changes already
        session.commit()

        for arch in suite.architectures:

            # Get all binary packages UUID/suite-id combinations for the given architecture and suite
            # FIXME: Urgh... Can this be more efficient?
            bpkg_b = Bundle('bin_package', BinaryPackage.uuid)
            suite_b = Bundle('archive_suite', ArchiveSuite.id)
            existing_bpkgs = dict()
            for e_bpkg, suite_i in session.query(bpkg_b, suite_b) \
                    .filter(BinaryPackage.repo_id == repo.id) \
                    .filter(BinaryPackage.component_id == component.id) \
                    .filter(BinaryPackage.architecture_id == arch.id).join(BinaryPackage.suites):
                sl = existing_bpkgs.get(e_bpkg.uuid)
                if not sl:
                    existing_bpkgs[e_bpkg.uuid] = [
                        suite_i.id
                    ]  # if there is just one suite, we may get a scalar here
                else:
                    sl.append(suite_i.id)

            # add information about regular binary packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.binary_packages(suite, component, arch), emitter)

            # add information about debian-installer packages
            existing_bpkgs = _register_binary_packages(
                session, repo, suite, component, arch, existing_bpkgs,
                local_repo.installer_packages(suite, component, arch), emitter)
            session.commit()

            for old_bpkg_uuid, suites in existing_bpkgs.items():
                suites_count = len(suites)
                if suite.id in suites:
                    rc = session.query(binpkg_suite_assoc_table) \
                                .filter(binpkg_suite_assoc_table.c.suite_id == suite.id) \
                                .filter(binpkg_suite_assoc_table.c.bin_package_uuid == old_bpkg_uuid) \
                                .delete(synchronize_session=False)
                    if rc > 0:
                        suites_count -= 1
                if suites_count <= 0:
                    # delete the old package, we don't need it anymore if it is in no suites
                    session.query(ArchiveFile) \
                        .filter(ArchiveFile.binpkg_id == old_bpkg_uuid).delete()
                    session.query(BinaryPackage) \
                        .filter(BinaryPackage.uuid == old_bpkg_uuid).delete()

                    # NOTE: We do not emit messages for removed binary packages, as they are usually
                    # deleted with their source package (unless we have an arch-specific removal) and we
                    # don't want to spam messages which may be uninteresting to current Laniakea modules.

            session.commit()

            # import new AppStream component metadata / delete old components
            update_appstream_data(session, local_repo, repo, suite, component,
                                  arch)

    # delete orphaned AppStream metadata
    for cpt in session.query(SoftwareComponent).filter(
            ~SoftwareComponent.bin_packages.any()).all():
        session.delete(cpt)
    session.commit()
Exemplo n.º 7
0
def add_image_recipe(options):

    print_header('Add new ISO/IMG image build recipe')

    with session_scope() as session:
        recipe = ImageBuildRecipe()

        recipe.distribution = input_str(
            'Name of the distribution to build the image for')
        recipe.suite = input_str('Name of the suite to build the image for')
        recipe.environment = input_str(
            'Environment of the image (e.g. GNOME, Plasma, server, ...)')
        recipe.style = input_str(
            'Style of this OS image (e.g. "oem", "live", ...)')
        recipe.architectures = input_list('List of architectures to build for')
        recipe.host_architecture = input_str(
            ('Architecture of the host that is allowed to build images '
             '(put "any" to allow any host)'))

        while True:
            format_str = input_str(
                'Type of image that we are building (iso/img)').lower()
            if format_str == 'iso':
                recipe.format = ImageFormat.ISO
                break
            if format_str == 'img':
                recipe.format = ImageFormat.IMG
                break
            print_note('The selected image format is unknown.')

        recipe.name = input_str(
            'Unique name for this recipe (format will be automatically prefixed)'
        ).lower()
        recipe.git_url = input_str(
            'Git repository URL containing the image build configuration')
        recipe.result_move_to = input_str(
            'Place to move the build result to (placeholders like %{DATE} are allowed)'
        )

        # type-prefix recipe name
        recipe.name = '{}:{}'.format(format_str, recipe.name)

        # add recipe to the database
        session.add(recipe)
        session.commit()

        # announce the event
        emitter = EventEmitter(LkModule.ADMINCLI)
        ev_data = {
            'name': recipe.name,
            'format': format_str,
            'architectures': recipe.architectures,
            'distribution': recipe.distribution,
            'suite': recipe.suite,
            'environment': recipe.environment,
            'style': recipe.style
        }
        emitter.submit_event_for_mod(LkModule.ISOTOPE, 'recipe-created',
                                     ev_data)

        print_done('Created recipe with name: {}'.format(recipe.name))
Exemplo n.º 8
0
    def _run_migration_for_entries(self, session, migration_entries):

        # event emitted for message publishing
        emitter = EventEmitter(LkModule.SPEARS)

        for mentry in migration_entries:
            si_res = self._suites_from_migration_entry(session, mentry)
            if si_res['error']:
                continue
            print('\nRunning migration: {} to {}\n'.format(
                '+'.join(mentry.source_suites), mentry.target_suite))
            suites_from = si_res['from']
            suite_to = si_res['to']
            assert len(suites_from) >= 1

            n_excuses = self._run_migration_internal(session, suites_from,
                                                     suite_to)
            if n_excuses is None:
                continue

            migration_id = mentry.make_migration_id()

            # list existing excuses
            existing_excuses = {}
            all_excuses = session.query(SpearsExcuse).filter(
                SpearsExcuse.migration_id == migration_id).all()
            for excuse in all_excuses:
                eid = '{}-{}:{}-{}/{}'.format(excuse.suite_source,
                                              excuse.suite_target,
                                              excuse.source_package,
                                              excuse.version_new,
                                              excuse.version_old)
                existing_excuses[eid] = excuse

            for new_excuse in n_excuses:
                excuse = existing_excuses.pop(new_excuse.make_idname(), None)
                if excuse:
                    # the excuse already exists, so we just update it
                    excuse_is_new = False
                else:
                    excuse_is_new = True
                    excuse = new_excuse

                if excuse_is_new:
                    excuse.uuid = uuid4(
                    )  # we need an UUID immediately to submit it in the event payload
                    session.add(excuse)

                    data = {
                        'uuid': str(excuse.uuid),
                        'suite_source': excuse.suite_source,
                        'suite_target': excuse.suite_target,
                        'source_package': excuse.source_package,
                        'version_new': excuse.version_new,
                        'version_old': excuse.version_old
                    }
                    emitter.submit_event('new-excuse', data)
                else:
                    excuse.is_candidate = new_excuse.is_candidate
                    excuse.maintainer = new_excuse.maintainer

                    excuse.age_current = new_excuse.age_current
                    excuse.age_required = new_excuse.age_required

                    excuse.missing_archs_primary = new_excuse.missing_archs_primary
                    excuse.missing_archs_secondary = new_excuse.missing_archs_secondary

                    excuse.set_old_binaries(new_excuse.get_old_binaries())

                    excuse.blocked_by = new_excuse.blocked_by
                    excuse.migrate_after = new_excuse.migrate_after
                    excuse.manual_block = new_excuse.manual_block
                    excuse.other = new_excuse.other
                    excuse.log_excerpt = new_excuse.log_excerpt

            for excuse in existing_excuses.values():
                data = {
                    'uuid': str(excuse.uuid),
                    'suite_source': excuse.suite_source,
                    'suite_target': excuse.suite_target,
                    'source_package': excuse.source_package,
                    'version_new': excuse.version_new,
                    'version_old': excuse.version_old
                }
                emitter.submit_event('excuse-removed', data)
                session.delete(excuse)

            # add changes to the database early
            session.commit()

        return True
Exemplo n.º 9
0
def command_migrate(options):
    ''' Run a Britney migration '''

    bconf, sconf, suites = get_spears_config()
    engine = SpearsEngine(bconf, sconf, suites)

    session = session_factory()

    migration_entries = session.query(SpearsMigrationEntry).all()
    if options.suite1:
        # we have parameters, so limit which migration entries we act on
        if not options.suite2:
            print('Target suite parameter is missing!')
            sys.exit(1)

        migration_found = False
        migration_id = '{}-to-{}'.format(options.suite1, options.suite2)
        for entry in migration_entries:
            if entry.make_migration_id() == migration_id:
                migration_found = True
                migration_entries = [entry]
                break

        if not migration_found:
            print('Could not find migration recipe with ID "{}"'.format(
                migration_id))
            sys.exit(1)

    # event emitted for message publishing
    emitter = EventEmitter(LkModule.SPEARS)

    for entry in migration_entries:
        print('\nRunning migration: {} to {}\n'.format(
            '+'.join(entry.source_suites), entry.target_suite))
        ret, n_excuses = engine.runMigration('+'.join(entry.source_suites),
                                             entry.target_suite)
        if not ret:
            sys.exit(2)

        migration_id = entry.make_migration_id()

        # list existing excuses
        existing_excuses = {}
        all_excuses = session.query(SpearsExcuse).filter(
            SpearsExcuse.migration_id == migration_id).all()
        for excuse in all_excuses:
            eid = '{}-{}:{}-{}/{}'.format(excuse.suite_source,
                                          excuse.suite_target,
                                          excuse.source_package,
                                          excuse.version_new,
                                          excuse.version_old)
            existing_excuses[eid] = excuse

        for ex in n_excuses:
            eid = '{}-{}:{}-{}/{}'.format(ex.sourceSuite, ex.targetSuite,
                                          ex.sourcePackage, ex.newVersion,
                                          ex.oldVersion)
            excuse = existing_excuses.pop(eid, None)
            if excuse:
                # the excuse already exists, so we just update it
                new_excuse = False
            else:
                new_excuse = True
                excuse = SpearsExcuse()
                #excuse.time = ex.date # noqa
                excuse.migration_id = migration_id

                excuse.suite_source = ex.sourceSuite
                excuse.suite_target = ex.targetSuite

                excuse.source_package = ex.sourcePackage

                excuse.version_new = ex.newVersion
                excuse.version_old = ex.oldVersion

            excuse.is_candidate = ex.isCandidate
            excuse.maintainer = ex.maintainer

            excuse.age_current = ex.age.currentAge
            excuse.age_required = ex.age.requiredAge

            excuse.missing_archs_primary = ex.missingBuilds.primaryArchs
            excuse.missing_archs_secondary = ex.missingBuilds.secondaryArchs

            obins = []
            for ob in ex.oldBinaries:
                obin = SpearsOldBinaries()
                obin.pkg_version = ob.pkgVersion
                obin.binaries = ob.binaries
                obins.append(obin)
            excuse.set_old_binaries(obins)

            excuse.blocked_by = ex.reason.blockedBy
            excuse.migrate_after = ex.reason.migrateAfter
            excuse.manual_block = ex.reason.manualBlock
            excuse.other = ex.reason.other
            excuse.log_excerpt = ex.reason.logExcerpt

            if new_excuse:
                excuse.uuid = uuid4(
                )  # we need an UUID immediately to submit it in the event payload
                session.add(excuse)

                data = {
                    'uuid': str(excuse.uuid),
                    'suite_source': excuse.suite_source,
                    'suite_target': excuse.suite_target,
                    'source_package': excuse.source_package,
                    'version_new': excuse.version_new,
                    'version_old': excuse.version_old
                }
                emitter.submit_event('new-excuse', data)

        for excuse in existing_excuses.values():
            data = {
                'uuid': str(excuse.uuid),
                'suite_source': excuse.suite_source,
                'suite_target': excuse.suite_target,
                'source_package': excuse.source_package,
                'version_new': excuse.version_new,
                'version_old': excuse.version_old
            }
            emitter.submit_event('excuse-removed', data)
            session.delete(excuse)

        # add changes to the database early
        session.commit()

    # ensure everything is committed
    session.commit()
Exemplo n.º 10
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        sync_sources = session.query(SynchrotronSource).all()
        autosyncs = session.query(SynchrotronConfig) \
                           .filter(SynchrotronConfig.sync_enabled == True) \
                           .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        for autosync in autosyncs:
            log.info('Synchronizing packages from {}/{} with {}'.format(
                autosync.source.os_name, autosync.source.suite_name,
                autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(autosync.destination_suite.name,
                                autosync.source.suite_name)
            ret, issue_data = engine.autosync(session, autosync,
                                              autosync.auto_cruft_remove)
            if not ret:
                sys.exit(2)
                return

            existing_sync_issues = {}
            for ssource in sync_sources:
                all_issues = session.query(SynchrotronIssue) \
                                    .filter(SynchrotronIssue.source_suite == ssource.suite_name,
                                            SynchrotronIssue.target_suite == autosync.destination_suite.name,
                                            SynchrotronIssue.config_id == autosync.id) \
                                    .all()
                for eissue in all_issues:
                    eid = '{}-{}-{}:{}'.format(eissue.package_name,
                                               eissue.source_version,
                                               eissue.target_version,
                                               str(eissue.kind))
                    existing_sync_issues[eid] = eissue

            for info in issue_data:
                eid = '{}-{}-{}:{}'.format(info.package_name,
                                           info.source_version,
                                           info.target_version, str(info.kind))
                issue = existing_sync_issues.pop(eid, None)
                if issue:
                    # the issue already exists, so we just update it
                    new_issue = False
                else:
                    new_issue = True
                    issue = info
                    issue.config = autosync

                if new_issue:
                    session.add(issue)

                    data = {
                        'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'suite_src': issue.source_suite,
                        'suite_dest': issue.target_suite,
                        'version_src': issue.source_version,
                        'version_dest': issue.target_version,
                        'kind': str(issue.kind)
                    }

                    emitter.submit_event('new-autosync-issue', data)

            for eissue in existing_sync_issues.values():
                session.delete(eissue)

                data = {
                    'name': eissue.package_name,
                    'src_os': autosync.source.os_name,
                    'suite_src': eissue.source_suite,
                    'suite_dest': eissue.target_suite,
                    'version_src': eissue.source_version,
                    'version_dest': eissue.target_version,
                    'kind': str(eissue.kind)
                }

                emitter.submit_event('resolved-autosync-issue', data)
Exemplo n.º 11
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        autosyncs = session.query(SynchrotronConfig).filter(SynchrotronConfig.sync_enabled == True) \
            .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        bconf, sconf = get_sync_config()
        blacklist_pkgnames = get_package_blacklist()  # the blacklist is global for now

        for autosync in autosyncs:
            incoming_suite = get_suiteinfo_for_suite(autosync.destination_suite)
            sconf.syncBinaries = autosync.sync_binaries
            sconf.source.defaultSuite = autosync.source.suite_name
            sconf.source.repoUrl = autosync.source.repo_url

            log.info('Synchronizing packages from {}/{} with {}'.format(autosync.source.os_name, autosync.source.suite_name,
                                                                        autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(bconf, sconf, incoming_suite)
            engine.setBlacklist(blacklist_pkgnames)

            ret, issue_data = engine.autosync()
            publish_synced_spkg_events(engine,
                                       autosync.source.os_name,
                                       autosync.source.suite_name,
                                       autosync.destination_suite.name,
                                       emitter=emitter)
            if not ret:
                sys.exit(2)
                return

            for ssuite in sconf.source.suites:
                session.query(SynchrotronIssue) \
                    .filter(SynchrotronIssue.source_suite == ssuite.name,
                            SynchrotronIssue.target_suite == incoming_suite.name,
                            SynchrotronIssue.config_id == autosync.id) \
                    .delete()

            for info in issue_data:
                issue = SynchrotronIssue()
                issue.config = autosync
                issue.kind = SynchrotronIssueKind(info.kind)
                issue.package_name = info.packageName
                issue.source_suite = info.sourceSuite
                issue.target_suite = info.targetSuite
                issue.source_version = info.sourceVersion
                issue.target_version = info.targetVersion
                issue.details = info.details
                session.add(issue)

                data = {'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'src_suite': issue.source_suite,
                        'dest_suite': issue.target_suite,
                        'src_version': issue.source_version,
                        'dest_version': issue.target_version,
                        'kind': str(issue.kind)}

                emitter.submit_event('autosync-issue', data)
Exemplo n.º 12
0
def command_autosync(options):
    ''' Automatically synchronize packages '''

    with session_scope() as session:
        autosyncs = session.query(SynchrotronConfig).filter(SynchrotronConfig.sync_enabled == True) \
            .filter(SynchrotronConfig.sync_auto_enabled == True).all()  # noqa: E712

        bconf, sconf = get_sync_config()
        blacklist_pkgnames = get_package_blacklist(
        )  # the blacklist is global for now

        for autosync in autosyncs:
            incoming_suite = get_suiteinfo_for_suite(
                autosync.destination_suite)
            sconf.syncBinaries = autosync.sync_binaries
            sconf.source.defaultSuite = autosync.source.suite_name
            sconf.source.repoUrl = autosync.source.repo_url

            log.info('Synchronizing packages from {}/{} with {}'.format(
                autosync.source.os_name, autosync.source.suite_name,
                autosync.destination_suite.name))

            emitter = EventEmitter(LkModule.SYNCHROTRON)

            engine = SyncEngine(bconf, sconf, incoming_suite)
            engine.setBlacklist(blacklist_pkgnames)

            ret, issue_data = engine.autosync()
            publish_synced_spkg_events(engine,
                                       autosync.source.os_name,
                                       autosync.source.suite_name,
                                       autosync.destination_suite.name,
                                       emitter=emitter)
            if not ret:
                sys.exit(2)
                return

            existing_sync_issues = {}
            for ssuite in sconf.source.suites:
                all_issues = session.query(SynchrotronIssue) \
                                    .filter(SynchrotronIssue.source_suite == ssuite.name,
                                            SynchrotronIssue.target_suite == incoming_suite.name,
                                            SynchrotronIssue.config_id == autosync.id) \
                                    .all()
                for eissue in all_issues:
                    eid = '{}-{}-{}:{}'.format(eissue.package_name,
                                               eissue.source_version,
                                               eissue.target_version,
                                               str(eissue.kind))
                    existing_sync_issues[eid] = eissue

            for info in issue_data:
                issue_kind = SynchrotronIssueKind(info.kind)
                eid = '{}-{}-{}:{}'.format(info.packageName,
                                           info.sourceVersion,
                                           info.targetVersion, str(issue_kind))
                issue = existing_sync_issues.pop(eid, None)
                if issue:
                    # the issue already exists, so we just update it
                    new_issue = False
                else:
                    new_issue = True
                    issue = SynchrotronIssue()
                    issue.config = autosync
                    issue.package_name = info.packageName
                    issue.source_version = info.sourceVersion
                    issue.target_version = info.targetVersion
                    issue.kind = issue_kind

                issue.source_suite = info.sourceSuite
                issue.target_suite = info.targetSuite

                issue.details = info.details

                if new_issue:
                    session.add(issue)

                    data = {
                        'name': issue.package_name,
                        'src_os': autosync.source.os_name,
                        'suite_src': issue.source_suite,
                        'suite_dest': issue.target_suite,
                        'version_src': issue.source_version,
                        'version_dest': issue.target_version,
                        'kind': str(issue.kind)
                    }

                    emitter.submit_event('new-autosync-issue', data)

            for eissue in existing_sync_issues.values():
                session.delete(eissue)

                data = {
                    'name': eissue.package_name,
                    'src_os': autosync.source.os_name,
                    'suite_src': eissue.source_suite,
                    'suite_dest': eissue.target_suite,
                    'version_src': eissue.source_version,
                    'version_dest': eissue.target_version,
                    'kind': str(eissue.kind)
                }

                emitter.submit_event('resolved-autosync-issue', data)