コード例 #1
0
ファイル: britneyconfig.py プロジェクト: lkhq/laniakea
    def save(self):
        # ensure essential directories exist
        hints_dir = os.path.join(self._base_dir, 'input', 'hints')
        os.makedirs(os.path.join(self._base_dir, 'output', 'target'),
                    exist_ok=True)
        os.makedirs(os.path.join(self._base_dir, 'state'), exist_ok=True)
        os.makedirs(hints_dir, exist_ok=True)

        # ensure essential settings are set
        # NOTE: All of this seriously needs some refactoring, the direct translation from
        # D code is pretty bad at the moment.
        assert self._paths_set
        assert self._components_set
        assert self._archs_set
        if not self._broken_archs_set:
            self.set_broken_architectures([])
        if not self._new_archs_set:
            self.set_new_architectures([])
        if not self._delays_set:
            self.set_delays([])

        # save configuration
        conf_fname = os.path.join(self._base_dir, 'britney.conf')
        log.debug('Saving Britney config to "{}"'.format(conf_fname))

        with open(conf_fname, 'wt') as f:
            for line in self._contents:
                f.write(line + '\n')

        if self._hint_contents:
            hints_fname = os.path.join(self._base_dir, 'input', 'hints',
                                       'laniakea')
            with open(hints_fname, 'wt') as f:
                for line in self._hint_contents:
                    f.write(line + '\n')
コード例 #2
0
def command_repo(options):
    ''' Import repository data '''

    suite_names = []
    if options.suite:
        suite_names.append(options.suite)
    else:
        log.debug('Importing data from all mutable suites.')
        with session_scope() as session:
            suite_names = [
                r[0] for r in session.query(ArchiveSuite.name).filter(
                    ArchiveSuite.frozen == False).all()
            ]  # noqa

    # TODO: Optimize this so we can run it in parallel, as well as skip
    # imports if they are not needed
    for suite_name in suite_names:
        log.debug('Importing data for suite "{}".'.format(suite_name))
        import_suite_packages(suite_name)
コード例 #3
0
ファイル: dakbridge.py プロジェクト: jeremiah/laniakea
    def package_is_removable(self, package_name: str, suite_name: str) -> bool:
        ''' Check if a package can be removed without breaking reverse dependencies. '''

        log.debug('Testing package \'{}\' removal from \'{}\''.format(
            package_name, suite_name))

        # simulate package removal
        args = [
            'rm', '-R', '-m', 'RID: Removed from Debian', '-C', 'janitor@dak',
            '-n', '-s', suite_name, package_name
        ]

        ret, out = self._run_dak(args, check=False)

        if ret != 0:
            raise Exception(
                'Unable to check if package \'{}\' is removable from \'{}\': {}'
                .format(package_name, suite_name, out))
        return 'No dependency problem found.' in out
コード例 #4
0
ファイル: spearsengine.py プロジェクト: dos1/laniakea
    def _collect_urgencies(self, mi_wspace: str):

        urgencies = ''
        for subdir, dirs, files in os.walk(
                self._lconf.archive_urgencies_export_dir):
            for fbasename in files:
                fname = os.path.join(subdir, fbasename)
                if not os.path.isfile(fname):
                    continue
                if not fbasename.startswith('install-urgencies'):
                    continue

                log.debug('Reading urgencies from {}'.format(fname))
                with open(fname, 'r') as f:
                    urgencies = urgencies + f.read()

        log.info('Writing urgency policy file.')
        urgency_policy_file = os.path.join(mi_wspace, 'state',
                                           'age-policy-urgencies')
        with open(urgency_policy_file, 'w') as f:
            f.write(urgencies)
コード例 #5
0
ファイル: syncengine.py プロジェクト: dos1/laniakea
    def autosync(self, session, sync_conf, remove_cruft: bool = True):
        ''' Synchronize all packages that are newer '''

        self._synced_source_pkgs = []
        active_src_pkgs = []  # source packages which should have their binary packages updated
        res_issues = []

        target_suite = session.query(ArchiveSuite) \
                              .filter(ArchiveSuite.name == self._target_suite_name).one()
        sync_conf = session.query(SynchrotronConfig) \
                           .join(SynchrotronConfig.destination_suite) \
                           .join(SynchrotronConfig.source) \
                           .filter(ArchiveSuite.name == self._target_suite_name,
                                   SynchrotronSource.suite_name == self._source_suite_name).one_or_none()

        for component in target_suite.components:
            dest_pkg_map = self._get_target_source_packages(component.name)

            # The source package lists contains many different versions, some source package
            # versions are explicitly kept for GPL-compatibility.
            # Sometimes a binary package migrates into another suite, dragging a newer source-package
            # that it was built against with itslf into the target suite.
            # These packages then have a source with a high version number, but might not have any
            # binaries due to them migrating later.
            # We need to care for that case when doing binary syncs (TODO: and maybe safeguard against it
            # when doing source-only syncs too?), That's why we don't filter out the newest packages in
            # binary-sync-mode.
            if sync_conf.sync_binaries:
                src_pkg_range = self._source_repo.source_packages(ArchiveSuite(self._source_suite_name), component)
            else:
                src_pkg_range = self._get_repo_source_package_map(self._source_repo,
                                                                  self._source_suite_name,
                                                                  component).values()

            for spkg in src_pkg_range:
                # ignore blacklisted packages in automatic sync
                if spkg.name in self._sync_blacklist:
                    continue

                dpkg = dest_pkg_map.get(spkg.name)
                if dpkg:
                    if version_compare(dpkg.version, spkg.version) >= 0:
                        log.debug('Skipped sync of {}: Target version \'{}\' is equal/newer than source version \'{}\'.'
                                  .format(spkg.name, dpkg.version, spkg.version))
                        continue

                    # check if we have a modified target package,
                    # indicated via its Debian revision, e.g. "1.0-0tanglu1"
                    if self._distro_tag in version_revision(dpkg.version):
                        log.info('Not syncing {}/{}: Destination has modifications (found {}).'
                                 .format(spkg.name, spkg.version, dpkg.version))

                        # add information that this package needs to be merged to the issue list
                        issue = SynchrotronIssue()
                        issue.package_name = spkg.name
                        issue.source_version = spkg.version
                        issue.target_version = dpkg.version
                        issue.kind = SynchrotronIssueKind.MERGE_REQUIRED

                        res_issues.append(issue)
                        continue

                # sync source package
                # the source package must always be known to dak first
                ret = self._import_source_package(spkg, component.name)
                if not ret:
                    return False, []

                # a new source package is always active and needs it's binary packages synced, in
                # case we do binary syncs.
                active_src_pkgs.append(spkg)

            # all packages in the target distribution are considered active, as long as they don't
            # have modifications.
            for spkg in dest_pkg_map.values():
                if self._distro_tag in version_revision(spkg.version):
                    active_src_pkgs.append(spkg)

            # import binaries as well. We test for binary updates for all available active source packages,
            # as binNMUs might have happened in the source distribution.
            # (an active package in this context is any source package which doesn't have modifications in the
            # target distribution)
            ret = self._import_binaries_for_source(sync_conf, target_suite, component.name, active_src_pkgs)
            if not ret:
                return False, []

        # test for cruft packages
        target_pkg_index = {}
        for component in target_suite.components:
            dest_pkg_map = self._get_repo_source_package_map(self._target_repo,
                                                             target_suite.name,
                                                             component.name)
            for pkgname, pkg in dest_pkg_map.items():
                target_pkg_index[pkgname] = pkg

        # check which packages are present in the target, but not in the source suite
        for component in target_suite.components:
            src_pkg_map = self._get_repo_source_package_map(self._source_repo,
                                                            self._source_suite_name,
                                                            component.name)
            for pkgname in src_pkg_map.keys():
                target_pkg_index.pop(pkgname, None)

        # remove cruft packages
        if remove_cruft:
            for pkgname, dpkg in target_pkg_index.items():
                dpkg_ver_revision = version_revision(dpkg.version, False)
                # native packages are never removed
                if not dpkg_ver_revision:
                    continue

                # check if the package is intoduced as new in the distro, in which case we won't remove it
                if dpkg_ver_revision.startswith('0' + self._distro_tag):
                    continue

                # if this package was modified in the target distro, we will also not remove it, but flag it
                # as "potential cruft" for someone to look at.
                if self._distro_tag in dpkg_ver_revision:
                    issue = SynchrotronIssue()
                    issue.kind = SynchrotronIssueKind.MAYBE_CRUFT
                    issue.source_suite = self._source_suite_name
                    issue.target_suite = self._target_suite_name
                    issue.package_name = dpkg.name
                    issue.source_version = None
                    issue.target_version = dpkg.version

                    res_issues.append(issue)
                    continue

                # check if we can remove this package without breaking stuff
                if self._dak.package_is_removable(dpkg.name, target_suite.name):
                    # try to remove the package
                    try:
                        self._dak.remove_package(dpkg.name, target_suite.name)
                    except Exception as e:
                        issue = SynchrotronIssue()
                        issue.kind = SynchrotronIssueKind.REMOVAL_FAILED
                        issue.source_suite = self._source_suite_name
                        issue.target_suite = self._target_suite_name
                        issue.package_name = dpkg.name
                        issue.source_version = None
                        issue.target_version = dpkg.version
                        issue.details = str(e)

                        res_issues.append(issue)
                else:
                    # looks like we can not remove this
                    issue = SynchrotronIssue()
                    issue.kind = SynchrotronIssueKind.REMOVAL_FAILED
                    issue.source_suite = self._source_suite_name
                    issue.target_suite = self._target_suite_name
                    issue.package_name = dpkg.name
                    issue.source_version = None
                    issue.target_version = dpkg.version
                    issue.details = 'This package can not be removed without breaking other packages. It needs manual removal.'

                    res_issues.append(issue)

        self._publish_synced_spkg_events(sync_conf.source.os_name,
                                         sync_conf.source.suite_name,
                                         sync_conf.destination_suite.name,
                                         False)

        return True, res_issues
コード例 #6
0
ファイル: syncengine.py プロジェクト: dos1/laniakea
    def _import_binaries_for_source(self, sync_conf, target_suite, component: str, spkgs: List[SourcePackage],
                                    ignore_target_changes: bool = False) -> bool:
        ''' Import binary packages for the given set of source packages into the archive. '''

        if not sync_conf.sync_binaries:
            log.debug('Skipping binary syncs.')
            return True

        # list of valid architectrures supported by the target
        target_archs = [a.name for a in target_suite.architectures]

        # cache of binary-package mappings for the source
        src_bpkg_arch_map = {}
        for aname in target_archs:
            src_bpkg_arch_map[aname] = self._get_repo_binary_package_map(self._source_repo, self._source_suite_name, component, aname)

        # cache of binary-package mappings from the target repository
        dest_bpkg_arch_map = {}
        for aname in target_archs:
            dest_bpkg_arch_map[aname] = self._get_repo_binary_package_map(self._target_repo, self._target_suite_name, component, aname)

        for spkg in spkgs:
            bin_files_synced = False
            existing_packages = False
            for arch_name in target_archs:
                if arch_name not in src_bpkg_arch_map:
                    continue

                src_bpkg_map = src_bpkg_arch_map[arch_name]
                dest_bpkg_map = dest_bpkg_arch_map[arch_name]

                bin_files = []
                for bin_i in spkg.binaries:
                    if bin_i.name not in src_bpkg_map:
                        if bin_i.name in dest_bpkg_map:
                            existing_packages = True  # package only exists in target
                        continue
                    if arch_name != 'all' and bin_i.architectures == ['all']:
                        # we handle arch:all explicitly
                        continue
                    bpkg = src_bpkg_map[bin_i.name]
                    if bin_i.version != bpkg.source_version:
                        log.debug('Not syncing binary package \'{}\': Version number \'{}\' does not match source package version \'{}\'.'
                                  .format(bpkg.name, bin_i.version, bpkg.source_version))
                        continue

                    ebpkg = dest_bpkg_map.get(bpkg.name)
                    if ebpkg:
                        if version_compare(ebpkg.version, bpkg.version) >= 0:
                            log.debug('Not syncing binary package \'{}/{}\': Existing binary package with bigger/equal version \'{}\' found.'
                                      .format(bpkg.name, bpkg.version, ebpkg.version))
                            existing_packages = True
                            continue

                        # Filter out manual rebuild uploads matching the pattern XbY.
                        # sometimes rebuild uploads of not-modified packages happen, and if the source
                        # distro did a binNMU, we don't want to sync that, even if it's bigger
                        # This rebuild-upload check must only happen if we haven't just updated the source package
                        # (in that case the source package version will be bigger than the existing binary package version)
                        if version_compare(spkg.version, ebpkg.version) >= 0:
                            if re.match(r'(.*)b([0-9]+)', ebpkg.version):
                                log.debug('Not syncing binary package \'{}/{}\': Existing binary package with rebuild upload \'{}\' found.'
                                          .format(bpkg.name, bpkg.version, ebpkg.version))
                                existing_packages = True
                                continue

                        if not ignore_target_changes and self._distro_tag in version_revision(ebpkg.version):
                            # safety measure, we should never get here as packages with modifications were
                            # filtered out previously.
                            log.debug('Can not sync binary package {}/{}: Target has modifications.'.format(bin_i.name, bin_i.version))
                            continue

                    fname = self._source_repo.get_file(bpkg.bin_file)
                    bin_files.append(fname)

                # now import the binary packages, if there is anything to import
                if bin_files:
                    bin_files_synced = True
                    ret = self._import_package_files(self._target_suite_name, component, bin_files)
                    if not ret:
                        return False

            if not bin_files_synced and not existing_packages:
                log.warning('No binary packages synced for source {}/{}'.format(spkg.name, spkg.version))

        return True
コード例 #7
0
def update_appstream_data(session, local_repo, repo, suite, component, arch):
    '''
    Import AppStream metadata about software components and associate it with the
    binary packages the data belongs to.
    '''

    if arch.name == 'all':
        # arch:all has no AppStream components, those are always associated with an architecture
        # and are included in arch-specific files (even if the package they belong to is arch:all)
        return

    arch_all = session.query(ArchiveArchitecture) \
                      .filter(ArchiveArchitecture.name == 'all').one()

    yaml_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'Components-{}.yml.xz'.format(arch.name)))
    if not yaml_fname:
        return

    cidmap_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'CID-Index-{}.json.gz'.format(arch.name)),
        check=False)
    if not cidmap_fname:
        return

    with gzip.open(cidmap_fname, 'rb') as f:
        cid_map = json.loads(f.read())
    with lzma.open(yaml_fname, 'r') as f:
        yaml_data = str(f.read(), 'utf-8')

    mdata = AppStream.Metadata()
    mdata.set_locale('ALL')
    mdata.set_format_style(AppStream.FormatStyle.COLLECTION)
    mdata.set_parse_flags(AppStream.ParseFlags.IGNORE_MEDIABASEURL)

    mdata.parse(yaml_data, AppStream.FormatKind.YAML)
    cpts = mdata.get_components()
    if len(cpts) == 0:
        return

    log.debug('Found {} software components in {}/{}'.format(
        len(cpts), suite.name, component.name))

    # create context for direct serialization to collection XML chunk
    context = AppStream.Context()
    context.set_locale('ALL')
    context.set_style(AppStream.FormatStyle.COLLECTION)

    for cpt in cpts:
        cpt.set_active_locale('C')

        pkgname = cpt.get_pkgname()
        if not pkgname:
            # we skip these for now, web-apps have no package assigned - we might need a better way to map
            # those to their packages, likely with an improved appstream-generator integration
            log.debug(
                'Found DEP-11 component without package name in {}/{}: {}'.
                format(suite.name, component.name, cpt.get_id()))
            continue

        # fetch package this component belongs to
        bin_pkg = session.query(BinaryPackage) \
            .filter(BinaryPackage.name == pkgname) \
            .filter(BinaryPackage.repo_id == repo.id) \
            .filter(BinaryPackage.architecture_id.in_((arch.id, arch_all.id))) \
            .filter(BinaryPackage.component_id == component.id) \
            .filter(BinaryPackage.suites.any(ArchiveSuite.id == suite.id)) \
            .order_by(BinaryPackage.version.desc()).first()

        if not bin_pkg:
            log.info('Found orphaned DEP-11 component in {}/{}: {}'.format(
                suite.name, component.name, cpt.get_id()))
            continue

        dcpt = SoftwareComponent()
        dcpt.kind = int(cpt.get_kind())
        dcpt.cid = cpt.get_id()

        dcpt.xml = cpt.to_xml_data(context)

        dcpt.gcid = cid_map.get(dcpt.cid)
        if not dcpt.gcid:
            log.info('Found DEP-11 component without GCID in {}/{}: {}'.format(
                suite.name, component.name, cpt.get_id()))

        # create UUID for this component (based on GCID or XML data)
        dcpt.update_uuid()

        existing_dcpt = session.query(SoftwareComponent) \
            .filter(SoftwareComponent.uuid == dcpt.uuid).one_or_none()
        if existing_dcpt:
            if bin_pkg in existing_dcpt.bin_packages:
                continue  # the binary package is already registered with this component
            existing_dcpt.bin_packages.append(bin_pkg)
            continue  # we already have this component, no need to add it again

        # add new software component to database
        dcpt.name = cpt.get_name()
        dcpt.summary = cpt.get_summary()
        dcpt.description = cpt.get_description()

        for icon in cpt.get_icons():
            if icon.get_kind() == AppStream.IconKind.CACHED:
                dcpt.icon_name = icon.get_name()
                break

        dcpt.project_license = cpt.get_project_license()
        dcpt.developer_name = cpt.get_developer_name()

        # test for free software
        dcpt.is_free = False
        if not dcpt.project_license:
            # We have no license set.
            # If we are in the 'main' component, we
            # assume we have free software
            if bin_pkg.component.name == 'main':
                dcpt.is_free = True
        else:
            # have AppStream test the SPDX license expression for free software
            dcpt.is_free = AppStream.license_is_free_license(
                dcpt.project_license)

        dcpt.categories = []
        for cat in cpt.get_categories():
            dcpt.categories.append(cat)

        dcpt.bin_packages = [bin_pkg]

        session.add(dcpt)
        log.debug('Added new software component \'{}\' to database'.format(
            dcpt.cid))
    session.commit()
コード例 #8
0
 def set_trusted(self, trusted):
     self._trusted = trusted
     if self._trusted:
         log.debug('Explicitly marked repository "{}" as trusted.'.format(self.location))
コード例 #9
0
ファイル: spearsengine.py プロジェクト: dos1/laniakea
    def _create_faux_packages(self, session, mi_wspace: str,
                              suites_source: List[ArchiveSuite],
                              suite_target: ArchiveSuite):
        '''
        If we have a partial source and target suite, we need to let Britney know about the
        parent packages somehow.
        At the moment, we simply abuse the FauxPackages system for that.
        '''

        # we don't support more than one source suite for this feature at the moment
        if len(suites_source) > 1:
            log.info(
                'Not auto-generating faux packages: Multiple suites set as sources.'
            )
            return

        suite_source = suites_source[0]

        if suite_source.parent and suite_target.parent:
            log.info(
                'Creating faux-packages to aid resolving of partial suites.')
        else:
            log.info(
                'No auto-generating faux packages: No source and target suite parents, generation is unnecessary.'
            )
            return

        existing_pkg_arch_set = set()
        log.debug(
            'Creating index of valid packages that do not need a faux package.'
        )

        # we need repository information to only generate faux packages if a package doesn't exist
        # in our source suite(s) already
        repo = self._get_local_repo(session)

        for suite in suites_source:
            esuite = session.query(ArchiveSuite) \
                            .options(joinedload(ArchiveSuite.components)) \
                            .options(joinedload(ArchiveSuite.architectures)) \
                            .filter(ArchiveSuite.id == suite.id).one()
            session.expunge(
                esuite
            )  # we don't want packages accidentally added to the database here
            for component in esuite.components:
                for arch in esuite.architectures:
                    aname = arch.name
                    for bpkg in repo.binary_packages(esuite, component, arch):
                        existing_pkg_arch_set.add(aname + ':' + bpkg.name)
                    for spkg in repo.source_packages(esuite, component):
                        existing_pkg_arch_set.add(aname + ':' + spkg.name)

        archive_root_dir = self._lconf.archive_root_dir
        fauxpkg_fname = os.path.join(mi_wspace, 'input', 'faux-packages')

        log.debug('Generating faux packages list')
        fauxpkg_data = {}
        for component in suite_target.parent.components:

            for installer_dir in ['', 'debian-installer']:
                for arch in suite_target.parent.architectures:
                    pfile = os.path.join(archive_root_dir, 'dists',
                                         suite_target.parent.name,
                                         component.name, installer_dir,
                                         'binary-{}'.format(arch.name),
                                         'Packages.xz')
                    if not os.path.isfile(pfile):
                        continue

                    log.debug('Reading data for faux packages list: {}'.format(
                        pfile))

                    with TagFile(pfile) as tf:
                        for e in tf:
                            pkgname = e['Package']
                            pkgversion = e['Version']
                            pkgarch = e['Architecture']

                            pkid = '{}-{}-{}'.format(pkgname, pkgversion,
                                                     pkgarch)
                            if pkid in fauxpkg_data:
                                continue
                            pkgname_arch = pkgarch + ':' + pkgname
                            if pkgname_arch in existing_pkg_arch_set:
                                continue
                            provides = e.get('Provides', '')

                            data = 'Package: {}\nVersion: {}'.format(
                                pkgname, pkgversion)
                            if pkgarch and pkgarch != 'all':
                                data = data + '\nArchitecture: {}'.format(
                                    pkgarch)
                            if provides:
                                data = data + '\nProvides: {}'.format(provides)
                            if component.name != 'main':
                                data = data + '\nComponent: {}'.format(
                                    component.name)

                            fauxpkg_data[pkid] = data

                            # FIXME: We shouldn't have to special-case this :any case,
                            # rather Britney should do the right thing and recognize this
                            # notation for faux-packages. But until that is fixed
                            # properly and since a dependency on python3:any is so common, we
                            # will work around this issue
                            if pkgname == 'python3':
                                pkid = '{}-{}-{}'.format(
                                    'python3:any', pkgversion, pkgarch)
                                if pkid in fauxpkg_data:
                                    continue
                                fauxpkg_data[pkid] = data.replace(
                                    'Package: python3\n',
                                    'Package: python3:any\n')

        with open(fauxpkg_fname, 'w') as f:
            for segment in fauxpkg_data.values():
                f.write(segment + '\n\n')
コード例 #10
0
ファイル: spearsengine.py プロジェクト: dos1/laniakea
    def _prepare_source_data(self, session, mi_wspace: str,
                             suites_source: List[ArchiveSuite],
                             suite_target: ArchiveSuite):
        '''
        If there is more than one source suite, we need to give Britney an amalgamation
        of the data of the two source suites.
        This function prepares this data.
        '''

        # only one suite means we can use the suite's data directly
        if len(suites_source) <= 1:
            return

        archive_root_dir = self._lconf.archive_root_dir
        fake_dists_dir = self._get_source_suite_dists_dir(
            mi_wspace, suites_source)

        for component in suite_target.components:
            for arch in suite_target.architectures:
                if arch.name == 'all':
                    continue
                packages_files = []

                for installer_dir in ['', 'debian-installer']:
                    for suite_source in suites_source:
                        pfile = os.path.join(archive_root_dir, 'dists',
                                             suite_source.name, component.name,
                                             installer_dir,
                                             'binary-{}'.format(arch.name),
                                             'Packages.xz')
                        if os.path.isfile(pfile):
                            log.debug(
                                'Looking for packages in: {}'.format(pfile))
                            packages_files.append(pfile)

                    if not installer_dir and not installer_dir:
                        raise Exception(
                            'No packages found on {}/{} in sources for migration "{}": Can not continue.'
                            .format(
                                component.name, arch.name,
                                self._get_migration_id(suites_source,
                                                       suite_target)))

                    # create new merged Packages file
                    target_packages_file = os.path.join(
                        fake_dists_dir, component.name, installer_dir,
                        'binary-{}'.format(arch.name), 'Packages.xz')
                    log.debug('Generating combined new fake packages file: {}'.
                              format(target_packages_file))
                    os.makedirs(os.path.dirname(target_packages_file),
                                exist_ok=True)

                    data = b''
                    for fname in packages_files:
                        with open_compressed(fname) as f:
                            data = data + f.read()
                    with lzma.open(target_packages_file, 'w') as f:
                        f.write(data)

            sources_files = []
            for suite_source in suites_source:
                sfile = os.path.join(archive_root_dir, 'dists',
                                     suite_source.name, component.name,
                                     'source', 'Sources.xz')
                if os.path.isfile(sfile):
                    log.debug(
                        'Looking for source packages in: {}'.format(sfile))
                    sources_files.append(sfile)

            if not sources_files:
                raise Exception(
                    'No source packages found in "{}" sources for migration "{}": Can not continue.'
                    .format(
                        component.name,
                        self._get_migrarion_id(suites_source, suite_target)))

            # Create new merged Sources file
            target_sources_file = os.path.join(fake_dists_dir, component.name,
                                               'source', 'Sources.xz')
            log.debug('Generating combined new fake sources file: {}'.format(
                target_sources_file))
            os.makedirs(os.path.dirname(target_sources_file), exist_ok=True)

            data = b''
            for fname in sources_files:
                with open_compressed(fname) as f:
                    data = data + f.read()
            with lzma.open(target_sources_file, 'w') as f:
                f.write(data)

        # Britney needs a Release file to determine the source suites components and architectures.
        # To keep things simple, we just copy one of the source Release files.
        # TODO: Synthesize a dedicated file instead and be less lazy
        release_file = os.path.join(archive_root_dir, 'dists',
                                    suites_source[0].name, 'Release')
        target_release_file = os.path.join(fake_dists_dir, 'Release')
        log.debug('Using Release file for fake suite: {}'.format(
            target_release_file))
        if os.path.join(target_release_file):
            os.remove(target_release_file)
        shutil.copyfile(release_file, target_release_file)