def _generate_depcheck_yaml(self, suite): ''' Get Dose YAML data for build installability issues in the selected suite. ''' arch_issue_map = {} for arch in suite.architectures: # fetch binary-package index list indices = self._get_full_index_info(suite, arch, False) if not indices['fg']: if arch.name == 'all': continue raise Exception('Unable to get any indices for {}/{} to check for dependency issues.'.format(suite.name, arch.name)) dose_args = ['--quiet', '--latest=1', '-e', '-f', '--summary', '--deb-native-arch={}'.format(suite.primary_architecture.name if arch.name == 'all' else arch.name)] # run depcheck indices_args = [] for f in indices['bg']: indices_args.append('--bg={}'.format(f)) for f in indices['fg']: indices_args.append('--fg={}'.format(f)) success, data = self._execute_dose('dose-debcheck', dose_args, indices_args) if not success: log.error('Dose debcheck command failed: ' + ' '.join(dose_args) + ' ' + ' '.join(indices_args) + '\n' + data) raise Exception('Unable to run Dose for {}/{}: {}'.format(suite.name, arch.name, data)) arch_issue_map[arch.name] = data return arch_issue_map
def run_migration(self, source_suite_name: str, target_suite_name: str): with session_scope() as session: migration_entries = session.query(SpearsMigrationEntry).all() if source_suite_name: # we have parameters, so limit which migration entries we act on if not target_suite_name: log.error('Target suite parameter is missing!') return False migration_found = False migration_id = '{}-to-{}'.format(source_suite_name, target_suite_name) for entry in migration_entries: if entry.make_migration_id() == migration_id: migration_found = True migration_entries = [entry] break if not migration_found: log.error( 'Could not find migration recipe with ID "{}"'.format( migration_id)) return False return self._run_migration_for_entries(session, migration_entries)
def _import_source_package(self, spkg: SourcePackage, component: str) -> bool: ''' Import a source package from the source repository into the target repo. ''' dscfile = None for f in spkg.files: # the source repository might be on a remote location, so we need to # request each file to be there. # (dak will fetch the files referenced in the .dsc file from the same directory) if f.fname.endswith('.dsc'): dscfile = self._source_repo.get_file(f) self._source_repo.get_file(f) if not dscfile: log.error( 'Critical consistency error: Source package {} in repository {} has no .dsc file.' .format(spkg.name, self._source_repo.base_dir)) return False if self._import_package_files(self._target_suite_name, component, [dscfile]): self._synced_source_pkgs.append(spkg) return True return False
def command_sync(options): ''' Synchronize a dedicated set of packages ''' if not options.packages: print('You need to define at least one package to synchronize!') sys.exit(1) bconf, sconf = get_sync_config() with session_scope() as session: si = session.query(SynchrotronConfig) \ .join(SynchrotronConfig.destination_suite) \ .join(SynchrotronConfig.source) \ .filter(ArchiveSuite.name == options.dest_suite, SynchrotronSource.suite_name == options.src_suite).one_or_none() if not si: log.error( 'Unable to find a sync config for this source/destination combination.' ) sys.exit(4) return if not si.sync_enabled: log.error( 'Can not synchronize package: Synchronization is disabled for this configuration.' ) sys.exit(3) return incoming_suite = get_suiteinfo_for_suite(si.destination_suite) sconf.syncBinaries = si.sync_binaries sconf.source.defaultSuite = si.source.suite_name sconf.source.repoUrl = si.source.repo_url engine = SyncEngine(bconf, sconf, incoming_suite) blacklist_pkgnames = get_package_blacklist() engine.setSourceSuite(si.source.suite_name) engine.setBlacklist(blacklist_pkgnames) ret = engine.syncPackages(options.component, options.packages, options.force) publish_synced_spkg_events(engine, si.source.os_name, si.source.suite_name, si.destination_suite.name, options.force) if not ret: sys.exit(2)
def _suites_from_migration_entry(self, session, mentry: SpearsMigrationEntry): res = {'error': False, 'from': [], 'to': None} for suite_name in mentry.source_suites: maybe_suite = session.query(ArchiveSuite) \ .filter(ArchiveSuite.name == suite_name).one_or_none() if not maybe_suite: log.error( 'Migration source suite "{}" does not exist. Can not create configuration.' .format(suite_name)) res['error'] = True return res res['from'].append(maybe_suite) maybe_suite = session.query(ArchiveSuite) \ .filter(ArchiveSuite.name == mentry.target_suite).one_or_none() if not maybe_suite: log.error( 'Migration target suite "{}" does not exist. Can not create configuration.' .format(mentry.target_suite)) res['error'] = True return res res['to'] = maybe_suite if res['to'] in res['from']: log.error( 'Migration target suite ({}) is contained in source suite list.' .format(res['to'].name)) res['error'] = True return res return res
def _fetch_repo_file_internal(self, location, check=False): ''' Download a file and retrieve a filename. This function does not validate the result, this step has to be done by the caller. ''' if self._repo_url: source_url = os.path.join(self._repo_url, location) target_fname = os.path.join(self._root_dir, location) os.makedirs(os.path.dirname(target_fname), exist_ok=True) download_file(source_url, target_fname, check=check) return target_fname else: fname = os.path.join(self._root_dir, location) if os.path.isfile(fname): return fname # There was an error, we couldn't find or download the file log.error('Could not find repository file "{}"'.format(location)) return None
def install_trusted_keyfile(options): ''' Install a public key to trust a client node. ''' from shutil import copyfile if not options.name: print('No name for this public key / client given!') sys.exit(1) source_keyfile = options.keyfile if not source_keyfile: print('No public key file given!') sys.exit(1) if not os.path.isfile(source_keyfile): print('Public key file "{}" was not found.'.format(source_keyfile)) sys.exit(1) pub_key = None sec_key = None try: pub_key, sec_key = zmq.auth.load_certificate(source_keyfile) except ValueError: pass if not pub_key: log.info('The given keyfile does not contain a public ZCurve key!') if sec_key: print('') print( '/!\\ The current file contains a secret ZCurve key. This file should never leave the client machine it is installed on.' ) print('') _, verify_key = keyfile_read_verify_key(source_keyfile) if not verify_key: log.info('The given keyfile does not contain a verification key!') if not verify_key and not pub_key: log.error( 'The keyfile does not contain either a public encryption, nor a verification key. Can not continue.' ) sys.exit(4) _, sign_key = keyfile_read_signing_key(source_keyfile) if sign_key: print('') print( '/!\\ The current file contains a secret signing key. This file should never leave the client machine it is installed on.' ) print('') lconf = LocalConfig() target_keyfile = os.path.join(lconf.trusted_curve_keys_dir, '{}.pub.key'.format(options.name)) if os.path.isfile(target_keyfile) and not options.force: print( 'We already trust a key for "{}" on this machine. You can override the existing one by specifying "--force".' .format(options.name)) sys.exit(2) try: copyfile(source_keyfile, target_keyfile) except Exception as e: print('Failed to install new public key as {}: {}'.format( target_keyfile, str(e))) sys.exit(3) print('Installed as {}'.format(target_keyfile))
def sync_packages(self, component: str, pkgnames: List[str], force: bool = False): self._synced_source_pkgs = [] with session_scope() as session: sync_conf = session.query(SynchrotronConfig) \ .join(SynchrotronConfig.destination_suite) \ .join(SynchrotronConfig.source) \ .filter(ArchiveSuite.name == self._target_suite_name, SynchrotronSource.suite_name == self._source_suite_name).one_or_none() if not sync_conf: log.error('Unable to find a sync config for this source/destination combination.') return False if not sync_conf.sync_enabled: log.error('Can not synchronize package: Synchronization is disabled for this configuration.') return False target_suite = session.query(ArchiveSuite) \ .filter(ArchiveSuite.name == self._target_suite_name).one() dest_pkg_map = self._get_target_source_packages(component) src_pkg_map = self._get_repo_source_package_map(self._source_repo, self._source_suite_name, component) for pkgname in pkgnames: spkg = src_pkg_map.get(pkgname) dpkg = dest_pkg_map.get(pkgname) if not spkg: log.info('Can not sync {}: Does not exist in source.'.format(pkgname)) continue if pkgname in self._sync_blacklist: log.info('Can not sync {}: The package is blacklisted.'.format(pkgname)) continue if dpkg: if version_compare(dpkg.version, spkg.version) >= 0: if force: log.warning('{}: Target version \'{}\' is newer/equal than source version \'{}\'.' .format(pkgname, dpkg.version, spkg.version)) else: log.info('Can not sync {}: Target version \'{}\' is newer/equal than source version \'{}\'.' .format(pkgname, dpkg.version, spkg.version)) continue if not force: if self._distro_tag in version_revision(dpkg.version): log.error('Not syncing {}/{}: Destination has modifications (found {}).' .format(spkg.name, spkg.version, dpkg.version)) continue # sync source package # the source package must always be known to dak first ret = self._import_source_package(spkg, component) if not ret: return False ret = self._import_binaries_for_source(sync_conf, target_suite, component, self._synced_source_pkgs, force) # TODO: Analyze the input, fetch the packages from the source distribution and # import them into the target in their correct order. # Then apply the correct, synced override from the source distro. self._publish_synced_spkg_events(sync_conf.source.os_name, sync_conf.source.suite_name, sync_conf.destination_suite.name, force) return ret