def run_subprocess(command: List[str], logger: Logger, cwd: Optional[str] = None) -> None: message = 'running "{}"'.format(' '.join(command)) if cwd is not None: message += ' in "{}"'.format(cwd) logger.log(message) with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, encoding='utf-8', errors='ignore', cwd=cwd) as proc: assert (proc.stdout) for line in proc.stdout: logger.get_indented().log(line.strip()) proc.wait() logger.log('command finished with code {}'.format(proc.returncode), logger.NOTICE if proc.returncode == 0 else logger.ERROR) if proc.returncode != 0: raise subprocess.CalledProcessError(cmd=command, returncode=proc.returncode)
def _iter_parse_all_sources(self, repository: RepositoryMetadata, transformer: Optional[PackageTransformer], logger: Logger) -> Iterator[Package]: for source in repository['sources']: logger.log('parsing source {} started'.format(source['name'])) yield from self._iter_parse_source(repository, source, transformer, logger.get_indented()) logger.log('parsing source {} complete'.format(source['name']))
def _do_fetch(self, statedir: AtomicDir, persdata: PersistentData, logger: Logger) -> bool: packages_url = self.url + 'packages.gz' logger.get_indented().log('fetching package list from ' + packages_url) data = self.do_http(packages_url).text # autogunzipped? package_names = [] for line in data.split('\n'): line = line.strip() if line.startswith('#') or line == '': continue package_names.append(line) if not package_names: raise RuntimeError('Empty package list received, refusing to continue') logger.get_indented().log('{} package name(s) parsed'.format(len(package_names))) for num_page, (url, num_packages) in enumerate(_split_names_into_urls(self.url + '/rpc/?v=5&type=info', package_names, self.max_api_url_length)): logger.get_indented().log('fetching page {} of {} package(s)'.format(num_page + 1, num_packages)) with open(os.path.join(statedir.get_path(), '{}.json'.format(num_page)), 'wb') as statefile: statefile.write(self.do_http(url).content) return True
def _fetch(self, repository: RepositoryMetadata, update: bool, logger: Logger) -> bool: logger.log('fetching started') if not os.path.isdir(self.statedir): os.mkdir(self.statedir) have_changes = False for source in repository['sources']: if not os.path.isdir(self._get_state_path(repository)): os.mkdir(self._get_state_path(repository)) have_changes |= self._fetch_source(repository, update, source, logger.get_indented()) logger.log('fetching complete' + ('' if have_changes else ' (no changes)')) return have_changes
def _fetch_source(self, repository: Repository, update: bool, source: Source, logger: Logger) -> bool: logger.log(f'fetching source {source.name} started') fetcher: Fetcher = self.fetcher_factory.spawn_with_known_args( source.fetcher['class'], source.fetcher ) have_changes = fetcher.fetch( self._get_state_source_path(repository, source), update=update, logger=logger.get_indented() ) logger.log(f'fetching source {source.name} complete' + ('' if have_changes else ' (no changes)')) return have_changes
def _fetch_source(self, repository: RepositoryMetadata, update: bool, source: RepositoryMetadata, logger: Logger) -> bool: if 'fetcher' not in source: logger.log('fetching source {} not supported'.format( source['name'])) return False logger.log('fetching source {} started'.format(source['name'])) fetcher: Fetcher = self.fetcher_factory.spawn_with_known_args( source['fetcher'], source) have_changes = fetcher.fetch(self._get_state_source_path( repository, source), update=update, logger=logger.get_indented()) logger.log('fetching source {} complete'.format(source['name']) + ('' if have_changes else ' (no changes)')) return have_changes
def _load_spec(self, package: str, statedir: AtomicDir, logger: Logger) -> None: specurl = self.giturl + '/{0}.git/plain/{0}.spec'.format(package) logger.get_indented().log('getting spec from {}'.format(specurl)) r = do_http(specurl, check_status=False) if r.status_code != 200: deadurl = self.giturl + '/{0}.git/plain/dead.package'.format( package) dr = do_http(deadurl, check_status=False) if dr.status_code == 200: logger.get_indented(2).log('dead: ' + ';'.join(dr.text.split('\n'))) else: logger.get_indented(2).log('failed: {}'.format( r.status_code)) # XXX: check .dead.package, instead throw return with open(os.path.join(statedir.get_path(), package + '.spec'), 'wb') as file: file.write(r.content)
def _iter_parse_all_sources( self, repository: Repository, transformer: PackageTransformer | None, maintainermgr: MaintainerManager | None, logger: Logger ) -> Iterator[Package]: for source in repository.sources: logger.log(f'parsing source {source.name} started') yield from self._iter_parse_source(repository, source, transformer, maintainermgr, logger.get_indented()) logger.log(f'parsing source {source.name} complete')