def __init__(self, rules_config: YamlConfig) -> None: self._rules = [] for rule in rules_config.get_items(): self._add_rule(rule) self._hash = rules_config.get_hash()
def test_cache_update(testdata_dir, datadir): cache = ParsedConfigCache(datadir / 'config-cache') shutil.copytree(testdata_dir / 'yaml_configs', datadir / 'configs') # populate cache config = YamlConfig.from_path(datadir / 'configs', cache) # modify config with open(datadir / 'configs' / '1.yaml', 'a') as fd: print('- { "foo": 11 }', file=fd) config = YamlConfig.from_path(datadir / 'configs', cache) assert config.get_items() == [ { 'foo': 1 }, { 'foo': 11 }, { 'bar': 2 }, { 'baz': 3 }, ] assert config.get_hash( ) == '0c86170147f75217684bc52ed5f87085460a7fe4b2af901d7a9569772139594c'
def __init__(self, maintainers_config: YamlConfig) -> None: self._hidden_maintainers = set() for maintainerdata in maintainers_config.get_items(): maintainer: str | None = maintainerdata.get('maintainer') if maintainer is not None and maintainerdata.get('hide'): self._hidden_maintainers.add(maintainer)
def test_file(testdata_dir): config = YamlConfig.from_path(testdata_dir / 'yaml_configs' / '1.yaml') assert config.get_items() == [ { 'foo': 1 }, ] assert config.get_hash( ) == '1b8a0882147fa5c71060f8a3f8cf7fa3e97f0b729ccc134526052ae0c181a925'
def test_hide(): config = YamlConfig.from_text(""" - { maintainer: [email protected], hide: true } - { maintainer: [email protected], hide: false } - { maintainer: [email protected] } """) m = MaintainerManager(config) assert m.is_hidden('*****@*****.**') assert not m.is_hidden('*****@*****.**') assert not m.is_hidden('*****@*****.**') assert not m.is_hidden('*****@*****.**')
def check_transformer(rulestext: str, *samples: PackageSample) -> None: __tracebackhide__ = True ruleset = Ruleset(YamlConfig.from_text(rulestext)) sample_by_repo = defaultdict(list) for sample in samples: sample_by_repo[sample.package.repo].append(sample) for repo, repo_samples in sample_by_repo.items(): transformer = PackageTransformer(ruleset, repo, {repo}) for sample in repo_samples: transformer.process(sample.package) sample.check_pytest()
def test_files(testdata_dir): config = YamlConfig.from_path(testdata_dir / 'yaml_configs') assert config.get_items() == [ { 'foo': 1 }, { 'bar': 2 }, { 'baz': 3 }, ] assert config.get_hash( ) == 'd6080e544cb4490aa1381f4cd3892c2f858f01fd6f0897e1d2829b20187b70e9'
def test_text(): config = YamlConfig.from_text(""" - { foo: 1 } - { bar: 2 } """) assert config.get_items() == [ { 'foo': 1 }, { 'bar': 2 }, ] assert config.get_hash( ) == 'da87f61f7796c806802a20b96d40406533fb91e07945c747ed84124b6277151d'
def test_parsers_regress(regtest): repomgr = RepositoryManager(YamlConfig.from_path(config['REPOS_DIR'])) repoproc = RepositoryProcessor(repomgr, 'testdata', 'testdata', safety_checks=False) # NOTE: run the following command to canonize this test after parser or testdata update # # pytest -k test_parsers_regress --regtest-reset # with regtest: map( print, sorted( json.dumps(package.__dict__, indent=1) for package in repoproc.iter_parse( reponames=['have_testdata'])))
def test_cache(testdata_dir, datadir): cache = ParsedConfigCache(datadir / 'config-cache') for _ in ('populate cache', 'use cache'): config = YamlConfig.from_path(testdata_dir / 'yaml_configs', cache) assert config.get_items() == [ { 'foo': 1 }, { 'bar': 2 }, { 'baz': 3 }, ] assert config.get_hash( ) == 'd6080e544cb4490aa1381f4cd3892c2f858f01fd6f0897e1d2829b20187b70e9'
def main() -> int: options = parse_arguments() logger: Logger = StderrLogger() if options.logfile: logger = FileLogger(options.logfile) if options.fields == 'all': options.fields = ['effname', 'repo', 'version'] + [slot for slot in Package.__slots__ if slot not in ['effname', 'repo', 'version']] else: options.fields = options.fields.split(',') repomgr = RepositoryManager(YamlConfig.from_path(options.repos_dir)) repoproc = RepositoryProcessor(repomgr, options.statedir, options.parseddir) logger.log('dumping...') for packageset in repoproc.iter_parsed(reponames=options.reponames, logger=logger): if options.from_ is not None and packageset[0].effname < options.from_: continue if options.to is not None and packageset[0].effname > options.to: break classify_packages(packageset) if not options.all and packageset_is_shadow_only(packageset): continue for package in packageset: print( options.field_separator.join( ( format_package_field(field, getattr(package, field, None)) for field in options.fields ) ) ) return 0
def __init__(self, repositories_config: YamlConfig) -> None: self._repositories = [] self._repo_by_name = {} # process source loops for repodata in repositories_config.get_items(): extra_groups = set() sources = [] for sourcedata in repodata['sources']: if sourcedata.get('disabled', False): continue for name in _listify(sourcedata['name']): # if there are multiple names, clone source data for each of them processed_sourcedata = _subst_source_recursively( copy.deepcopy(sourcedata), name) sources.append( Source( name=name, subrepo=processed_sourcedata.get('subrepo'), fetcher=processed_sourcedata['fetcher'], parser=processed_sourcedata['parser'], packagelinks=[ PackageLink(type=LinkType.from_string( linkdata['type']), url=linkdata['url']) for linkdata in processed_sourcedata.get( 'packagelinks', []) ], )) extra_groups.add(sourcedata['fetcher']['class']) extra_groups.add(sourcedata['parser']['class']) repo = Repository( name=repodata['name'], sortname=repodata.get('sortname', repodata['name']), singular=repodata.get('singular', repodata['desc'] + ' package'), type=repodata.get('type', 'repository'), desc=repodata['desc'], statsgroup=repodata.get('statsgroup', repodata['desc']), family=repodata['family'], ruleset=_listify(repodata.get('ruleset', repodata['family'])), color=repodata.get('color'), valid_till=repodata.get('valid_till'), default_maintainer=repodata.get('default_maintainer'), update_period=_parse_duration( repodata.get('update_period', 600)), minpackages=repodata.get('minpackages', 0), shadow=repodata.get('shadow', False), incomplete=repodata.get('incomplete', False), repolinks=repodata.get('repolinks', []), packagelinks=[ PackageLink(type=LinkType.from_string(linkdata['type']), url=linkdata['url']) for linkdata in repodata.get('packagelinks', []) ], groups=repodata.get('groups', []) + list(extra_groups), sources=sources, ) self._repositories.append(repo) self._repo_by_name[repo.name] = repo self._repositories = sorted(self._repositories, key=lambda repo: repo.sortname)
def get_yaml(path: str) -> Any: return YamlConfig.from_path(path).get_items()
def get_repos_config(self) -> YamlConfig: return YamlConfig.from_path(self.options.repos_dir, self.get_parsed_config_cache())
def get_maintainers_config(self) -> YamlConfig: return YamlConfig.from_path(self.options.maintainers_config, self.get_parsed_config_cache())
# along with repology. If not, see <http://www.gnu.org/licenses/>. from collections import defaultdict from repology.repomgr import RepositoryManager from repology.transformer import PackageTransformer from repology.transformer.ruleset import Ruleset from repology.yamlloader import YamlConfig from ..package import PackageSample _repomgr = RepositoryManager(YamlConfig.from_text(""" [ { name: dummyrepo, desc: dummyrepo, family: dummyrepo, sources: [] }, { name: foo, desc: foo, family: foo, sources: [] }, { name: bar, desc: bar, family: bar, sources: [] }, { name: baz, desc: baz, family: baz, sources: [] } ] """)) def check_transformer(rulestext: str, *samples: PackageSample) -> None: __tracebackhide__ = True ruleset = Ruleset(YamlConfig.from_text(rulestext)) sample_by_repo = defaultdict(list) for sample in samples: sample_by_repo[sample.package.repo].append(sample)