def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: result: Dict[str, PackageMaker] = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded for entry in iter_json_list(path, ('releases', None)): with factory.begin() as pkg: pkg.set_name(entry['name']) pkg.set_version(entry['version']) if not pkg.check_sanity(verbose=False): continue pkg.add_homepages(entry.get('homepage')) pkg.set_summary(entry.get('summary')) if not pkg.comment: pkg.set_summary(entry.get('description')) # multiline #pkg.add_maintainers(entry.get('submitter') + '@freshcode') # unfiltered garbage #pkg.add_downloads(entry.get('download')) # ignore for now, may contain download page urls instead of file urls pkg.add_licenses(entry.get('license')) # take latest known versions if pkg.name not in result or version_compare(pkg.version, result[pkg.name].version) > 0: result[pkg.name] = pkg yield from result.values()
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: result: dict[str, PackageMaker] = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded for entry in iter_json_list(path, ('releases', None)): with factory.begin() as pkg: pkg.add_name(entry['name'], NameType.FRESHCODE_NAME) if not entry['version']: pkg.log('empty version', Logger.ERROR) continue pkg.set_version(entry['version']) pkg.add_homepages(entry.get('homepage')) pkg.set_summary( entry.get('summary') ) # cound use `or entry.get('description'))`, but it's long multiline #pkg.add_maintainers(entry.get('submitter') + '@freshcode') # unfiltered garbage #pkg.add_downloads(entry.get('download')) # ignore for now, may contain download page urls instead of file urls pkg.add_licenses(entry.get('license')) # take latest known versions if entry['name'] not in result or version_compare( pkg.version, result[entry['name']].version) > 0: result[entry['name']] = pkg yield from result.values()
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: # packages with names starting with an underscore are # uninteresting as they contain cygwin-specific # installation helper scripts if packagedata['name'].startswith('_'): continue pkg.add_name(packagedata['name'], NameType.CYGWIN_PACKAGE_NAME) pkg.set_summary(packagedata['summary']) if 'maintainers' in packagedata: pkg.add_maintainers([ m.replace('.', '').replace(' ', '.') + '@cygwin' for m in packagedata['maintainers'] ]) for maturity in ['stable', 'test']: if maturity not in packagedata['versions']: continue verpkg = pkg.clone() verpkg.set_flags(PackageFlags.IGNORE, maturity == 'test') # XXX: weak_devel raw_version = packagedata['versions'][maturity][-1] (version, release) = raw_version.rsplit('-', 1) # If release is just '0', that means someone # forgot it counts from 1, but if it starts with # '0', the rest indicates the pre-release version # (as per Fedora/repodata.py) if release.startswith('0') and len(release) > 1: match = re.fullmatch( r'.*((?:alpha|beta|rc)(?:\.?[0-9]+)?|(?<![a-z])[ab]\.?[0-9]+)', release) if match: # known pre-release schema version += '-' + match.group(1) verpkg.set_flags(PackageFlags.DEVEL) else: # unknown pre-release schema verpkg.set_flags(PackageFlags.IGNORE) verpkg.set_version(version) verpkg.set_rawversion(raw_version) for subpackagedata in packagedata['subpackages']: if '_obsolete' in subpackagedata['categories']: continue subpkg = verpkg.clone() subpkg.add_name(subpackagedata['name'], NameType.CYGWIN_SUBPACKAGE_NAME) subpkg.add_categories(subpackagedata['categories']) yield subpkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, (None, )): with factory.begin() as pkg: info = pkgdata['info'] pkg.add_name(info['name'], NameType.PYPI_NAME) pkg.set_version(info['version']) if info['home_page']: pkg.add_homepages(info['home_page']) pkg.add_homepages(info['project_url']) if info['author_email']: pkg.add_maintainers( map(str.strip, info['author_email'].split(','))) if info['summary']: pkg.set_summary(info['summary']) release_items = pkgdata['releases'][info['version']] pkg.add_downloads(item['url'] for item in release_items) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_left(':') for pkgdata in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: pkg.add_name(pkgdata['name'], NameType.AOSC_NAME) pkg.add_name(pkgdata['directory'], NameType.AOSC_DIRECTORY) pkg.add_name( '{}-{}/{}'.format( pkgdata['category'], pkgdata['section'], pkgdata['directory'], ), NameType.AOSC_FULLPATH) pkg.set_extra_field('tree', pkgdata['tree']) pkg.set_extra_field('branch', pkgdata['branch']) if pkgdata['version'] is None: pkg.log('no version defined', Logger.ERROR) continue pkg.set_version(pkgdata['version'], normalize_version) pkg.set_rawversion(pkgdata['full_version']) pkg.add_categories(pkgdata['pkg_section'], pkgdata['section']) pkg.set_summary(pkgdata['description']) # just a committer, doesn't seem suitable #pkg.add_maintainers(extract_maintainers(pkgdata['committer'])) if pkg.version == '999': pkg.set_flags(PackageFlags.IGNORE) # XXX: rolling? revisit yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, (None, )): with factory.begin() as pkg: info = pkgdata['info'] pkg.add_name(info['name'], NameType.PYPI_NAME) pkg.set_version(info['version']) pkg.add_links(LinkType.PROJECT_HOMEPAGE, info['project_url']) if (url := info.get('home_page')) and url != 'UNKNOWN': pkg.add_links(LinkType.UPSTREAM_HOMEPAGE, url) if info['project_urls']: for key, url in info['project_urls'].items(): if (link_type := _url_types.get( key.lower())) and url != 'UNKNOWN': pkg.add_links(link_type, url) for item in pkgdata['releases'][info['version']]: pkg.add_links(LinkType.PROJECT_DOWNLOAD, item['url']) if info['author_email']: pkg.add_maintainers( map(str.strip, info['author_email'].split(','))) if info['summary']: pkg.set_summary(info['summary']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, ('ravenports', None)): with factory.begin() as pkg: pkg.add_name(packagedata['namebase'], NameType.RAVENPORTS_NAMEBASE) pkg.set_version(packagedata['version']) pkg.add_categories(packagedata['keywords']) pkg.add_homepages(packagedata.get('homepage')) pkg.add_downloads(packagedata['distfile']) pkg.set_summary(packagedata['variants'][0]['sdesc']) pkg.add_maintainers( map(lambda contact: contact.get('email'), packagedata.get('contacts', []))) # type: ignore pkg.set_extra_field('bucket', packagedata['bucket']) pkg.set_extra_field('variant', packagedata['variants'][0]['label']) if 'cpe' in packagedata: pkg.add_cpe( **{ key: packagedata['cpe'].get(key) for key in [ 'vendor', 'product', 'edition', 'lang', 'sw_edition', 'target_sw', 'target_hw', 'other' ] }) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None, )): with factory.begin() as pkg: pkg.set_name(packagedata['name'].split('@', 1)[0]) pkg.set_version(packagedata['versions']['stable']) pkg.set_summary(packagedata['desc']) pkg.add_homepages(packagedata['homepage']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: pkg.add_name(pkgdata['name'], NameType.YACP_NAME) pkg.set_version(pkgdata['version']) pkg.add_categories(pkgdata['category']) pkg.set_summary(pkgdata['summary']) pkg.add_homepages(pkgdata['homepage']) pkg.add_binnames(subpackage['name'] for subpackage in pkgdata['subpackages']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None, )): with factory.begin(packagedata['token']) as pkg: pkg.add_name(packagedata['token'], NameType.GENERIC_PKGNAME) pkg.set_version(packagedata['version'].split(',')[0]) pkg.add_homepages(packagedata['homepage']) pkg.add_downloads(packagedata['url']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None,)): with factory.begin(packagedata['token']) as pkg: pkg.add_name(packagedata['token'], NameType.HOMEBREW_CASK_TOKEN) pkg.add_name(packagedata['name'][0], NameType.HOMEBREW_CASK_FIRST_NAME) pkg.set_version(packagedata['version'].split(',')[0]) pkg.add_homepages(packagedata['homepage']) pkg.add_downloads(packagedata['url']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None,)): with factory.begin() as pkg: pkg.add_name(packagedata['name'], NameType.HOMEBREW_NAME) pkg.add_name(packagedata['name'].split('@', 1)[0], NameType.HOMEBREW_NAME_PRE_AT) pkg.add_name(packagedata['oldname'], NameType.HOMEBREW_OLDNAME) pkg.add_name(packagedata['full_name'], NameType.HOMEBREW_FULL_NAME) pkg.set_version(packagedata['versions']['stable']) pkg.set_summary(packagedata['desc']) pkg.add_homepages(packagedata['homepage']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, (None, )): with factory.begin() as pkg: info = pkgdata['info'] pkg.add_name(info['name'], NameType.PYPI_NAME) pkg.add_links(LinkType.PROJECT_HOMEPAGE, info['project_url']) if (url := info.get('home_page')) and url != 'UNKNOWN': pkg.add_links(LinkType.UPSTREAM_HOMEPAGE, url) if info['author_email']: for maintainer in map(str.strip, info['author_email'].split(',')): if ' ' in maintainer or '"' in maintainer or '<' in maintainer or "'" in maintainer or '@' not in maintainer: pkg.log( 'Skipping garbage maintainer email "{maintainer}"', severity=Logger.WARNING) else: pkg.add_maintainers(maintainer) if info['summary']: pkg.set_summary(info['summary']) if info['project_urls']: for key, url in info['project_urls'].items(): if (link_type := _url_types.get( key.lower())) and url != 'UNKNOWN': pkg.add_links(link_type, url) for version, releasedatas in pkgdata['releases'].items(): verpkg = pkg.clone() verpkg.set_version(version) if _pep440_is_prerelease(version): verpkg.set_flags(PackageFlags.DEVEL) good_items = 0 yanked_items = 0 for releasedata in releasedatas: if releasedata['yanked']: yanked_items += 1 else: good_items += 1 verpkg.add_links(LinkType.PROJECT_DOWNLOAD, releasedata['url']) if yanked_items > 0 and good_items == 0: verpkg.set_flags(PackageFlags.RECALLED) yield verpkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, (None,)): with factory.begin() as pkg: pkg.add_name(pkgdata['name'], NameType.ATARAXIA_NAME) pkg.set_version(pkgdata['version']) pkg.set_summary(pkgdata['summary']) pkg.add_categories(pkgdata['category']) pkg.add_maintainers(extract_maintainers(pkgdata['maintainer'])) pkg.add_homepages(pkgdata['homepage']) pkg.add_downloads(pkgdata['download'].split(' ')) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for item in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: pkg.add_name(item['name'], NameType.GENERIC_PKGNAME) pkg.set_version(item['ver']) pkg.set_summary(item['descs']) pkg.set_arch(item['arch']) pkg.set_extra_field('location', item['loc']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None, )): with factory.begin() as pkg: # XXX: tries to normalize project name from human readable form; not suitable for production pkg.set_name(packagedata['name'][0].replace('.', '').replace( ' ', '-')) # XXX: comma-separated versions are encountered often, wtf are these, need to handle pkg.set_version(packagedata['version']) pkg.add_homepages(packagedata['homepage']) pkg.add_downloads(packagedata['url']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for port in iter_json_list(path, ('ports', None)): with factory.begin() as pkg: pkg.add_name(port['name'], NameType.CRUX_NAME) pkg.set_summary(port['description']) pkg.set_version(port['version']) if port['maintainer'] == '': pkg.log('Missing maintainer for port "{}"'.format(port['name']), severity=Logger.ERROR) else: pkg.add_maintainers(extract_maintainers(port['maintainer'])) pkg.add_homepages(port['url']) pkg.set_subrepo(port['repository']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for item in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: pkg.add_name(item['name'], NameType.SALIX_NAME) pkg.set_version(item['ver']) pkg.set_summary(item['descs']) pkg.set_arch(item['arch']) # May be potentially useful for packagelinks, but not used now # as there's no way to generate package-specific link to # https://packages.salixos.org/#! #pkg.set_extra_field('location', item['loc']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, (None,)): with factory.begin() as pkg: pkg.add_name(packagedata['name'], NameType.EXHERBO_NAME) pkg.add_name(packagedata['category'] + '/' + packagedata['name'], NameType.EXHERBO_FULL_NAME) pkg.set_version(packagedata['version'], _normalize_version) pkg.add_categories(packagedata['category']) pkg.add_homepages(packagedata['homepage'].split()) pkg.add_downloads(packagedata['downloads'].split()) pkg.set_subrepo(packagedata['repository']) pkg.set_summary(packagedata['summary']) if pkg.version == 'scm' or pkg.version.endswith('-scm'): # XXX: to rules? pkg.set_flags(PackageFlags.ROLLING) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_right('+') for pkgdata in iter_json_list(path, ('ports', None)): with factory.begin() as pkg: # drop obsolete ports (see #235) if 'replaced_by' in pkgdata: continue portname = pkgdata['portdir'].split('/')[1] pkg.add_name(pkgdata['name'], NameType.MACPORTS_NAME) pkg.add_name(pkgdata['portdir'], NameType.MACPORTS_PORTDIR) pkg.add_name(portname, NameType.MACPORTS_PORTNAME) pkg.set_version(pkgdata['version'], normalize_version) pkg.set_summary(pkgdata.get('description')) pkg.add_homepages(pkgdata.get('homepage')) pkg.add_categories(pkgdata.get('categories')) pkg.add_licenses( pkgdata['license']) # XXX: properly handle braces for maintainerdata in pkgdata['maintainers']: # macports decided not to publish raw maintainer emails #if 'email' in maintainerdata: # pkg.add_maintainers(maintainerdata['email']['name'] + '@' + maintainerdata['email']['domain']) # provide fallback with macports accounts if 'email' in maintainerdata and maintainerdata['email'][ 'domain'] == 'macports.org': pkg.add_maintainers(maintainerdata['email']['name'] + '@macports') if 'github' in maintainerdata: pkg.add_maintainers(maintainerdata['github'] + '@github') if not pkgdata['maintainers']: pkg.add_maintainers('*****@*****.**') # If portname is used as name if pkgdata['name'] == portname + '-devel': pkg.set_flags(PackageFlags.IGNORE) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, ('ravenports', None)): with factory.begin() as pkg: pkg.set_name(packagedata['namebase']) pkg.set_version(packagedata['version']) pkg.add_categories(packagedata['keywords']) pkg.add_homepages(packagedata.get('homepage')) pkg.add_downloads(packagedata['distfile']) pkg.set_summary(packagedata['variants'][0]['sdesc']) pkg.add_maintainers( map(lambda contact: contact.get('email'), packagedata.get('contacts', []))) # type: ignore pkg.set_extra_field('bucket', packagedata['bucket']) pkg.set_extra_field('variant', packagedata['variants'][0]['label']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, ('ravenports', None)): with factory.begin() as pkg: pkg.add_name(packagedata['namebase'], NameType.RAVENPORTS_NAMEBASE) pkg.set_version(packagedata['version']) pkg.add_categories(packagedata['keywords']) pkg.add_homepages(packagedata.get('homepage')) pkg.add_downloads(packagedata['distfile']) pkg.set_summary(packagedata['variants'][0]['sdesc']) pkg.add_maintainers( map(lambda contact: contact.get('email'), packagedata.get('contacts', []))) # type: ignore pkg.set_extra_field('bucket', packagedata['bucket']) pkg.set_extra_field('variant', packagedata['variants'][0]['label']) # TODO: process all variants instead of the first one if 'cpe' in packagedata: pkg.add_cpe( **{ key: packagedata['cpe'].get(key) for key in [ 'vendor', 'product', 'edition', 'lang', 'sw_edition', 'target_sw', 'target_hw', 'other' ] }) if packagedata['namebase'] in ('xblas', 'tree', 'norm', 'lapack'): if 'homepage' not in packagedata: raise RuntimeError( 'Detected faking information for Repology, refusing to continue' ) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for port in iter_json_list(path, ('ports', None)): with factory.begin() as pkg: pkg.add_name(port['name'], NameType.CRUX_NAME) pkg.set_summary(port['description']) pkg.set_version(port['version']) if port['maintainer'] == '': pkg.log('Missing maintainer for port "{}"'.format( port['name']), severity=Logger.ERROR) else: pkg.add_maintainers(extract_maintainers( port['maintainer'])) pkg.add_homepages(port['url']) pkg.set_subrepo(port['repository']) pkg.add_downloads(port['sources']) if '${' in port['name']: raise RuntimeError(f'bad port name {port["name"]}') yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for item in iter_json_list(path, ('items', None)): with factory.begin() as pkg: pkg.set_basename(item['meta']) pkg.set_version(item['ver']) pkg.add_maintainers(item['maintainer']) pkg.add_licenses(item['license']) pkg.add_homepages(item['home']) pkg.add_downloads(item.get('src')) if pkg.version == 'latest': pkg.set_flags(PackageFlags.ROLLING) for subitem in item['pkgs']: subpkg = pkg.clone() subpkg.add_categories(subitem['cat']) subpkg.set_summary(subitem['desc']) subpkg.set_name(subitem['name']) subpkg.set_version(subitem.get('ver')) yield subpkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_right_greedy('-').strip_left(':').strip_right_greedy('+') for pkgdata in iter_json_list(path, (None,)): with factory.begin() as pkg: pkg.add_name(pkgdata['Name'], NameType.ARCH_NAME) pkg.set_version(pkgdata['Version'], normalize_version) pkg.set_summary(pkgdata['Description']) pkg.add_homepages(pkgdata['URL']) pkg.add_licenses(pkgdata.get('License')) if 'Maintainer' in pkgdata and pkgdata['Maintainer']: pkg.add_maintainers(extract_maintainers(pkgdata['Maintainer'] + '@' + self._maintainer_host)) if 'PackageBase' in pkgdata and pkgdata['PackageBase']: pkg.add_name(pkgdata['PackageBase'], NameType.ARCH_BASENAME) # XXX: enable when we support multiple categories #if 'Keywords' in pkgdata and pkgdata['Keywords']: # pkg.add_categories(pkgdata['Keywords']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for packagedata in iter_json_list(path, ('packages', None)): with factory.begin() as pkg: pkg.add_name(packagedata['name'], NameType.SRCRPM_NAME) pkg.add_binnames(binary['name'] for binary in packagedata['binaries']) version, flags = parse_rpm_version(self._vertags, packagedata['version'], packagedata['release']) pkg.set_version(version) pkg.set_rawversion( nevra_construct(None, packagedata['epoch'], packagedata['version'], packagedata['release'])) pkg.set_flags(flags) pkg.add_categories(packagedata['category']) pkg.set_summary(packagedata['summary']) pkg.add_licenses(packagedata['license']) pkg.add_maintainers(packagedata['packager']) pkg.add_links(LinkType.UPSTREAM_HOMEPAGE, packagedata['url']) pkg.add_links(LinkType.PACKAGE_HOMEPAGE, packagedata['homepage']) pkg.add_links(LinkType.PACKAGE_RECIPE, packagedata['recipe']) pkg.add_links(LinkType.PACKAGE_RECIPE_RAW, packagedata['recipe_raw']) pkg.add_links(LinkType.PACKAGE_ISSUE_TRACKER, packagedata['bugzilla']) # TODO: parse CPE data when available if 'CPE' in packagedata: pass yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for pkgdata in iter_json_list(path, (None, )): with factory.begin() as pkg: pkg.add_name(pkgdata['name'], NameType.GUIX_NAME) pkg.set_version(pkgdata['version']) pkg.set_summary(pkgdata['synopsis']) pkg.add_homepages(pkgdata.get('homepage')) path, lineno = pkgdata['location'].split(':') pkg.set_extra_field('loc_path', path) pkg.set_extra_field('loc_line', lineno) pkg.add_cpe(product=pkgdata.get('cpe_name')) if 'source' in pkgdata: source = pkgdata['source'] if source['type'] == 'url': pkg.add_downloads(source['urls']) if re.fullmatch('.*-[0-9]+\\.[0-9a-f]{4,}', pkgdata['version']): # snapshot pattern with plain url pkg.set_flags(PackageFlags.IGNORE) # e.g. snapshot elif source['type'] == 'svn': pkg.add_downloads(source['svn_url']) if str(source['svn_revision']) in re.split( '[._-]', pkgdata['version']): # svn revision in version pkg.set_flags(PackageFlags.IGNORE) # e.g. snapshot elif source['type'] == 'git': pkg.add_downloads(source['git_url']) if re.fullmatch( '[0-9a-f]{7,}', source['git_ref']) and not re.fullmatch( '[0-9]{8}', source['git_ref']): # ref is a commit hash, not a tag if len(source['git_ref']) != 40: pkg.log( 'treating git_ref as trimmed commit hash: {}' .format(source['git_ref']), Logger.WARNING) match = re.fullmatch( '(.*)-[0-9]+\\.([0-9a-f]{7,})', pkgdata['version']) if match is not None and source[ 'git_ref'].startswith(match.group(2)): # commit hash in version, allowed pattern documented in # https://guix.gnu.org/manual/en/html_node/Version-Numbers.html pkg.set_flags( PackageFlags.IGNORE) # e.g. snapshot elif source['git_ref'][:7] in pkgdata['version']: # git commit in version and not a known pattern pkg.set_flags(PackageFlags.INCORRECT) else: # type == 'none' if re.fullmatch('.*-[0-9]+\\.[0-9a-f]{4,}', pkgdata['version']): # snapshot pattern anyway pkg.set_flags(PackageFlags.IGNORE) # e.g. snapshot yield pkg