def test_name(self): self.assertEqual( extract_maintainers('Dmitry Marakasov <*****@*****.**>'), ['*****@*****.**']) self.assertEqual( extract_maintainers('"Dmitry Marakasov" <*****@*****.**>'), ['*****@*****.**'])
def test_name_comma(self): self.assertEqual( extract_maintainers('Marakasov, Dmitry <*****@*****.**>'), ['*****@*****.**']) self.assertEqual( extract_maintainers('"Marakasov, Dmitry" <*****@*****.**>'), ['*****@*****.**'])
def test_empty(self): self.assertEqual(extract_maintainers('somecrap'), []) self.assertEqual(extract_maintainers(''), []) self.assertEqual(extract_maintainers('http://repology.org'), []) self.assertEqual(extract_maintainers('Repology <http://repology.org>'), []) self.assertEqual(extract_maintainers('nobody <really>'), [])
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for pkgdata in _iter_packages(path): with factory.begin(pkgdata['Package']) as pkg: fixed_version, flags = parse_debian_version(pkgdata['Version']) pkg.set_version(fixed_version) pkg.set_rawversion(pkgdata['Version']) pkg.set_flags(flags) pkg.add_maintainers( extract_maintainers(pkgdata.get('Maintainer', ''))) pkg.add_maintainers( extract_maintainers(pkgdata.get('Uploaders', ''))) pkg.add_categories(pkgdata.get('Section')) pkg.add_homepages(pkgdata.get('Homepage')) self._extra_handling(pkg, pkgdata) if (url := _extract_vcs_link(pkgdata)) is not None: if self._allowed_vcs_urls_re is not None and self._allowed_vcs_urls_re.match( url): pkg.add_links(LinkType.PACKAGE_SOURCES, url) yield pkg
def test_lists(self): self.assertEqual( extract_maintainers('[email protected],[email protected]'), ['*****@*****.**', '*****@*****.**']) self.assertEqual( extract_maintainers('[email protected], [email protected]'), ['*****@*****.**', '*****@*****.**']) self.assertEqual( extract_maintainers('[email protected] [email protected]'), ['*****@*****.**', '*****@*****.**'])
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for pkgdata in _iter_packages(path): with factory.begin(pkgdata['Package']) as pkg: pkg.set_version(pkgdata['Version'], _normalize_version) pkg.add_maintainers(extract_maintainers(pkgdata.get('Maintainer', ''))) pkg.add_maintainers(extract_maintainers(pkgdata.get('Uploaders', ''))) pkg.add_categories(pkgdata.get('Section')) pkg.add_homepages(pkgdata.get('Homepage')) self._extra_handling(pkg, pkgdata) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for version_path_abs in walk_tree(path, name='version'): version_path_rel = os.path.relpath(version_path_abs, path) package_path_abs = os.path.dirname(version_path_abs) package_path_rel = os.path.relpath(package_path_abs, path) package_path_rel_comps = os.path.split(package_path_rel) sources_path_abs = os.path.join(package_path_abs, 'sources') meta_path_abs = os.path.join(package_path_abs, 'meta') patches_path_abs = os.path.join(package_path_abs, 'patches') with factory.begin(package_path_rel) as pkg: pkg.add_name(package_path_rel_comps[-1], NameType.KISS_NAME) pkg.set_version(read_version(version_path_abs)) if not os.path.exists(sources_path_abs): pkg.log('skipping sourceless package', Logger.ERROR) continue pkg.add_downloads(iter_sources(sources_path_abs)) pkg.set_extra_field('path', package_path_rel) pkg.set_subrepo(package_path_rel_comps[0]) if self._maintainer_from_git: command = [ 'git', 'log', '-1', '--format=tformat:%ae', version_path_rel ] with subprocess.Popen(command, stdout=subprocess.PIPE, encoding='utf-8', errors='ignore', cwd=path) as git: lastauthor, _ = git.communicate() pkg.add_maintainers(extract_maintainers(lastauthor)) if self._use_meta and os.path.exists(meta_path_abs): meta = read_carbs_meta(meta_path_abs) pkg.set_summary(meta.get('description')) pkg.add_licenses(meta.get('license', '').split(',')) pkg.add_maintainers( extract_maintainers(meta.get('maintainer'))) add_patch_files(pkg, patches_path_abs) yield pkg
def iter_parse(self, path, factory, transformer): normalize_version = VersionStripper().strip_right_greedy('_') with open(os.path.join(path, 'index.plist'), 'rb') as plistfile: plist_index = plistlib.load(plistfile, fmt=plistlib.FMT_XML) for pkgname, props in plist_index.items(): pkg = factory.begin(pkgname) if 'source-revisions' in props: pkg.set_basename(props['source-revisions'].split(':', 1)[0]) else: pkg.log('cannot parse, no source-revisions field', severity=Logger.ERROR) continue if not props['pkgver'].startswith(pkgname + '-'): pkg.log('pkgver is expected to start with package name', severity=Logger.ERROR) continue pkg.set_name(pkgname) pkg.set_version(props['pkgver'][len(pkgname) + 1:], normalize_version) pkg.add_maintainers( extract_maintainers(props.get('maintainer', ''))) pkg.set_summary(props['short_desc']) pkg.add_homepages(props['homepage']) pkg.add_licenses(props['license'].split(',')) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_left_greedy(':') with open(path, 'r', encoding='utf-8') as jsonfile: for packagedata in json.load(jsonfile): pkg = factory.begin() pkg.set_name(packagedata['name']) pkg.set_version(packagedata['version'], normalize_version) pkg.set_summary(packagedata['description']) pkg.add_homepages(packagedata['homepage']) pkg.add_downloads(packagedata.get('srcurl')) pkg.add_maintainers( extract_maintainers(packagedata['maintainer'])) # maintainer may also be in '@username' form match = re.search('(?:^| )@([^ ]+)$', packagedata['maintainer']) if match: pkg.add_maintainers(match.group(1).lower() + '@termux') yield pkg
def iter_parse(self, path, factory): with open(path, encoding='utf-8') as indexfile: for line in indexfile: pkg = factory.begin() fields = line.strip().split('|') if len(fields) < 7: # varies pkg.log('skipping, unexpected number of fields {}'.format( len(fields)), severity=Logger.ERROR) continue pkgname = fields[0] # cut away string suffixes which come after version match = re.match('(.*?)(-[a-z_]+[0-9]*)+$', pkgname) if match: pkgname = match.group(1) pkg.set_name_and_version(pkgname, _normalize_version) pkg.set_summary(fields[3]) pkg.add_maintainers(extract_maintainers(fields[5])) pkg.add_categories(fields[6].split()) origin = fields[1].rsplit(',', 1)[0] pkg.set_origin(origin) pkg.set_extra_field('portname', origin.split('/')[1]) yield pkg
def iter_parse(self, path, factory, transformer): normalize_version = VersionStripper().strip_right('nb') with open(path, encoding='utf-8') as indexfile: for line in indexfile: pkg = factory.begin() fields = line.strip().split('|') if len(fields) != 12: pkg.log('skipping, unexpected number of fields {}'.format( len(fields)), severity=Logger.ERROR) continue if not fields[0]: pkg.log('skipping, empty first field', severity=Logger.ERROR) continue pkg.set_name_and_version(fields[0], normalize_version) pkg.set_summary(fields[3]) # sometimes OWNER variable is used in which case # there's no MAINTAINER OWNER doesn't get to INDEX pkg.add_maintainers(extract_maintainers(fields[5])) pkg.add_categories(fields[6].split()) pkg.add_homepages(fields[11]) pkg.set_extra_field('portname', fields[1].split('/')[-1]) pkg.set_origin(fields[1]) yield pkg
def iter_parse(self, path, factory): with open(os.path.join(path, 'APKINDEX'), 'r', encoding='utf-8') as apkindex: state = {} for line in apkindex: line = line.strip() if line: state[line[0]] = line[2:].strip() continue # empty line, we can flush our state if state and state['P'] == state['o']: pkg = factory.begin() pkg.name = state['P'] pkg.version, pkg.origversion = SanitizeVersion(state['V']) pkg.comment = state['T'] pkg.homepage = state[ 'U'] # XXX: switch to homepages, split pkg.licenses = [state['L']] if 'm' in state: pkg.maintainers = extract_maintainers(state['m']) yield pkg state = {}
def iter_parse(self, path, factory): with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 13: factory.log( 'package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), severity=Logger.ERROR) continue pkg = factory.begin() pkg.name, version = fields[0].rsplit('-', 1) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] pkg.maintainers = extract_maintainers(fields[5]) pkg.category = fields[6].split(' ')[0] if fields[9]: pkg.homepage = fields[9] path = fields[1].split('/') pkg.extrafields['portname'] = path[-1] pkg.extrafields['origin'] = '/'.join(path[-2:]) yield pkg
def Parse(self, path): result = [] with open(path, 'r', encoding='utf-8') as jsonfile: for packagedata in json.load(jsonfile): pkg = Package() pkg.name = packagedata['name'] pkg.version, pkg.origversion = SanitizeVersion( packagedata['version']) pkg.comment = packagedata['description'] pkg.homepage = packagedata['homepage'] if 'srcurl' in packagedata: pkg.downloads = [packagedata['srcurl']] match = re.search(' @([^ ]+)$', packagedata['maintainer']) if match: pkg.maintainers = [match.group(1).lower() + '@termux'] else: pkg.maintainers = extract_maintainers( packagedata['maintainer']) result.append(pkg) return result
def iter_parse(self, path, factory): for header in rpm.readHeaderListFromFile(path): fields = { key: str(header[key], self.encoding) if header[key] is not None else None for key in ['name', 'version', 'release', 'packager', 'group', 'summary'] } pkg = factory.begin() pkg.set_name(fields['name']) pkg.set_version(fields['version']) # XXX: handle release pkg.set_origversion( nevra_construct(None, header['epoch'], fields['version'], fields['release'])) if fields['packager']: pkg.add_maintainers(extract_maintainers( fields['packager'])) # XXX: may have multiple maintainers pkg.add_categories(fields['group']) pkg.set_summary(fields['summary']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: with open(os.path.join(path, 'APKINDEX'), 'r', encoding='utf-8') as apkindex: state = {} for line in apkindex: line = line.strip() if line: state[line[0]] = line[2:].strip() continue # empty line, we can flush our state if state and state['P'] == state['o']: pkg = factory.begin() pkg.set_name(state['P']) pkg.set_version(state['V'], normalize_version) pkg.set_summary(state['T']) pkg.add_homepages(state['U']) # XXX: split? pkg.add_licenses(state['L']) pkg.set_arch(state['A']) pkg.add_maintainers(extract_maintainers(state.get('m'))) yield pkg state = {}
def iter_parse( self, path: str, factory: PackageFactory, transformer: PackageTransformer ) -> Generator[PackageMaker, None, None]: normalize_version = VersionStripper().strip_left(':') with open(path, 'r', encoding='utf-8') as jsonfile: for package in json.load(jsonfile)['packages']: pkg = factory.begin() pkg.set_name(package['name']) pkg.set_version(package['version'], normalize_version) if not pkg.check_sanity(verbose=True): continue pkg.set_rawversion(package['full_version']) pkg.add_categories(package['pkg_section'], package['section']) pkg.set_summary(package['description']) pkg.add_maintainers(extract_maintainers(package['committer'])) if pkg.version == '999': pkg.set_flags(PackageFlags.ignore) # XXX: rolling? revisit yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for row in _iter_sqlports(os.path.join(path + self._path_to_database)): with factory.begin(row['fullpkgpath']) as pkg: # there are a lot of potential name sources in sqlports, namely: # fullpkgpath, fullpkgname, pkgname, pkgspec, pkgstem, pkgpath (comes from Paths table) # * pkgname may be NULL, so ignoring it # * pkgpath is the same as fullpkgpath with flavors stripped, so no need to join with Paths # * pkgspec may be complex for our purposes, for it may contain version ranges in form of python-bsddb->=2.7,<2.8 # * fullpkgname may be split into stem, version and flavors according to https://man.openbsd.org/packages-specs # * pkgstem is usually equal to the stem got from fullpkgname, but there are currently 12 exceptions # like php-7.1, php-7.2, php-7.3, polkit-qt-, polkit-qt5-, so it's more reliable to get stem from fullpkgname # # As a result, we're basically left with fullpkgpath (which is path in ports tree + flavors) # and fullpkgname (which is package name aka stem + version + flavors) pkgpath, *flavors = row['fullpkgpath'].split(',') stem, version = re.sub('(-[^0-9][^-]*)+$', '', row['fullpkgname']).rsplit('-', 1) stripped_stem = _strip_flavors_from_stem(stem, flavors) pkg.add_name(stem, NameType.OPENBSD_STEM) pkg.add_name(pkgpath, NameType.OPENBSD_PKGPATH) pkg.add_name(stripped_stem, NameType.OPENBSD_STRIPPED_STEM) pkg.add_flavors(flavors) pkg.set_version(version, _normalize_version) pkg.set_summary(row['comment']) pkg.add_homepages(row['homepage']) if row['gh_account'] and row['gh_project']: pkg.add_homepages('https://github.com/{}/{}'.format(row['gh_account'], row['gh_project'])) pkg.add_maintainers(extract_maintainers(row['maintainer'])) pkg.add_categories(row['categories'].split()) pkg.add_downloads(_iter_distfiles(row)) yield pkg
def Parse(self, path): result = [] with open(path, 'r', encoding='utf-8') as jsonfile: for package in json.load(jsonfile)['packages']: pkg = Package() pkg.name = package['name'] if package['version'] is None: print('no version: {}'.format(pkg.name), file=sys.stderr) continue pkg.version, _ = SanitizeVersion(package['version']) pkg.origversion = package['full_version'] pkg.category = package['pkg_section'] or package['section'] pkg.comment = package['description'] pkg.maintainers = extract_maintainers(package['committer']) if pkg.version == '999': pkg.SetFlag(PackageFlags.ignore) # XXX: rolling? revisit result.append(pkg) return result
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_left(':') with open(path, 'r', encoding='utf-8') as jsonfile: for package in json.load(jsonfile)['packages']: pkg = factory.begin() pkg.add_name(package['name'], NameType.GENERIC_PKGNAME) if package['version'] is None: pkg.log('no version defined', Logger.ERROR) continue pkg.set_version(package['version'], normalize_version) pkg.set_rawversion(package['full_version']) pkg.add_categories(package['pkg_section'], package['section']) pkg.set_summary(package['description']) pkg.add_maintainers(extract_maintainers(package['committer'])) if pkg.version == '999': pkg.set_flags(PackageFlags.IGNORE) # XXX: rolling? revisit yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for header in rpm.readHeaderListFromFile(path): with factory.begin() as pkg: fields = { key: str(header[key], self.encoding) if header[key] is not None else None for key in [ 'name', 'version', 'release', 'packager', 'group', 'summary' ] } pkg.set_name(fields['name']) pkg.set_version(fields['version']) # XXX: handle release if fields['version'] is None: raise RuntimeError('version not defined') pkg.set_rawversion( nevra_construct(None, header['epoch'], fields['version'], fields['release'])) if fields['packager']: pkg.add_maintainers(extract_maintainers(fields['packager']) ) # XXX: may have multiple maintainers pkg.add_categories(fields['group']) pkg.set_summary(fields['summary']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_right('nb') with open(path, encoding='utf-8') as indexfile: for line in indexfile: pkg = factory.begin() fields = line.strip().split('|') if len(fields) != 12: pkg.log('skipping, unexpected number of fields {}'.format( len(fields)), severity=Logger.ERROR) continue if not fields[0]: pkg.log('skipping, empty first field', severity=Logger.ERROR) continue name, version = fields[0].rsplit('-', 1) pkg.add_name(name, NameType.BSD_PKGNAME) pkg.add_name(fields[1], NameType.BSD_ORIGIN) pkg.set_version(version, normalize_version) pkg.set_summary(fields[3]) # sometimes OWNER variable is used in which case # there's no MAINTAINER OWNER doesn't get to INDEX pkg.add_maintainers(extract_maintainers(fields[5])) pkg.add_categories(fields[6].split()) pkg.add_homepages(fields[11]) yield pkg
def _parse_package_metadata_xml( path: str, category: str, package: str, pkg: PackageMaker) -> Tuple[List[str], List[str]]: metadata_path = os.path.join(path, category, package, 'metadata.xml') maintainers: List[str] = [] upstreams: List[str] = [] if not os.path.isfile(metadata_path): return (maintainers, upstreams) with open(metadata_path, 'r', encoding='utf-8') as metafile: meta = xml.etree.ElementTree.parse(metafile) for entry in meta.findall('maintainer'): email_node = entry.find('email') if email_node is not None and email_node.text is not None: maintainers += extract_maintainers(email_node.text) for entry in meta.findall('upstream'): for remote_id_node in entry.findall('remote-id'): if remote_id_node.text: link = _construct_upstream_link(remote_id_node.attrib['type'], remote_id_node.text.strip(), pkg) if link: upstreams.append(link) return (maintainers, upstreams)
def iter_parse(self, path): with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 12: print( 'WARNING: package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), file=sys.stderr) continue if not fields[0]: print('WARNING: line {} bogus, critical fields are empty'. format(line.strip()), file=sys.stderr) continue pkg = Package() pkg.name, version = fields[0].rsplit('-', 1) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] if fields[11]: pkg.homepage = fields[11] # sometimes OWNER variable is used in which case # there's no MAINTAINER OWNER doesn't get to INDEX pkg.maintainers = extract_maintainers(fields[5]) pkg.category = fields[6].split(' ')[0] pkg.extrafields['portname'] = fields[1].split('/')[-1] pkg.extrafields['origin'] = fields[1] yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for category, pkgname in _iter_packages(path): with factory.begin(category + '/' + pkgname) as pkg: info_path = os.path.join(path, category, pkgname, pkgname + '.info') if not os.path.isfile(info_path): pkg.log('.info file does not exist', severity=Logger.ERROR) continue pkg.add_categories(category) variables = _parse_infofile(info_path) if variables['PRGNAM'] != pkgname: pkg.log( f'PRGNAM "{variables["PRGNAM"]}" != pkgname "{pkgname}"', severity=Logger.ERROR) continue pkg.add_name(variables['PRGNAM'], NameType.SLACKBUILDS_NAME) pkg.add_name(category + '/' + pkgname, NameType.SLACKBUILDS_FULL_NAME) pkg.set_version(variables['VERSION']) pkg.add_homepages(variables['HOMEPAGE']) pkg.add_maintainers(extract_maintainers(variables['EMAIL'])) for key in ['DOWNLOAD', 'DOWNLOAD_x86_64']: if variables[key] not in ['', 'UNSUPPORTED', 'UNTESTED']: pkg.add_downloads(variables[key].split()) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_right_greedy( '-').strip_left(':').strip_right_greedy('+') for filename in os.listdir(path): if not filename.endswith('.json'): continue with open(os.path.join(path, filename), 'r') as jsonfile: for result in json.load(jsonfile)['results']: pkg = factory.begin() pkg.set_name(result['Name']) pkg.set_version(result['Version'], normalize_version) pkg.set_summary(result['Description']) pkg.add_homepages(result['URL']) pkg.add_licenses(result.get('License')) if 'Maintainer' in result and result['Maintainer']: pkg.add_maintainers( extract_maintainers(result['Maintainer'] + '@aur')) if 'PackageBase' in result and result['PackageBase']: pkg.set_basename(result['PackageBase']) # XXX: enable when we support multiple categories #if 'Keywords' in result and result['Keywords']: # pkg.add_categories(result['Keywords']) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for category, pkgname in _iter_packages(path): pkg = factory.begin(category + '/' + pkgname) info_path = os.path.join(path, category, pkgname, pkgname + '.info') if not os.path.isfile(info_path): pkg.log('.info file does not exist', severity=Logger.ERROR) continue pkg.add_categories(category) variables = _parse_infofile(info_path) pkg.set_name(variables['PRGNAM']) pkg.set_version(variables['VERSION']) pkg.add_homepages(variables['HOMEPAGE']) pkg.add_maintainers(extract_maintainers(variables['EMAIL'])) for key in ['DOWNLOAD', 'DOWNLOAD_x86_64']: if variables[key] not in ['', 'UNSUPPORTED', 'UNTESTED']: pkg.add_downloads(variables[key].split()) yield pkg
def iter_parse(self, path, factory, transformer): normalize_version = VersionStripper().strip_right(',').strip_right('_') with open(path, encoding='utf-8') as indexfile: for line in indexfile: pkg = factory.begin() fields = line.strip().split('|') if len(fields) != 13: pkg.log('skipping, unexpected number of fields {}'.format( len(fields)), severity=Logger.ERROR) continue pkg.set_name_and_version(fields[0], normalize_version) pkg.set_summary(fields[3]) pkg.add_maintainers(extract_maintainers(fields[5])) pkg.add_categories(fields[6].split()) pkg.add_homepages(fields[12]) path = fields[1].split('/') pkg.set_extra_field('portname', path[-1]) pkg.set_origin('/'.join(path[-2:])) yield pkg
def Parse(self, path): packages = [] for filename in os.listdir(path): if not filename.endswith('.json'): continue with open(os.path.join(path, filename), 'r') as jsonfile: for result in json.load(jsonfile)['results']: pkg = Package() pkg.name = result['Name'] pkg.version, pkg.origversion = SanitizeVersion( result['Version']) pkg.comment = result['Description'] pkg.homepage = result['URL'] if 'License' in result: for license_ in result['License']: pkg.licenses.append(license_) if 'Maintainer' in result and result['Maintainer']: pkg.maintainers += extract_maintainers( result['Maintainer'] + '@aur') packages.append(pkg) return packages
def Parse(self, path): result = [] with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 13: print( 'WARNING: package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), file=sys.stderr) continue pkg = Package() pkg.name, version = fields[0].rsplit('-', 1) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] pkg.maintainers = extract_maintainers(fields[5]) pkg.category = fields[6].split(' ')[0] if fields[12]: pkg.homepage = fields[12] path = fields[1].split('/') pkg.extrafields['portname'] = path[-1] pkg.extrafields['origin'] = '/'.join(path[-2:]) result.append(pkg) return result