def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for filename in walk_tree(path, suffix='pspec.xml'): relpath = os.path.relpath(filename, path) pkg = factory.begin(relpath) try: root = xml.etree.ElementTree.parse(filename) except xml.etree.ElementTree.ParseError as e: pkg.log('Cannot parse XML: ' + str(e), Logger.ERROR) continue pkg.set_name(root.find('./Source/Name').text) # type: ignore pkg.set_summary(root.find('./Source/Summary').text) # type: ignore pkg.add_homepages( map(lambda el: el.text, root.findall('./Source/Homepage'))) pkg.add_downloads( map(lambda el: el.text, root.findall('./Source/Archive'))) pkg.add_licenses( map(lambda el: el.text, root.findall('./Source/License'))) pkg.add_categories( map(lambda el: el.text, root.findall('./Source/IsA'))) pkg.add_maintainers( map(lambda el: el.text, root.findall('./Source/Packager/Email'))) pkg.set_extra_field('pspecdir', os.path.dirname(relpath)) lastupdate = max(root.findall('./History/Update'), key=lambda el: int(el.attrib['release'])) pkg.set_version(lastupdate.find('./Version').text) # type: ignore yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for filename in walk_tree(path, suffix='pkginfo.xml'): root = xml.etree.ElementTree.parse(filename) # XXX: fails on unknown entity NST_RELEASE_SUFFIX # and dtd is not usable as it needs preprocessing yield from []
def iter_parse(self, path, factory): for filename in walk_tree(path, suffix='.json'): data = json.load(open(filename, encoding='utf-8', errors='ignore')) if 'versions' not in data: continue for version, versiondata in data['versions'].items(): pkg = factory.begin() pkg.set_name(data['name']) pkg.set_version(version) pkg.add_licenses(data['license']) pkg.add_homepages(data['url']) pkg.set_extra_field('recipe', os.path.relpath(filename, path)) # garbage: links to git:// or specific commits #if isinstance(versiondata['source'], str): # pkg.downloads = [versiondata['source']] #else: # pkg.downloads = [versiondata['source']['url']] yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for filename in walk_tree(path, suffix='.json'): data = json.load(open(filename, encoding='utf-8', errors='ignore')) if 'versions' not in data: continue with factory.begin(filename) as pkg: pkg.add_name(data['name'], NameType.BUCKAROO_NAME) pkg.add_name( os.path.basename(filename)[:-5], NameType.BUCKAROO_FILENAME) pkg.add_licenses(data['license']) pkg.add_homepages(data['url']) pkg.set_extra_field('recipe', os.path.relpath(filename, path)) for version, versiondata in data['versions'].items(): verpkg = pkg.clone() verpkg.set_version(version) # not parsing sources as these contain references to specific commit snapshots yield verpkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for conandata_abs_path in walk_tree(path, name='conandata.yml'): conandata_rel_path = os.path.relpath(conandata_abs_path, path) with factory.begin(conandata_rel_path) as pkg: pkg.add_name( conandata_rel_path.split('/')[1], NameType.CONAN_RECIPE_NAME) with open(conandata_abs_path) as fd: conandata = yaml.safe_load(fd) patches = _extract_patches(conandata) for version_info in _extract_version_infos(conandata): verpkg = pkg.clone(append_ident=':' + version_info.version) verpkg.set_version(version_info.version) # XXX: we may create more subpackages here based on url_info.tags # which may contain various OSes, architectures, compilers and probably # other specifics (see cspice/all/conandata.yml for example) for url_info in version_info.url_infos: verpkg.add_downloads(url_info.url) if version_info.version in patches: verpkg.set_extra_field('patch', patches[version_info.version]) verpkg.set_extra_field('folder', conandata_rel_path.split('/')[2]) yield verpkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for versionpath in walk_tree(path, name='version'): rootdir = os.path.dirname(versionpath) with factory.begin(rootdir) as pkg: pkg.set_name(os.path.basename(rootdir)) with open(versionpath) as f: version, revision = f.read().strip().split() pkg.set_version(version) with open(os.path.join(rootdir, 'sources')) as f: pkg.add_downloads( filter( _is_good_download, (line.strip().split()[0] for line in f) ) ) pkgpath = os.path.relpath(rootdir, path) subrepo = os.path.split(pkgpath)[0] if subrepo == 'testing': continue pkg.set_extra_field('path', pkgpath) pkg.set_subrepo(subrepo) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_right('.p') for versionfile in walk_tree(path, name='package-version.txt'): pkgpath = os.path.dirname(versionfile) with factory.begin(pkgpath) as pkg: pkg.add_name(os.path.basename(pkgpath), NameType.SAGEMATH_NAME) projectname = os.path.basename(pkgpath) if os.path.exists(os.path.join(pkgpath, 'install-requires.txt')): projectname = 'python:' + projectname pkg.add_name(projectname, NameType.SAGEMATH_PROJECT_NAME) with open(versionfile) as fd: pkg.set_version(fd.read().strip(), normalize_version) if upstream_url := _parse_upstream_url(pkgpath): pkg.add_downloads( upstream_url.replace('VERSION', pkg.rawversion)) add_patch_files(pkg, os.path.join(pkgpath, 'patches'), '*.patch') yield pkg
def Parse(self, path): result = [] for filename in walk_tree(path, suffix='.json'): data = json.load(open(filename, encoding='utf-8', errors='ignore')) if 'versions' not in data: continue for version, versiondata in data['versions'].items(): pkg = Package() pkg.name = data['name'] if data['license']: pkg.licenses = [data['license']] pkg.homepage = data['url'] pkg.version = version pkg.extrafields['recipe'] = os.path.relpath(filename, path) # garbage: links to git:// or specific commits #if isinstance(versiondata['source'], str): # pkg.downloads = [versiondata['source']] #else: # pkg.downloads = [versiondata['source']['url']] result.append(pkg) return result
def Parse(self, path): result = [] for filename in walk_tree(path, suffix='pkginfo.xml'): root = xml.etree.ElementTree.parse(filename) # XXX: fails on unknown entity NST_RELEASE_SUFFIX # and dtd is not usable as it needs preprocessing return result
def iter_parse( self, path: str, factory: PackageFactory, transformer: PackageTransformer ) -> Generator[PackageMaker, None, None]: for filename in walk_tree(path, suffix='.info'): rel_filename = os.path.relpath(filename, path) with factory.begin(rel_filename) as pkg: info = _parse_info_file(filename) for nestedkey in ['info4', 'info3', 'info2']: if nestedkey in info: info.update(_parse_info(info[nestedkey])) pkg.set_name(info['package']) pkg.set_version(info['version']) if '%' in info['package']: # XXX: not usable because of too complex parsing is required for substitutions like # package-stash-pm%type-pkg[perl] pkg.log( 'unsupported substitution in package name: {}'.format( info['package']), severity=Logger.ERROR) continue for key in ['homepage', 'source']: if key in info: # https://github.com/fink/fink/blob/848234952865c097f1a9c5b9cc4aa616546d906b/perlmod/Fink/PkgVersion.pm#L656-L671 replacements = { '%v': info['version'], '%n': info['package'], '%m': info.get('architecture') or 'x86_64', # can we use fixed arch to generate a download url? } for replkey, replacement in replacements.items(): info[key] = info[key].replace(replkey, replacement) if '%' in info[key]: pkg.log( 'probably unsupported substitution in {}: {}'. format(key, info[key]), severity=Logger.ERROR) pkg.add_downloads(info.get('source')) pkg.add_homepages(info.get('homepage')) pkg.add_licenses(info.get('license')) pkg.add_maintainers(extract_maintainers(info['maintainer'])) pkg.set_extra_field('infopath', rel_filename) yield pkg
def _iter_packages(path: str) -> Iterable[_PackageLocation]: for yamlpath_abs in walk_tree(os.path.join(path, 'manifests'), suffix='.yaml'): yamlpath_rel = os.path.relpath(yamlpath_abs, path) yield _PackageLocation( yamlpath_abs=yamlpath_abs, yamlpath_rel=yamlpath_rel, # skip manifests/ at left # skip version directory and yaml filename at right relevant_path='/'.join(yamlpath_rel.split('/')[1:-2]), )
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for version_path_abs in walk_tree(path, name='version'): version_path_rel = os.path.relpath(version_path_abs, path) package_path_abs = os.path.dirname(version_path_abs) package_path_rel = os.path.relpath(package_path_abs, path) package_path_rel_comps = os.path.split(package_path_rel) sources_path_abs = os.path.join(package_path_abs, 'sources') meta_path_abs = os.path.join(package_path_abs, 'meta') patches_path_abs = os.path.join(package_path_abs, 'patches') with factory.begin(package_path_rel) as pkg: pkg.add_name(package_path_rel_comps[-1], NameType.KISS_NAME) pkg.set_version(read_version(version_path_abs)) if not os.path.exists(sources_path_abs): pkg.log('skipping sourceless package', Logger.ERROR) continue pkg.add_downloads(iter_sources(sources_path_abs)) pkg.set_extra_field('path', package_path_rel) pkg.set_subrepo(package_path_rel_comps[0]) if self._maintainer_from_git: command = [ 'git', 'log', '-1', '--format=tformat:%ae', version_path_rel ] with subprocess.Popen(command, stdout=subprocess.PIPE, encoding='utf-8', errors='ignore', cwd=path) as git: lastauthor, _ = git.communicate() pkg.add_maintainers(extract_maintainers(lastauthor)) if self._use_meta and os.path.exists(meta_path_abs): meta = read_carbs_meta(meta_path_abs) pkg.set_summary(meta.get('description')) pkg.add_licenses(meta.get('license', '').split(',')) pkg.add_maintainers( extract_maintainers(meta.get('maintainer'))) add_patch_files(pkg, patches_path_abs) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for versionpath in walk_tree(path, name='version'): rootdir = os.path.dirname(versionpath) with factory.begin(os.path.relpath(rootdir, path)) as pkg: pkg.add_name(os.path.basename(rootdir), NameType.KISS_NAME) with open(versionpath) as f: version, revision = f.read().strip().split() pkg.set_version(version) with open(os.path.join(rootdir, 'sources')) as f: for line in f: line = line.strip() if not line or line.startswith('#'): continue url, *rest = line.split() if '://' in url: pkg.add_downloads(url) pkgpath = os.path.relpath(rootdir, path) subrepo = os.path.split(pkgpath)[0] pkg.set_extra_field('path', pkgpath) pkg.set_subrepo(subrepo) if self._maintainer_from_git: command = [ 'git', 'log', '-1', '--format=tformat:%ae', os.path.relpath(versionpath, path) ] with subprocess.Popen(command, stdout=subprocess.PIPE, encoding='utf-8', errors='ignore', cwd=path) as git: lastauthor, _ = git.communicate() pkg.add_maintainers(extract_maintainers(lastauthor)) patchesdir_abs = os.path.join(rootdir, 'patches') if os.path.exists(patchesdir_abs): pkg.set_extra_field('patch', sorted(os.listdir(patchesdir_abs))) yield pkg
def iter_parse( self, path: str, factory: PackageFactory, transformer: PackageTransformer ) -> Generator[PackageMaker, None, None]: for filename in walk_tree(path, suffix='.desc'): rel_filename = os.path.relpath(filename, path) with factory.begin(rel_filename) as pkg: pkgpath = os.path.dirname(rel_filename) name = os.path.basename(pkgpath) if name + '.desc' != os.path.basename(rel_filename): pkg.log('Path inconsistency (expected .../foo/foo.desc)', Logger.WARNING) data = _parse_descfile(filename, pkg) pkg.set_name(name) pkg.set_version(data['version'][0]) pkg.set_summary(data['title'][0]) pkg.add_homepages( (url.split()[0] for url in data.get('url', []) if url)) pkg.add_homepages(data.get('cv-url')) pkg.add_licenses(data['license']) pkg.add_maintainers( map(extract_maintainers, data['maintainer'])) pkg.add_categories(data['category']) for cksum, filename, url, *rest in ( line.split() for line in data.get('download', [])): url = url.lstrip('-!') if url.endswith('/'): url += filename if url.startswith('cvs') or url.startswith( 'git') or url.startswith('svn') or url.startswith( 'hg'): # snapshots basically pkg.set_flags(PackageFlags.untrusted) pkg.add_downloads(url) pkg.set_extra_field('pkgpath', pkgpath) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for desc_path in walk_tree(path, suffix='.desc'): rel_desc_path = os.path.relpath(desc_path, path) with factory.begin(rel_desc_path) as pkg: pkgpath = os.path.dirname(rel_desc_path) name = os.path.basename(pkgpath) if name + '.desc' != os.path.basename(rel_desc_path): raise RuntimeError( 'Path inconsistency (expected .../foo/foo.desc)') data = _parse_descfile(desc_path, pkg) pkg.add_name(name, NameType.T2_NAME) pkg.add_name(pkgpath, NameType.T2_FULL_NAME) pkg.set_version(data['version'][0]) pkg.set_summary(data['title'][0]) pkg.add_homepages( (url.split()[0] for url in data.get('url', []) if url)) #pkg.add_homepages(data.get('cv-url')) # url used by version checker; may be garbage pkg.add_licenses(data['license']) pkg.add_maintainers( map(extract_maintainers, data['maintainer'])) pkg.add_categories(data['category']) for cksum, filename, url, *rest in ( line.split() for line in data.get('download', [])): url = url.lstrip('-!') if url.endswith('/'): url += filename if url.startswith('cvs') or url.startswith( 'git') or url.startswith('svn') or url.startswith( 'hg'): # snapshots basically pkg.set_flags(PackageFlags.UNTRUSTED) pkg.add_downloads(url) add_patch_files(pkg, os.path.dirname(desc_path), '*.patch') yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for protofile in walk_tree(path, name='build.textproto'): protofile_rel = os.path.relpath(protofile, path) with factory.begin(protofile_rel) as pkg: pkgpath = os.path.dirname(protofile_rel) pkg.add_name(os.path.basename(pkgpath), NameType.DISTRI_NAME) with open(protofile) as f: build = BuildMessage() ParseTextFormat(f.read(), build) pkg.set_version(build.version, lambda ver: ver.rsplit('-', 1)[0]) pkg.add_downloads(build.source) pkg.set_extra_field('path', pkgpath) yield pkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for filename in walk_tree(path, suffix='pspec.xml'): relpath = os.path.relpath(filename, path) with factory.begin(relpath) as pkg: try: root = xml.etree.ElementTree.parse(filename).getroot() except xml.etree.ElementTree.ParseError as e: pkg.log('Cannot parse XML: ' + str(e), Logger.ERROR) continue name = safe_findtext(root, './Source/Name') pkgdir = os.path.dirname(relpath) pathname = relpath.split(os.sep)[-2] if name != pathname: # there's only one exception ATOW pkg.log(f'name "{name}" != package directory "{pathname}"', Logger.ERROR) pkg.add_name(name, NameType.PISI_NAME) pkg.add_name(pkgdir, NameType.PISI_PKGDIR) pkg.set_summary(safe_findtext(root, './Source/Summary')) pkg.add_homepages( map(lambda el: el.text, root.findall('./Source/Homepage'))) pkg.add_downloads( map(lambda el: el.text, root.findall('./Source/Archive'))) pkg.add_licenses( map(lambda el: el.text, root.findall('./Source/License'))) pkg.add_categories( map(lambda el: el.text, root.findall('./Source/IsA'))) pkg.add_maintainers( map(lambda el: el.text, root.findall('./Source/Packager/Email'))) lastupdate = max(root.findall('./History/Update'), key=lambda el: int(el.attrib['release'])) pkg.set_version(safe_findtext(lastupdate, './Version')) yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for conandata_abs_path in walk_tree(path, name='conandata.yml'): conandata_rel_path = os.path.relpath(conandata_abs_path, path) with factory.begin(conandata_rel_path) as pkg: pkg.add_name( conandata_rel_path.split('/')[1], NameType.CONAN_RECIPE_NAME) with open(conandata_abs_path) as fd: conandata = yaml.safe_load(fd) for version, urls in _extract_version_urls( conandata['sources']): verpkg = pkg.clone(append_ident=version) verpkg.set_version(version) verpkg.add_downloads(urls) yield verpkg
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: for pkginfo_path_abs in walk_tree(path, name='PKGINFO'): pkginfo_path_rel = os.path.relpath(pkginfo_path_abs, path) with factory.begin(pkginfo_path_rel) as pkg: subdir = os.path.dirname(pkginfo_path_rel) with open(pkginfo_path_abs) as fd: pkginfo = json.loads(fd.read()) if subdir != pkginfo['name']: raise RuntimeError( f'subdir "{subdir}" != name "{pkginfo["name"]}"') pkg.add_name(pkginfo['name'], NameType.YIFFOS_NAME) pkg.set_version(pkginfo['version']) pkg.set_summary(pkginfo['description']) pkg.add_links(LinkType.UPSTREAM_HOMEPAGE, pkginfo['url']) pkg.add_licenses(pkginfo['license']) pkg.add_maintainers( map(extract_maintainers, pkginfo['maintainers'])) yield pkg
def iter_parse(self, path, factory): for filename in walk_tree(path, suffix='pkginfo.xml'): root = xml.etree.ElementTree.parse(filename)