def relne_matcher(package: Package, package_context: PackageContext, match_context: MatchContext) -> bool: return version_compare(package.version, relne, 0, LOWER_BOUND) < 0 or version_compare( package.version, relne, 0, UPPER_BOUND) > 0
def Parse(self, path): packages = [] for moduledir in os.listdir(path): modulepath = os.path.join(path, moduledir) cabalpath = None maxversion = None for versiondir in os.listdir(modulepath): if versiondir == 'preferred-versions': continue if maxversion is None or version_compare( versiondir, maxversion) > 0: maxversion = versiondir cabalpath = os.path.join(path, moduledir, maxversion, moduledir + '.cabal') if maxversion is None: print('WARNING: cannot determine max version for {}'.format( moduledir), file=sys.stderr) continue pkg = Package() pkg.name = moduledir pkg.version = maxversion pkg.homepage = 'http://hackage.haskell.org/package/' + moduledir cabaldata = self.ParseCabal(cabalpath) if cabaldata['name'] == pkg.name and version_compare( cabaldata['version'], pkg.version) == 0: if 'synopsis' in cabaldata and cabaldata['synopsis']: pkg.comment = cabaldata['synopsis'].strip() if 'maintainer' in cabaldata: pkg.maintainers = extract_maintainers( cabaldata['maintainer']) if 'license' in cabaldata: pkg.licenses = [cabaldata['license']] if 'homepage' in cabaldata and ( cabaldata['homepage'].startswith('http://') or cabaldata['homepage'].startswith('https://')): pkg.homepage = cabaldata['homepage'] if 'category' in cabaldata: pkg.category = cabaldata['category'] else: print( 'WARNING: cabal data sanity check failed for {}, ignoring cabal data' .format(cabalpath), file=sys.stderr) packages.append(pkg) return packages
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for moduledir in os.listdir(path): pkg = factory.begin() pkg.set_name(moduledir) modulepath = os.path.join(path, moduledir) cabalpath = None maxversion = None for versiondir in os.listdir(modulepath): if versiondir == 'preferred-versions': continue if maxversion is None or version_compare( versiondir, maxversion) > 0: maxversion = versiondir cabalpath = os.path.join(path, moduledir, maxversion, moduledir + '.cabal') if maxversion is None: pkg.log('cannot determine max version'.format(), severity=Logger.ERROR) continue pkg.set_version(maxversion) assert (cabalpath) cabaldata = _parse_cabal_file(cabalpath) if cabaldata['name'] == pkg.name and version_compare( cabaldata['version'], pkg.version) == 0: pkg.set_summary(cabaldata.get('synopsis')) if 'maintainer' not in cabaldata: pkg.add_maintainers('fallback-mnt-hackage@repology') else: pkg.add_maintainers( extract_maintainers(cabaldata.get('maintainer'))) pkg.add_licenses(cabaldata.get('license')) pkg.add_homepages(cabaldata.get('homepage')) pkg.add_categories(cabaldata.get('category')) else: pkg.log( 'cabal data sanity check failed ({} {} != {} {}), ignoring cabal data' .format(cabaldata['name'], cabaldata['version'], pkg.name, pkg.version), severity=Logger.ERROR) pkg.add_homepages('http://hackage.haskell.org/package/' + moduledir) yield pkg
def set_highlight(self, version: Optional[str]) -> '_VersionRange': if version is None: return self if self.start is not None and version_compare( version, self.start) < (1 if self.start_excluded else 0): return self if version_compare(version, self.end) > (-1 if self.end_excluded else 0): return self self.highlighted = True return self
def __eq__(self, other): return (self.metaorder == other.metaorder and self.versionclass == other.versionclass and self.version == other.version and version_compare(self.version, other.version, self.versionflags, other.versionflags) == 0 and self.spread == other.spread)
def iter_parse(self, path, factory): result = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded with open(path, 'r', encoding='utf-8') as jsonfile: for entry in json.load(jsonfile)['releases']: pkg = factory.begin() pkg.set_name(entry['name']) pkg.set_version(entry['version']) if not pkg.check_sanity(verbose=False): continue pkg.add_homepages(entry.get('homepage')) pkg.set_summary(entry.get('summary')) if not pkg.comment: pkg.set_summary(entry.get('description')) # multiline #pkg.add_maintainers(entry.get('submitter') + '@freshcode') # unfiltered garbage #pkg.add_downloads(entry.get('download')) # ignore for now, may contain download page urls instead of file urls pkg.add_licenses(entry.get('license')) # take latest known versions if pkg.name not in result or version_compare(pkg.version, result[pkg.name].version) > 0: result[pkg.name] = pkg yield from result.values()
def __lt__(self, other): if self.metaorder < other.metaorder: return True if self.metaorder > other.metaorder: return False res = version_compare(self.version, other.version, self.versionflags, other.versionflags) if res < 0: return True if res > 0: return False if self.versionclass < other.versionclass: return True if self.versionclass > other.versionclass: return False if self.spread < other.spread: return True if self.spread > other.spread: return False return self.version < other.version
def getNextVersion(version, homepage): userRepo = getUserRepoPair(homepage) if userRepo is None: return nextVersionDate = latestRelease(*userRepo) if nextVersionDate is None: return nextVersion, nextDate = nextVersionDate currDate = parseUnstable(version) if currDate is not None and nextDate.date() <= currDate.date(): log( f"Discarding unfit version {nextVersion} ({nextDate}), because it " f"is older than our current version {version}." ) return if skipPrerelease(nextVersion): return nextVersion = stripRelease(userRepo[1], nextVersion) if libversion.version_compare(version, nextVersion) >= 0: return return nextVersion
def badge_vertical_allrepos(name: str) -> Any: args = flask.request.args.to_dict() packages = get_db().get_metapackage_packages( name, fields=['repo', 'version', 'versionclass']) best_pkg_by_repo = packageset_to_best_by_repo(packages, allow_ignored=args.get( 'allow_ignored', False)) header = args.get('header') minversion = args.get('minversion') cells = [] for reponame in repometadata.active_names(): if reponame in best_pkg_by_repo: version = best_pkg_by_repo[reponame].version versionclass = best_pkg_by_repo[reponame].versionclass unsatisfying = minversion and version_compare(version, minversion) < 0 color = badge_color(versionclass, unsatisfying) cells.append([ BadgeCell(repometadata[reponame]['desc'], align='r'), BadgeCell(version, color=color, truncate=13, minwidth=60) ]) if header is None: header = 'Packaging status' if cells else 'No known packages' return render_generic_badge(cells, header=header)
def badge_version_for_repo(repo: str, name: str) -> Any: if repo not in repometadata.all_names(): flask.abort(404) args = flask.request.args.to_dict() best_package = packageset_to_best(get_db().get_metapackage_packages( name, repo=repo, fields=['repo', 'version', 'versionclass']), allow_ignored=args.get( 'allow_ignored', False)) left_cell = BadgeCell(flask.request.args.to_dict().get( 'header', repometadata[repo]['singular']), collapsible=True) if best_package is None: # Note: it would be more correct to return 404 with content here, # but some browsers (e.g. Firefox) won't display the image in that case right_cell = BadgeCell('-') else: minversion = flask.request.args.to_dict().get('minversion') unsatisfying = minversion and version_compare(best_package.version, minversion) < 0 right_cell = BadgeCell(best_package.version, badge_color(best_package.versionclass, unsatisfying), truncate=20) return render_generic_badge([[left_cell, right_cell]])
def iter_parse( self, path: str, factory: PackageFactory, transformer: PackageTransformer ) -> Generator[PackageMaker, None, None]: result: Dict[str, PackageMaker] = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded with open(path, 'rb') as jsonfile: for entry in JsonSlicer(jsonfile, ('releases', None)): pkg = factory.begin() pkg.set_name(entry['name']) pkg.set_version(entry['version']) if not pkg.check_sanity(verbose=False): continue pkg.add_homepages(entry.get('homepage')) pkg.set_summary(entry.get('summary')) if not pkg.comment: pkg.set_summary(entry.get('description')) # multiline #pkg.add_maintainers(entry.get('submitter') + '@freshcode') # unfiltered garbage #pkg.add_downloads(entry.get('download')) # ignore for now, may contain download page urls instead of file urls pkg.add_licenses(entry.get('license')) # take latest known versions if pkg.name not in result or version_compare( pkg.version, result[pkg.name].version) > 0: result[pkg.name] = pkg yield from result.values()
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: result: dict[str, PackageMaker] = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded for entry in iter_json_list(path, ('releases', None)): with factory.begin() as pkg: pkg.add_name(entry['name'], NameType.FRESHCODE_NAME) if not entry['version']: pkg.log('empty version', Logger.ERROR) continue pkg.set_version(entry['version']) pkg.add_homepages(entry.get('homepage')) pkg.set_summary( entry.get('summary') ) # cound use `or entry.get('description'))`, but it's long multiline #pkg.add_maintainers(entry.get('submitter') + '@freshcode') # unfiltered garbage #pkg.add_downloads(entry.get('download')) # ignore for now, may contain download page urls instead of file urls pkg.add_licenses(entry.get('license')) # take latest known versions if entry['name'] not in result or version_compare( pkg.version, result[entry['name']].version) > 0: result[entry['name']] = pkg yield from result.values()
def badge_version_for_repo(repo, name): if repo not in repometadata.all_names(): flask.abort(404) packages = get_db().get_metapackage_packages( name, fields=['repo', 'version', 'versionclass']) best_pkg_by_repo = packageset_to_best_by_repo(packages) if repo not in best_pkg_by_repo: # XXX: display this as normal "pill" badge with correct repository name return ( flask.render_template('badge-tiny-string.svg', string='No package'), # XXX: it's more correct to return 404 with content # here, but some browsers (e.g. Firefox) won't display # the image in that case { 'Content-type': 'image/svg+xml' }) minversion = flask.request.args.to_dict().get('minversion') unsatisfying = version_compare(best_pkg_by_repo[repo].version, minversion) < 0 if minversion else False return (flask.render_template( 'badge-tiny-version.svg', repo=repo, version=best_pkg_by_repo[repo].version, versionclass=best_pkg_by_repo[repo].versionclass, unsatisfying=unsatisfying, ), { 'Content-type': 'image/svg+xml' })
def badge_vertical_allrepos(name): packages = get_db().get_metapackage_packages( name, fields=['repo', 'version', 'versionclass']) best_pkg_by_repo = packageset_to_best_by_repo(packages) header = flask.request.args.to_dict().get('header', 'Packaging status') minversion = flask.request.args.to_dict().get('minversion') entries = [{ 'repo': repometadata[reponame], 'package': best_pkg_by_repo[reponame], 'unsatisfying': version_compare(best_pkg_by_repo[reponame].version, minversion) < 0 if minversion else False, } for reponame in repometadata.active_names() if reponame in best_pkg_by_repo] if not entries: header = 'No known packages' return (flask.render_template('badge-vertical.svg', entries=entries, name=name, header=header), { 'Content-type': 'image/svg+xml' })
def _iter_hackage_tarfile_multipass(path: str) -> Iterable[Dict[str, str]]: preferred_versions: Dict[str, str] = {} latest_versions: Dict[str, List[Any]] = {} # name -> [version, count] # Pass 1: gather preferred versions with tarfile.open(path, 'r|*') as tar: for tarinfo in tar: tarpath = tarinfo.name.split('/') if tarpath[-1] == 'preferred-versions': preferred_versions[tarpath[0]] = _extract_tarinfo(tar, tarinfo) # Pass 2: gather latest versions with tarfile.open(path, 'r|*') as tar: for tarinfo in tar: tarpath = tarinfo.name.split('/') if tarpath[-1].endswith('.cabal'): name, version = tarpath[0:2] if name not in latest_versions or version_compare(version, latest_versions[name][0]) > 0: latest_versions[name] = [version, 1] elif version == latest_versions[name][0]: latest_versions[name][1] += 1 # Pass 3: extract cabal files with tarfile.open(path, 'r|*') as tar: for tarinfo in tar: tarpath = tarinfo.name.split('/') if tarpath[-1].endswith('.cabal'): name, version = tarpath[0:2] if version == latest_versions[name][0]: if latest_versions[name][1] > 1: latest_versions[name][1] -= 1 else: yield _parse_cabal_file(StringIO(_extract_tarinfo(tar, tarinfo)))
def __lt__(self, other: 'UserVisibleVersionInfo') -> bool: if self.metaorder < other.metaorder: return True if self.metaorder > other.metaorder: return False res = version_compare(self.version, other.version, self.versionflags, other.versionflags) if res < 0: return True if res > 0: return False if self.versionclass < other.versionclass: return True if self.versionclass > other.versionclass: return False if self.vulnerable > other.vulnerable: return True if self.vulnerable < other.vulnerable: return False if self.spread < other.spread: return True if self.spread > other.spread: return False return self.version < other.version
def iter_parse(self, path, factory): trunk_path = os.path.join(path, 'trunk') for package_name in os.listdir(trunk_path): package_path = os.path.join(trunk_path, package_name) maxversion = None for version_name in os.listdir(package_path): if maxversion is None or version_compare(version_name, maxversion) > 0: maxversion = version_name if maxversion is None: factory.log('no usable versions for package {}'.format(package_name), severity=Logger.ERROR) continue recipe_path = os.path.join(package_path, maxversion, 'Recipe') description_path = os.path.join(package_path, maxversion, 'Resources', 'Description') pkg = factory.begin() pkg.name = package_name pkg.version = maxversion if os.path.isfile(recipe_path): with open(recipe_path, 'r', encoding='utf-8', errors='ignore') as recipe: for line in recipe: line = line.strip() if line.startswith('url='): download = ExpandDownloadUrlTemplates(line[4:]) if '$' not in download: pkg.downloads.append(download.strip('"')) else: factory.log('Recipe for {}/{} skipped, unhandled URL substitude found'.format(package_name, maxversion), severity=Logger.ERROR) if os.path.isfile(description_path): with open(description_path, 'r', encoding='utf-8', errors='ignore') as description: data = {} current_tag = None for line in description: line = line.strip() match = re.match('^\[([A-Z][a-z]+)\] *(.*?)$', line) if match: current_tag = match.group(1) data[current_tag] = match.group(2) elif current_tag is None: factory.log('Description for {}/{} skipped, dumb format'.format(package_name, maxversion), severity=Logger.ERROR) break elif line: if data[current_tag]: data[current_tag] += ' ' data[current_tag] += line if 'Summary' in data: pkg.comment = data['Summary'] if 'License' in data: pkg.licenses = [data['License']] if 'Homepage' in data: pkg.homepage = data['Homepage'].strip('"') yield pkg
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]: for recipe_name in os.listdir(path): if recipe_name.startswith('.'): continue pkg = factory.begin() pkg.add_name(recipe_name, NameType.GOBOLINUX_RECIPE) package_path = os.path.join(path, recipe_name) maxversion: Optional[str] = None for version_name in os.listdir(package_path): if maxversion is None or version_compare(version_name, maxversion) > 0: maxversion = version_name if maxversion is None: pkg.log('no usable versions found', severity=Logger.ERROR) continue pkg.set_version(maxversion) recipe_path = os.path.join(package_path, maxversion, 'Recipe') description_path = os.path.join(package_path, maxversion, 'Resources', 'Description') if os.path.isfile(recipe_path): with open(recipe_path, 'r', encoding='utf-8', errors='ignore') as recipe: for line in recipe: line = line.strip() if line.startswith('url='): download = _expand_mirrors(line[4:]) if '$' not in download: pkg.add_downloads(download.strip('"')) else: factory.log('Recipe for {}/{} skipped, unhandled URL substitute found'.format(recipe_name, maxversion), severity=Logger.ERROR) if os.path.isfile(description_path): with open(description_path, 'r', encoding='utf-8', errors='ignore') as description: data = {} current_tag = None for line in description: line = line.strip() match = re.match('^\\[([A-Z][a-z]+)\\] *(.*?)$', line) if match: current_tag = match.group(1) data[current_tag] = match.group(2) elif current_tag is None: factory.log('Description for {}/{} skipped, dumb format'.format(recipe_name, maxversion), severity=Logger.ERROR) break elif line: if data[current_tag]: data[current_tag] += ' ' data[current_tag] += line pkg.set_summary(data.get('Summary')) pkg.add_licenses(data.get('License')) pkg.add_homepages(data.get('Homepage', '').strip('"')) yield pkg
def __eq__(self, other: Any) -> bool: return (self.metaorder == other.metaorder and self.versionclass == other.versionclass and self.version == other.version and version_compare(self.version, other.version, self.versionflags, other.versionflags) == 0 and self.vulnerable == other.vulnerable and self.spread == other.spread)
def badge_vertical_allrepos(name: str) -> Response: args = flask.request.args.to_dict() best_pkg_by_repo = packageset_to_best_by_repo( (PackageDataMinimal(**item) for item in get_db().get_metapackage_packages(name, minimal=True)), allow_ignored=args.get('allow_ignored', False)) header = args.get('header') minversion = args.get('minversion') repo_filter = RepositoryFilter(args) cells = [] for reponame in repometadata.active_names(): if not repo_filter.check(reponame): continue if reponame in best_pkg_by_repo: version = best_pkg_by_repo[reponame].version versionclass = best_pkg_by_repo[reponame].versionclass unsatisfying = minversion and version_compare(version, minversion) < 0 color = badge_color(versionclass, unsatisfying) cells.append([ BadgeCell(repometadata[reponame]['desc'], align='r'), BadgeCell(version, color=color, truncate=13, minwidth=60) ]) try: columns = min(int(args.get('columns', '1')), len(cells)) except: columns = 1 if columns > 1: chunks = [] columnsize = (len(cells) + columns - 1) // columns for column in range(columns): chunks.append(cells[column * columnsize:column * columnsize + columnsize]) empty_filler = [BadgeCell(''), BadgeCell('')] cells = [ sum(cells, []) for cells in zip_longest(*chunks, fillvalue=empty_filler) ] if header is None: header = 'Packaging status' if cells else 'No known packages' return render_generic_badge(cells, header=header)
def _parse_devel_packages(packages, latest_versions, factory): for fields in packages: if _as_str(fields['maturity']) != 'developer': continue pkg = _parse_package(factory.begin(), fields) if not pkg.version: pkg.log('empty version', severity=Logger.ERROR) continue if version_compare(pkg.version, latest_versions.get(pkg.name, '0')) > 0: pkg.set_flags(PackageFlags.devel) yield pkg
def fetch(self, statepath: str, update: bool = True, logger: Logger = NoopLogger()) -> bool: if os.path.isfile(statepath) and not update: logger.log('no update requested, skipping') return False state: Dict[str, Any] = {} if os.path.isfile(statepath): with open(statepath, 'r', encoding='utf-8') as oldstatefile: state = json.load(oldstatefile) logger.log('loaded old state, {} entries'.format(len(state))) else: logger.log('starting with empty state') newdata = json.loads(do_http(self.url).text) if not newdata['releases']: raise RuntimeError( 'Empty freshcode package list received, refusing to go on') # add new entries in reversed order, oldest first so newest # have higher priority; may also compare versions here for entry in newdata['releases']: if 'name' not in entry: logger.log('skipping entry with no name') continue if entry['name'] in state: oldentry = state[entry['name']] if version_compare(entry['version'], oldentry['version']) > 0: logger.log( 'replacing entry "{}", version changed {} -> {}'. format(entry['name'], oldentry['version'], entry['version'])) state[entry['name']] = entry else: logger.log('adding entry "{}", version {}'.format( entry['name'], entry['version'])) state[entry['name']] = entry with AtomicFile(statepath, 'w', encoding='utf-8') as statefile: json.dump(state, statefile.get_file()) logger.log('saved new state, {} entries'.format(len(state))) return True
def package_version_compare(a: VersionComparable, b: VersionComparable) -> int: metaorder_a = 1 if a.flags & PackageFlags.ROLLING else -1 if a.flags & PackageFlags.OUTDATED else 0 metaorder_b = 1 if b.flags & PackageFlags.ROLLING else -1 if b.flags & PackageFlags.OUTDATED else 0 if metaorder_a < metaorder_b: return -1 if metaorder_a > metaorder_b: return 1 return version_compare( a.version, b.version, ((a.flags & PackageFlags.P_IS_PATCH) and P_IS_PATCH) | ((a.flags & PackageFlags.ANY_IS_PATCH) and ANY_IS_PATCH), ((b.flags & PackageFlags.P_IS_PATCH) and P_IS_PATCH) | ((b.flags & PackageFlags.ANY_IS_PATCH) and ANY_IS_PATCH))
def VersionCompare(self, other): self_metaorder = PackageFlags.GetMetaorder(self.flags) other_metaorder = PackageFlags.GetMetaorder(other.flags) if self_metaorder < other_metaorder: return -1 if self_metaorder > other_metaorder: return 1 return version_compare( self.version, other.version, ((self.flags & PackageFlags.p_is_patch) and P_IS_PATCH) | ((self.flags & PackageFlags.any_is_patch) and ANY_IS_PATCH), ((other.flags & PackageFlags.p_is_patch) and P_IS_PATCH) | ((other.flags & PackageFlags.any_is_patch) and ANY_IS_PATCH))
def compare_local_remote_versions( local_versions: Iterable[str], remotes: Iterable[Remote], worker_count: int, ) -> Tuple[Tuple[Remote, str]]: """Returns the list of remotes_with_new_versions with versions greater than the maximum local one""" max_version_local = max(set( filter(complement(_is_live_version), local_versions)), default="") return tuple( filter( lambda remote_version: version_compare(remote_version[1], max_version_local) > 0, process_remotes_list(remotes, worker_count=worker_count), ))
def version_compare(self, other: 'Package') -> int: self_metaorder = PackageFlags.get_metaorder(self.flags) other_metaorder = PackageFlags.get_metaorder(other.flags) if self_metaorder < other_metaorder: return -1 if self_metaorder > other_metaorder: return 1 return version_compare( self.version, other.version, ((self.flags & PackageFlags.P_IS_PATCH) and P_IS_PATCH) | ((self.flags & PackageFlags.ANY_IS_PATCH) and ANY_IS_PATCH), ((other.flags & PackageFlags.P_IS_PATCH) and P_IS_PATCH) | ((other.flags & PackageFlags.ANY_IS_PATCH) and ANY_IS_PATCH))
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]: normalize_version = VersionStripper().strip_left(':') for key, pkgdata in iter_json_dict(path, ('packages', None)): with factory.begin(key) as pkg: pkg.add_name(pkgdata['name'], NameType.SPACK_NAME) pkg.add_homepages(pkgdata['homepages']) pkg.add_maintainers(f'{m}@spack' for m in pkgdata['maintainers']) pkg.set_extra_field('patch', [ patch.split()[0] for patch in pkgdata['patches'] if '://' not in patch ]) # - no usable keywords/categories (yet) # - summaries are multiline, so ignored # - dependencies info is available, not used yet # spack may contain a lot of versions for a single project, # we don't handle that very well, so pick greatest release # version and all rolling versions picked_verdatas: list[dict[str, Any]] = [] latest_release_verdata: dict[str, Any] | None = None for pkgverdata in pkgdata['version']: if 'branch' in pkgverdata: picked_verdatas.append(pkgverdata) elif latest_release_verdata is None or version_compare( pkgverdata['version'], latest_release_verdata['version']) > 0: latest_release_verdata = pkgverdata if latest_release_verdata: picked_verdatas.append(latest_release_verdata) for pkgverdata in picked_verdatas: verpkg = pkg.clone() if 'branch' in pkgverdata: verpkg.set_flags(PackageFlags.ROLLING) verpkg.set_version(pkgverdata['version'], normalize_version) verpkg.add_downloads(pkgverdata['downloads']) yield verpkg
def _parse_devel_packages(packages: Iterable[dict[str, Any]], latest_versions: dict[str, str], factory: PackageFactory) -> Iterable[PackageMaker]: for fields in packages: if _as_str(fields['maturity']) != 'developer': continue distribution, pkg = _parse_package(factory.begin(), fields) if not pkg.version: pkg.log('empty version', severity=Logger.ERROR) continue if version_compare(pkg.version, latest_versions.get(distribution, '0')) > 0: pkg.set_flags(PackageFlags.DEVEL) yield pkg
def Parse(self, path): result = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded with open(path, 'r', encoding='utf-8') as jsonfile: for entry in json.load(jsonfile)['releases']: pkg = Package() pkg.name = entry['name'] pkg.version = entry['version'] if not pkg.name or not pkg.version: continue homepage = entry.get('homepage') summary = entry.get('summary') description = entry.get('description') #submitter = entry.get('submitter') #download = entry.get('download') license_ = entry.get('license') if homepage: pkg.homepage = homepage if summary: pkg.comment = summary elif description: pkg.comment = description # multiline if license_: pkg.licenses = [license_] # unfiltered garbage #if submitter: # pkg.maintainers = [submitter + '@freshcode'] # ignore for now, may contain download page urls instead of file urls #if download # pkg.downloads = [download] if pkg.name not in result or version_compare( pkg.version, result[pkg.name].version) > 0: result[pkg.name] = pkg return result.values()
def _iter_cabal_hier(path: str) -> Iterable[Dict[str, str]]: for moduledir in os.listdir(path): modulepath = os.path.join(path, moduledir) cabalpath: Optional[str] = None maxversion: Optional[str] = None for versiondir in os.listdir(modulepath): if versiondir == 'preferred-versions': continue if maxversion is None or version_compare(versiondir, maxversion) > 0: maxversion = versiondir cabalpath = os.path.join(path, moduledir, maxversion, moduledir + '.cabal') if cabalpath is not None: with open(cabalpath) as cabaldata: yield _parse_cabal_file(cabaldata)