def get_files_info(request, linkstore, show_toxresults=False): files = [] filedata = linkstore.get_links(rel='releasefile') if not filedata: log.warn("project %r version %r has no files", linkstore.projectname, linkstore.version) for link in sorted(filedata, key=attrgetter('basename')): url = url_for_entrypath(request, link.entrypath) entry = link.entry if entry.eggfragment: url += "#egg=%s" % entry.eggfragment elif entry.md5: url += "#md5=%s" % entry.md5 py_version, file_type = get_pyversion_filetype(link.basename) if py_version == 'source': py_version = '' size = '' if entry.file_exists(): size = "%.0f %s" % sizeof_fmt(entry.file_size()) fileinfo = dict( title=link.basename, url=url, basename=link.basename, md5=entry.md5, dist_type=dist_file_types.get(file_type, ''), py_version=py_version, size=size) if show_toxresults: toxresults = get_toxresults_info(linkstore, link) if toxresults: fileinfo['toxresults'] = toxresults files.append(fileinfo) return files
def get_files_info(request, linkstore, show_toxresults=False): files = [] filedata = linkstore.get_links(rel='releasefile') if not filedata: log.warn("project %r version %r has no files", linkstore.project, linkstore.version) for link in sorted(filedata, key=attrgetter('basename')): url = url_for_entrypath(request, link.entrypath) entry = link.entry if getattr(entry, 'eggfragment', None): # BBB for older devpi-server (<5.0.0) # before 5.0.0, eggfragment value was the result of searching for # downloads outside PyPI via scraping # can be removed once devpi-web requires devpi-server >= 5.0.0 url += "#egg=%s" % entry.eggfragment elif entry.hash_spec: url += "#" + entry.hash_spec py_version, file_type = get_pyversion_filetype(link.basename) if py_version == 'source': py_version = '' size = '' if entry.file_exists(): size = "%.0f %s" % sizeof_fmt(entry.file_size()) try: history = [ make_history_view_item(request, x) for x in link.get_logs() ] except AttributeError: history = [] last_modified = format_timetuple(parsedate(entry.last_modified)) fileinfo = dict(title=link.basename, url=url, basename=link.basename, hash_spec=entry.hash_spec, dist_type=dist_file_types.get(file_type, ''), py_version=py_version, last_modified=last_modified, history=history, size=size) if show_toxresults: toxresults = get_toxresults_info(linkstore, link) if toxresults: fileinfo['toxresults'] = toxresults files.append(fileinfo) return files
def get_files_info(request, linkstore, show_toxresults=False): files = [] filedata = linkstore.get_links(rel='releasefile') if not filedata: log.warn("project %r version %r has no files", linkstore.project, linkstore.version) for link in sorted(filedata, key=attrgetter('basename')): url = url_for_entrypath(request, link.entrypath) entry = link.entry if entry.eggfragment: url += "#egg=%s" % entry.eggfragment elif entry.hash_spec: url += "#" + entry.hash_spec py_version, file_type = get_pyversion_filetype(link.basename) if py_version == 'source': py_version = '' size = '' if entry.file_exists(): size = "%.0f %s" % sizeof_fmt(entry.file_size()) try: history = [ make_history_view_item(request, x) for x in link.get_logs()] except AttributeError: history = [] last_modified = format_timetuple(parsedate(entry.last_modified)) fileinfo = dict( title=link.basename, url=url, basename=link.basename, hash_spec=entry.hash_spec, dist_type=dist_file_types.get(file_type, ''), py_version=py_version, last_modified=last_modified, history=history, size=size) if show_toxresults: toxresults = get_toxresults_info(linkstore, link) if toxresults: fileinfo['toxresults'] = toxresults files.append(fileinfo) return files
def findlinks_view(context, request): title = "%s: all package links without root/pypi" % (context.stage.name) projectnames = set() for stage, names in context.stage.op_sro("list_projectnames_perstage"): if stage.ixconfig["type"] == "mirror": continue projectnames.update(names) all_links = [] basenames = set() for projectname in sorted(projectnames): for stage, res in context.stage.op_sro_check_pypi_whitelist( "get_releaselinks_perstage", projectname=projectname): if stage.ixconfig["type"] == "mirror": continue for link in res: if link.eggfragment: key = link.eggfragment else: key = link.basename if key not in basenames: basenames.add(key) all_links.append(link) links = [] for link in sorted(all_links, key=attrgetter('basename')): href = url_for_entrypath(request, link.entrypath) entry = link.entry if entry.eggfragment: href += "#egg=%s" % entry.eggfragment elif entry.md5: href += "#md5=%s" % entry.md5 links.extend([ "/".join(link.entrypath.split("/", 2)[:2]) + " ", html.a(link.basename, href=href), html.br(), "\n"]) if not links: links = [html.p('No releases.')] return Response(html.html( html.head( html.title(title)), html.body( html.h1(title), "\n", links)).unicode(indent=2))
def findlinks_view(context, request): title = "%s: all package links without root/pypi" % (context.stage.name) projects = set() for stage, names in context.stage.op_sro("list_projects_perstage"): if stage.ixconfig["type"] == "mirror": continue projects.update(names) all_links = [] basenames = set() for project in sorted(projects): for stage, res in context.stage.op_sro_check_mirror_whitelist( "get_releaselinks_perstage", project=project): if stage.ixconfig["type"] == "mirror": continue for link in res: if getattr(link, 'eggfragment', None): key = link.eggfragment else: key = link.basename if key not in basenames: basenames.add(key) all_links.append(link) links = [] for link in sorted(all_links, key=attrgetter('basename')): href = url_for_entrypath(request, link.entrypath) entry = link.entry if getattr(entry, 'eggfragment', None): href += "#egg=%s" % entry.eggfragment elif entry.hash_spec: href += "#%s" % entry.hash_spec links.extend([ "/".join(link.entrypath.split("/", 2)[:2]) + " ", html.a(link.basename, href=href), html.br(), "\n" ]) if not links: links = [html.p('No releases.')] return Response( html.html(html.head(html.title(title)), html.body(html.h1(title), "\n", links)).unicode(indent=2))