def version_get(context, request): context = ContextWrapper(context) user, index = context.username, context.index name, version = context.name, context.version stage, verdata = context.stage, context.verdata infos = [] skipped_keys = frozenset( ("description", "home_page", "name", "summary", "version")) for key, value in sorted(verdata.items()): if key in skipped_keys or key.startswith('+'): continue if isinstance(value, list): if not len(value): continue value = html.ul([html.li(x) for x in value]).unicode() else: if not value: continue value = py.xml.escape(value) infos.append((py.xml.escape(key), value)) show_toxresults = not (user == 'root' and index == 'pypi') linkstore = stage.get_linkstore_perstage(name, version) files = get_files_info(request, linkstore, show_toxresults) docs = get_docs_info(request, stage, verdata) home_page = verdata.get("home_page") nav_links = [] if docs: nav_links.append(dict( title="Documentation", url=docs['url'])) if home_page: nav_links.append(dict( title="Homepage", url=home_page)) nav_links.append(dict( title="Simple index", url=request.route_url( "/{user}/{index}/+simple/{name}", user=context.username, index=context.index, name=context.name))) return dict( title="%s/: %s-%s metadata and description" % (stage.name, name, version), content=get_description(stage, name, version), summary=verdata.get("summary"), nav_links=nav_links, infos=infos, files=files, show_toxresults=show_toxresults, make_toxresults_url=functools.partial( request.route_url, "toxresults", user=context.username, index=context.index, name=context.name, version=context.version), make_toxresult_url=functools.partial( request.route_url, "toxresult", user=context.username, index=context.index, name=context.name, version=context.version))
def devpiserver_on_upload(stage, project, version, link): """ Called when a file is uploaded to a private stage for a project/version. link.entry.file_exists() may be false because a more recent revision deleted the file (and files are not revisioned). NOTE that this hook is currently NOT called for the implicit "caching" uploads to the pypi mirror. """ debug("devpiserver_on_upload called") debug("project=%s, version=%s, link=%s" % (project, version, link)) if ("rss_active" in stage.ixconfig) and (stage.ixconfig["rss_active"] in [False, "False"]): debug("rss not active for this index") return if not link.entry.file_exists(): # taken from devpi_web.main.devpiserver_on_upload: # on replication or import we might be at a lower than # current revision and the file might have been deleted already warn("ignoring lost upload: %s", link) index_url = "%s/%s" % (server_url, stage.name) server_rss_dir = stage.xom.config.serverdir.join(".rss") xml_file = server_rss_dir.join("%s.xml" % stage.name.replace("/", ".")) pickle_file = server_rss_dir.join("%s.pickle" % stage.name.replace("/", ".")) if pickle_file.exists(): debug("loading pickle file: %s" % pickle_file.strpath) with open(pickle_file.strpath, "r") as f: rss = pickle.load(f) else: debug("pickle file doesn't exist yet") rss = PyRSS2Gen.RSS2(title="Devpi index '%s'" % stage.name, link=index_url, description="The latest package uploads", lastBuildDate=datetime.datetime.now()) # apply some kinda max description text size _description = description.get_description(stage, project, version) if stage.xom.config.args.rss_truncate_desc: if _description.count("\n") > 32: debug("reducing amount of lines (%s)" % _description.count("\n")) _description = "\n".join(_description.splitlines(True)[:32] + ["[...]"]) if len(_description) > 1024: debug("reducing amount of characters (%s)" % len(_description)) _description = _description[:1024] + "[...]" while len(rss.items) >= stage.xom.config.args.rss_max_items: debug("reducing number of rss items (%s)" % len(rss.items)) rss.items.pop() rss.items.insert(0, PyRSS2Gen.RSSItem( title="%s %s" % (project, version), link="%s/%s/%s" % (index_url, project, version), description=_description, guid=PyRSS2Gen.Guid("%s/%s/%s" % (index_url, project, version)), pubDate=datetime.datetime.now())) if not server_rss_dir.exists(): debug("creating server rss dir: %s" % server_rss_dir.strpath) server_rss_dir.mkdir() debug("writing xml file: %s" % xml_file.strpath) rss.write_xml(open(xml_file.strpath, "w"), encoding="utf8") with open(pickle_file.strpath, "w") as f: debug("writing pickle file: %s" % pickle_file.strpath) s = StringIO() pickle.dump(rss, s) f.write(s.getvalue())
def version_get(context, request): """ Show version for the precise stage, ignores inheritance. """ context = ContextWrapper(context) name, version = context.verified_project, context.version stage = context.stage try: verdata = context.get_versiondata(perstage=True) except stage.UpstreamError as e: log.error(e.msg) raise HTTPBadGateway(e.msg) infos = [] skipped_keys = frozenset( ("description", "home_page", "name", "summary", "version")) for key, value in sorted(verdata.items()): if key in skipped_keys or key.startswith('+'): continue if isinstance(value, seq_types): if not len(value): continue value = html.ul([html.li(x) for x in value]).unicode() else: if not value: continue value = py.xml.escape(value) infos.append((py.xml.escape(key), value)) show_toxresults = (stage.ixconfig['type'] != 'mirror') linkstore = stage.get_linkstore_perstage(name, version) files = get_files_info(request, linkstore, show_toxresults) docs = get_docs_info(request, stage, linkstore) home_page = verdata.get("home_page") nav_links = [] if docs: nav_links.append(dict( title="Documentation", url=docs['url'])) if home_page: nav_links.append(dict( title="Homepage", url=home_page)) nav_links.append(dict( title="Simple index", url=request.route_url( "/{user}/{index}/+simple/{project}", user=context.username, index=context.index, project=context.project))) if hasattr(stage, 'get_mirror_whitelist_info'): whitelist_info = stage.get_mirror_whitelist_info(name) else: whitelist_info = dict( has_mirror_base=stage.has_mirror_base(name), blocked_by_mirror_whitelist=False) if whitelist_info['has_mirror_base']: for base in reversed(list(stage.sro())): if base.ixconfig["type"] != "mirror": continue mirror_web_url_fmt = base.ixconfig.get("mirror_web_url_fmt") if not mirror_web_url_fmt: continue nav_links.append(dict( title="%s page" % base.ixconfig.get("title", "Mirror"), url=mirror_web_url_fmt.format(name=name))) return dict( title="%s/: %s-%s metadata and description" % (stage.name, name, version), content=get_description(stage, name, version), summary=verdata.get("summary"), nav_links=nav_links, infos=infos, files=files, blocked_by_mirror_whitelist=whitelist_info['blocked_by_mirror_whitelist'], show_toxresults=show_toxresults, make_toxresults_url=functools.partial( request.route_url, "toxresults", user=context.username, index=context.index, project=context.project, version=context.version), make_toxresult_url=functools.partial( request.route_url, "toxresult", user=context.username, index=context.index, project=context.project, version=context.version))
def devpiserver_on_upload(stage, project, version, link): """ Called when a file is uploaded to a private stage for a project/version. link.entry.file_exists() may be false because a more recent revision deleted the file (and files are not revisioned). NOTE that this hook is currently NOT called for the implicit "caching" uploads to the pypi mirror. """ debug("devpiserver_on_upload called") debug("project=%s, version=%s, link=%s" % (project, version, link)) if ("rss_active" in stage.ixconfig) and (stage.ixconfig["rss_active"] in [False, "False"]): debug("rss not active for this index") return if not link.entry.file_exists(): # taken from devpi_web.main.devpiserver_on_upload: # on replication or import we might be at a lower than # current revision and the file might have been deleted already warn("ignoring lost upload: %s", link) index_url = "%s/%s" % (server_url, stage.name) server_rss_dir = stage.xom.config.serverdir.join(".rss") xml_file = server_rss_dir.join("%s.xml" % stage.name.replace("/", ".")) pickle_file = server_rss_dir.join("%s.pickle" % stage.name.replace("/", ".")) if pickle_file.exists(): debug("loading pickle file: %s" % pickle_file.strpath) with open(pickle_file.strpath, "r") as f: rss = pickle.load(f) else: debug("pickle file doesn't exist yet") rss = PyRSS2Gen.RSS2(title="Devpi index '%s'" % stage.name, link=index_url, description="The latest package uploads", lastBuildDate=datetime.datetime.now()) # apply some kinda max description text size _description = description.get_description(stage, project, version) if stage.xom.config.args.rss_truncate_desc: if _description.count("\n") > 32: debug("reducing amount of lines (%s)" % _description.count("\n")) _description = "\n".join(_description.splitlines(True)[:32] + ["[...]"]) if len(_description) > 1024: debug("reducing amount of characters (%s)" % len(_description)) _description = _description[:1024] + "[...]" while len(rss.items) >= stage.xom.config.args.rss_max_items: debug("reducing number of rss items (%s)" % len(rss.items)) rss.items.pop() rss.items.insert(0, PyRSS2Gen.RSSItem( title="%s %s" % (project, version), link="%s/%s/%s" % (index_url, project, version), description=_description, guid=PyRSS2Gen.Guid("%s/%s/%s" % (index_url, project, version)), pubDate=datetime.datetime.now())) if not server_rss_dir.exists(): debug("creating server rss dir: %s" % server_rss_dir.strpath) server_rss_dir.mkdir() debug("writing xml file: %s" % xml_file.strpath) rss.write_xml(open(xml_file.strpath, "w"), encoding="utf-8") with open(pickle_file.strpath, "w") as f: debug("writing pickle file: %s" % pickle_file.strpath) s = StringIO() pickle.dump(rss, s) f.write(s.getvalue())