def draw_networkx_with_pydot(nx_graph, include_third_party=False, outfile=None, show_reqs=False): """ Draws a networkx graph using PyDot. Parameters ---------- nx_graph: `networkx.DiGraph` The requirements graph include_third_party: `bool` Show third-party packages on the graph outfile: `path` If set, will save the graph to this path show_reqs: `bool` If set, will show the graph of requirements instead of the related distributions - this more closely mirrors the underlying data. """ import pydot dot_graph = pydot.Dot(graph_type='digraph', suppress_disconnected=False) dot_nodes = {} fn_node = get_dot_dist_node fn_edge = get_dot_dist_edge if show_reqs: fn_node = get_dot_req_node fn_edge = get_dot_req_edge for nx_node in nx_graph.nodes_iter(): if (not include_third_party and not util.is_inhouse_package(nx_node.project_name)): continue # Skip requirements that aren't yet 'installed' - this is only the # case mid-way through dependency resolution if hasattr(nx_node, '_chosen_dist'): dot_node = fn_node(nx_node) dot_nodes[nx_node] = dot_node dot_graph.add_node(dot_node) for edge in nx_graph.edges_iter(): if edge[0] not in dot_nodes or edge[1] not in dot_nodes: continue dot_graph.add_edge(fn_edge(edge, dot_nodes)) if not outfile: tmpdir = tempfile.mkdtemp() outfile = os.path.abspath(os.path.join(tmpdir, 'graph.png')) # TODO: make neato work for nicer layouts #dot_graph.write_png(outfile, prog='neato') log.info("Rendering graph to {0}".format(outfile)) dot_graph.write_png(outfile) webbrowser.open('file://%s' % outfile) time.sleep(5) if not outfile: shutil.rmtree(tmpdir)
def get_dot_dist_node(req): """ Get a pydot node object from a networkx node that represents the dist for that node """ import pydot fill_colour = "#cccccc" shape = "box" if util.is_inhouse_package(req.project_name): fill_colour = "green" return pydot.Node(' '.join((str(req.project_name), req._chosen_dist.version)), style="filled", fillcolor=fill_colour, shape=shape)
def pre(self, command, output_dir, pkg_vars): if not is_inhouse_package(pkg_vars['project']): msg = "Package name doesn't start with an company prefix.\n" \ "Prefixes are:\n" \ "%s" % '\n'.join(CONFIG.namespaces) sys.exit(msg) nss = pkg_vars['namespace_separator'] prefix, package = pkg_vars['project'].split( pkg_vars['namespace_separator'], 1) pkg_vars['package'] = package pkg_vars['namespace_package'] = prefix pkg_vars['project_nodot'] = pkg_vars['project'].replace(nss, '') pkg_vars['project_dotted'] = pkg_vars['project'].replace(nss, '.') pkg_vars['project_dir'] = pkg_vars['project'].replace(nss, '/')
def get_dot_dist_node(req): """ Get a pydot node object from a networkx node that represents the dist for that node """ import pydot fill_colour = "#cccccc" shape = "box" if util.is_inhouse_package(req.project_name): fill_colour = "green" return pydot.Node(' '.join( (str(req.project_name), req._chosen_dist.version)), style="filled", fillcolor=fill_colour, shape=shape)
def pre(self, command, output_dir, pkg_vars): if not is_inhouse_package(pkg_vars["project"]): msg = ( "Package name doesn't start with an company prefix.\n" "Prefixes are:\n" "%s" % "\n".join(CONFIG.namespaces) ) sys.exit(msg) nss = pkg_vars["namespace_separator"] prefix, package = pkg_vars["project"].split(pkg_vars["namespace_separator"], 1) pkg_vars["package"] = package pkg_vars["namespace_package"] = prefix pkg_vars["project_nodot"] = pkg_vars["project"].replace(nss, "") pkg_vars["project_dotted"] = pkg_vars["project"].replace(nss, ".") pkg_vars["project_dir"] = pkg_vars["project"].replace(nss, "/")
def get_dot_req_node(req): """ Get a pydot node object from a networkx node that represents the requirement and its installed dist """ import pydot fill_colour = "#cccccc" shape = "box" if util.is_inhouse_package(req.project_name): fill_colour = "green" if hasattr(req, '_chosen_dist'): dist = req._chosen_dist.version else: dist = '' return pydot.Node('{0} ({1})'.format(req, dist), style="filled", fillcolor=fill_colour, shape=shape)
def write_all_revisions_(self): """ Create ``allrevisions.txt`` file containing subversion revision of every project upon which we depend. This won't have the dependencies in it if we've not yet been set-up with 'setup.py develop' or similar. """ my_dist = dependency.get_dist(self.distribution.metadata.name) # list of (name,version,url,rev) tuples allrevisions = [(self.distribution.metadata.get_name(), self.distribution.metadata.get_version(), self.full_url, self.revision)] all_requires = [] if my_dist: my_require = my_dist.as_requirement() try: all_requires = pkg_resources.working_set.resolve([my_require]) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # not installed yet -- will probably be OK when we're # called after the build has taken place. pass for dist in all_requires: if dist == my_dist or not util.is_inhouse_package( dist.project_name): continue try: revisions = parse.read_allrevisions(dist.location, dist.project_name) except IOError as ex: log.warn("Can't read allrevisions for %s: %s", dist, ex) for name, version, url, rev in revisions: if pkg_resources.safe_name(name) == dist.project_name: allrevisions.append((name, version, url, rev)) break else: log.warn("No revision for %s in %s", dist.project_name, dist) data = [ '# These are the VCS revision numbers used for this particular build.', '# This file is used by release tools to tag a working build.' ] + [','.join(str(e) for e in rev_data) for rev_data in allrevisions] self.write_file("all revisions", self.all_revisions_file, '\n'.join(data))
def write_all_revisions_(self): """ Create ``allrevisions.txt`` file containing subversion revision of every project upon which we depend. This won't have the dependencies in it if we've not yet been set-up with 'setup.py develop' or similar. """ my_dist = dependency.get_dist(self.distribution.metadata.name) # list of (name,version,url,rev) tuples allrevisions = [(self.distribution.metadata.get_name(), self.distribution.metadata.get_version(), self.full_url, self.revision)] all_requires = [] if my_dist: my_require = my_dist.as_requirement() try: all_requires = pkg_resources.working_set.resolve([my_require]) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # not installed yet -- will probably be OK when we're # called after the build has taken place. pass for dist in all_requires: if dist == my_dist or not util.is_inhouse_package(dist.project_name): continue try: revisions = parse.read_allrevisions(dist.location, dist.project_name) except IOError as ex: log.warn("Can't read allrevisions for %s: %s", dist, ex) for name, version, url, rev in revisions: if pkg_resources.safe_name(name) == dist.project_name: allrevisions.append((name, version, url, rev)) break else: log.warn("No revision for %s in %s", dist.project_name, dist) data = ['# These are the VCS revision numbers used for this particular build.', '# This file is used by release tools to tag a working build.' ] + [','.join(str(e) for e in rev_data) for rev_data in allrevisions] self.write_file("all revisions", self.all_revisions_file, '\n'.join(data))
def draw_networkx_with_d3(nx_graph, include_third_party=False, outfile=None): """ Draws a networkx graph using d3. """ from path import path tmpl = string.Template((path(__file__).parent / 'd3' / 'fbl.html').text()) nodes = {} edges = [] i = 0 for req in nx_graph.nodes_iter(): if (not include_third_party and not util.is_inhouse_package(req.project_name)): continue #print "Node %d: %s" % (i, req.project_name) nodes[req.key] = (i, req.project_name) i += 1 for edge in nx_graph.edges_iter(): if edge[0].key not in nodes or edge[1].key not in nodes: continue #print "edge: %s -> %s" % (edge[0].key, edge[1].key) #print '{source : %d, target: %d, weight: 1}' % (nodes[edge[0].key][0], nodes[edge[1].key][0]) edges.append('{source : %d, target: %d, weight: 0.5}' % (nodes[edge[0].key][0], nodes[edge[1].key][0])) nodes = ',\n'.join([ '{label: "%s"}' % i[1] for i in sorted(nodes.values(), key=operator.itemgetter(0)) ]) edges = ',\n'.join(edges) if not outfile: tmpdir = path(tempfile.mkdtemp()) outfile = tmpdir / 'graph.html' else: outfile = path(outfile) outfile.write_text(tmpl.safe_substitute(nodes=nodes, links=edges)) webbrowser.open('file://%s' % outfile) time.sleep(5) if not outfile: shutil.rmtree(tmpdir)
def components_command(plat, ns): platforms = plat.get_platform_packages() if ns.package not in platforms: raise platypus.PlatError("Package %s is not a platform package" % ns.package) pkg_info = plat.get_installed_version(ns.package) if not pkg_info: raise platypus.PlatError("Package %s is not installed" % ns.package) meta = plat.get_package_metadata(ns.package, pkg_info.version) platform = "%s (%s): %s" % (ns.package, pkg_info.version, meta.get("summary", "")) statusmsg(platform) components = plat.get_package_dependencies(ns.package).values() components = [ "%s (%s)" % (pkg.name, pkg.version) for pkg in components if pkg and util.is_inhouse_package(pkg.name) ] components.sort() for component in components: statusmsg(" " + component)
def draw_networkx_with_d3(nx_graph, include_third_party=False, outfile=None): """ Draws a networkx graph using d3. """ from path import path tmpl = string.Template((path(__file__).parent / 'd3' / 'fbl.html').text()) nodes = {} edges = [] i = 0 for req in nx_graph.nodes_iter(): if (not include_third_party and not util.is_inhouse_package(req.project_name)): continue #print "Node %d: %s" % (i, req.project_name) nodes[req.key] = (i, req.project_name) i += 1 for edge in nx_graph.edges_iter(): if edge[0].key not in nodes or edge[1].key not in nodes: continue #print "edge: %s -> %s" % (edge[0].key, edge[1].key) #print '{source : %d, target: %d, weight: 1}' % (nodes[edge[0].key][0], nodes[edge[1].key][0]) edges.append('{source : %d, target: %d, weight: 0.5}' % (nodes[edge[0].key][0], nodes[edge[1].key][0])) nodes = ',\n'.join(['{label: "%s"}' % i[1] for i in sorted(nodes.values(), key=operator.itemgetter(0))]) edges = ',\n'.join(edges) if not outfile: tmpdir = path(tempfile.mkdtemp()) outfile = tmpdir / 'graph.html' else: outfile = path(outfile) outfile.write_text(tmpl.safe_substitute(nodes=nodes, links=edges)) webbrowser.open('file://%s' % outfile) time.sleep(5) if not outfile: shutil.rmtree(tmpdir)
def get_version_comparator(self, requirement): """ Here we pick between 'dev' or 'final' versions. We want to use different logic depending on if this is a third-party or in-house package: In-house-packages: we usually want the latest dev version as keeping on head revisions is sensible to stop code going stale Third-party: we usually want the latest final version to protect ourselves from OSS developers exposing their latest untested code to the internet at large. To override this logic the packager needs to specify an explicit version pin in install_requires or similar for third-party packages, or use the prefer-final setup flag for in-house packages. """ if util.is_inhouse_package(requirement.project_name): if self._prefer_final: log.debug(' --- in-house package, prefer-final') return easy_install._final_version else: log.debug(' --- in-house package, prefer-dev') return self.is_dev_version else: log.debug(' --- third-party package, always prefer-final') return easy_install._final_version
def test_is_inhouse_package(): with mock.patch.object(util, 'CONFIG', TEST_CONFIG): assert util.is_inhouse_package('acme.foo') assert not util.is_inhouse_package('foo')