def test_remote_sparql():
    """
    reconstitution test
    """
    factory = OntologyFactory()
    # default method is sparql
    ont = factory.create('pato')
    g = ont.get_graph()
    info = g.node[PLOIDY]
    print(str(info))
    nodes = g.nodes()
    print(len(nodes))
    assert len(nodes) > 100
    nbrs = g.successors(PLOIDY)
    print("SUCC:" + str(nbrs))
    parents = g.predecessors(PLOIDY)
    print("PRED:" + str(parents))
    assert parents == ['PATO:0001396']
    ancs = ancestors(g, PLOIDY)
    print("ANCS:" + str(ancs))
    assert 'PATO:0000001' in ancs
    print(g)
    Q = ['.*shape.*']
    w = GraphRenderer.create('tree')

    shapes1 = ont.resolve_names(Q, is_regex=True, is_remote=False)
    print("SHAPE Q:" + str(shapes1))
    show_nodes(w, ont, shapes1)
    assert Y_SHAPED in shapes1

    shapes2 = ont.resolve_names(Q, is_regex=True, is_remote=True)
    print("SHAPE Q:" + str(shapes2))
    show_nodes(w, ont, shapes2)
    assert Y_SHAPED in shapes2
Esempio n. 2
0
    def get_obj_ref_graph(self,
                          *obj_ids: IdT,
                          direction=archives.OUTGOING,
                          max_dist: int = None) -> networkx.DiGraph:
        obj_ids = set(obj_ids)
        graph = self._archive.get_obj_ref_graph(*obj_ids,
                                                direction=direction,
                                                max_dist=max_dist)

        # If there is a transaction then we should fix up the graph to contain information from that
        # too
        trans = self._historian.current_transaction(
        )  # type: transactions.Transaction
        if trans is not None:
            _update_from_transaction(graph, trans)

            # Now cull all the nodes not reachable from the nodes of interest

            # Now, get the subgraph we're interested in
            reachable = set()
            for obj_id in obj_ids:
                if direction == archives.OUTGOING:
                    reachable.update(dag.descendants(graph, obj_id))
                else:
                    reachable.update(dag.ancestors(graph, obj_id))

            # Remove all non-reachable nodes except obj_ids as these can stay even if they have no
            # edges
            graph.remove_nodes_from(set(graph.nodes) - obj_ids - reachable)

        return graph
Esempio n. 3
0
def network_to_echart(write_to_file: bool = False, layout: bool = False):
    nodes = []
    no_nodes = len(LANGS_NETWORK.nodes)
    for node in LANGS_NETWORK.nodes:
        lang_name = node.split('-')[0]
        no_ancestors = len(ancestors(LANGS_NETWORK, node))
        no_descendants = len(descendants(LANGS_NETWORK, node))
        size = min(
            20,
            max(2, ((no_ancestors / no_nodes) * 100 +
                    (no_descendants / no_nodes) * 100)))
        node = {
            'name': node,
            'symbolSize': size,
            'id': node,
            'category': lang_name
        }
        nodes.append(node)
    nodes.sort(key=lambda x: x['name'])
    edges = []
    for edge in LANGS_NETWORK.edges:
        edges.append({'source': edge[0], 'target': edge[1]})
    if write_to_file:
        with open(
                os.path.join(os.path.dirname(static_file),
                             'languages-network.json'), 'w') as f:
            f.write(json.dumps({'nodes': nodes, 'edges': edges}))
        LOGGER.info(f'Wrote network nodes and edges to static file.')
    return nodes, edges
Esempio n. 4
0
def get_resource_dependencies(context, r):
    G = cndp_get_digraph(context.names, context.connections)
    # print G.nodes(), G.edges()
    # XXX do we need to remove the self-loops?
    from networkx.algorithms.dag import ancestors
    all_ancestors = ancestors(G, r.dp)
    # print('ancestors: %s')
    return all_ancestors
Esempio n. 5
0
    def find_independent_sensors(self, sensor, sensor_list):
        '''Returns a list of names of sensors that do not depend
        on the same component as "sensor"

        Keyword arguments:
        sensor -- name of a sensor
        sensor_list -- a list of sensor names

        '''
        independent_sensors = list()
        sensor_ancestors = set(ancestors(self.structural_model, sensor))
        for other_sensor in sensor_list:
            other_sensor_ancestors = set(
                ancestors(self.structural_model, other_sensor))
            if not sensor_ancestors.intersection(other_sensor_ancestors):
                independent_sensors.append(other_sensor)
        return independent_sensors
def get_resource_dependencies(context, r):
    G = cndp_get_digraph(context.names, context.connections)
    # print G.nodes(), G.edges()
    # XXX do we need to remove the self-loops?
    from networkx.algorithms.dag import ancestors
    all_ancestors = ancestors(G, r.dp)
    # print('ancestors: %s')
    return all_ancestors
Esempio n. 7
0
def ingraph(g, target, distance=None):
    ins = ancestors(g, target)
    if distance is not None:
        ins = {
            i
            for i in ins
            if shortest_path_length(g, source=i, target=target) <= distance
        }
    return g.subgraph(ins | {target})
Esempio n. 8
0
def compute(s):
    bags = nx.DiGraph()
    my_bag = "shiny gold"
    for rule in s.splitlines():
        (outer, inner) = rule.split("contain")
        matches = re.findall("\\d ([a-z]* [a-z]*) bag", inner)
        for m in matches:
            bags.add_edge(re.match("([a-z]* [a-z]*) bag", outer).group(1), m)
    return len(ancestors(bags, my_bag))
Esempio n. 9
0
def collect_constraint_topological_orders(prob):
    dag = prob.dag
    for sink_node in prob.con_ends_num:
        dependencies = ancestors(dag, sink_node)
        dependencies.add(sink_node)
        con_dag = dag.subgraph(dependencies)
        eval_order = du.deterministic_topological_sort(con_dag)
        prob.con_top_ord[sink_node] = eval_order
    prob.ncons = len(prob.con_ends_num)
    set_nzeros(prob)
Esempio n. 10
0
def collect_constraint_topological_orders(prob):
    dag = prob.dag
    for sink_node in prob.con_ends_num:
        dependencies = ancestors(dag, sink_node)
        dependencies.add(sink_node)
        con_dag = dag.subgraph(dependencies)
        eval_order = du.deterministic_topological_sort(con_dag)
        prob.con_top_ord[sink_node] = eval_order
    prob.ncons = len(prob.con_ends_num)
    set_nzeros(prob)
Esempio n. 11
0
 def _complete_parents(self, ontology, walked):
     allontologies = [ontology]
     walked.append(ontology)
     if ontology in self.graph:
         for ancestor in ancestors(self.graph, ontology):
             if ancestor not in walked:
                 allontologies += self._complete_parents(ancestor, walked)
         return allontologies
     else:
         return allontologies
Esempio n. 12
0
def independent(G, n1, n2, n3=None):
    """Computes whether n1 and n2 are independent given n3 on the DAG G
    
    Can find a decent exposition of the algorithm at http://web.mit.edu/jmn/www/6.034/d-separation.pdf
    """
    if n3 is None:
        n3 = set()
    elif isinstance(n3, (int, str)):
        n3 = set([n3])
    elif not isinstance(n3, set):
        n3 = set(n3)
    # Construct the ancestral graph of n1, n2, and n3
    a = ancestors(G, n1) | ancestors(G, n2) | {n1, n2} | n3
    G = G.subgraph(a)
    # Moralize the graph
    M = moral_graph(G)
    # Remove n3 (if applicable)
    M.remove_nodes_from(n3)
    # Check that path exists between n1 and n2
    return not has_path(M, n1, n2)
Esempio n. 13
0
    def _check_valid_adjustment_set_(self, graph, adjustment_set):
        """Checks the adjustment set as valid using the following 6 steps
        Step 1) check no descendants of X are included in adjustment set
        Step 2) delete variables that meet certain definitions
        Step 3) delete all arrows that originate from exposure
        Step 4) connect all source nodes (to assess for collider stratification)
        Step 5) convert to undirected graph
        Step 6) check whether a path exists between exposure & outcome
        """
        dag = graph.copy()

        # List of all nodes valid for adjustment
        all_nodes = list(dag.nodes())
        all_nodes.remove(self.exposure)
        all_nodes.remove(self.outcome)

        # Step 1) Check no descendants of X
        desc_x = descendants(dag, self.exposure)
        if desc_x & set(adjustment_set):
            return False

        # Step 2) Delete all variables that: (a) non-ancestors of X, (b) non-ancestors of Y, (c) non-ancestors
        #         of adjustment set
        set_check = set(adjustment_set).union([self.exposure, self.outcome])
        set_remove = set(dag.nodes)
        for n in set_check:
            set_remove = set_remove & (dag.nodes - ancestors(dag, n))
        set_remove = set_remove - set([self.exposure, self.outcome
                                       ]) - set(adjustment_set)
        dag.remove_nodes_from(set_remove)

        # Step 3) Delete all arrows with X as the source
        for endpoint in list(dag.successors(self.exposure)):
            dag.remove_edge(self.exposure, endpoint)

        # Step 4) Directly connect all source nodes pointing to same endpoint (for collider assessment)
        for n in dag:
            sources = list(dag.predecessors(n))
            if len(sources) > 1:
                for s1, s2 in combinations(sources, 2):
                    if not (dag.has_edge(s2, s1) or dag.has_edge(s1, s2)):
                        dag.add_edge(s1, s2)

        # Step 5) Remove arrow directionality
        uag = dag.to_undirected()

        # Step 6) Remove nodes from the adjustment set
        uag.remove_nodes_from(adjustment_set)

        # Checking whether a a path between X and Y exists now
        if nx.has_path(uag, self.exposure, self.outcome):
            return False
        else:
            return True
Esempio n. 14
0
def remove_unused_def_vars(prob):
    dag = prob.dag
    removed = [ ]
    for n in du.itr_sinks(dag, prob.defined_vars):
        deps = ancestors(dag, n)
        deps.add(n)
        con_dag = dag.subgraph(deps)
        reverse_order = topological_sort(con_dag, reverse=True)
        removed = delete_sinks_recursively(dag, reverse_order)
    print('Unused nodes:', removed)
    prob.defined_vars.difference_update(removed)
Esempio n. 15
0
def remove_unused_def_vars(prob):
    dag = prob.dag
    removed = []
    for n in du.itr_sinks(dag, prob.defined_vars):
        deps = ancestors(dag, n)
        deps.add(n)
        con_dag = dag.subgraph(deps)
        reverse_order = topological_sort(con_dag, reverse=True)
        removed = delete_sinks_recursively(dag, reverse_order)
    print('Unused nodes:', removed)
    prob.defined_vars.difference_update(removed)
Esempio n. 16
0
def satisfies_backdoor_criteria(dag, X, outcome, S):
    # follows https://bmcmedresmethodol.biomedcentral.com/articles/10.1186/1471-2288-8-70#Fig4
    # test this on the M graph

    S = set(S)
    dag = dag.copy()

    # step 1:
    # The covariates chosen to reduce bias should not be descendants of X
    descendants_X = descendants(dag, X)

    if descendants_X & S:
      return False

    # step 2:
    # Delete all variables that satisfy all of the following:
    # 1) non-ancestors (an ancestor is a variable that causes another variable either directly or indirectly) of X,
    # 2) non-ancestors of the Outcome and
    # 3) non-ancestors of the covariates that one is including to reduce bias

    nodes_to_check = S.union([X, outcome])
    nodes_to_remove = set(dag.nodes)
    for node in nodes_to_check:
        nodes_to_remove = nodes_to_remove & (dag.nodes - ancestors(dag, node))

    nodes_to_remove = nodes_to_remove - set([X, outcome]) - S
    dag.remove_nodes_from(nodes_to_remove)

    # step 3:
    # Delete all lines emanating from X
    for child in list(dag.successors(X)):
        dag.remove_edge(X, child)

    # step 4:
    # Connect any two parents (direct causes of a variable) sharing a common child (this step appears simple but it requires practice not to miss any)
    for node in dag:
        parents = list(dag.predecessors(node))
        if len(parents) > 1:
            for a, b in combinations(parents, 2):
                if not (dag.has_edge(a, b) or dag.has_edge(b, a)):
                    # order doesn't matter, as the next step removes all direction
                    dag.add_edge(a, b)


    # Step 5: Strip all arrowheads from lines
    g = dag.to_undirected()

    # Step 6 : Delete all lines between the covariates in the model and any other variables
    g.remove_nodes_from(S)

    # Finally: does there exist a path between X and outcome?
    # If so, then we failed the criteria.
    return not nx.has_path(g, X, outcome)
Esempio n. 17
0
    def connected_subgraph(self, node):
        """Returns the subgraph containing the given node, its ancestors, and
        its descendants.

        Parameters
        ----------
        node: str
            We want to create the subgraph containing this node.

        Returns
        -------
        subgraph: networkx.DiGraph
            The subgraph containing the specified node.
        """
        G = self.G

        subgraph_nodes = set()
        subgraph_nodes.add(node)
        subgraph_nodes.update(dag.ancestors(G, node))
        subgraph_nodes.update(dag.descendants(G, node))

        # Keep adding the ancesotrs and descendants on nodes of the graph
        # until we can't do so any longer
        graph_changed = True
        while graph_changed:
            initial_count = len(subgraph_nodes)

            old_nodes = set(subgraph_nodes)
            for n in old_nodes:
                subgraph_nodes.update(dag.ancestors(G, n))
                subgraph_nodes.update(dag.descendants(G, n))

            current_count = len(subgraph_nodes)
            graph_changed = current_count > initial_count

        return G.subgraph(subgraph_nodes)
Esempio n. 18
0
    def downhill_path(self, u, v):
        '''
        Returns True if a downhill path from u to v exists, False otherwise.

        Equivalently, it evaluates the truth value of the statement
        "u is reachable from v"
        '''
        if self.h(u) <= self.h(v):
            return False
        else:
            if not dag.is_directed_acyclic_graph(self.directed):
                raise TypeError('NetworkLandscape.directed must be an acyclic\
                 directed graph')
            else:
                return (u in dag.ancestors(self.directed, v))
Esempio n. 19
0
    def connected_subgraph(self, node):
        """Returns the subgraph containing the given node, its ancestors, and
        its descendants.

        Parameters
        ----------
        node : str
            We want to create the subgraph containing this node.

        Returns
        -------
        subgraph : networkx.DiGraph
            The subgraph containing the specified node.
        """
        G = self.G

        subgraph_nodes = set()
        subgraph_nodes.add(node)
        subgraph_nodes.update(dag.ancestors(G, node))
        subgraph_nodes.update(dag.descendants(G, node))

        # Keep adding the ancesotrs and descendants on nodes of the graph
        # until we can't do so any longer
        graph_changed = True
        while graph_changed:
            initial_count = len(subgraph_nodes)

            old_nodes = set(subgraph_nodes)
            for n in old_nodes:
                subgraph_nodes.update(dag.ancestors(G, n))
                subgraph_nodes.update(dag.descendants(G, n))

            current_count = len(subgraph_nodes)
            graph_changed = current_count > initial_count

        return G.subgraph(subgraph_nodes)
def get_ancestors(T=nx.DiGraph(), node):
    """
    Recupérer Tous les ascendants du noeud node sur l'arbre T
    Parameters
    ----------
    T: nx.DiGraph()
      Le graphe orienté
    node: str
         Le noeud dont on recherche les ascendants
    Returns
    -------
    list
        La liste des ascendants
    """

    return list(dag.ancestors(T, node))
Esempio n. 21
0
	def build_graph(self):
		datalineage_graph = nx.DiGraph()
	
		for row in self.list_data[DATA_ROW_START_INDEX:]:
			target_node = None
			for idx, v in enumerate(row[BQ_TABLE_COLUMN_START_INDEX:]):  # find the output
				if v == 'O':
					target_node = self.list_data[0][BQ_TABLE_COLUMN_START_INDEX+idx]
					filter_full_match = True if (self.filter_raw != '(.*?)' and self.filter_pattern.fullmatch(target_node.split(':')[-1])) else False
					datalineage_graph.add_node(target_node, id=target_node, entity="node", value=1, size=10, filter_full_match=filter_full_match)
					break

			for idx, v in enumerate(row[BQ_TABLE_COLUMN_START_INDEX:]):  # find the input
				if v == 'I' and target_node:
					datalineage_graph.add_edge(target_node, self.list_data[0][BQ_TABLE_COLUMN_START_INDEX+idx],
						relationship="consumed_by", entity="link", value=1)

		edges = datalineage_graph.edges()
		node_names = datalineage_graph.nodes()

		filtered_nodes = set()
		node_color = {}

		#get nodes that match the pattern
		for node in datalineage_graph.nodes():
			if self.filter_pattern.fullmatch(node):
				self.target_nodes.add(node)



		#get all the Ancestors and descendants of the identified nodes from the digraph:
		for node in self.target_nodes:
			# print("Node: ", node)
			# print("Ancestors: ", ancestors(datalineage_graph, node))
			# print("Descendents: ", descendants(datalineage_graph, node))
			filtered_nodes.add(node)
			filtered_nodes.update(ancestors(datalineage_graph, node))
			filtered_nodes.update(descendants(datalineage_graph, node))


		self.G = datalineage_graph.subgraph(list(filtered_nodes))

		print('Node count: %s' % self.G.number_of_nodes())
		print('Edge count: %s' % self.G.number_of_edges())
   
		nx.write_graphml(self.G, "data_lineage.graphml")
Esempio n. 22
0
def ancestor_dag(dag, nodes, copy=False):
    """ Create a subgraph with ancestor nodes from the input dag

    Parameters
    ----------
    dag     :       networkX.DiGraph
    nodes   :       an iterable of nodes
    copy    :       if set True, return a deep copy of subgraph that copies all the attributes
                    o.w. function is still retuning a shallow copy, i.e., attributes points original

    """
    from networkx.algorithms.dag import ancestors
    ancestor_nodes = set()
    for nn in nodes:
        ancestor_nodes.update(ancestors(dag, nn))
        ancestor_nodes.add(nn)
    if copy:
        return dag.subgraph(ancestor_nodes).copy()
    else:
        return dag.subgraph(ancestor_nodes)
def capacitorsPlacementAlgorithm(numOfCapacitors, G, T, out_of_range_nodes):
    validate_number_of_capacitors(numOfCapacitors, out_of_range_nodes)
    all_pairs_distance = {}
    all_pairs_cla = {}
    currecnt_capacitor_placement = {}
    for pair in list(itertools.combinations(out_of_range_nodes.keys(), 2)):
        all_pairs_distance[pair] = graph_handler.getPathLenBetweenNodes(
            G, pair[0], pair[1])
        cla = graph_handler.getCLABetweenPair(T, pair[0], pair[1])
        all_pairs_cla[pair] = cla
        if cla in currecnt_capacitor_placement.keys(
        ):  # create a dict of cla and its children
            currecnt_capacitor_placement[cla].add(pair[0])
            currecnt_capacitor_placement[cla].add(pair[1])
        else:
            currecnt_capacitor_placement[cla] = {pair[0], pair[1]}
    if numOfCapacitors > len(currecnt_capacitor_placement):
        increase_num_of_capacitors(currecnt_capacitor_placement,
                                   numOfCapacitors)
    if numOfCapacitors < len(currecnt_capacitor_placement):
        dictToRet = {}

        lst = decrease_num_of_clas(currecnt_capacitor_placement,
                                   numOfCapacitors, G, T,
                                   list(out_of_range_nodes.keys()))
        dictToRet = dict([(key, []) for key in lst])

        for outNode in out_of_range_nodes:
            lstAnc = ancestors.ancestors(T, outNode)
            for kk in lst:
                if kk in lstAnc:
                    if kk in dictToRet.keys():
                        dictToRet[kk].append(outNode)

        for cp in dictToRet:
            if len(dictToRet[cp]) == 0:
                dictToRet[cp].append(cp)

        currecnt_capacitor_placement = dictToRet

    return currecnt_capacitor_placement
Esempio n. 24
0
    def connected_subgraph(self, node):
        """Returns the subgraph containing the given node, its ancestors, and
        its descendants.

        Parameters
        ----------
        node: str
            We want to create the subgraph containing this node.

        Returns
        -------
        subgraph: networkx.DiGraph
            The subgraph containing the specified node.
        """
        G = self.G

        subgraph_nodes = set()
        subgraph_nodes.add(node)
        subgraph_nodes.update(dag.ancestors(G, node))
        subgraph_nodes.update(dag.descendants(G, node))
        return G.subgraph(subgraph_nodes)
Esempio n. 25
0
 def get_full_dependants(self, obj):
     return ancestors(self.g, obj)
Esempio n. 26
0
start = time()
# bags, contain = load_input("input_test_7.txt")
# bags, contain = load_input("input_test2_7.txt")
bags, contain = load_input("input_7.txt")

loading = time()

# G = nx.DiGraph()
G = DiGraph()
G.add_nodes_from(bags)
for i, con in enumerate(contain):
    G.add_edges_from([(bags[i], c[1], {"weight": int(c[0])}) for c in con])

# super_bags = nx.algorithms.dag.ancestors(G, "shiny gold")
super_bags = ancestors(G, "shiny gold")
# print(super_bags)
print(f'Number of super bags: {len(super_bags)}')

total_bags = count_of_subbags()
print(
    f'Total number bags in shiny gold: {total_bags - 1}')  # -1 for shiny gold

end = time()
print(f"loading input: {loading - start}, solving: {end - loading}")
print(f"imports: {start - imports}")

# needs uncomment some of the imports
# def plot_graph(G):
#     # Write a plot to DOT format and plot graph by Graphviz.
#     A = nx.nx_agraph.to_agraph(G)  # convert to a graphviz graph
Esempio n. 27
0
def length_of_each_nodes(graph: nx.DiGraph) -> int:
    return sum(len(ancestors(G, node)) for node in G.nodes)
Esempio n. 28
0
 def count_containing_bags(self, color: str) -> int:
     try:
         succs = ancestors(self._graph, color)
     except nx.NetworkXError:
         raise ValueError(f"No rule for {color} bags exists!")
     return sum(1 for succ in succs)
Esempio n. 29
0
 def _get_lower_closure(self, a):
     """ Returns a set of the upper closure of a """
     G = self._get_graph_closure_no_cycles()
     d = set(ancestors(G, a))
     d.add(a)
     return d
Esempio n. 30
0
def retrieve_outputs(environment, node):

    provider, component, command, manual, local = parse_reference(node)
    parents = list(ancestors(G, node))
    print("retrieving outputs for {}".format(node))
    env = {}
    for parent in parents:

        parent_provider, parent_component, parent_command, manual, parent_local = parse_reference(
            parent)
        parent_builds, last_build_status, next_build = get_builds(
            environment, parent_provider, parent_component, parent_command)

        last_successful_build = find_last_successful_build(parent_builds)

        if last_successful_build == None:
            # print("No successful build for {}".format(parwsent))
            continue

        pretty_build_number = "{:0>4d}".format(
            last_successful_build["build_number"])
        output_filename = "outputs/{}.{}.{}.{}.outputs.json".format(
            environment, parent_provider, parent_component, parent_command)
        if not os.path.isfile(output_filename):
            output_bucket = "vvv-{}-outputs".format(environment)
            s3_filename = "{}/{}/{}/{}.json".format(environment,
                                                    parent_provider,
                                                    parent_component,
                                                    parent_command,
                                                    pretty_build_number)
            s3_path = "s3://vvv-{}-outputs/{}/{}/{}/{}.json".format(
                environment, parent_provider, parent_component, parent_command,
                pretty_build_number)
            # check = run(["aws", "s3api", "head-object", "--bucket", output_bucket, "--key", s3_filename], stderr=open("s3log", "w"))

            #if check.returncode == 0:
            #  pass # run(["aws", "s3", "cp", s3_path, output_filename])
        outputs_path = os.path.abspath(
            os.path.join(project_directory, "builds", output_filename))

        if os.path.isfile(outputs_path):

            if os.stat(outputs_path).st_size != 0:
                loaded_outputs = json.loads(open(outputs_path).read())
                if 'secrets' in loaded_outputs:
                    decoder = Popen(["base64", "-d", "--wrap=0"],
                                    stdin=PIPE,
                                    stdout=PIPE,
                                    stderr=sys.stderr)
                    decrypter = Popen(["gpg", "--decrypt"],
                                      stdin=decoder.stdout,
                                      stdout=PIPE,
                                      stderr=sys.stderr)
                    decoder.stdin.write(
                        loaded_outputs['secrets'].encode('utf-8'))
                    decoder.stdin.close()
                    decrypted_result, err = decrypter.communicate()
                    loaded_outputs['secrets'] = json.loads(
                        decrypted_result.decode('utf-8'))

                env.update(loaded_outputs)

    unfiltered = dict(env)
    for key, value in env.items():
        if isinstance(value, list):
            cleaned = list(filter(lambda x: x != "", env[key]))
            unfiltered[key] = cleaned
            env[key] = " ".join(cleaned)
    return env, unfiltered
Esempio n. 31
0
File: dbn.py Progetto: marcokorb/its
 def get_predecessors(self, node):
     """
     Return all predecessors of a given node within the graph
     """
     return list(ancestors(self.model, node))
Esempio n. 32
0
    for parent in G.predecessors(node):
        G.add_edge(parent, "{}/{}".format(node, "package"))
    for children in G.successors(node):
        G.add_edge("{}/{}".format(node, "publish"), children)
    dot_graph.remove_node(node)

tree = nx.topological_sort(dot_graph)
ordered_environments = list(nx.topological_sort(environment_graph))

write_dot(dot_graph, "architecture.expanded.dot")

ordering = list(tree)

loaded_components = []
for count, node in enumerate(ordering):
    component_ancestors = list(ancestors(G, node))
    predecessors = list(G.predecessors(node))
    successors = list(G.successors(node))
    loaded_components.append({
        "name": node,
        "ancestors": predecessors,
        "successors": successors
    })

print("Scheduling components into run groups...")
streams, orderings = scheduler.parallelise_components(loaded_components)

loaded_json_file = open("builds/loaded.json", "w")
loaded_json_file.write(json.dumps(loaded_components, indent=4))

pprint(streams)
Esempio n. 33
0
    splitted = line.split("contain")
    parent = get_match_from_string(splitted[0], parent_regex, 0)
    descendants = []
    if 'no other bags' not in splitted[1]:
        descendants_text = splitted[1].split(',')
        for text in descendants_text:
            descendant_name = get_match_from_string(text, descendant_regex, 1)
            descendant_count = int(get_match_from_string(text, descendant_regex, 0))
            descendants.append((descendant_name, descendant_count))
    for descendant_name, descendant_count in descendants:
        for i in range(descendant_count):
            edges.append((parent, descendant_name))

graph = nx.MultiDiGraph()
graph.add_edges_from(edges)
ancestors = dag.ancestors(graph, "shiny gold")

print(f"Part 1: {len(ancestors)}")


descendants = dag.descendants(graph, "shiny gold")

def get_descendant(nodes, parent_name):
    contain_count = 1
    for descendant in nodes:
        edge_count = graph.number_of_edges(parent_name, descendant)
        nested_descendants = list(dag.descendants(graph, descendant))

        if not nested_descendants:
            contain_count += edge_count
        else:
Esempio n. 34
0
File: api.py Progetto: deltork/g2p
 def get(self, node):
     try:
         return sorted(ancestors(LANGS_NETWORK, node))
     except NetworkXError:
         abort(404)
Esempio n. 35
0
 def _get_lower_closure(self, a):
     """ Returns a set of the upper closure of a """
     G = self._get_graph_closure_no_cycles()
     d = set(ancestors(G, a))
     d.add(a)
     return d
Esempio n. 36
0
def partOne(inp):
    orbits = nx.DiGraph()
    for orbit in inp.split('\n'):
        center, orb = orbit.strip().split(')')
        orbits.add_edge(center, orb)
    return sum(len(ancestors(orbits, n)) for n in orbits.nodes)