Ejemplo n.º 1
0
 def parse(self):
     """
     Convert the graph into a neural network which can then
     be optimized by pytorch.
     """
     for node_idx in lexicographical_topological_sort(self):
         if 'subgraph' in self.nodes[node_idx]:
             self.nodes[node_idx]['subgraph'].parse()
             self.add_module(
                 "{}-subgraph_at({})".format(self.name, node_idx),
                 self.nodes[node_idx]['subgraph'])
         else:
             if isinstance(self.nodes[node_idx]['comb_op'],
                           torch.nn.Module):
                 self.add_module(
                     "{}-comb_op_at({})".format(self.name, node_idx),
                     self.nodes[node_idx]['comb_op'])
         for neigbor_idx in self.neighbors(node_idx):
             edge_data = self.get_edge_data(node_idx, neigbor_idx)
             if isinstance(edge_data.op, Graph):
                 edge_data.op.parse()
             elif edge_data.op.get_embedded_ops():
                 for primitive in edge_data.op.get_embedded_ops():
                     if isinstance(primitive, Graph):
                         primitive.parse()
             self.add_module(
                 "{}-edge({},{})".format(self.name, node_idx, neigbor_idx),
                 edge_data.op)
     self.is_parsed = True
Ejemplo n.º 2
0
    def dependency_list(self):
        r'''
        Returns a list of dependencies in the order with which they should be
        called to ensure data is calculated by one model before it's asked for
        by another.

        Notes
        -----
        This raises an exception if the graph has cycles which means the
        dependencies are unresolvable (i.e. there is no order which the
        models can be called that will work).  In this case it is possible
        to visually inspect the graph using ``dependency_graph``.

        See Also
        --------
        dependency_graph
        dependency_map

        '''
        dtree = self.dependency_graph()
        cycles = list(simple_cycles(dtree))
        if cycles:
            raise Exception('Cyclic dependency found: ' +
                            ' -> '.join(cycles[0] + [cycles[0][0]]))
        d = lexicographical_topological_sort(dtree, sorted)
        return list(d)
Ejemplo n.º 3
0
def get_nncf_graph_from_mock_nx_graph(nx_graph: nx.DiGraph) -> PTNNCFGraph:
    # pylint:disable=too-many-branches
    mock_graph = PTNNCFGraph()
    key_vs_id = {}
    edge_vs_output_idx_and_creator_id = {}  # type: Dict[Tuple[str, str], Tuple[int, int]]
    from networkx.algorithms.dag import lexicographical_topological_sort
    for idx, curr_node_key in enumerate(lexicographical_topological_sort(nx_graph)):
        node = nx_graph.nodes[curr_node_key]
        if NNCFGraph.NODE_NAME_ATTR in node:
            node_name = node[NNCFGraph.NODE_NAME_ATTR]
        else:
            node_name = str(OperationAddress(curr_node_key, Scope(), 0))

        if NNCFGraph.NODE_TYPE_ATTR in node:
            node_type = node[NNCFGraph.NODE_TYPE_ATTR]
        else:
            node_type = curr_node_key

        layer_attributes = node.get(NNCFGraph.LAYER_ATTRIBUTES)

        if NNCFGraph.METATYPE_ATTR in node:
            metatype = node[NNCFGraph.METATYPE_ATTR]
        else:
            metatype = PT_OPERATOR_METATYPES.get_operator_metatype_by_op_name(node_type)
            if metatype is not UnknownMetatype:
                if metatype.subtypes:
                    subtype = metatype.determine_subtype(layer_attributes=layer_attributes)
                    if subtype is not None:
                        metatype = subtype

        node_id = idx
        node = mock_graph.add_nncf_node(
            node_name=node_name,
            node_type=node_type,
            node_metatype=metatype,
            layer_attributes=layer_attributes,
            node_id_override=idx)
        key_vs_id[curr_node_key] = node_id

        preds = list(nx_graph.predecessors(curr_node_key))
        for pred_idx, pred in enumerate(preds):
            in_edge = (pred, curr_node_key)
            out_idx, creator_id = edge_vs_output_idx_and_creator_id[in_edge]
            edge_data = nx_graph.edges[in_edge]
            if NNCFGraph.DTYPE_EDGE_ATTR in edge_data:
                dtype = edge_data[NNCFGraph.DTYPE_EDGE_ATTR]
            else:
                dtype = Dtype.FLOAT
            mock_graph.add_edge_between_nncf_nodes(creator_id, node_id,
                                                   [1, 1, 1, 1], input_port_id=pred_idx,
                                                   output_port_id=out_idx,
                                                   dtype=dtype)

        for out_idx, out_edge in enumerate(nx_graph.out_edges(curr_node_key)):
            edge_vs_output_idx_and_creator_id[out_edge] = (out_idx, node.node_id)
    return mock_graph
Ejemplo n.º 4
0
    def update_nodes(self,
                     update_func: callable,
                     scope="all",
                     single_instances: bool = True):
        """
        Update the nodes of the graph and its incoming and outgoing edges by iterating over the 
        graph and applying `update_func` to each of it. This is the
        preferred way to change the search space once it has been defined.

        Note that edges marked as 'final' will not be updated here.

        Args:
            update_func (callable): Function that accepts three incoming parameters named
                `node, in_edges, out_edges`. 
                    - `node` is a tuple (int, dict) containing the 
                      index and the attributes of the current node. 
                    - `in_edges` is a list of tuples with the index of 
                      the tail of the edge and its EdgeData.
                    - `out_edges is a list of tuples with the index of
                      the head of the edge and its EdgeData.
            scope (str or list(str)): Can be "all" or list of scopes to be updated. Only graphs
                and child graphs with the specified scope are considered
            single_instance (bool): If set to false, this means update_func will be
                applied to nodes of all copies of a graphs. THIS IS NOT RECOMMENDED FOR SHARED 
                ATTRIBUTES, i.e. when manipulating the shared data of incoming or outgoing edges.
                Shared attributes should be set only once, we take care it is syncronized across 
                all copies of this graph.
                
                The only usecase for setting it to true is when actually changing
                `op` during the initialization of the optimizer (e.g. replacing it
                with MixedOp or SampleOp)
        """
        assert scope is not None
        for graph in self._get_child_graphs(single_instances) + [self]:
            if scope == 'all' or graph.scope == scope or (isinstance(
                    scope, list) and graph.scope in scope):
                logger.debug('Updating nodes of graph {}'.format(graph.name))
                for node_idx in lexicographical_topological_sort(graph):
                    node = (node_idx, graph.nodes[node_idx])
                    in_edges = list(graph.in_edges(node_idx,
                                                   data=True))  # (v, u, data)
                    in_edges = [(v, data) for v, u, data in in_edges
                                if not data.is_final()]  # u is same for all
                    out_edges = list(graph.out_edges(
                        node_idx, data=True))  # (v, u, data)
                    out_edges = [(u, data) for v, u, data in out_edges
                                 if not data.is_final()]  # v is same for all
                    update_func(node=node,
                                in_edges=in_edges,
                                out_edges=out_edges)
        self._delete_flagged_edges()
Ejemplo n.º 5
0
 def to_quil(self, filename: str = None) -> None:
     if filename:
         #write to file
         pass
     else:
         for node in lexicographical_topological_sort(
                 self._dag, key=lambda node: node._qubits):
             if node.type != 'gate':
                 continue
             if node._args:
                 print(
                     f"{node._gate}({','.join(node._args)}) {' '.join([str(qubit) for qubit in node._qubits])}"
                 )
             else:
                 print(
                     f"{node._gate} {' '.join([str(qubit) for qubit in node._qubits])}"
                 )
Ejemplo n.º 6
0
    def _assign_x_to_nodes(self, x):
        """
        Assign x to the input nodes of self. Depending whether on
        edge or nodes.

        Performs also several sanity checks of the input.

        Args:
            x (Tensor or dict): Input to be assigned.
        """
        # We need dict in case of cell and int in case of motif
        assert isinstance(x, dict) or isinstance(x, torch.Tensor)

        if self.input_node_idxs is None:
            assert self.num_input_nodes(
            ) == 1, "There are more than one input nodes but input indeces are not defined."
            assert len(list(self.predecessors(
                1))) == 0, "Expecting node 1 to be the parent."
            assert 'subgraph' not in self.nodes[1].keys(
            ), "Expecting node 1 not to have a subgraph as it serves as input node."
            assert isinstance(x, torch.Tensor)
            self.nodes[1]['input'] = {0: x}
        else:
            # assign the input to the corresponding nodes
            assert all([i in x.keys() for i in self.input_node_idxs
                        ]), "got x from an unexpected input edge"
            if self.num_input_nodes() > len(x):
                # here is the case where the same input is assigned to more than one node
                # this can happen when there are cells with two inputs but at the very first
                # layer of the network, there is just one output (i.e. the data inputed to the
                # makro input node). Handle it and log a Info. This should happen only rarly
                logger.debug(
                    "We are using the same x for two inputs in graph {}".
                    format(self.name))
            input_node_iterator = iter(self.input_node_idxs)
            for node_idx in lexicographical_topological_sort(self):
                if self.in_degree(node_idx) == 0:
                    self.nodes[node_idx]['input'] = {
                        0: x[next(input_node_iterator)]
                    }
Ejemplo n.º 7
0
#!/usr/bin/env python3
from collections import defaultdict
import networkx as nx
from networkx.algorithms.dag import lexicographical_topological_sort

DG = nx.DiGraph()
for line in open('input.txt'):
    DG.add_edge(line[5], line[36])

topo_sort = "".join(lexicographical_topological_sort(DG))
print(topo_sort)


done = []
time_left = {}
for k in topo_sort:
    time_left[k] = ord(k) - ord('A') + 61

cur_work = []
workers_available = 6
time_spent = 0

while True:
    ready = list()

    for n in topo_sort:
        can_start = True
        in_edges = DG.in_edges(n)
        for k, v in in_edges:
            if k not in done:
                can_start = False
Ejemplo n.º 8
0
def part1():
    return ''.join(lexicographical_topological_sort(dagify()))
Ejemplo n.º 9
0
    def _get_child_graphs(self, single_instances: bool = False) -> list:
        """
        Get all child graphs of the current graph.

        Args:
            single_instances (bool): Whether to return multiple instances
                (i.e. copies) of the same graph. When changing shared data
                this should be set to True.
        
        Returns:
            list: A list of all child graphs (can be empty)
        """
        graphs = []
        for node_idx in lexicographical_topological_sort(self):
            node_data = self.nodes[node_idx]
            if 'subgraph' in node_data:
                graphs.append(node_data['subgraph'])
                graphs.append(node_data['subgraph']._get_child_graphs())

        for _, _, edge_data in self.edges.data():
            if isinstance(edge_data.op, Graph):
                graphs.append(edge_data.op)
                graphs.append(edge_data.op._get_child_graphs())
            elif isinstance(edge_data.op, list):
                for op in edge_data.op:
                    if isinstance(op, Graph):
                        graphs.append(op)
                        graphs.append(op._get_child_graphs())
            elif isinstance(edge_data.op, AbstractPrimitive):
                # maybe it is an embedded op?
                embedded_ops = edge_data.op.get_embedded_ops()
                if embedded_ops is not None:
                    if isinstance(embedded_ops, Graph):
                        graphs.append(embedded_ops)
                        graphs.append(embedded_ops._get_child_graphs())
                    elif isinstance(embedded_ops, list):
                        for child_op in edge_data.op.get_embedded_ops():
                            if isinstance(child_op, Graph):
                                graphs.append(child_op)
                                graphs.append(child_op._get_child_graphs())
                    else:
                        logger.debug(
                            "Got embedded op, but is neither a graph nor a list: {}"
                            .format(embedded_ops))
            elif inspect.isclass(edge_data.op):
                assert not issubclass(
                    edge_data.op, Graph), "Found non-initialized graph. Abort."
                pass  # we look at an uncomiled op
            else:
                raise ValueError("Unknown format of op: {}".format(
                    edge_data.op))

        graphs = [g for g in iter_flatten(graphs)]

        if single_instances:
            single = []
            for g in graphs:
                if g.name not in [sg.name for sg in single]:
                    single.append(g)
            return sorted(single, key=lambda g: g.name)
        else:
            return sorted(graphs, key=lambda g: g.name)
Ejemplo n.º 10
0
    def forward(self, x, *args):
        """
        Forward some data through the graph. This is done recursively
        in case there are graphs defined on nodes or as 'op' on edges.

        Args:
            x (Tensor or dict): The input. If the graph sits on a node the
                input can be a dict with {source_idx: Tensor} to be routed
                to the defined input nodes. If the graph sits on an edge,
                x is the feature tensor.
            args: This is only required to handle cases where the graph sits
                on an edge and receives an EdgeData object which will be ignored
        """
        logger.debug("Graph {} called. Input {}.".format(
            self.name, log_formats(x)))

        # Assign x to the corresponding input nodes
        self._assign_x_to_nodes(x)

        for node_idx in lexicographical_topological_sort(self):
            node = self.nodes[node_idx]
            logger.debug(
                "Node {}-{}, current data {}, start processing...".format(
                    self.name, node_idx, log_formats(node)))

            # node internal: process input if necessary
            if ('subgraph' in node
                    and 'comb_op' not in node) or ('comb_op' in node
                                                   and 'subgraph' not in node):
                log_first_n(logging.WARN,
                            "Comb_op is ignored if subgraph is defined!",
                            n=1)
            # TODO: merge 'subgraph' and 'comb_op'. It is basicallly the same thing. Also in parse()
            if 'subgraph' in node:
                x = node['subgraph'].forward(node['input'])
            else:
                if len(node['input'].values()) == 1:
                    x = list(node['input'].values())[0]
                else:
                    x = node['comb_op']([
                        node['input'][k] for k in sorted(node['input'].keys())
                    ])
            node['input'] = {}  # clear the input as we have processed it

            if len(list(self.neighbors(node_idx))) == 0 and node_idx < list(
                    lexicographical_topological_sort(self))[-1]:
                # We have more than one output node. This is e.g. the case for
                # auxillary losses. Attach them to the graph, handling must done
                # by the user.
                logger.debug(
                    "Graph {} has more then one output node. Storing output of non-maximum index node {} at graph dict"
                    .format(self, node_idx))
                self.graph['out_from_{}'.format(node_idx)] = x
            else:
                # outgoing edges: process all outgoing edges
                for neigbor_idx in self.neighbors(node_idx):
                    edge_data = self.get_edge_data(node_idx, neigbor_idx)
                    # inject edge data only for AbstractPrimitive, not Graphs
                    if isinstance(edge_data.op, Graph):
                        edge_output = edge_data.op.forward(x)
                    elif isinstance(edge_data.op, AbstractPrimitive):
                        logger.debug("Processing op {} at edge {}-{}".format(
                            edge_data.op, node_idx, neigbor_idx))
                        edge_output = edge_data.op.forward(x,
                                                           edge_data=edge_data)
                    else:
                        raise ValueError(
                            "Unknown class as op: {}. Expected either Graph or AbstactPrimitive"
                            .format(edge_data.op))
                    self.nodes[neigbor_idx]['input'].update(
                        {node_idx: edge_output})

            logger.debug("Node {}-{}, processing done.".format(
                self.name, node_idx))

        logger.debug("Graph {} exiting. Output {}.".format(
            self.name, log_formats(x)))
        return x
Ejemplo n.º 11
0
#!/usr/bin/env python3
from collections import defaultdict
import networkx as nx
from networkx.algorithms.dag import lexicographical_topological_sort

DG = nx.DiGraph()
for line in open('input.txt'):
    DG.add_edge(line[5], line[36])

topo_sort = "".join(lexicographical_topological_sort(DG))
print(topo_sort)

done = []
time_left = {}
for k in topo_sort:
    time_left[k] = ord(k) - ord('A') + 61

cur_work = []
workers_available = 6
time_spent = 0

while True:
    ready = list()

    for n in topo_sort:
        can_start = True
        in_edges = DG.in_edges(n)
        for k, v in in_edges:
            if k not in done:
                can_start = False
                break
Ejemplo n.º 12
0
import sys
import networkx as nx
from networkx.algorithms.dag import lexicographical_topological_sort

g = nx.DiGraph()

for foo in sys.stdin:
    x = foo.strip().split(" ")
    g.add_edge(x[0], x[1])

x = list(lexicographical_topological_sort(g))  #, key=prio.get))
print(''.join(x))
Ejemplo n.º 13
0
#n, m, o = 2, 6, 0
n, m, o = 4, 26, 60

for i, letter in enumerate(map(chr, range(65, 65 + m))):  #91
    tim[letter] = o + 1 + i

print(tim)

g = nx.DiGraph()

for foo in sys.stdin:
    x = foo.strip().split(" ")
    g.add_edge(x[0], x[1])

x = list(lexicographical_topological_sort(g))
print(x)

ws = [None] * n
tot = 0
while True:
    for i in range(n):
        if ws[i] != None:
            tim[ws[i]] = max(tim[ws[i]] - 1, 0)
            if tim[ws[i]] == 0:
                del tim[ws[i]]
                ws[i] = None

    if len(x) > 0:
        for i in range(n):
            wi = ws[i]
Ejemplo n.º 14
0
def emit(w, skip, head, body):
    if not(not skip and len(body) > 1):
        plain(w, head, body)
        return

    mkDneg = ' ' * len('not ') if not skip and any(project(1, body)) else ''
    mkCneg = ' ' * len('-') if not skip and any(project(2, body)) else ''
    body = list(map(lambda x: (x[0], x[1], x[2], x[3].strip(), bare(x[4])), body))
    offset = max(map(len, project(3, body)))

    g = nx.DiGraph()

    prio = []
    head = list(head)
    if head != []:
        lastHead = head[-1]
        prio = [bare(lastHead[2])]
        offset = max(offset, len(lastHead[1]) - tablen)
        g.add_edges_from(zip(lastHead[2], lastHead[2][1:]))

    # Establish some order for incomparable sets.
    lex, prio = uniq(flatten(prio + list(sorted(map(bare, project(4, body)), key=len, reverse=True))))

    for _, _, _, _, terms in body:
        if len(terms) == 1:
            g.add_node(terms[0])
        else:
            g.add_edges_from(zip(terms, terms[1:]))

    def foo(x):
        if not x in prio:
            return None
        return prio[x]

    if len(lex) > 1:
        try:
            lex = list(lexicographical_topological_sort(g))#, key=prio.get))
        except nx.exception.NetworkXUnfeasible:
            # we're fine with that...
            print('DANGER')
            ''

    print(tab + tab + ' ' + ' ' * offset + ' '.join(lex))

    g = nx.DiGraph()

    # Generate nodes for all sets of variables.
    for atom in body:
        bareVars = map(lambda y: y.strip(), atom[4])
        ts = list(set(list(bareVars)[:]))
        # Attention, we destroy the order of the terms here!
        ts.sort()
        ts = tuple(ts)
        if ts in g:
            g.node[ts]['atoms'].append(atom)
        else:
            g.add_node(ts, atoms=[atom])

    for u, v in combinations(g, 2):
        # If u is a superset of v, then u should come earlier in the topo.
        if set(v).issubset(set(u)):
            g.add_edge(u, v)

    if head != []:
        w(tab + ' | '.join([('-' if cneg else '') + pred + ('' if terms == [] else ' ' + ' '.join(terms)) for (cneg, pred, terms) in head]) + lf)

    atoms = nx.get_node_attributes(g, 'atoms')
    for v in lexicographical_topological_sort(g, key=lambda x: lex.index(x[0]) if len(x) > 0 else 0):
        for (indent, dneg, cneg, predicate, terms) in atoms[v]:
            w(
                tab * indent +
                ('not ' if dneg else mkDneg) +
                ('-' if cneg else mkCneg) +
                predicate.ljust(offset,' ') +
                ('' if terms == [] else ' ' + ' '.join(terms)) +
                lf
            )