class Attribute(BaseObject): """ """ stringIsLinkRe = re.compile('^\{[A-Za-z]+[A-Za-z0-9_.]*\}$') def __init__(self, node, attributeDesc, isOutput, root=None, parent=None): """ Attribute constructor Args: node (Node): the Node hosting this Attribute attributeDesc (desc.Attribute): the description of this Attribute isOutput (bool): whether this Attribute is an output of the Node root (Attribute): (optional) the root Attribute (List or Group) containing this one parent (BaseObject): (optional) the parent BaseObject """ super(Attribute, self).__init__(parent) self._name = attributeDesc.name self._root = None if root is None else weakref.ref(root) self._node = weakref.ref(node) self.attributeDesc = attributeDesc self._isOutput = isOutput self._value = copy.copy(attributeDesc.value) self._label = attributeDesc.label self._enabled = True # invalidation value for output attributes self._invalidationValue = "" @property def node(self): return self._node() @property def root(self): return self._root() if self._root else None def absoluteName(self): return '{}.{}.{}'.format(self.node.graph.name, self.node.name, self._name) def getFullName(self): """ Name inside the Graph: nodeName.name """ if isinstance(self.root, ListAttribute): return '{}[{}]'.format(self.root.getFullName(), self.root.index(self)) elif isinstance(self.root, GroupAttribute): return '{}.{}'.format(self.root.getFullName(), self._name) return '{}.{}'.format(self.node.name, self._name) def asLinkExpr(self): """ Return link expression for this Attribute """ return "{" + self.getFullName() + "}" def getName(self): """ Attribute name """ return self._name def getType(self): return self.attributeDesc.__class__.__name__ def getBaseType(self): return self.getType() def getLabel(self): return self._label def getEnabled(self): if isinstance(self.desc.enabled, types.FunctionType): try: return self.desc.enabled(self.node) except: # Node implementation may fail due to version mismatch return True return self.attributeDesc.enabled def setEnabled(self, v): if self._enabled == v: return self._enabled = v self.enabledChanged.emit() def _get_value(self): return self.getLinkParam().value if self.isLink else self._value def _set_value(self, value): if self._value == value: return if isinstance(value, Attribute) or Attribute.isLinkExpression(value): # if we set a link to another attribute self._value = value else: # if we set a new value, we use the attribute descriptor validator to check the validity of the value # and apply some conversion if needed convertedValue = self.desc.validateValue(value) self._value = convertedValue # Request graph update when input parameter value is set # and parent node belongs to a graph # Output attributes value are set internally during the update process, # which is why we don't trigger any update in this case # TODO: update only the nodes impacted by this change # TODO: only update the graph if this attribute participates to a UID if self.isInput: self.requestGraphUpdate() self.valueChanged.emit() def resetValue(self): self._value = self.attributeDesc.value def requestGraphUpdate(self): if self.node.graph: self.node.graph.markNodesDirty(self.node) self.node.graph.update() @property def isOutput(self): return self._isOutput @property def isInput(self): return not self._isOutput def uid(self, uidIndex=-1): """ """ # 'uidIndex' should be in 'self.desc.uid' but in the case of linked attribute # it will not be the case (so we cannot have an assert). if self.isOutput: # only dependent on the hash of its value without the cache folder return hashValue(self._invalidationValue) if self.isLink: return self.getLinkParam().uid(uidIndex) if isinstance(self._value, ( list, tuple, set, )): # hash of sorted values hashed return hashValue([hashValue(v) for v in sorted(self._value)]) return hashValue(self._value) @property def isLink(self): """ Whether the attribute is a link to another attribute. """ # note: directly use self.node.graph._edges to avoid using the property that may become invalid at some point return self.node.graph and self.isInput and self.node.graph._edges and self in self.node.graph._edges.keys( ) @staticmethod def isLinkExpression(value): """ Return whether the given argument is a link expression. A link expression is a string matching the {nodeName.attrName} pattern. """ return isinstance(value, pyCompatibility.basestring ) and Attribute.stringIsLinkRe.match(value) def getLinkParam(self, recursive=False): if not self.isLink: return None linkParam = self.node.graph.edge(self).src if not recursive: return linkParam if linkParam.isLink: return linkParam.getLinkParam(recursive) return linkParam @property def hasOutputConnections(self): """ Whether the attribute has output connections, i.e is the source of at least one edge. """ # safety check to avoid evaluation errors if not self.node.graph or not self.node.graph.edges: return False return next( (edge for edge in self.node.graph.edges.values() if edge.src == self), None) is not None def _applyExpr(self): """ For string parameters with an expression (when loaded from file), this function convert the expression into a real edge in the graph and clear the string value. """ v = self._value g = self.node.graph if not g: return if isinstance(v, Attribute): g.addEdge(v, self) self.resetValue() elif self.isInput and Attribute.isLinkExpression(v): # value is a link to another attribute link = v[1:-1] linkNode, linkAttr = link.split('.') try: g.addEdge(g.node(linkNode).attribute(linkAttr), self) except KeyError as err: logging.warning( 'Connect Attribute from Expression failed.\nExpression: "{exp}"\nError: "{err}".' .format(exp=v, err=err)) self.resetValue() def getExportValue(self): if self.isLink: return self.getLinkParam().asLinkExpr() if self.isOutput: return self.defaultValue() return self._value def getValueStr(self): if isinstance(self.attributeDesc, desc.ChoiceParam) and not self.attributeDesc.exclusive: assert (isinstance(self.value, pyCompatibility.Sequence) and not isinstance(self.value, pyCompatibility.basestring)) return self.attributeDesc.joinChar.join(self.value) if isinstance(self.attributeDesc, (desc.StringParam, desc.File)): return '"{}"'.format(self.value) return str(self.value) def defaultValue(self): if isinstance(self.desc.value, types.FunctionType): return self.desc.value(self) # Need to force a copy, for the case where the value is a list (avoid reference to the desc value) return copy.copy(self.desc.value) def _isDefault(self): return self._value == self.defaultValue() def getPrimitiveValue(self, exportDefault=True): return self._value def updateInternals(self): # Emit if the enable status has changed self.setEnabled(self.getEnabled()) name = Property(str, getName, constant=True) fullName = Property(str, getFullName, constant=True) label = Property(str, getLabel, constant=True) type = Property(str, getType, constant=True) baseType = Property(str, getType, constant=True) desc = Property(desc.Attribute, lambda self: self.attributeDesc, constant=True) valueChanged = Signal() value = Property(Variant, _get_value, _set_value, notify=valueChanged) isOutput = Property(bool, isOutput.fget, constant=True) isLinkChanged = Signal() isLink = Property(bool, isLink.fget, notify=isLinkChanged) hasOutputConnectionsChanged = Signal() hasOutputConnections = Property(bool, hasOutputConnections.fget, notify=hasOutputConnectionsChanged) isDefault = Property(bool, _isDefault, notify=valueChanged) linkParam = Property(BaseObject, getLinkParam, notify=isLinkChanged) rootLinkParam = Property(BaseObject, lambda self: self.getLinkParam(recursive=True), notify=isLinkChanged) node = Property(BaseObject, node.fget, constant=True) enabledChanged = Signal() enabled = Property(bool, getEnabled, setEnabled, notify=enabledChanged)
class Graph(BaseObject): """ _________________ _________________ _________________ | | | | | | | Node A | | Node B | | Node C | | | edge | | edge | | |input output|>---->|input output|>---->|input output| |_______________| |_______________| |_______________| Data structures: nodes = {'A': <nodeA>, 'B': <nodeB>, 'C': <nodeC>} edges = {B.input: A.output, C.input: B.output,} """ _cacheDir = "" class IO(object): """ Centralize Graph file keys and IO version. """ __version__ = "1.1" class Keys(object): """ File Keys. """ # Doesn't inherit enum to simplify usage (Graph.IO.Keys.XX, without .value) Header = "header" NodesVersions = "nodesVersions" ReleaseVersion = "releaseVersion" FileVersion = "fileVersion" Graph = "graph" class Features(Enum): """ File Features. """ Graph = "graph" Header = "header" NodesVersions = "nodesVersions" PrecomputedOutputs = "precomputedOutputs" NodesPositions = "nodesPositions" @staticmethod def getFeaturesForVersion(fileVersion): """ Return the list of supported features based on a file version. Args: fileVersion (str, Version): the file version Returns: tuple of Graph.IO.Features: the list of supported features """ if isinstance(fileVersion, pyCompatibility.basestring): fileVersion = Version(fileVersion) features = [Graph.IO.Features.Graph] if fileVersion >= Version("1.0"): features += [Graph.IO.Features.Header, Graph.IO.Features.NodesVersions, Graph.IO.Features.PrecomputedOutputs, ] if fileVersion >= Version("1.1"): features += [Graph.IO.Features.NodesPositions] return tuple(features) def __init__(self, name, parent=None): super(Graph, self).__init__(parent) self.name = name self._updateEnabled = True self._updateRequested = False self.dirtyTopology = False self._nodesMinMaxDepths = {} self._computationBlocked = {} self._canComputeLeaves = True self._nodes = DictModel(keyAttrName='name', parent=self) self._edges = DictModel(keyAttrName='dst', parent=self) # use dst attribute as unique key since it can only have one input connection self._compatibilityNodes = DictModel(keyAttrName='name', parent=self) self.cacheDir = meshroom.core.defaultCacheFolder self._filepath = '' self.header = {} def clear(self): self.header.clear() self._compatibilityNodes.clear() self._edges.clear() # Tell QML nodes are going to be deleted for node in self._nodes: node.alive = False self._nodes.clear() @property def fileFeatures(self): """ Get loaded file supported features based on its version. """ if not self._filepath: return [] return Graph.IO.getFeaturesForVersion(self.header.get(Graph.IO.Keys.FileVersion, "0.0")) @Slot(str) def load(self, filepath, setupProjectFile=True): """ Load a meshroom graph ".mg" file. Args: filepath: project filepath to load setupProjectFile: Store the reference to the project file and setup the cache directory. If false, it only loads the graph of the project file as a template. """ self.clear() with open(filepath) as jsonFile: fileData = json.load(jsonFile) # older versions of Meshroom files only contained the serialized nodes graphData = fileData.get(Graph.IO.Keys.Graph, fileData) if not isinstance(graphData, dict): raise RuntimeError('loadGraph error: Graph is not a dict. File: {}'.format(filepath)) self.header = fileData.get(Graph.IO.Keys.Header, {}) nodesVersions = self.header.get(Graph.IO.Keys.NodesVersions, {}) with GraphModification(self): # iterate over nodes sorted by suffix index in their names for nodeName, nodeData in sorted(graphData.items(), key=lambda x: self.getNodeIndexFromName(x[0])): if not isinstance(nodeData, dict): raise RuntimeError('loadGraph error: Node is not a dict. File: {}'.format(filepath)) # retrieve version from # 1. nodeData: node saved from a CompatibilityNode # 2. nodesVersion in file header: node saved from a Node # 3. fallback to no version "0.0": retro-compatibility if "version" not in nodeData: nodeData["version"] = nodesVersions.get(nodeData["nodeType"], "0.0") n = nodeFactory(nodeData, nodeName) # Add node to the graph with raw attributes values self._addNode(n, nodeName) # Create graph edges by resolving attributes expressions self._applyExpr() if setupProjectFile: # Update filepath related members # Note: needs to be done at the end as it will trigger an updateInternals. self._setFilepath(filepath) return True @property def updateEnabled(self): return self._updateEnabled @updateEnabled.setter def updateEnabled(self, enabled): self._updateEnabled = enabled if enabled and self._updateRequested: # Trigger an update if requested while disabled self.update() self._updateRequested = False @changeTopology def _addNode(self, node, uniqueName): """ Internal method to add the given node to this Graph, with the given name (must be unique). Attribute expressions are not resolved. """ if node.graph is not None and node.graph != self: raise RuntimeError( 'Node "{}" cannot be part of the Graph "{}", as it is already part of the other graph "{}".'.format( node.nodeType, self.name, node.graph.name)) assert uniqueName not in self._nodes.keys() node._name = uniqueName node.graph = self self._nodes.add(node) def addNode(self, node, uniqueName=None): """ Add the given node to this Graph with an optional unique name, and resolve attributes expressions. """ self._addNode(node, uniqueName if uniqueName else self._createUniqueNodeName(node.nodeType)) # Resolve attribute expressions with GraphModification(self): node._applyExpr() return node def copyNode(self, srcNode, withEdges=False): """ Get a copy instance of a node outside the graph. Args: srcNode (Node): the node to copy withEdges (bool): whether to copy edges Returns: Node, dict: the created node instance, a dictionary of linked attributes with their original value (empty if withEdges is True) """ with GraphModification(self): # create a new node of the same type and with the same attributes values # keep links as-is so that CompatibilityNodes attributes can be created with correct automatic description # (File params for link expressions) node = nodeFactory(srcNode.toDict(), srcNode.nodeType) # use nodeType as name # skip edges: filter out attributes which are links by resetting default values skippedEdges = {} if not withEdges: for n, attr in node.attributes.items(): # find top-level links if Attribute.isLinkExpression(attr.value): skippedEdges[attr] = attr.value attr.resetValue() # find links in ListAttribute children elif isinstance(attr, ListAttribute): for child in attr.value: if Attribute.isLinkExpression(child.value): skippedEdges[child] = child.value child.resetValue() return node, skippedEdges def duplicateNode(self, srcNode): """ Duplicate a node in the graph with its connections. Args: srcNode: the node to duplicate Returns: Node: the created node """ node, edges = self.copyNode(srcNode, withEdges=True) return self.addNode(node) def duplicateNodesFromNode(self, fromNode): """ Duplicate 'fromNode' and all the following nodes towards graph's leaves. Args: fromNode (Node): the node to start the duplication from Returns: OrderedDict[Node, Node]: the source->duplicate map """ srcNodes, srcEdges = self.nodesFromNode(fromNode) # use OrderedDict to keep duplicated nodes creation order duplicates = OrderedDict() with GraphModification(self): duplicateEdges = {} # first, duplicate all nodes without edges and keep a 'source=>duplicate' map # keeps tracks of non-created edges for later remap for srcNode in srcNodes: node, edges = self.copyNode(srcNode, withEdges=False) duplicate = self.addNode(node) duplicateEdges.update(edges) duplicates[srcNode] = duplicate # original node to duplicate map # re-create edges taking into account what has been duplicated for attr, linkExpression in duplicateEdges.items(): link = linkExpression[1:-1] # remove starting '{' and trailing '}' # get source node and attribute name edgeSrcNodeName, edgeSrcAttrName = link.split(".", 1) edgeSrcNode = self.node(edgeSrcNodeName) # if the edge's source node has been duplicated, use the duplicate; otherwise use the original node edgeSrcNode = duplicates.get(edgeSrcNode, edgeSrcNode) self.addEdge(edgeSrcNode.attribute(edgeSrcAttrName), attr) return duplicates def outEdges(self, attribute): """ Return the list of edges starting from the given attribute """ # type: (Attribute,) -> [Edge] return [edge for edge in self.edges if edge.src == attribute] def nodeInEdges(self, node): # type: (Node) -> [Edge] """ Return the list of edges arriving to this node """ return [edge for edge in self.edges if edge.dst.node == node] def nodeOutEdges(self, node): # type: (Node) -> [Edge] """ Return the list of edges starting from this node """ return [edge for edge in self.edges if edge.src.node == node] @changeTopology def removeNode(self, nodeName): """ Remove the node identified by 'nodeName' from the graph and return in and out edges removed by this operation in two dicts {dstAttr.getFullName(), srcAttr.getFullName()} """ node = self.node(nodeName) inEdges = {} outEdges = {} # Remove all edges arriving to and starting from this node with GraphModification(self): for edge in self.nodeOutEdges(node): self.removeEdge(edge.dst) outEdges[edge.dst.getFullName()] = edge.src.getFullName() for edge in self.nodeInEdges(node): self.removeEdge(edge.dst) inEdges[edge.dst.getFullName()] = edge.src.getFullName() node.alive = False self._nodes.remove(node) self.update() return inEdges, outEdges def addNewNode(self, nodeType, name=None, position=None, **kwargs): """ Create and add a new node to the graph. Args: nodeType (str): the node type name. name (str): if specified, the desired name for this node. If not unique, will be prefixed (_N). position (Position): (optional) the position of the node **kwargs: keyword arguments to initialize node's attributes Returns: The newly created node. """ if name and name in self._nodes.keys(): name = self._createUniqueNodeName(name) n = self.addNode(Node(nodeType, position=position, **kwargs), uniqueName=name) n.updateInternals() return n def _createUniqueNodeName(self, inputName): i = 1 while i: newName = "{name}_{index}".format(name=inputName, index=i) if newName not in self._nodes.objects: return newName i += 1 def node(self, nodeName): return self._nodes.get(nodeName) def upgradeNode(self, nodeName): """ Upgrade the CompatibilityNode identified as 'nodeName' Args: nodeName (str): the name of the CompatibilityNode to upgrade Returns: the list of deleted input/output edges """ node = self.node(nodeName) if not isinstance(node, CompatibilityNode): raise ValueError("Upgrade is only available on CompatibilityNode instances.") upgradedNode = node.upgrade() with GraphModification(self): inEdges, outEdges = self.removeNode(nodeName) self.addNode(upgradedNode, nodeName) for dst, src in outEdges.items(): try: self.addEdge(self.attribute(src), self.attribute(dst)) except (KeyError, ValueError) as e: logging.warning("Failed to restore edge {} -> {}: {}".format(src, dst, str(e))) return upgradedNode, inEdges, outEdges def upgradeAllNodes(self): """ Upgrade all upgradable CompatibilityNode instances in the graph. """ nodeNames = [name for name, n in self._compatibilityNodes.items() if n.canUpgrade] with GraphModification(self): for nodeName in nodeNames: self.upgradeNode(nodeName) @Slot(str, result=Attribute) def attribute(self, fullName): # type: (str) -> Attribute """ Return the attribute identified by the unique name 'fullName'. """ node, attribute = fullName.split('.', 1) return self.node(node).attribute(attribute) @staticmethod def getNodeIndexFromName(name): """ Nodes are created with a suffix index; returns this index by parsing node name. Args: name (str): the node name Returns: int: the index retrieved from node name (-1 if not found) """ try: return int(name.split('_')[-1]) except: return -1 @staticmethod def sortNodesByIndex(nodes): """ Sort the given list of Nodes using the suffix index in their names. [NodeName_1, NodeName_0] => [NodeName_0, NodeName_1] Args: nodes (list[Node]): the list of Nodes to sort Returns: list[Node]: the sorted list of Nodes based on their index """ return sorted(nodes, key=lambda x: Graph.getNodeIndexFromName(x.name)) def nodesByType(self, nodeType, sortedByIndex=True): """ Returns all Nodes of the given nodeType. Args: nodeType (str): the node type name to consider. sortedByIndex (bool): whether to sort the nodes by their index (see Graph.sortNodesByIndex) Returns: list[Node]: the list of nodes matching the given nodeType. """ nodes = [n for n in self._nodes.values() if n.nodeType == nodeType] return self.sortNodesByIndex(nodes) if sortedByIndex else nodes def findNodeCandidates(self, nodeNameExpr): pattern = re.compile(nodeNameExpr) return [v for k, v in self._nodes.objects.items() if pattern.match(k)] def findNode(self, nodeExpr): candidates = self.findNodeCandidates('^' + nodeExpr) if not candidates: raise KeyError('No node candidate for "{}"'.format(nodeExpr)) if len(candidates) > 1: raise KeyError('Multiple node candidates for "{}": {}'.format(nodeExpr, str([c.name for c in candidates]))) return candidates[0] def findNodes(self, nodesExpr): return [self.findNode(nodeName) for nodeName in nodesExpr] def edge(self, dstAttributeName): return self._edges.get(dstAttributeName) def getLeaves(self): nodesWithOutput = set([edge.src.node for edge in self.edges]) return set(self._nodes) - nodesWithOutput @changeTopology def addEdge(self, srcAttr, dstAttr): assert isinstance(srcAttr, Attribute) assert isinstance(dstAttr, Attribute) if srcAttr.node.graph != self or dstAttr.node.graph != self: raise RuntimeError('The attributes of the edge should be part of a common graph.') if dstAttr in self.edges.keys(): raise RuntimeError('Destination attribute "{}" is already connected.'.format(dstAttr.getFullName())) edge = Edge(srcAttr, dstAttr) self.edges.add(edge) self.markNodesDirty(dstAttr.node) dstAttr.valueChanged.emit() dstAttr.isLinkChanged.emit() return edge def addEdges(self, *edges): with GraphModification(self): for edge in edges: self.addEdge(*edge) @changeTopology def removeEdge(self, dstAttr): if dstAttr not in self.edges.keys(): raise RuntimeError('Attribute "{}" is not connected'.format(dstAttr.getFullName())) self.edges.pop(dstAttr) self.markNodesDirty(dstAttr.node) dstAttr.valueChanged.emit() dstAttr.isLinkChanged.emit() def getDepth(self, node, minimal=False): """ Return node's depth in this Graph. By default, returns the maximal depth of the node unless minimal is set to True. Args: node (Node): the node to consider. minimal (bool): whether to return the minimal depth instead of the maximal one (default). Returns: int: the node's depth in this Graph. """ assert node.graph == self assert not self.dirtyTopology minDepth, maxDepth = self._nodesMinMaxDepths[node] return minDepth if minimal else maxDepth def getInputEdges(self, node): return set([edge for edge in self.edges if edge.dst.node is node]) def _getInputEdgesPerNode(self): nodeEdges = defaultdict(set) for edge in self.edges: nodeEdges[edge.dst.node].add(edge.src.node) return nodeEdges def _getOutputEdgesPerNode(self): nodeEdges = defaultdict(set) for edge in self.edges: nodeEdges[edge.src.node].add(edge.dst.node) return nodeEdges def dfs(self, visitor, startNodes=None, longestPathFirst=False, reverse=False): # Default direction: from node to root # Reverse direction: from node to leaves nodeChildren = self._getOutputEdgesPerNode() if reverse else self._getInputEdgesPerNode() # Initialize color map colors = {} for u in self._nodes: colors[u] = WHITE nodes = startNodes or self.getLeaves() if longestPathFirst: # Graph topology must be known and node depths up-to-date assert not self.dirtyTopology nodes = sorted(nodes, key=lambda item: item.depth) try: for node in nodes: self.dfsVisit(node, visitor, colors, nodeChildren, longestPathFirst) except StopGraphVisit: pass def dfsVisit(self, u, visitor, colors, nodeChildren, longestPathFirst): try: self._dfsVisit(u, visitor, colors, nodeChildren, longestPathFirst) except StopBranchVisit: pass def _dfsVisit(self, u, visitor, colors, nodeChildren, longestPathFirst): colors[u] = GRAY visitor.discoverVertex(u, self) # d_time[u] = time = time + 1 children = nodeChildren[u] if longestPathFirst: assert not self.dirtyTopology children = sorted(children, reverse=True, key=lambda item: self._nodesMinMaxDepths[item][1]) for v in children: visitor.examineEdge((u, v), self) if colors[v] == WHITE: visitor.treeEdge((u, v), self) # (u,v) is a tree edge self.dfsVisit(v, visitor, colors, nodeChildren, longestPathFirst) # TODO: avoid recursion elif colors[v] == GRAY: # (u,v) is a back edge visitor.backEdge((u, v), self) elif colors[v] == BLACK: # (u,v) is a cross or forward edge visitor.forwardOrCrossEdge((u, v), self) visitor.finishEdge((u, v), self) colors[u] = BLACK visitor.finishVertex(u, self) def dfsOnFinish(self, startNodes=None): """ :param startNodes: list of starting nodes. Use all leaves if empty. :return: visited nodes and edges. The order is defined by the visit and finishVertex event. """ nodes = [] edges = [] visitor = Visitor() visitor.finishVertex = lambda vertex, graph: nodes.append(vertex) visitor.finishEdge = lambda edge, graph: edges.append(edge) self.dfs(visitor=visitor, startNodes=startNodes) return nodes, edges def dfsToProcess(self, startNodes=None): """ Return the full list of predecessor nodes to process in order to compute the given nodes. Args: startNodes: list of starting nodes. Use all leaves if empty. Returns: visited nodes and edges that are not already computed (node.status != SUCCESS). The order is defined by the visit and finishVertex event. """ nodes = [] edges = [] visitor = Visitor() def discoverVertex(vertex, graph): if vertex.hasStatus(Status.SUCCESS): # stop branch visit if discovering a node already computed raise StopBranchVisit() if self._computationBlocked[vertex]: raise RuntimeError("Can't compute node '{}'".format(vertex.name)) def finishVertex(vertex, graph): chunksToProcess = [] for chunk in vertex.chunks: if chunk.status.status is Status.SUBMITTED: logging.warning('Node "{}" is already submitted.'.format(chunk.name)) if chunk.status.status is Status.RUNNING: logging.warning('Node "{}" is already running.'.format(chunk.name)) if chunk.status.status is not Status.SUCCESS: chunksToProcess.append(chunk) if chunksToProcess: nodes.append(vertex) # We could collect specific chunks def finishEdge(edge, graph): if edge[0].hasStatus(Status.SUCCESS) or edge[1].hasStatus(Status.SUCCESS): return edges.append(edge) visitor.finishVertex = finishVertex visitor.finishEdge = finishEdge visitor.discoverVertex = discoverVertex self.dfs(visitor=visitor, startNodes=startNodes) return nodes, edges @Slot(Node, result=bool) def canCompute(self, node): """ Return the computability of a node based on itself and its dependency chain. Computation can't happen for: - CompatibilityNodes - nodes having a non-computed CompatibilityNode in its dependency chain Args: node (Node): the node to evaluate Returns: bool: whether the node can be computed """ if isinstance(node, CompatibilityNode): return False return not self._computationBlocked[node] def updateNodesTopologicalData(self): """ Compute and cache nodes topological data: - min and max depth - computability """ self._nodesMinMaxDepths.clear() self._computationBlocked.clear() compatNodes = [] visitor = Visitor() def discoverVertex(vertex, graph): # initialize depths self._nodesMinMaxDepths[vertex] = (0, 0) # initialize computability self._computationBlocked[vertex] = False if isinstance(vertex, CompatibilityNode): compatNodes.append(vertex) # a not computed CompatibilityNode blocks computation if not vertex.hasStatus(Status.SUCCESS): self._computationBlocked[vertex] = True def finishEdge(edge, graph): currentVertex, inputVertex = edge # update depths currentDepths = self._nodesMinMaxDepths[currentVertex] inputDepths = self._nodesMinMaxDepths[inputVertex] if currentDepths[0] == 0: # if not initialized, set the depth of the first child depthMin = inputDepths[0] + 1 else: depthMin = min(currentDepths[0], inputDepths[0] + 1) self._nodesMinMaxDepths[currentVertex] = (depthMin, max(currentDepths[1], inputDepths[1] + 1)) # update computability if currentVertex.hasStatus(Status.SUCCESS): # output is already computed and available, # does not depend on input connections computability return # propagate inputVertex computability self._computationBlocked[currentVertex] |= self._computationBlocked[inputVertex] leaves = self.getLeaves() visitor.finishEdge = finishEdge visitor.discoverVertex = discoverVertex self.dfs(visitor=visitor, startNodes=leaves) # update graph computability status canComputeLeaves = all([self.canCompute(node) for node in leaves]) if self._canComputeLeaves != canComputeLeaves: self._canComputeLeaves = canComputeLeaves self.canComputeLeavesChanged.emit() # update compatibilityNodes model if len(self._compatibilityNodes) != len(compatNodes): self._compatibilityNodes.reset(compatNodes) compatibilityNodes = Property(BaseObject, lambda self: self._compatibilityNodes, constant=True) def dfsMaxEdgeLength(self, startNodes=None): """ :param startNodes: list of starting nodes. Use all leaves if empty. :return: """ nodesStack = [] edgesScore = defaultdict(lambda: 0) visitor = Visitor() def finishEdge(edge, graph): u, v = edge for i, n in enumerate(reversed(nodesStack)): index = i + 1 if index > edgesScore[(n, v)]: edgesScore[(n, v)] = index def finishVertex(vertex, graph): v = nodesStack.pop() assert v == vertex visitor.discoverVertex = lambda vertex, graph: nodesStack.append(vertex) visitor.finishVertex = finishVertex visitor.finishEdge = finishEdge self.dfs(visitor=visitor, startNodes=startNodes, longestPathFirst=True) return edgesScore def flowEdges(self, startNodes=None): """ Return as few edges as possible, such that if there is a directed path from one vertex to another in the original graph, there is also such a path in the reduction. :param startNodes: :return: the remaining edges after a transitive reduction of the graph. """ flowEdges = [] edgesScore = self.dfsMaxEdgeLength(startNodes) for link, score in edgesScore.items(): assert score != 0 if score == 1: flowEdges.append(link) return flowEdges def nodesFromNode(self, startNode, filterTypes=None): """ Return the node chain from startNode to the graph leaves. Args: startNode (Node): the node to start the visit from. filterTypes (str list): (optional) only return the nodes of the given types (does not stop the visit, this is a post-process only) Returns: The list of nodes and edges, from startNode to the graph leaves following edges. """ nodes = [] edges = [] visitor = Visitor() def discoverVertex(vertex, graph): if not filterTypes or vertex.nodeType in filterTypes: nodes.append(vertex) visitor.discoverVertex = discoverVertex visitor.examineEdge = lambda edge, graph: edges.append(edge) self.dfs(visitor=visitor, startNodes=[startNode], reverse=True) return nodes, edges def _applyExpr(self): with GraphModification(self): for node in self._nodes: node._applyExpr() def toDict(self): return {k: node.toDict() for k, node in self._nodes.objects.items()} @Slot(result=str) def asString(self): return str(self.toDict()) def save(self, filepath=None, setupProjectFile=True): path = filepath or self._filepath if not path: raise ValueError("filepath must be specified for unsaved files.") self.header[Graph.IO.Keys.ReleaseVersion] = meshroom.__version__ self.header[Graph.IO.Keys.FileVersion] = Graph.IO.__version__ # store versions of node types present in the graph (excluding CompatibilityNode instances) usedNodeTypes = set([n.nodeDesc.__class__ for n in self._nodes if isinstance(n, Node)]) self.header[Graph.IO.Keys.NodesVersions] = { "{}".format(p.__name__): meshroom.core.nodeVersion(p, "0.0") for p in usedNodeTypes } data = { Graph.IO.Keys.Header: self.header, Graph.IO.Keys.Graph: self.toDict() } with open(path, 'w') as jsonFile: json.dump(data, jsonFile, indent=4) if path != self._filepath and setupProjectFile: self._setFilepath(path) def _setFilepath(self, filepath): """ Set the internal filepath of this Graph. This method should not be used directly from outside, use save/load instead. Args: filepath: the graph file path """ if not os.path.isfile(filepath): self._unsetFilepath() return if self._filepath == filepath: return self._filepath = filepath # For now: # * cache folder is located next to the graph file # * graph name if the basename of the graph file self.name = os.path.splitext(os.path.basename(filepath))[0] self.cacheDir = os.path.join(os.path.abspath(os.path.dirname(filepath)), meshroom.core.cacheFolderName) self.filepathChanged.emit() def _unsetFilepath(self): self._filepath = "" self.name = "" self.cacheDir = meshroom.core.defaultCacheFolder self.filepathChanged.emit() def updateInternals(self, startNodes=None, force=False): nodes, edges = self.dfsOnFinish(startNodes=startNodes) for node in nodes: if node.dirty or force: node.updateInternals() def updateStatusFromCache(self, force=False): for node in self._nodes: if node.dirty or force: node.updateStatusFromCache() def updateStatisticsFromCache(self): for node in self._nodes: node.updateStatisticsFromCache() def update(self): if not self._updateEnabled: # To do the update once for multiple changes self._updateRequested = True return self.updateInternals() if os.path.exists(self._cacheDir): self.updateStatusFromCache() for node in self.nodes: node.dirty = False # Graph topology has changed if self.dirtyTopology: # update nodes topological data cache self.updateNodesTopologicalData() self.dirtyTopology = False self.updated.emit() def markNodesDirty(self, fromNode): """ Mark all nodes following 'fromNode' as dirty, and request a graph update. All nodes marked as dirty will get their outputs to be re-evaluated during the next graph update. Args: fromNode (Node): the node to start the invalidation from See Also: Graph.update, Graph.updateInternals, Graph.updateStatusFromCache """ nodes, edges = self.nodesFromNode(fromNode) for node in nodes: node.dirty = True self.update() def stopExecution(self): """ Request graph execution to be stopped by terminating running chunks""" for chunk in self.iterChunksByStatus(Status.RUNNING): chunk.stopProcess() @Slot() def clearSubmittedNodes(self): """ Reset the status of already submitted nodes to Status.NONE """ for node in self.nodes: node.clearSubmittedChunks() @Slot(Node) def clearDataFrom(self, startNode): for node in self.nodesFromNode(startNode)[0]: node.clearData() def iterChunksByStatus(self, status): """ Iterate over NodeChunks with the given status """ for node in self.nodes: for chunk in node.chunks: if chunk.status.status == status: yield chunk def getChunksByStatus(self, status): """ Return the list of NodeChunks with the given status """ chunks = [] for node in self.nodes: chunks += [chunk for chunk in node.chunks if chunk.status.status == status] return chunks def getChunks(self, nodes=None): """ Returns the list of NodeChunks for the given list of nodes (for all nodes if nodes is None) """ chunks = [] for node in nodes or self.nodes: chunks += [chunk for chunk in node.chunks] return chunks def getOrderedChunks(self): """ Get chunks as visited by dfsOnFinish. Returns: list of NodeChunks: the ordered list of NodeChunks """ return self.getChunks(self.dfsOnFinish()[0]) @property def nodes(self): return self._nodes @property def edges(self): return self._edges @property def cacheDir(self): return self._cacheDir @cacheDir.setter def cacheDir(self, value): if self._cacheDir == value: return # use unix-style paths for cache directory self._cacheDir = value.replace(os.path.sep, "/") self.updateInternals(force=True) self.updateStatusFromCache(force=True) self.cacheDirChanged.emit() nodes = Property(BaseObject, nodes.fget, constant=True) edges = Property(BaseObject, edges.fget, constant=True) filepathChanged = Signal() filepath = Property(str, lambda self: self._filepath, notify=filepathChanged) fileReleaseVersion = Property(str, lambda self: self.header.get(Graph.IO.Keys.ReleaseVersion, "0.0"), notify=filepathChanged) cacheDirChanged = Signal() cacheDir = Property(str, cacheDir.fget, cacheDir.fset, notify=cacheDirChanged) updated = Signal() canComputeLeavesChanged = Signal() canComputeLeaves = Property(bool, lambda self: self._canComputeLeaves, notify=canComputeLeavesChanged)
class BaseNode(BaseObject): """ Base Abstract class for Graph nodes. """ # Regexp handling complex attribute names with recursive understanding of Lists and Groups # i.e: a.b, a[0], a[0].b.c[1] attributeRE = re.compile(r'\.?(?P<name>\w+)(?:\[(?P<index>\d+)\])?') def __init__(self, nodeType, position=None, parent=None, **kwargs): """ Create a new Node instance based on the given node description. Any other keyword argument will be used to initialize this node's attributes. Args: nodeDesc (desc.Node): the node description for this node parent (BaseObject): this Node's parent **kwargs: attributes values """ super(BaseNode, self).__init__(parent) self._nodeType = nodeType self.nodeDesc = None # instantiate node description if nodeType is valid if nodeType in meshroom.core.nodesDesc: self.nodeDesc = meshroom.core.nodesDesc[nodeType]() self.packageName = self.packageVersion = "" self._internalFolder = "" self._name = None self.graph = None self.dirty = True # whether this node's outputs must be re-evaluated on next Graph update self._chunks = ListModel(parent=self) self._uids = dict() self._cmdVars = {} self._size = 0 self._position = position or Position() self._attributes = DictModel(keyAttrName='name', parent=self) self.attributesPerUid = defaultdict(set) self._alive = True # for QML side to know if the node can be used or is going to be deleted def __getattr__(self, k): try: # Throws exception if not in prototype chain # return object.__getattribute__(self, k) # doesn't work in python2 return object.__getattr__(self, k) except AttributeError as e: try: return self.attribute(k) except KeyError: raise e def getName(self): return self._name def getLabel(self): """ Returns: str: the high-level label of this node """ t, idx = self._name.split("_") return "{}{}".format(t, idx if int(idx) > 1 else "") def getDocumentation(self): return self.nodeDesc.documentation @property def packageFullName(self): return '-'.join([self.packageName, self.packageVersion]) @Slot(str, result=Attribute) def attribute(self, name): att = None # Complex name indicating group or list attribute if '[' in name or '.' in name: p = self.attributeRE.findall(name) for n, idx in p: # first step: get root attribute if att is None: att = self._attributes.get(n) else: # get child Attribute in Group assert isinstance(att, GroupAttribute) att = att.value.get(n) if idx != '': # get child Attribute in List assert isinstance(att, ListAttribute) att = att.value.at(int(idx)) else: att = self._attributes.get(name) return att def getAttributes(self): return self._attributes @Slot(str, result=bool) def hasAttribute(self, name): return name in self._attributes.keys() def _applyExpr(self): for attr in self._attributes: attr._applyExpr() @property def nodeType(self): return self._nodeType @property def position(self): """ Get node position. """ return self._position @position.setter def position(self, value): """ Set node position. Args: value (Position): target position """ if self._position == value: return self._position = value self.positionChanged.emit() @property def alive(self): return self._alive @alive.setter def alive(self, value): if self._alive == value: return self._alive = value self.aliveChanged.emit() @property def depth(self): return self.graph.getDepth(self) @property def minDepth(self): return self.graph.getDepth(self, minimal=True) def toDict(self): pass def _computeUids(self): """ Compute node uids by combining associated attributes' uids. """ for uidIndex, associatedAttributes in self.attributesPerUid.items(): # uid is computed by hashing the sorted list of tuple (name, value) of all attributes impacting this uid uidAttributes = [(a.getName(), a.uid(uidIndex)) for a in associatedAttributes if a.enabled] uidAttributes.sort() self._uids[uidIndex] = hashValue(uidAttributes) def _buildCmdVars(self): def _buildAttributeCmdVars(cmdVars, name, attr): if attr.enabled: if attr.attributeDesc.group is not None: # if there is a valid command line "group" v = attr.getValueStr() cmdVars[name] = '--{name} {value}'.format(name=name, value=v) cmdVars[name + 'Value'] = str(v) if v: cmdVars[attr.attributeDesc.group] = cmdVars.get(attr.attributeDesc.group, '') + \ ' ' + cmdVars[name] elif isinstance(attr, GroupAttribute): assert isinstance(attr.value, DictModel) # if the GroupAttribute is not set in a single command line argument, # the sub-attributes may need to be exposed individually for v in attr._value: _buildAttributeCmdVars(cmdVars, v.name, v) """ Generate command variables using input attributes and resolved output attributes names and values. """ for uidIndex, value in self._uids.items(): self._cmdVars['uid{}'.format(uidIndex)] = value # Evaluate input params for name, attr in self._attributes.objects.items(): if attr.isOutput: continue # skip outputs _buildAttributeCmdVars(self._cmdVars, name, attr) # For updating output attributes invalidation values cmdVarsNoCache = self._cmdVars.copy() cmdVarsNoCache['cache'] = '' # Evaluate output params for name, attr in self._attributes.objects.items(): if attr.isInput: continue # skip inputs # Only consider File attributes for command output parameters if not isinstance(attr.attributeDesc, desc.File): continue defaultValue = attr.defaultValue() try: attr.value = defaultValue.format(**self._cmdVars) attr._invalidationValue = defaultValue.format(**cmdVarsNoCache) except KeyError as e: logging.warning('Invalid expression with missing key on "{nodeName}.{attrName}" with value "{defaultValue}".\nError: {err}'.format(nodeName=self.name, attrName=attr.name, defaultValue=defaultValue, err=str(e))) except ValueError as e: logging.warning('Invalid expression value on "{nodeName}.{attrName}" with value "{defaultValue}".\nError: {err}'.format(nodeName=self.name, attrName=attr.name, defaultValue=defaultValue, err=str(e))) v = attr.getValueStr() self._cmdVars[name] = '--{name} {value}'.format(name=name, value=v) self._cmdVars[name + 'Value'] = str(v) if v: self._cmdVars[attr.attributeDesc.group] = self._cmdVars.get(attr.attributeDesc.group, '') + \ ' ' + self._cmdVars[name] @property def isParallelized(self): return bool(self.nodeDesc.parallelization) if meshroom.useMultiChunks else False @property def nbParallelizationBlocks(self): return len(self._chunks) def hasStatus(self, status): if not self._chunks: return False for chunk in self._chunks: if chunk.status.status != status: return False return True def _isComputed(self): return self.hasStatus(Status.SUCCESS) @Slot() def clearData(self): """ Delete this Node internal folder. Status will be reset to Status.NONE """ if self.internalFolder and os.path.exists(self.internalFolder): shutil.rmtree(self.internalFolder) self.updateStatusFromCache() def isAlreadySubmitted(self): for chunk in self._chunks: if chunk.isAlreadySubmitted(): return True return False def alreadySubmittedChunks(self): return [ch for ch in self._chunks if ch.isAlreadySubmitted()] @Slot() def clearSubmittedChunks(self): """ Reset all submitted chunks to Status.NONE. This method should be used to clear inconsistent status if a computation failed without informing the graph. Warnings: This must be used with caution. This could lead to inconsistent node status if the graph is still being computed. """ for chunk in self.alreadySubmittedChunks(): chunk.upgradeStatusTo(Status.NONE, ExecMode.NONE) def upgradeStatusTo(self, newStatus): """ Upgrade node to the given status and save it on disk. """ for chunk in self._chunks: chunk.upgradeStatusTo(newStatus) def updateStatisticsFromCache(self): for chunk in self._chunks: chunk.updateStatisticsFromCache() def _updateChunks(self): pass def updateInternals(self, cacheDir=None): """ Update Node's internal parameters and output attributes. This method is called when: - an input parameter is modified - the graph main cache directory is changed Args: cacheDir (str): (optional) override graph's cache directory with custom path """ if self.nodeDesc: self.nodeDesc.update(self) for attr in self._attributes: attr.updateInternals() # Update chunks splitting self._updateChunks() # Retrieve current internal folder (if possible) try: folder = self.internalFolder except KeyError: folder = '' # Update command variables / output attributes self._cmdVars = { 'cache': cacheDir or self.graph.cacheDir, 'nodeType': self.nodeType, } self._computeUids() self._buildCmdVars() if self.nodeDesc: self.nodeDesc.postUpdate(self) # Notify internal folder change if needed if self.internalFolder != folder: self.internalFolderChanged.emit() @property def internalFolder(self): return self._internalFolder.format(**self._cmdVars) def updateStatusFromCache(self): """ Update node status based on status file content/existence. """ for chunk in self._chunks: chunk.updateStatusFromCache() def submit(self, forceCompute=False): for chunk in self._chunks: if forceCompute or chunk.status.status != Status.SUCCESS: chunk.upgradeStatusTo(Status.SUBMITTED, ExecMode.EXTERN) def beginSequence(self, forceCompute=False): for chunk in self._chunks: if forceCompute or chunk.status.status != Status.SUCCESS: chunk.upgradeStatusTo(Status.SUBMITTED, ExecMode.LOCAL) def processIteration(self, iteration): self._chunks[iteration].process() def process(self, forceCompute=False): for chunk in self._chunks: chunk.process(forceCompute) def endSequence(self): pass def getGlobalStatus(self): """ Get node global status based on the status of its chunks. Returns: Status: the node global status """ chunksStatus = [chunk.status.status for chunk in self._chunks] anyOf = (Status.ERROR, Status.STOPPED, Status.KILLED, Status.RUNNING, Status.SUBMITTED) allOf = (Status.SUCCESS,) for status in anyOf: if any(s == status for s in chunksStatus): return status for status in allOf: if all(s == status for s in chunksStatus): return status return Status.NONE def getChunks(self): return self._chunks def getSize(self): return self._size def setSize(self, value): if self._size == value: return self._size = value self.sizeChanged.emit() def __repr__(self): return self.name name = Property(str, getName, constant=True) label = Property(str, getLabel, constant=True) nodeType = Property(str, nodeType.fget, constant=True) documentation = Property(str, getDocumentation, constant=True) positionChanged = Signal() position = Property(Variant, position.fget, position.fset, notify=positionChanged) x = Property(float, lambda self: self._position.x, notify=positionChanged) y = Property(float, lambda self: self._position.y, notify=positionChanged) attributes = Property(BaseObject, getAttributes, constant=True) internalFolderChanged = Signal() internalFolder = Property(str, internalFolder.fget, notify=internalFolderChanged) depthChanged = Signal() depth = Property(int, depth.fget, notify=depthChanged) minDepth = Property(int, minDepth.fget, notify=depthChanged) chunksChanged = Signal() chunks = Property(Variant, getChunks, notify=chunksChanged) sizeChanged = Signal() size = Property(int, getSize, notify=sizeChanged) globalStatusChanged = Signal() globalStatus = Property(str, lambda self: self.getGlobalStatus().name, notify=globalStatusChanged) isComputed = Property(bool, _isComputed, notify=globalStatusChanged) aliveChanged = Signal() alive = Property(bool, alive.fget, alive.fset, notify=aliveChanged)
class NodeChunk(BaseObject): def __init__(self, node, range, parent=None): super(NodeChunk, self).__init__(parent) self.node = node self.range = range self.logManager = LogManager(self) self.status = StatusData(node.name, node.nodeType, node.packageName, node.packageVersion) self.statistics = stats.Statistics() self.statusFileLastModTime = -1 self._subprocess = None # notify update in filepaths when node's internal folder changes self.node.internalFolderChanged.connect(self.nodeFolderChanged) @property def index(self): return self.range.iteration @property def name(self): if self.range.blockSize: return "{}({})".format(self.node.name, self.index) else: return self.node.name @property def statusName(self): return self.status.status.name @property def logger(self): return self.logManager.logger @property def execModeName(self): return self.status.execMode.name def updateStatusFromCache(self): """ Update node status based on status file content/existence. """ statusFile = self.statusFile oldStatus = self.status.status # No status file => reset status to Status.None if not os.path.exists(statusFile): self.statusFileLastModTime = -1 self.status.reset() else: with open(statusFile, 'r') as jsonFile: statusData = json.load(jsonFile) self.status.fromDict(statusData) self.statusFileLastModTime = os.path.getmtime(statusFile) if oldStatus != self.status.status: self.statusChanged.emit() @property def statusFile(self): if self.range.blockSize == 0: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, 'status') else: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, str(self.index) + '.status') @property def statisticsFile(self): if self.range.blockSize == 0: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, 'statistics') else: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, str(self.index) + '.statistics') @property def logFile(self): if self.range.blockSize == 0: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, 'log') else: return os.path.join(self.node.graph.cacheDir, self.node.internalFolder, str(self.index) + '.log') def saveStatusFile(self): """ Write node status on disk. """ data = self.status.toDict() statusFilepath = self.statusFile folder = os.path.dirname(statusFilepath) if not os.path.exists(folder): os.makedirs(folder) statusFilepathWriting = getWritingFilepath(statusFilepath) with open(statusFilepathWriting, 'w') as jsonFile: json.dump(data, jsonFile, indent=4) renameWritingToFinalPath(statusFilepathWriting, statusFilepath) def upgradeStatusTo(self, newStatus, execMode=None): if newStatus.value <= self.status.status.value: print('WARNING: downgrade status on node "{}" from {} to {}'.format(self.name, self.status.status, newStatus)) if execMode is not None: self.status.execMode = execMode self.execModeNameChanged.emit() self.status.status = newStatus self.saveStatusFile() self.statusChanged.emit() def updateStatisticsFromCache(self): """ """ oldTimes = self.statistics.times statisticsFile = self.statisticsFile if not os.path.exists(statisticsFile): return with open(statisticsFile, 'r') as jsonFile: statisticsData = json.load(jsonFile) self.statistics.fromDict(statisticsData) if oldTimes != self.statistics.times: self.statisticsChanged.emit() def saveStatistics(self): data = self.statistics.toDict() statisticsFilepath = self.statisticsFile folder = os.path.dirname(statisticsFilepath) if not os.path.exists(folder): os.makedirs(folder) statisticsFilepathWriting = getWritingFilepath(statisticsFilepath) with open(statisticsFilepathWriting, 'w') as jsonFile: json.dump(data, jsonFile, indent=4) renameWritingToFinalPath(statisticsFilepathWriting, statisticsFilepath) def isAlreadySubmitted(self): return self.status.status in (Status.SUBMITTED, Status.RUNNING) def process(self, forceCompute=False): if not forceCompute and self.status.status == Status.SUCCESS: print("Node chunk already computed:", self.name) return global runningProcesses runningProcesses[self.name] = self self.status.initStartCompute() startTime = time.time() self.upgradeStatusTo(Status.RUNNING) self.statThread = stats.StatisticsThread(self) self.statThread.start() try: self.node.nodeDesc.processChunk(self) except Exception as e: self.upgradeStatusTo(Status.ERROR) raise except (KeyboardInterrupt, SystemError, GeneratorExit) as e: self.upgradeStatusTo(Status.STOPPED) raise finally: self.status.initEndCompute() self.status.elapsedTime = time.time() - startTime print(' - elapsed time:', self.status.elapsedTimeStr) # ask and wait for the stats thread to stop self.statThread.stopRequest() self.statThread.join() self.statistics = stats.Statistics() del runningProcesses[self.name] self.upgradeStatusTo(Status.SUCCESS) def stopProcess(self): self.node.nodeDesc.stopProcess(self) statusChanged = Signal() statusName = Property(str, statusName.fget, notify=statusChanged) execModeNameChanged = Signal() execModeName = Property(str, execModeName.fget, notify=execModeNameChanged) statisticsChanged = Signal() nodeFolderChanged = Signal() statusFile = Property(str, statusFile.fget, notify=nodeFolderChanged) logFile = Property(str, logFile.fget, notify=nodeFolderChanged) statisticsFile = Property(str, statisticsFile.fget, notify=nodeFolderChanged)
class TaskManager(BaseObject): """ Manage graph - local and external - computation tasks. """ def __init__(self, parent=None): super(TaskManager, self).__init__(parent) self._graph = None self._nodes = DictModel(keyAttrName='_name', parent=self) self._nodesToProcess = [] self._nodesExtern = [] # internal thread in which local tasks are executed self._thread = TaskThread(self) self._blockRestart = False self.restartRequested.connect(self.restart) def requestBlockRestart(self): """ Block computing. Note: should only be used to completely stop computing. """ self._blockRestart = True def blockRestart(self): """ Avoid the automatic restart of computing. """ for node in self._nodesToProcess: chunkCount = 0 for chunk in node.chunks: if chunk.status.status in (Status.SUBMITTED, Status.ERROR): chunk.upgradeStatusTo(Status.NONE) chunkCount += 1 if chunkCount == len(node.chunks): self.removeNode(node, displayList=True) self._blockRestart = False self._nodesToProcess = [] self._thread._state = State.DEAD @Slot() def restart(self): """ Restart computing when thread has been stopped. Note: this is done like this to avoid app freezing. """ # Make sure to wait the end of the current thread self._thread.join() # Avoid restart if thread was globally stopped if self._blockRestart: self.blockRestart() return if self._thread._state != State.STOPPED: return for node in self._nodesToProcess: if node.getGlobalStatus() == Status.STOPPED: # Remove node from the computing list self.removeNode(node, displayList=False, processList=True) # Remove output nodes from display and computing lists outputNodes = node.getOutputNodes(recursive=True, dependenciesOnly=True) for n in outputNodes: if n.getGlobalStatus() in (Status.ERROR, Status.SUBMITTED): n.upgradeStatusTo(Status.NONE) self.removeNode(n, displayList=True, processList=True) # Start a new thread with the remaining nodes to compute self._thread = TaskThread(self) self._thread.start() def compute(self, graph=None, toNodes=None, forceCompute=False, forceStatus=False): """ Start graph computation, from root nodes to leaves - or nodes in 'toNodes' if specified. Computation tasks (NodeChunk) happen in a separate thread (see TaskThread). :param graph: the graph to consider. :param toNodes: specific leaves, all graph leaves if None. :param forceCompute: force the computation despite nodes status. :param forceStatus: force the computation even if some nodes are submitted externally. """ self._graph = graph self.updateNodes() if forceCompute: nodes, edges = graph.dfsOnFinish(startNodes=toNodes) self.checkCompatibilityNodes( graph, nodes, "COMPUTATION") # name of the context is important for QML self.checkDuplicates( nodes, "COMPUTATION") # name of the context is important for QML else: # Check dependencies of toNodes if not toNodes: toNodes = graph.getLeafNodes(dependenciesOnly=True) toNodes = list(toNodes) allReady = self.checkNodesDependencies(graph, toNodes, "COMPUTATION") # At this point, toNodes is a list # If it is empty, we raise an error to avoid passing through dfsToProcess if not toNodes: self.raiseImpossibleProcess("COMPUTATION") nodes, edges = graph.dfsToProcess(startNodes=toNodes) if not nodes: logging.warning('Nothing to compute') return self.checkCompatibilityNodes( graph, nodes, "COMPUTATION") # name of the context is important for QML self.checkDuplicates( nodes, "COMPUTATION") # name of the context is important for QML nodes = [node for node in nodes if not self.contains(node) ] # be sure to avoid non-real conflicts chunksInConflict = self.getAlreadySubmittedChunks(nodes) if chunksInConflict: chunksStatus = set( [chunk.status.status.name for chunk in chunksInConflict]) chunksName = [node.name for node in chunksInConflict] # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage msg = '[COMPUTATION] Already Submitted:\n' \ 'WARNING - Some nodes are already submitted with status: {}\nNodes: {}'.format( ', '.join(chunksStatus), ', '.join(chunksName) ) if forceStatus: logging.warning(msg) else: raise RuntimeError(msg) for node in nodes: node.destroyed.connect(lambda obj=None, name=node.name: self. onNodeDestroyed(obj, name)) node.beginSequence(forceCompute) self._nodes.update(nodes) self._nodesToProcess.extend(nodes) if self._thread._state == State.IDLE: self._thread.start() elif self._thread._state in (State.DEAD, State.ERROR): self._thread = TaskThread(self) self._thread.start() # At the end because it raises a WarningError but should not stop processing if not allReady: self.raiseDependenciesMessage("COMPUTATION") def onNodeDestroyed(self, obj, name): """ Remove node from the taskmanager when it's destroyed in the graph :param obj: :param name: :return: """ if name in self._nodes.keys(): self._nodes.pop(name) def contains(self, node): return node in self._nodes.values() def containsNodeName(self, name): """ Check if a node with the argument name belongs to the display list. """ if name in self._nodes.keys(): return True return False def removeNode(self, node, displayList=True, processList=False, externList=False): """ Remove node from the Task Manager. Args: node (Node): node to remove. displayList (bool): remove from the display list. processList (bool): remove from the nodesToProcess list. externList (bool): remove from the nodesExtern list. """ if displayList and self._nodes.contains(node): self._nodes.pop(node.name) if processList and node in self._nodesToProcess: self._nodesToProcess.remove(node) if externList and node in self._nodesExtern: self._nodesExtern.remove(node) def clear(self): """ Remove all the nodes from the taskmanager :return: """ self._nodes.clear() self._nodesExtern = [] self._nodesToProcess = [] def updateNodes(self): """ Update task manager nodes lists by checking the nodes status. """ self._nodesExtern = [ node for node in self._nodesExtern if node.isExtern() and node.isAlreadySubmitted() ] newNodes = [node for node in self._nodes if node.isAlreadySubmitted()] if len(newNodes) != len(self._nodes): self._nodes.clear() self._nodes.update(newNodes) def update(self, graph): """ Add all the nodes that are being rendered in a renderfarm to the taskmanager when new graph is loaded :param graph: :return: """ for node in graph._nodes: if node.isAlreadySubmitted( ) and node._chunks.size() > 0 and node.isExtern(): self._nodes.add(node) self._nodesExtern.append(node) def checkCompatibilityNodes(self, graph, nodes, context): compatNodes = [] for node in nodes: if node in graph._compatibilityNodes.values(): compatNodes.append(node.nameToLabel(node.name)) if compatNodes: # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage raise RuntimeError( "[{}] Compatibility Issue:\n" "Cannot compute because of these incompatible nodes:\n" "{}".format(context, sorted(compatNodes))) def checkDuplicates(self, nodesToProcess, context): for node in nodesToProcess: for duplicate in node.duplicates: if duplicate in nodesToProcess: # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage raise RuntimeError( "[{}] Duplicates Issue:\n" "Cannot compute because there are some duplicate nodes to process:\n\n" "First match: '{}' and '{}'\n\n" "There can be other duplicate nodes in the list. Please, check the graph and try again." .format(context, node.nameToLabel(node.name), node.nameToLabel(duplicate.name))) def checkNodesDependencies(self, graph, toNodes, context): """ Check dependencies of nodes to process. Update toNodes with computable/submittable nodes only. Returns: bool: True if all the nodes can be processed. False otherwise. """ ready = [] computed = [] for node in toNodes: if context == "COMPUTATION": if graph.canCompute( node) and graph.canSubmitOrCompute(node) % 2 == 1: ready.append(node) elif node.isComputed: computed.append(node) elif context == "SUBMITTING": if graph.canCompute( node) and graph.canSubmitOrCompute(node) > 1: ready.append(node) elif node.isComputed: computed.append(node) else: raise ValueError( "Argument 'context' must be: 'COMPUTATION' or 'SUBMITTING'" ) if len(ready) + len(computed) != len(toNodes): toNodes.clear() toNodes.extend(ready) return False return True def raiseDependenciesMessage(self, context): # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage raise RuntimeWarning( "[{}] Unresolved dependencies:\n" "Some nodes cannot be computed in LOCAL/submitted in EXTERN because of unresolved dependencies.\n\n" "Nodes which are ready will be processed.".format(context)) def raiseImpossibleProcess(self, context): # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage raise RuntimeError( "[{}] Impossible Process:\n" "There is no node able to be processed.".format(context)) def submit(self, graph=None, submitter=None, toNodes=None): """ Nodes are send to the renderfarm :param graph: :param submitter: :param toNodes: :return: """ # Ensure submitter is properly set sub = None if submitter: sub = meshroom.core.submitters.get(submitter, None) elif len(meshroom.core.submitters) == 1: # if only one submitter available use it allSubmitters = meshroom.core.submitters.values() sub = next(iter(allSubmitters)) # retrieve the first element if sub is None: # Warning: Syntax and terms are parsed on QML side to recognize the error # Syntax : [Context] ErrorType: ErrorMessage raise RuntimeError( "[SUBMITTING] Unknown Submitter:\n" "Unknown Submitter called '{submitter}'. Available submitters are: '{allSubmitters}'." .format(submitter=submitter, allSubmitters=str(meshroom.core.submitters.keys()))) # Update task manager's lists self.updateNodes() # Check dependencies of toNodes if not toNodes: toNodes = graph.getLeafNodes(dependenciesOnly=True) toNodes = list(toNodes) allReady = self.checkNodesDependencies(graph, toNodes, "SUBMITTING") # At this point, toNodes is a list # If it is empty, we raise an error to avoid passing through dfsToProcess if not toNodes: self.raiseImpossibleProcess("SUBMITTING") nodesToProcess, edgesToProcess = graph.dfsToProcess(startNodes=toNodes) if not nodesToProcess: logging.warning('Nothing to compute') return self.checkCompatibilityNodes( graph, nodesToProcess, "SUBMITTING") # name of the context is important for QML self.checkDuplicates( nodesToProcess, "SUBMITTING") # name of the context is important for QML flowEdges = graph.flowEdges(startNodes=toNodes) edgesToProcess = set(edgesToProcess).intersection(flowEdges) logging.info("Nodes to process: {}".format(nodesToProcess)) logging.info("Edges to process: {}".format(edgesToProcess)) try: res = sub.submit(nodesToProcess, edgesToProcess, graph.filepath) if res: for node in nodesToProcess: node.destroyed.connect(lambda obj=None, name=node.name: self.onNodeDestroyed(obj, name)) node.submit() # update node status self._nodes.update(nodesToProcess) self._nodesExtern.extend(nodesToProcess) # At the end because it raises a WarningError but should not stop processing if not allReady: self.raiseDependenciesMessage("SUBMITTING") except Exception as e: logging.error("Error on submit : {}".format(e)) def submitFromFile(self, graphFile, submitter, toNode=None): """ Submit the given graph via the given submitter. """ graph = meshroom.core.graph.loadGraph(graphFile) toNodes = graph.findNodes([toNode]) if toNode else None self.submit(graph, submitter, toNodes) def getAlreadySubmittedChunks(self, nodes): """ Check if nodes have already been submitted in another Meshroom instance. :param nodes: :return: """ out = [] for node in nodes: for chunk in node.chunks: # Already submitted/running chunks in another task manager if chunk.isAlreadySubmitted() and not self.containsNodeName( chunk.statusNodeName): out.append(chunk) return out nodes = Property(BaseObject, lambda self: self._nodes, constant=True) restartRequested = Signal()
class Attribute(BaseObject): """ """ stringIsLinkRe = re.compile('^\{[A-Za-z]+[A-Za-z0-9_.]*\}$') def __init__(self, node, attributeDesc, isOutput, root=None, parent=None): """ Attribute constructor Args: node (Node): the Node hosting this Attribute attributeDesc (desc.Attribute): the description of this Attribute isOutput (bool): whether this Attribute is an output of the Node root (Attribute): (optional) the root Attribute (List or Group) containing this one parent (BaseObject): (optional) the parent BaseObject """ super(Attribute, self).__init__(parent) self._name = attributeDesc.name self._root = None if root is None else weakref.ref(root) self._node = weakref.ref(node) self.attributeDesc = attributeDesc self._isOutput = isOutput self._value = attributeDesc.value self._label = attributeDesc.label # invalidation value for output attributes self._invalidationValue = "" @property def node(self): return self._node() @property def root(self): return self._root() if self._root else None def absoluteName(self): return '{}.{}.{}'.format(self.node.graph.name, self.node.name, self._name) def fullName(self): """ Name inside the Graph: nodeName.name """ if isinstance(self.root, ListAttribute): return '{}[{}]'.format(self.root.fullName(), self.root.index(self)) elif isinstance(self.root, GroupAttribute): return '{}.{}'.format(self.root.fullName(), self._name) return '{}.{}'.format(self.node.name, self._name) def asLinkExpr(self): """ Return link expression for this Attribute """ return "{" + self.fullName() + "}" def getName(self): """ Attribute name """ return self._name def getType(self): return self.attributeDesc.__class__.__name__ def getLabel(self): return self._label def _get_value(self): return self.getLinkParam().value if self.isLink else self._value def _set_value(self, value): if self._value == value: return if isinstance(value, Attribute) or Attribute.isLinkExpression(value): # if we set a link to another attribute self._value = value else: # if we set a new value, we use the attribute descriptor validator to check the validity of the value # and apply some conversion if needed convertedValue = self.desc.validateValue(value) self._value = convertedValue # Request graph update when input parameter value is set # and parent node belongs to a graph # Output attributes value are set internally during the update process, # which is why we don't trigger any update in this case # TODO: update only the nodes impacted by this change # TODO: only update the graph if this attribute participates to a UID if self.isInput: self.requestGraphUpdate() self.valueChanged.emit() def resetValue(self): self._value = "" def requestGraphUpdate(self): if self.node.graph: self.node.graph.markNodesDirty(self.node) @property def isOutput(self): return self._isOutput @property def isInput(self): return not self._isOutput def uid(self, uidIndex=-1): """ """ # 'uidIndex' should be in 'self.desc.uid' but in the case of linked attribute # it will not be the case (so we cannot have an assert). if self.isOutput: # only dependent on the hash of its value without the cache folder return hashValue(self._invalidationValue) if self.isLink: return self.getLinkParam().uid(uidIndex) if isinstance(self._value, (list, tuple, set,)): # hash of sorted values hashed return hashValue([hashValue(v) for v in sorted(self._value)]) return hashValue(self._value) @property def isLink(self): """ Whether the attribute is a link to another attribute. """ return self.node.graph and self.isInput and self in self.node.graph.edges.keys() @staticmethod def isLinkExpression(value): """ Return whether the given argument is a link expression. A link expression is a string matching the {nodeName.attrName} pattern. """ return isinstance(value, pyCompatibility.basestring) and Attribute.stringIsLinkRe.match(value) def getLinkParam(self): return self.node.graph.edge(self).src if self.isLink else None def _applyExpr(self): """ For string parameters with an expression (when loaded from file), this function convert the expression into a real edge in the graph and clear the string value. """ v = self._value g = self.node.graph if not g: return if isinstance(v, Attribute): g.addEdge(v, self) self.resetValue() elif self.isInput and Attribute.isLinkExpression(v): # value is a link to another attribute link = v[1:-1] linkNode, linkAttr = link.split('.') g.addEdge(g.node(linkNode).attribute(linkAttr), self) self.resetValue() def getExportValue(self): if self.isLink: return self.getLinkParam().asLinkExpr() if self.isOutput: return self.desc.value return self._value def getValueStr(self): if isinstance(self.attributeDesc, desc.ChoiceParam) and not self.attributeDesc.exclusive: assert(isinstance(self.value, collections.Sequence) and not isinstance(self.value, pyCompatibility.basestring)) return self.attributeDesc.joinChar.join(self.value) if isinstance(self.attributeDesc, (desc.StringParam, desc.File)): return '"{}"'.format(self.value) return str(self.value) def defaultValue(self): return self.desc.value def _isDefault(self): return self._value == self.defaultValue() def getPrimitiveValue(self, exportDefault=True): return self._value name = Property(str, getName, constant=True) label = Property(str, getLabel, constant=True) type = Property(str, getType, constant=True) desc = Property(desc.Attribute, lambda self: self.attributeDesc, constant=True) valueChanged = Signal() value = Property(Variant, _get_value, _set_value, notify=valueChanged) isOutput = Property(bool, isOutput.fget, constant=True) isLinkChanged = Signal() isLink = Property(bool, isLink.fget, notify=isLinkChanged) isDefault = Property(bool, _isDefault, notify=valueChanged)