def _set_node_status(self, node_id, node_running_status): node = self.node_id_to_node[node_id] node.node_running_status = node_running_status logging.info("Node running status {} {}".format(node_running_status, node.title)) if node_running_status == NodeRunningStatus.FAILED: # TODO optional cancel based on parameter self.kill() self._node_running_status = NodeRunningStatus.FAILED_WAITING if node_running_status in {NodeRunningStatus.SUCCESS, NodeRunningStatus.RESTORED}: for dependent_node_id in self.node_id_to_dependents[node_id]: dependent_node = self.node_id_to_node[dependent_node_id] prev_dependency_index = self.node_id_to_dependency_index[dependent_node_id] removed_dependencies = 0 for node_input in dependent_node.inputs: for input_reference in node_input.input_references: if to_object_id(input_reference.node_id) == to_object_id(node_id): removed_dependencies += 1 dependency_index = prev_dependency_index - removed_dependencies self.dependency_index_to_node_ids[prev_dependency_index].remove(dependent_node_id) self.dependency_index_to_node_ids[dependency_index].add(dependent_node_id) self.node_id_to_dependency_index[dependent_node_id] = dependency_index self.uncompleted_nodes_count -= 1 if self.uncompleted_nodes_count == 0 and not NodeRunningStatus.is_failed(self._node_running_status): self._node_running_status = NodeRunningStatus.SUCCESS
def get_db_node(self, node_id, user_id=None): """Get dict representation of the Node. Args: node_id (ObjectId, str): Node ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Node """ res = get_db_connector()[self.collection].find_one( {'_id': to_object_id(node_id)}) if not res: return res res['_readonly'] = (user_id != to_object_id(res['author'])) sub_nodes_dicts = None for parameter in res['parameters']: if parameter['name'] == '_nodes': sub_nodes_dicts = parameter['value']['value'] break # TODO join collections using database capabilities if self.collection == Collections.RUNS: self._update_sub_nodes_fields(sub_nodes_dicts, '_id', _PROPERTIES_TO_GET_FROM_SUBS) self._update_sub_nodes_fields( sub_nodes_dicts, 'original_node_id', ['node_status'], reference_collection=Collections.TEMPLATES) return res
def get_nodes(node_link=None): author = to_object_id(g.user._id) # if node_link is a base node if node_link in node_collection.name_to_class: return JSONEncoder().encode({ 'data': node_collection.name_to_class[node_link].get_default().to_dict(), 'status': 'success'}) # if node_link is defined (Node id) elif node_link: try: node_id = to_object_id(node_link) except Exception: return 'Invalid ID', 404 node = node_collection_manager.get_db_node(node_id, author) if node: return JSONEncoder().encode({ 'data': node, 'status': 'success'}) else: return 'Node `{}` was not found'.format(node_link), 404 else: query = json.loads(request.args.get('query', "{}")) query["author"] = to_object_id(g.user._id) nodes_query = {k: v for k, v in query.items() if k in PAGINATION_QUERY_KEYS} count_query = {k: v for k, v in query.items() if k in COUNT_QUERY_KEYS} return JSONEncoder().encode({ 'nodes': node_collection_manager.get_db_nodes(**nodes_query), 'total_count': node_collection_manager.get_db_nodes_count(**count_query), 'status': 'success'})
def _set_node_status(self, node_id, node_running_status): node = self.node_id_to_node[node_id] node.node_running_status = node_running_status if node_running_status == NodeRunningStatus.FAILED: self.graph.graph_running_status = GraphRunningStatus.FAILED_WAITING if node_running_status in {NodeRunningStatus.SUCCESS, NodeRunningStatus.FAILED, NodeRunningStatus.RESTORED}: for dependent_node_id in self.node_id_to_dependents[node_id]: dependent_node = self.node_id_to_node[dependent_node_id] prev_dependency_index = self.node_id_to_dependency_index[dependent_node_id] removed_dependencies = 0 for node_input in dependent_node.inputs: for input_value in node_input.values: if to_object_id(input_value.node_id) == to_object_id(node_id): removed_dependencies += 1 dependency_index = prev_dependency_index - removed_dependencies self.dependency_index_to_node_ids[prev_dependency_index].remove(dependent_node_id) self.dependency_index_to_node_ids[dependency_index].add(dependent_node_id) self.node_id_to_dependency_index[dependent_node_id] = dependency_index self.uncompleted_nodes_count -= 1 if self.uncompleted_nodes_count == 0 and not GraphRunningStatus.is_failed(self.graph.graph_running_status): self.graph.graph_running_status = GraphRunningStatus.SUCCESS
def get_nodes(node_link=None): user_id = to_object_id(g.user._id) # if node_link is a base node if node_link in node_collection.name_to_class: return JSONEncoder().encode({ 'data': node_collection.name_to_class[node_link].get_default().to_dict(), 'resources_dict': resource_manager.resources_dict, 'status': 'success' }) else: try: node_id = to_object_id(node_link) except Exception: return make_fail_response('Invalid ID'), 404 node = node_collection_manager.get_db_node(node_id, user_id) if node: return JSONEncoder().encode({ 'data': node, 'resources_dict': resource_manager.resources_dict, 'status': 'success' }) else: return make_fail_response( 'Node `{}` was not found'.format(node_link)), 404
def get_nodes(collection, node_link=None): user_id = to_object_id(g.user._id) # if node_link is a base node if node_link in executor_manager.kind_to_executor_class and collection == Collections.TEMPLATES: kind = node_link node = executor_manager.kind_to_executor_class[kind].get_default_node( is_workflow=kind in workflow_manager.kind_to_workflow_dict) if isinstance(node, tuple): data = node[0].to_dict() tour_steps = node[1] else: data = node.to_dict() tour_steps = [] data['kind'] = kind return JSONEncoder().encode({ 'node': data, 'tour_steps': tour_steps, 'plugins_dict': PLUGINS_DICT, 'status': 'success' }) elif node_link in workflow_manager.kind_to_workflow_dict and collection == Collections.GROUPS: # TODO move group to a separate class group_dict = Group().to_dict() group_dict['kind'] = node_link return JSONEncoder().encode({ 'group': group_dict, 'plugins_dict': PLUGINS_DICT, 'status': 'success' }) else: try: node_id = to_object_id(node_link) except Exception: return make_fail_response('Invalid ID'), 404 if collection == Collections.GROUPS: # TODO move group to a separate class group = node_collection_managers[collection].get_db_object( node_id, user_id) if group: return JSONEncoder().encode({ 'group': group, 'plugins_dict': PLUGINS_DICT, 'status': 'success', }) else: make_fail_response( 'Group `{}` was not found'.format(node_link)), 404 node = node_collection_managers[collection].get_db_node( node_id, user_id) app.logger.debug(node) if node: return JSONEncoder().encode({ 'node': node, 'plugins_dict': PLUGINS_DICT, 'status': 'success' }) else: return make_fail_response( 'Node `{}` was not found'.format(node_link)), 404
def get_db_node(node_id, user_id=None): """Get dict representation of the Graph. Args: node_id (ObjectId, str): Node ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Graph """ res = get_db_connector().nodes.find_one({'_id': to_object_id(node_id)}) if res: res['_readonly'] = (user_id != to_object_id(res['author'])) return res
def get_graph(graph_id=None): if graph_id == 'new': return JSONEncoder().encode({ 'data': Graph().to_dict(), 'status': 'success' }) elif graph_id: graph = graph_collection_manager.get_db_graph(graph_id) if graph: return JSONEncoder().encode({'data': graph, 'status': 'success'}) else: return 'Graph was not found', 404 else: query = json.loads(request.args.get('query', "{}")) query["author"] = to_object_id(g.user._id) count_query = {k: v for k, v in query.items() if k in COUNT_QUERY_KEYS} graphs_query = { k: v for k, v in query.items() if k in PAGINATION_QUERY_KEYS } return JSONEncoder().encode({ 'graphs': [ graph for graph in graph_collection_manager.get_db_graphs( **graphs_query) ], 'total_count': graph_collection_manager.get_db_graphs_count(**count_query), 'status': 'success' })
def get_db_nodes(author, status=None, base_node_names=None, search=None, per_page=20, offset=0): """Get subset of the Nodes. Args: author (ObjectId): Author of the Nodes status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern per_page (int): Number of Nodes per page offset (int): Offset Return: (list of dict) List of Nodes in dict format """ and_query = NodeCollectionManager._get_basic_query( author=author, status=status, base_node_names=base_node_names, search=search) db_nodes = get_db_connector().nodes.find({ '$and': and_query }).sort('insertion_date', -1).skip(offset).limit(per_page) res = [] for node in db_nodes: node['_readonly'] = (author != to_object_id(node['author'])) res.append(node) return res
def post_search_nodes(collection): query = json.loads(request.data) app.logger.debug(request.data) query['user_id'] = to_object_id(g.user._id) virtual_collection = query.pop('virtual_collection', None) if len(query.keys() - PAGINATION_QUERY_KEYS): return make_fail_response( 'Unknown keys: `{}`'.format(query.keys() - PAGINATION_QUERY_KEYS)), 400 if collection == 'in_hubs': hub = query.pop('hub') res = hub_manager.kind_to_hub_class[hub].search( plynx.base.hub.Query(**query)) else: if virtual_collection == NodeVirtualCollection.OPERATIONS: query['node_kinds'] = list( operation_manager.kind_to_operation_dict.keys()) elif virtual_collection == NodeVirtualCollection.WORKFLOWS: query['node_kinds'] = list( workflow_manager.kind_to_workflow_dict.keys()) res = node_collection_managers[collection].get_db_objects(**query) return make_success_response({ 'items': res['list'], 'total_count': res['metadata'][0]['total'] if res['metadata'] else 0, 'plugins_dict': PLUGINS_DICT, })
def _transplant_node(node, new_node): if to_object_id(node.parent_node) == new_node._id: return node new_node.apply_properties(node) new_node.parent_node = str(new_node._id) new_node._id = node._id return new_node
def _update_node_statuses(db_graph): if not db_graph: return None node_ids = set( [to_object_id(node['parent_node']) for node in db_graph['nodes']]) db_nodes = GraphCollectionManager.node_collection_manager.get_db_nodes_by_ids( node_ids) node_id_to_db_node = {db_node['_id']: db_node for db_node in db_nodes} for g_node in db_graph['nodes']: id = to_object_id(g_node['parent_node']) if id in node_id_to_db_node: db_node = node_id_to_db_node[id] g_node['node_status'] = db_node['node_status'] return db_graph
def get_db_graph(graph_id, user_id=None): """Get dict representation of the Graph. Args: graph_id (ObjectId, str): Graph ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Graph """ res = GraphCollectionManager._update_node_statuses( get_db_connector().graphs.find_one({'_id': to_object_id(graph_id)})) if res: res['_readonly'] = (to_object_id(user_id) != to_object_id( res['author'])) return res
def get_db_object(self, object_id, user_id=None): """Get dict representation of an Object. Args: object_id (ObjectId, str): Object ID user_id (str, ObjectId, None): User ID Return: (dict) dict representation of the Object """ res = get_db_connector()[self.collection].find_one( {'_id': to_object_id(object_id)}) if not res: return res res['_readonly'] = (user_id != to_object_id(res['author'])) return res
def _get_node_with_inputs(self, node_id): """Get the node and init its inputs, i.e. filling its resource_ids""" res = self.node_id_to_node[node_id] for node_input in res.inputs: for value in node_input.values: value.resource_id = self.node_id_to_node[to_object_id(value.node_id)].get_output_by_name( value.output_id ).resource_id return res
def __init__(self, node_dict): super(DAG, self).__init__(node_dict) self.subnodes = None # TODO make a function to look for parameter for parameter in self.node.parameters: if parameter.name == '_nodes': self.subnodes = parameter.value.value assert self.subnodes is not None, 'Could not find subnodes' self.node_id_to_node = { node._id: node for node in self.subnodes } # number of dependencies to ids self.dependency_index_to_node_ids = defaultdict(lambda: set()) self.node_id_to_dependents = defaultdict(lambda: set()) self.node_id_to_dependency_index = defaultdict(lambda: 0) self.uncompleted_nodes_count = 0 self._node_running_status = NodeRunningStatus.READY for node in self.subnodes: node_id = node._id if node_id == SpecialNodeId.INPUT: updated_resources_count = 0 for output in node.outputs: for input in self.node.inputs: if input.name == output.name: updated_resources_count += 1 output.values = input.values if updated_resources_count != len(self.node.inputs): raise Exception('Used {} inputs for {} outputs'.format(updated_resources_count, len(self.node.inputs))) # ignore nodes in finished statuses if NodeRunningStatus.is_finished(node.node_running_status) and node_id != SpecialNodeId.OUTPUT: continue dependency_index = 0 for node_input in node.inputs: for input_reference in node_input.input_references: dep_node_id = to_object_id(input_reference.node_id) self.node_id_to_dependents[dep_node_id].add(node_id) if not NodeRunningStatus.is_finished(self.node_id_to_node[dep_node_id].node_running_status): dependency_index += 1 if not NodeRunningStatus.is_finished(node.node_running_status): self.uncompleted_nodes_count += 1 self.dependency_index_to_node_ids[dependency_index].add(node_id) self.node_id_to_dependency_index[node_id] = dependency_index self.monitoring_node_ids = set() if self.uncompleted_nodes_count == 0: self._node_running_status = NodeRunningStatus.SUCCESS
def pop_jobs(self): """Get a set of nodes with satisfied dependencies""" res = [] logging.info("Pop jobs") for running_node_dict in node_collection_manager.get_db_nodes_by_ids( self.monitoring_node_ids): # check status if NodeRunningStatus.is_finished( running_node_dict['node_running_status']): node = Node.from_dict(running_node_dict) self.update_node(node) self.monitoring_node_ids.remove(node._id) if NodeRunningStatus.is_failed(self.node.node_running_status): logging.info("Job in DAG failed, pop_jobs returns []") return res cached_nodes = [] for node_id in self.dependency_index_to_node_ids[0]: """Get the node and init its inputs, i.e. filling its resource_ids""" orig_node = self.node_id_to_node[node_id] for node_input in orig_node.inputs: for input_reference in node_input.input_references: node_input.values.extend(self.node_id_to_node[to_object_id( input_reference.node_id)].get_output_by_name( input_reference.output_id).values) orig_node.node_running_status = NodeRunningStatus.IN_QUEUE node = orig_node.copy() if DAG._cacheable(node) and False: # !!! _cacheable is broken try: cache = self.node_cache_manager.get( node, self.graph.author) if cache: node.node_running_status = NodeRunningStatus.RESTORED node.outputs = cache.outputs node.logs = cache.logs node.cache_url = '{}/graphs/{}?nid={}'.format( self.WEB_CONFIG.endpoint.rstrip('/'), str(cache.graph_id), str(cache.node_id), ) cached_nodes.append(node) continue except Exception as err: logging.exception( "Unable to update cache: `{}`".format(err)) res.append(node) del self.dependency_index_to_node_ids[0] for node in cached_nodes: self.update_node(node) return res
def upgrade_nodes(graph): """Upgrade deprecated Nodes. The function does not change the Graph in the database. Return: (int) Number of upgraded Nodes """ node_ids = set( [to_object_id(node.parent_node) for node in graph.nodes]) db_nodes = GraphCollectionManager.node_collection_manager.get_db_nodes_by_ids( node_ids) new_node_db_mapping = {} for db_node in db_nodes: original_parent_node_id = db_node['_id'] new_db_node = db_node if original_parent_node_id not in new_node_db_mapping: while new_db_node[ 'node_status'] != NodeStatus.READY and 'successor_node' in new_db_node and new_db_node[ 'successor_node']: n = GraphCollectionManager.node_collection_manager.get_db_node( new_db_node['successor_node']) if n: new_db_node = n else: break new_node_db_mapping[original_parent_node_id] = new_db_node new_nodes = [ GraphCollectionManager._transplant_node( node, Node.from_dict(new_node_db_mapping[to_object_id( node.parent_node)])) for node in graph.nodes ] upgraded_nodes_count = sum( 1 for node, new_node in zip(graph.nodes, new_nodes) if node.parent_node != new_node.parent_node) graph.nodes = new_nodes return upgraded_nodes_count
def get_db_graph(graph_id): """Get dict representation of the Graph. Args: graph_id (ObjectId, str): Graph ID Return: (dict) dict representation of the Graph """ return GraphCollectionManager._update_node_statuses( get_db_connector().graphs.find_one({'_id': to_object_id(graph_id)}))
def get_index_helper(node, level): if level < 0: return 0 parent_node_ids = set() for input in node.inputs: for value in input.values: parent_node_ids.add(to_object_id(value.node_id)) for index, node_id in enumerate(level_to_node_ids[level]): if node_id in parent_node_ids: return index return -1
def __init__(self, graph, node_collection=None): if isinstance(graph, Graph): self.graph_id = graph._id self.graph = graph else: self.graph_id = graph self.graph = Graph.load(self.graph_id) self.node_id_to_node = {node._id: node for node in self.graph.nodes} # number of dependencies to ids self.dependency_index_to_node_ids = defaultdict(lambda: set()) self.node_id_to_dependents = defaultdict(lambda: set()) self.node_id_to_dependency_index = defaultdict(lambda: 0) self.uncompleted_nodes_count = 0 if node_collection: self.node_collection = node_collection else: self.node_collection = NodeCollection() for node in self.graph.nodes: # ignore nodes in finished statuses if node.node_running_status in { NodeRunningStatus.SUCCESS, NodeRunningStatus.FAILED, NodeRunningStatus.STATIC, NodeRunningStatus.RESTORED, NodeRunningStatus.CANCELED }: continue node_id = node._id dependency_index = 0 for node_input in node.inputs: for input_value in node_input.values: parent_node_id = to_object_id(input_value.node_id) self.node_id_to_dependents[parent_node_id].add(node_id) if self.node_id_to_node[ parent_node_id].node_running_status not in { NodeRunningStatus.SUCCESS, NodeRunningStatus.FAILED, NodeRunningStatus.STATIC, NodeRunningStatus.RESTORED, NodeRunningStatus.CANCELED }: dependency_index += 1 if node.node_running_status not in { NodeRunningStatus.SUCCESS, NodeRunningStatus.FAILED, NodeRunningStatus.STATIC, NodeRunningStatus.RESTORED, NodeRunningStatus.CANCELED }: self.uncompleted_nodes_count += 1 self.dependency_index_to_node_ids[dependency_index].add(node_id) self.node_id_to_dependency_index[node_id] = dependency_index
def post_search_nodes(): query = json.loads(request.data) user_id = to_object_id(g.user._id) if len(query.keys() - PAGINATION_QUERY_KEYS): return make_fail_response( 'Unknown keys: `{}`'.format(query.keys() - PAGINATION_QUERY_KEYS)), 400 res = node_collection_manager.get_db_nodes(user_id=user_id, **query) return JSONEncoder().encode({ 'items': res['list'], 'total_count': res['metadata'][0]['total'] if res['metadata'] else 0, 'resources_dict': resource_manager.resources_dict, 'status': 'success' })
def upgrade_sub_nodes(self, main_node): """Upgrade deprecated Nodes. The function does not change the original graph in the database. Return: (int): Number of upgraded Nodes """ assert self.collection == Collections.TEMPLATES sub_nodes = main_node.get_parameter_by_name('_nodes').value.value node_ids = set([node.original_node_id for node in sub_nodes]) db_nodes = self.get_db_objects_by_ids(node_ids) new_node_db_mapping = {} for db_node in db_nodes: original_node_id = db_node['_id'] new_db_node = db_node if original_node_id not in new_node_db_mapping: while new_db_node[ 'node_status'] != NodeStatus.READY and 'successor_node_id' in new_db_node and new_db_node[ 'successor_node_id']: n = self.get_db_node(new_db_node['successor_node_id']) if n: new_db_node = n else: break new_node_db_mapping[original_node_id] = new_db_node new_nodes = [ NodeCollectionManager._transplant_node( node, Node.from_dict(new_node_db_mapping[to_object_id( node.original_node_id)])) for node in sub_nodes ] upgraded_nodes_count = sum( 1 for node, new_node in zip(sub_nodes, new_nodes) if node.original_node_id != new_node.original_node_id) main_node.get_parameter_by_name('_nodes').value.value = new_nodes return upgraded_nodes_count
def post_demo_user(): user = demo_user_manager.create_demo_user() if not user: return make_fail_response('Failed to create a demo user') template_id = DemoUserManager.demo_config.kind if DemoUserManager.demo_config.template_id: try: node_id = to_object_id(DemoUserManager.demo_config.template_id) except Exception as e: app.logger.error('node_id `{}` is invalid'.format( DemoUserManager.demo_config.template_id)) app.logger.error(e) return make_fail_response('Failed to create a demo node') try: user_id = user._id node = template_collection_manager.get_db_node(node_id, user_id) node = get_class(node['_type'])(node).clone( NodeClonePolicy.NODE_TO_NODE) node.author = user_id node.save() template_id = node._id except Exception as e: app.logger.error('Failed to create a demo node') app.logger.error(e) return make_fail_response(str(e)), 500 access_token = user.generate_access_token(expiration=1800) user_obj = user.to_dict() user_obj['hash_password'] = '' return JSONEncoder().encode({ 'access_token': access_token.decode('ascii'), 'refresh_token': 'Not assigned', 'user': user_obj, 'url': '/{}/{}'.format(Collections.TEMPLATES, template_id), })
def arrange_auto_layout(self): """Use heuristic to rearange nodes.""" HEADER_HEIGHT = 23 DESCRIPTION_HEIGHT = 20 FOOTER_HEIGHT = 10 BORDERS_HEIGHT = 2 ITEM_HEIGHT = 20 SPACE_HEIGHT = 50 LEFT_PADDING = 30 TOP_PADDING = 80 LEVEL_WIDTH = 252 SPECIAL_PARAMETER_HEIGHT = 20 SPECIAL_PARAMETER_TYPES = [ParameterTypes.CODE] min_node_height = HEADER_HEIGHT + DESCRIPTION_HEIGHT + FOOTER_HEIGHT + BORDERS_HEIGHT node_id_to_level = defaultdict(lambda: -1) node_id_to_node = {} queued_node_ids = set() children_ids = defaultdict(set) node_ids = set([node._id for node in self.nodes]) non_zero_node_ids = set() for node in self.nodes: node_id_to_node[node._id] = node for input in node.inputs: for value in input.values: parent_node_id = to_object_id(value.node_id) non_zero_node_ids.add(parent_node_id) children_ids[parent_node_id].add(node._id) leaves = node_ids - non_zero_node_ids to_visit = deque() for leaf_id in leaves: node_id_to_level[leaf_id] = 0 to_visit.append(leaf_id) while to_visit: node_id = to_visit.popleft() node = node_id_to_node[node_id] node_level = max([node_id_to_level[node_id]] + [ node_id_to_level[child_id] + 1 for child_id in children_ids[node_id] ]) node_id_to_level[node_id] = node_level for input in node.inputs: for value in input.values: parent_node_id = to_object_id(value.node_id) parent_level = node_id_to_level[parent_node_id] node_id_to_level[parent_node_id] = max( node_level + 1, parent_level) if parent_node_id not in queued_node_ids: to_visit.append(parent_node_id) queued_node_ids.add(parent_node_id) max_level = max(node_id_to_level.values()) level_to_node_ids = defaultdict(list) row_heights = defaultdict(lambda: 0) def get_index_helper(node, level): if level < 0: return 0 parent_node_ids = set() for input in node.inputs: for value in input.values: parent_node_ids.add(to_object_id(value.node_id)) for index, node_id in enumerate(level_to_node_ids[level]): if node_id in parent_node_ids: return index return -1 def get_index(node, max_level, level): return tuple([ get_index_helper(node, lvl) for lvl in range(max_level, level, -1) ]) for node_id, level in node_id_to_level.items(): level_to_node_ids[level].append(node_id) for level in range(max_level, -1, -1): level_node_ids = level_to_node_ids[level] index_to_node_id = [] for node_id in level_node_ids: node = node_id_to_node[node_id] index = get_index(node, max_level, level) index_to_node_id.append((index, node_id)) index_to_node_id.sort() level_to_node_ids[level] = [ node_id for _, node_id in index_to_node_id ] for index, node_id in enumerate(level_to_node_ids[level]): node = node_id_to_node[node_id] special_parameters_count = sum( 1 if parameter.parameter_type in SPECIAL_PARAMETER_TYPES and parameter.widget else 0 for parameter in node.parameters) node_height = sum([ min_node_height, ITEM_HEIGHT * max(len(node.inputs), len(node.outputs)), special_parameters_count * SPECIAL_PARAMETER_HEIGHT ]) row_heights[index] = max(row_heights[index], node_height) cum_heights = [0] for index in range(len(row_heights)): cum_heights.append(cum_heights[-1] + row_heights[index] + SPACE_HEIGHT) max_height = max(cum_heights) for level in range(max_level, -1, -1): level_node_ids = level_to_node_ids[level] level_height = cum_heights[len(level_node_ids)] level_padding = (max_height - level_height) // 2 for index, node_id in enumerate(level_node_ids): node = node_id_to_node[node_id] node.x = LEFT_PADDING + (max_level - level) * LEVEL_WIDTH node.y = TOP_PADDING + level_padding + cum_heights[index]
def get_nodes(collection, node_link=None): user_id = to_object_id(g.user._id) can_view_others_operations = g.user.check_role( IAMPolicies.CAN_VIEW_OTHERS_OPERATIONS) can_view_others_workflows = g.user.check_role( IAMPolicies.CAN_VIEW_OTHERS_WORKFLOWS) can_view_operations = g.user.check_role(IAMPolicies.CAN_VIEW_OPERATIONS) can_view_workflows = g.user.check_role(IAMPolicies.CAN_VIEW_WORKFLOWS) can_create_operations = g.user.check_role( IAMPolicies.CAN_CREATE_OPERATIONS) can_create_workflows = g.user.check_role(IAMPolicies.CAN_CREATE_WORKFLOWS) if node_link in executor_manager.kind_to_executor_class and collection == Collections.TEMPLATES: # if node_link is a base node # i.e. /templates/basic-bash kind = node_link if kind in workflow_manager.kind_to_workflow_dict and ( not can_view_workflows or not can_create_workflows): return make_permission_denied() if kind in operation_manager.kind_to_operation_dict and ( not can_view_operations or not can_create_operations): return make_permission_denied() node = executor_manager.kind_to_executor_class[kind].get_default_node( is_workflow=kind in workflow_manager.kind_to_workflow_dict) if isinstance(node, tuple): data = node[0].to_dict() tour_steps = node[1] else: data = node.to_dict() tour_steps = [] data['kind'] = kind return make_success_response({ 'node': data, 'tour_steps': tour_steps, 'plugins_dict': PLUGINS_DICT, }) elif node_link in workflow_manager.kind_to_workflow_dict and collection == Collections.GROUPS: # TODO move group to a separate class group_dict = Group().to_dict() group_dict['kind'] = node_link return make_success_response({ 'group': group_dict, 'plugins_dict': PLUGINS_DICT, }) else: # when node_link is an id of the object try: node_id = to_object_id(node_link) except Exception: return make_fail_response('Invalid ID'), 404 if collection == Collections.GROUPS: # TODO move group to a separate class group = node_collection_managers[collection].get_db_object( node_id, user_id) if group: return make_success_response({ 'group': group, 'plugins_dict': PLUGINS_DICT, }) else: make_fail_response( 'Group `{}` was not found'.format(node_link)), 404 node = node_collection_managers[collection].get_db_node( node_id, user_id) app.logger.debug(node) if node: is_owner = node['author'] == user_id kind = node['kind'] if kind in workflow_manager.kind_to_workflow_dict and not can_view_workflows: return make_permission_denied() if kind in operation_manager.kind_to_operation_dict and not can_view_operations: return make_permission_denied() if kind in workflow_manager.kind_to_workflow_dict and not can_view_others_workflows and not is_owner: return make_permission_denied() if kind in operation_manager.kind_to_operation_dict and not can_view_others_operations and not is_owner: return make_permission_denied() return make_success_response({ 'node': node, 'plugins_dict': PLUGINS_DICT, }) else: return make_fail_response( 'Node `{}` was not found'.format(node_link)), 404
def get_db_nodes(status='', base_node_names=None, search='', per_page=20, offset=0, user_id=None): """Get subset of the Nodes. Args: status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern per_page (int): Number of Nodes per page offset (int): Offset Return: (list of dict) List of Nodes in dict format """ if status and isinstance(status, basestring): status = [status] if base_node_names and isinstance(base_node_names, basestring): base_node_names = [base_node_names] aggregate_list = [] search_parameters, search_string = parse_search_string(search) # Match and_query = {} if base_node_names: and_query['base_node_name'] = {'$in': base_node_names} if status: and_query['node_status'] = {'$in': status} if search_string: and_query['$text'] = {'$search': search_string} if len(and_query): aggregate_list.append({"$match": and_query}) # Join with users aggregate_list.append({ '$lookup': { 'from': 'users', 'localField': 'author', 'foreignField': '_id', 'as': '_user' } }) # rm password hash aggregate_list.append({"$project": { "_user.password_hash": 0, }}) # Match username and_query = {} if 'author' in search_parameters: and_query['_user.username'] = search_parameters['author'] if len(and_query): aggregate_list.append({"$match": and_query}) # sort sort_dict = OrderedDict() if 'sort' in search_parameters: # TODO more sort options if search_parameters['sort'] == 'starred': sort_dict['starred'] = -1 sort_dict['insertion_date'] = -1 aggregate_list.append({"$sort": sort_dict}) aggregate_list.append({ "$addFields": { '_readonly': { '$ne': ["$author", to_object_id(user_id)] }, } }) # counts and pagination aggregate_list.append({ '$facet': { "metadata": [{ "$count": "total" }], "list": [{ "$skip": int(offset) }, { "$limit": int(per_page) }], } }) return next(get_db_connector().nodes.aggregate(aggregate_list), None)
def generate_code(self): code_blocks = [] unique_nodes = {node.parent_node: node for node in self.nodes} def name_iteration_handler(lst): return ', '.join( map(lambda element: "'{}'".format(element.name), lst)) def generate_class_name(title): return ''.join( map(lambda s: s.title(), re.split('[^a-zA-Z]', title))) def generate_var_name(title): return '_'.join(re.split('[^a-zA-Z0-9]', title)) def param_to_value(param): if param.parameter_type == ParameterTypes.INT: return param.value elif param.parameter_type == ParameterTypes.ENUM: return repr(param.value.values[int(param.value.index)]) elif param.parameter_type == ParameterTypes.ENUM: return param.value.values[param.value.index] elif param.parameter_type == ParameterTypes.LIST_INT: return map(int, param.value) elif param.parameter_type == ParameterTypes.CODE: return repr(param.value.value) return repr(param.value) used_class_names = set() node_id_to_class_name = {} for node_id, node in unique_nodes.items(): if node.base_node_name == 'file': class_type = 'File' content = '\n '.join([ "", "id='{}',".format(node_id), "title='{}',".format(node.title), "description='{}',".format(node.description), ]) orig_class_name = generate_var_name(node.title) else: class_type = 'Operation' content = '\n '.join([ "", "id='{}',".format(node_id), "title='{}',".format(node.title), "inputs=[{}],".format(name_iteration_handler(node.inputs)), "params=[{}],".format( name_iteration_handler( filter(lambda p: p.widget, node.parameters))), "outputs=[{}],".format(name_iteration_handler( node.outputs)), ]) orig_class_name = generate_class_name(node.title) class_name = orig_class_name while class_name in used_class_names: class_name = '{}_{}'.format(orig_class_name, str(uuid.uuid1())[:4]) used_class_names.add(class_name) node_id_to_class_name[node_id] = class_name code = "{class_name} = {class_type}({content}\n)\n".format( class_name=class_name, class_type=class_type, content=content, ) code_blocks.append(code) level_to_node_ids, node_id_to_node = self.arrange_auto_layout( readonly=True) max_level = max(level_to_node_ids.keys()) node_id_to_var_name = {} for level in range(max_level, -1, -1): for row, node_id in enumerate(level_to_node_ids[level]): node = node_id_to_node[node_id] if node.base_node_name == 'file': node_id_to_var_name[node_id] = node_id_to_class_name[ node.parent_node] continue var_name = '{}_{}_{}'.format( generate_var_name(node.title.lower()), max_level - level, row) # generate args args = [] for input in node.inputs: values = [] for value in input.values: values.append('{}.outputs.{}'.format( node_id_to_var_name[to_object_id(value.node_id)], value.output_id)) if values: args.append(' {}={},'.format( generate_var_name(input.name), values[0] if len(values) == 1 else '[{}]'.format( ', '.join(values)))) for param in node.parameters: if param.widget: args.append(' {}={},'.format( generate_var_name(param.name), param_to_value(param))) # generate var declaration node_id_to_var_name[node_id] = var_name content = '{var_name} = {class_name}(\n{args}\n)\n'.format( var_name=var_name, class_name=node_id_to_class_name[node.parent_node], args='\n'.join(args)) code_blocks.append(content) code_blocks.append( "graph = Graph(\n" " Client(\n" " token=TOKEN,\n" " endpoint=ENDPOINT,\n" " ),\n" " title='{title}',\n" " description='{description}',\n" " targets=[{targets}]\n" ")\n\n" "graph.approve().wait()\n".format( title=self.title, description=self.description, targets=", ".join( map(lambda node_id: node_id_to_var_name[node_id], level_to_node_ids[0])))) return '\n'.join(code_blocks)