def expand_library_node(json_in): """ Expand a specific library node in a given SDFG. If no specific library node is provided, expand all library nodes in the given SDFG. :param json_in: The entire provided request JSON. """ old_meta = utils.disable_save_metadata() sdfg = None try: loaded = utils.load_sdfg_from_json(json_in['sdfg']) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] except KeyError: return { 'error': { 'message': 'Failed to expand library node', 'details': 'No SDFG provided', }, } try: sdfg_id, state_id, node_id = json_in['nodeId'] except KeyError: sdfg_id, state_id, node_id = None, None, None if sdfg_id is None: sdfg.expand_library_nodes() else: context_sdfg = sdfg.sdfg_list[sdfg_id] state = context_sdfg.node(state_id) node = state.node(node_id) if isinstance(node, nodes.LibraryNode): node.expand(context_sdfg, state) else: return { 'error': { 'message': 'Failed to expand library node', 'details': 'The provided node is not a valid library node', }, } new_sdfg = sdfg.to_json() utils.restore_save_metadata(old_meta) return { 'sdfg': new_sdfg, }
def reapply_history_until(sdfg_json, index): """ Rewind a given SDFG back to a specific point in its history by reapplying all transformations until a given index in its history to its original state. :param sdfg_json: The SDFG to rewind. :param index: Index of the last history item to apply. """ old_meta = utils.disable_save_metadata() loaded = utils.load_sdfg_from_json(sdfg_json) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] original_sdfg = sdfg.orig_sdfg history = sdfg.transformation_hist for i in range(index + 1): transformation = history[i] try: if isinstance(transformation, SubgraphTransformation): transformation.apply( original_sdfg.sdfg_list[transformation.sdfg_id]) else: transformation.apply_pattern( original_sdfg.sdfg_list[transformation.sdfg_id]) except Exception as e: print(traceback.format_exc(), file=sys.stderr) sys.stderr.flush() return { 'error': { 'message': 'Failed to play back the transformation history', 'details': utils.get_exception_message(e), }, } new_sdfg = original_sdfg.to_json() utils.restore_save_metadata(old_meta) return { 'sdfg': new_sdfg, }
def apply_transformation(sdfg_json, transformation_json): old_meta = utils.disable_save_metadata() loaded = utils.load_sdfg_from_json(sdfg_json) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] try: transformation = serialize.from_json(transformation_json) except Exception as e: print(traceback.format_exc(), file=sys.stderr) sys.stderr.flush() return { 'error': { 'message': 'Failed to parse the applied transformation', 'details': utils.get_exception_message(e), }, } try: target_sdfg = sdfg.sdfg_list[transformation.sdfg_id] if isinstance(transformation, SubgraphTransformation): sdfg.append_transformation(transformation) transformation.apply(target_sdfg) else: transformation.apply_pattern(target_sdfg) except Exception as e: print(traceback.format_exc(), file=sys.stderr) sys.stderr.flush() return { 'error': { 'message': 'Failed to apply the transformation to the SDFG', 'details': utils.get_exception_message(e), }, } new_sdfg = sdfg.to_json() utils.restore_save_metadata(old_meta) return { 'sdfg': new_sdfg, }
def compile_sdfg(path, suppress_instrumentation=False): # We lazy import DaCe, not to break cyclic imports, but to avoid any large # delays when booting in daemon mode. from dace.codegen.compiled_sdfg import CompiledSDFG old_meta = disable_save_metadata() loaded = load_sdfg_from_file(path) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] if suppress_instrumentation: _sdfg_remove_instrumentations(sdfg) compiled_sdfg: CompiledSDFG = sdfg.compile() restore_save_metadata(old_meta) return { 'filename': compiled_sdfg.filename, }
def remove_sdfg_elements(sdfg_json, uuids): from dace.sdfg.graph import Edge old_meta = disable_save_metadata() loaded = load_sdfg_from_json(sdfg_json) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] elements = [] for uuid in uuids: elements.append(find_graph_element_by_uuid(sdfg, uuid)) for element_ret in elements: element = element_ret['element'] parent = element_ret['parent'] if parent is not None and element is not None: if isinstance(element, Edge): parent.remove_edge(element) else: parent.remove_node(element) else: return { 'error': { 'message': 'Failed to delete element', 'details': 'Element or parent not found', }, } new_sdfg = sdfg.to_json() restore_save_metadata(old_meta) return { 'sdfg': new_sdfg, }
def insert_sdfg_element(sdfg_str, type, parent_uuid, edge_a_uuid): sdfg_answer = load_sdfg_from_json(sdfg_str) sdfg = sdfg_answer['sdfg'] uuid = 'error' ret = find_graph_element_by_uuid(sdfg, parent_uuid) parent = ret['element'] libname = None if type is not None and isinstance(type, str): split_type = type.split('|') if len(split_type) == 2: type = split_type[0] libname = split_type[1] if type == 'SDFGState': if parent is None: parent = sdfg elif isinstance(parent, nodes.NestedSDFG): parent = parent.sdfg state = parent.add_state() uuid = [get_uuid(state)] elif type == 'AccessNode': arrays = list(parent.parent.arrays.keys()) if len(arrays) == 0: parent.parent.add_array('tmp', [1], dtype=dtypes.float64) arrays = list(parent.parent.arrays.keys()) node = parent.add_access(arrays[0]) uuid = [get_uuid(node, parent)] elif type == 'Map': map_entry, map_exit = parent.add_map('map', dict(i='0:1')) uuid = [get_uuid(map_entry, parent), get_uuid(map_exit, parent)] elif type == 'Consume': consume_entry, consume_exit = parent.add_consume('consume', ('i', '1')) uuid = [get_uuid(consume_entry, parent), get_uuid(consume_exit, parent)] elif type == 'Tasklet': tasklet = parent.add_tasklet( name='placeholder', inputs={'in'}, outputs={'out'}, code='') uuid = [get_uuid(tasklet, parent)] elif type == 'NestedSDFG': sub_sdfg = SDFG('nested_sdfg') sub_sdfg.add_array('in', [1], dtypes.float32) sub_sdfg.add_array('out', [1], dtypes.float32) nsdfg = parent.add_nested_sdfg(sub_sdfg, sdfg, {'in'}, {'out'}) uuid = [get_uuid(nsdfg, parent)] elif type == 'LibraryNode': if libname is None: return { 'error': { 'message': 'Failed to add library node', 'details': 'Must provide a valid library node type', }, } libnode_class = pydoc.locate(libname) libnode = libnode_class() parent.add_node(libnode) uuid = [get_uuid(libnode, parent)] elif type == 'Edge': edge_start_ret = find_graph_element_by_uuid(sdfg, edge_a_uuid) edge_start = edge_start_ret['element'] edge_parent = edge_start_ret['parent'] if edge_start is not None: if edge_parent is None: edge_parent = sdfg if isinstance(edge_parent, SDFGState): if not (isinstance(edge_start, nodes.Node) and isinstance(parent, nodes.Node)): return { 'error': { 'message': 'Failed to add edge', 'details': 'Must connect two nodes or two states', }, } memlet = Memlet() edge_parent.add_edge(edge_start, None, parent, None, memlet) elif isinstance(edge_parent, SDFG): if not (isinstance(edge_start, SDFGState) and isinstance(parent, SDFGState)): return { 'error': { 'message': 'Failed to add edge', 'details': 'Must connect two nodes or two states', }, } isedge = InterstateEdge() edge_parent.add_edge(edge_start, parent, isedge) uuid = ['NONE'] else: raise ValueError('No edge starting point provided') old_meta = disable_save_metadata() new_sdfg_str = sdfg.to_json() restore_save_metadata(old_meta) return { 'sdfg': new_sdfg_str, 'uuid': uuid, }
def get_transformations(sdfg_json, selected_elements): # We lazy import DaCe, not to break cyclic imports, but to avoid any large # delays when booting in daemon mode. from dace.transformation.optimizer import SDFGOptimizer from dace.sdfg.graph import SubgraphView old_meta = utils.disable_save_metadata() loaded = utils.load_sdfg_from_json(sdfg_json) if loaded['error'] is not None: return loaded['error'] sdfg = loaded['sdfg'] optimizer = SDFGOptimizer(sdfg) matches = optimizer.get_pattern_matches() transformations = [] docstrings = {} for transformation in matches: transformations.append(transformation.to_json()) docstrings[type(transformation).__name__] = transformation.__doc__ selected_states = [ utils.sdfg_find_state_from_element(sdfg, n) for n in selected_elements if n['type'] == 'state' ] selected_nodes = [ utils.sdfg_find_node_from_element(sdfg, n) for n in selected_elements if n['type'] == 'node' ] selected_sdfg_ids = list(set(elem['sdfgId'] for elem in selected_elements)) selected_sdfg = sdfg if len(selected_sdfg_ids) > 1: return { 'transformations': transformations, 'docstrings': docstrings, 'warnings': 'More than one SDFG selected, ignoring subgraph', } elif len(selected_sdfg_ids) == 1: selected_sdfg = sdfg.sdfg_list[selected_sdfg_ids[0]] subgraph = None if len(selected_states) > 0: subgraph = SubgraphView(selected_sdfg, selected_states) else: violated = False state = None for node in selected_nodes: if state is None: state = node.state elif state != node.state: violated = True break if not violated and state is not None: subgraph = SubgraphView(state, selected_nodes) if subgraph is not None: extensions = SubgraphTransformation.extensions() for xform in extensions: xform_data = extensions[xform] if ('singlestate' in xform_data and xform_data['singlestate'] and len(selected_states) > 0): continue xform_obj = xform(subgraph) if xform_obj.can_be_applied(selected_sdfg, subgraph): transformations.append(xform_obj.to_json()) docstrings[xform.__name__] = xform_obj.__doc__ utils.restore_save_metadata(old_meta) return { 'transformations': transformations, 'docstrings': docstrings, }