def make_write_sdfg(): sdfg = SDFG("spmv_write") begin = sdfg.add_state("begin") entry = sdfg.add_state("entry") state = sdfg.add_state("body") end = sdfg.add_state("end") sdfg.add_edge(begin, entry, InterstateEdge(assignments={"h": "0"})) sdfg.add_edge( entry, state, InterstateEdge(condition=CodeProperty.from_string( "h < H", language=Language.Python))) sdfg.add_edge( entry, end, InterstateEdge(condition=CodeProperty.from_string( "h >= H", language=Language.Python))) sdfg.add_edge(state, entry, InterstateEdge(assignments={"h": "h + 1"})) result_to_write_in = state.add_stream("b_pipe", dtype, storage=StorageType.FPGA_Local) b = state.add_array("b_mem", (H, ), dtype, storage=StorageType.FPGA_Global) state.add_memlet_path(result_to_write_in, b, memlet=Memlet.simple(b, "h")) return sdfg
def test_for_inside_branch(): sdfg = dace.SDFG('for_in_branch') state_init = sdfg.add_state('init') branch_guard = sdfg.add_state('branch_guard') loop_guard = sdfg.add_state('loop_guard') loop_state = sdfg.add_state('loop_state') branch_merge = sdfg.add_state('branch_merge') sdfg.add_edge(state_init, branch_guard, InterstateEdge(assignments={ 'i': '0', })) sdfg.add_edge(branch_guard, branch_merge, InterstateEdge(condition=CodeProperty.from_string('i < 10', language=Language.Python))) sdfg.add_edge( branch_guard, loop_guard, InterstateEdge(condition=CodeProperty.from_string('not (i < 10)', language=Language.Python), assignments={ 'j': '0', })) sdfg.add_edge(loop_guard, loop_state, InterstateEdge(condition=CodeProperty.from_string('j < 10', language=Language.Python))) sdfg.add_edge(loop_guard, branch_merge, InterstateEdge(condition=CodeProperty.from_string('not (j < 10)', language=Language.Python))) sdfg.add_edge(loop_state, loop_guard, InterstateEdge(assignments={ 'j': 'j + 1', })) propagate_states(sdfg) state_check_executions(branch_guard, 1, False) state_check_executions(loop_guard, 11, True) state_check_executions(loop_state, 10, True) state_check_executions(branch_merge, 1, False)
def test_conditional_fake_merge(): sdfg = dace.SDFG('fake_merge') state_init = sdfg.add_state('init') state_a = sdfg.add_state('A') state_b = sdfg.add_state('B') state_c = sdfg.add_state('C') state_d = sdfg.add_state('D') state_e = sdfg.add_state('E') sdfg.add_edge(state_init, state_a, InterstateEdge(assignments={ 'i': '0', 'j': '0', })) sdfg.add_edge(state_a, state_b, InterstateEdge(condition=CodeProperty.from_string('i < 10', language=Language.Python))) sdfg.add_edge(state_a, state_c, InterstateEdge(condition=CodeProperty.from_string('not (i < 10)', language=Language.Python))) sdfg.add_edge(state_b, state_d, InterstateEdge()) sdfg.add_edge(state_c, state_d, InterstateEdge(condition=CodeProperty.from_string('j < 10', language=Language.Python))) sdfg.add_edge(state_c, state_e, InterstateEdge(condition=CodeProperty.from_string('not (j < 10)', language=Language.Python))) propagate_states(sdfg) state_check_executions(state_d, 1, True) state_check_executions(state_e, 1, True)
def make_nested_sdfg(): sdfg = dace.SDFG('vol_propagation_nested') assign_loop_bound = sdfg.add_state('assign') guard_state = sdfg.add_state('guard') loop_state = sdfg.add_state('for') end_state = sdfg.add_state('endfor') sdfg.add_edge(assign_loop_bound, guard_state, InterstateEdge(assignments={'i': '0'})) sdfg.add_edge( guard_state, loop_state, InterstateEdge(condition=CodeProperty.from_string( 'i < loop_bound', language=Language.Python))) sdfg.add_edge(loop_state, guard_state, InterstateEdge(assignments={'i': 'i+1'})) sdfg.add_edge( guard_state, end_state, InterstateEdge(condition=CodeProperty.from_string( 'not (i < loop_bound)', language=Language.Python))) in_bound = assign_loop_bound.add_stream('IN_bound', dace.int32, storage=StorageType.FPGA_Local) loop_bound = assign_loop_bound.add_scalar( 'loop_bound', dace.int32, transient=True, storage=StorageType.FPGA_Registers) assign_loop_bound.add_memlet_path(in_bound, loop_bound, memlet=Memlet.simple(loop_bound, '0')) in_a = loop_state.add_array('IN_a', [N], dace.int32, storage=StorageType.FPGA_Global) out_stream = loop_state.add_stream('OUT_stream', dace.int32, storage=StorageType.FPGA_Local) tasklet2 = loop_state.add_tasklet('compute', {'_IN_a'}, {'_OUT_stream'}, '_OUT_stream = _IN_a[0]') loop_state.add_memlet_path(in_a, tasklet2, dst_conn='_IN_a', memlet=Memlet.simple(in_a, '0:N')) loop_state.add_memlet_path(tasklet2, out_stream, src_conn='_OUT_stream', memlet=Memlet.simple(out_stream, '0')) return sdfg
def __str__(self): if self.condition is not None: return ("%s [%s=0:%s], Condition: %s" % (self._label, self.pe_index, self.num_pes, CodeProperty.to_string(self.condition))) else: return ("%s [%s=0:%s]" % (self._label, self.pe_index, self.num_pes))
class Consume(object): """ Consume is a scope, like `Map`, that is a part of the parametric graph extension of the SDFG. It creates a producer-consumer relationship between the input stream and the scope subgraph. The subgraph is scheduled to a given number of processing elements for processing, and they will try to pop elements from the input stream until a given quiescence condition is reached. """ # Properties label = Property(dtype=str, desc="Name of the consume node") pe_index = Property(dtype=str, desc="Processing element identifier") num_pes = SymbolicProperty(desc="Number of processing elements") condition = CodeProperty(desc="Quiescence condition", allow_none=True) language = Property(enum=types.Language, default=types.Language.Python) schedule = Property(dtype=types.ScheduleType, desc="Consume schedule", enum=types.ScheduleType, from_string=lambda x: types.ScheduleType[x]) chunksize = Property(dtype=int, desc="Maximal size of elements to consume at a time", default=1) debuginfo = DebugInfoProperty() is_collapsed = Property(dtype=bool, desc="Show this node/scope/state as collapsed", default=False) def as_map(self): """ Compatibility function that allows to view the consume as a map, mainly in memlet propagation. """ return Map(self.label, [self.pe_index], sbs.Range([(0, self.num_pes - 1, 1)]), self.schedule) def __init__(self, label, pe_tuple, condition, schedule=types.ScheduleType.Default, chunksize=1, debuginfo=None): super(Consume, self).__init__() # Properties self.label = label self.pe_index, self.num_pes = pe_tuple self.condition = condition self.schedule = schedule self.chunksize = chunksize self.debuginfo = debuginfo def __str__(self): if self.condition is not None: return ("%s [%s=0:%s], Condition: %s" % (self._label, self.pe_index, self.num_pes, CodeProperty.to_string(self.condition))) else: return ("%s [%s=0:%s]" % (self._label, self.pe_index, self.num_pes)) def validate(self, sdfg, state, node): if not data.validate_name(self.label): raise NameError('Invalid consume name "%s"' % self.label) def get_param_num(self): """ Returns the number of consume dimension parameters/symbols. """ return 1
class Tasklet(CodeNode): """ A node that contains a tasklet: a functional computation procedure that can only access external data specified using connectors. Tasklets may be implemented in Python, C++, or any supported language by the code generator. """ label = Property(dtype=str, desc="Name of the tasklet") language = Property(enum=types.Language, default=types.Language.Python) code = CodeProperty(desc="Tasklet code") code_global = CodeProperty( desc="Global scope code needed for tasklet execution", default="") code_init = CodeProperty( desc="Extra code that is called on DaCe runtime initialization", default="") code_exit = CodeProperty( desc="Extra code that is called on DaCe runtime cleanup", default="") location = Property(dtype=str, desc="Tasklet execution location descriptor") debuginfo = DebugInfoProperty() def __init__(self, label, inputs=set(), outputs=set(), code="", language=types.Language.Python, code_global="", code_init="", code_exit="", location="-1", debuginfo=None): super(Tasklet, self).__init__(inputs, outputs) # Properties self.label = label self.language = language self.code = code self.location = location self.code_global = code_global self.code_init = code_init self.code_exit = code_exit self.debuginfo = debuginfo @property def name(self): return self._label def draw_node(self, sdfg, graph): return dot.draw_node(sdfg, graph, self, shape="octagon") def validate(self, sdfg, state): if not data.validate_name(self.label): raise NameError('Invalid tasklet name "%s"' % self.label) for in_conn in self.in_connectors: if not data.validate_name(in_conn): raise NameError('Invalid input connector "%s"' % in_conn) for out_conn in self.out_connectors: if not data.validate_name(out_conn): raise NameError('Invalid output connector "%s"' % out_conn) def __str__(self): if not self.label: return "--Empty--" else: return self.label
class Tasklet(CodeNode): """ A node that contains a tasklet: a functional computation procedure that can only access external data specified using connectors. Tasklets may be implemented in Python, C++, or any supported language by the code generator. """ code = CodeProperty(desc="Tasklet code", default=CodeBlock("")) debuginfo = DebugInfoProperty() instrument = Property(choices=dtypes.InstrumentationType, desc="Measure execution statistics with given method", default=dtypes.InstrumentationType.No_Instrumentation) def __init__(self, label, inputs=None, outputs=None, code="", language=dtypes.Language.Python, location=None, debuginfo=None): super(Tasklet, self).__init__(label, location, inputs, outputs) self.code = CodeBlock(code, language) self.debuginfo = debuginfo @property def language(self): return self.code.language @staticmethod def from_json(json_obj, context=None): ret = Tasklet("dummylabel") dace.serialize.set_properties_from_json(ret, json_obj, context=context) return ret @property def name(self): return self._label def validate(self, sdfg, state): if not dtypes.validate_name(self.label): raise NameError('Invalid tasklet name "%s"' % self.label) for in_conn in self.in_connectors: if not dtypes.validate_name(in_conn): raise NameError('Invalid input connector "%s"' % in_conn) for out_conn in self.out_connectors: if not dtypes.validate_name(out_conn): raise NameError('Invalid output connector "%s"' % out_conn) @property def free_symbols(self) -> Set[str]: return self.code.get_free_symbols(self.in_connectors.keys() | self.out_connectors.keys()) def infer_connector_types(self, sdfg, state): # If a Python tasklet, use type inference to figure out all None output # connectors if all(cval.type is not None for cval in self.out_connectors.values()): return if self.code.language != dtypes.Language.Python: return if any(cval.type is None for cval in self.in_connectors.values()): raise TypeError('Cannot infer output connectors of tasklet "%s", ' 'not all input connectors have types' % str(self)) # Avoid import loop from dace.codegen.tools.type_inference import infer_types # Get symbols defined at beginning of node, and infer all types in # tasklet syms = state.symbols_defined_at(self) syms.update(self.in_connectors) new_syms = infer_types(self.code.code, syms) for cname, oconn in self.out_connectors.items(): if oconn.type is None: if cname not in new_syms: raise TypeError('Cannot infer type of tasklet %s output ' '"%s", please specify manually.' % (self.label, cname)) self.out_connectors[cname] = new_syms[cname] def __str__(self): if not self.label: return "--Empty--" else: return self.label
class Tasklet(CodeNode): """ A node that contains a tasklet: a functional computation procedure that can only access external data specified using connectors. Tasklets may be implemented in Python, C++, or any supported language by the code generator. """ code = CodeProperty(desc="Tasklet code", default=CodeBlock("")) debuginfo = DebugInfoProperty() instrument = Property( choices=dtypes.InstrumentationType, desc="Measure execution statistics with given method", default=dtypes.InstrumentationType.No_Instrumentation) def __init__(self, label, inputs=None, outputs=None, code="", language=dtypes.Language.Python, location=None, debuginfo=None): super(Tasklet, self).__init__(label, location, inputs, outputs) self.code = CodeBlock(code, language) self.debuginfo = debuginfo @property def language(self): return self.code.language @staticmethod def from_json(json_obj, context=None): ret = Tasklet("dummylabel") dace.serialize.set_properties_from_json(ret, json_obj, context=context) return ret @property def name(self): return self._label def validate(self, sdfg, state): if not dtypes.validate_name(self.label): raise NameError('Invalid tasklet name "%s"' % self.label) for in_conn in self.in_connectors: if not dtypes.validate_name(in_conn): raise NameError('Invalid input connector "%s"' % in_conn) for out_conn in self.out_connectors: if not dtypes.validate_name(out_conn): raise NameError('Invalid output connector "%s"' % out_conn) @property def free_symbols(self) -> Set[str]: return self.code.get_free_symbols(self.in_connectors | self.out_connectors) def __str__(self): if not self.label: return "--Empty--" else: return self.label
class Tasklet(CodeNode): """ A node that contains a tasklet: a functional computation procedure that can only access external data specified using connectors. Tasklets may be implemented in Python, C++, or any supported language by the code generator. """ code = CodeProperty(desc="Tasklet code", default=CodeBlock("")) state_fields = ListProperty( element_type=str, desc="Fields that are added to the global state") code_global = CodeProperty( desc="Global scope code needed for tasklet execution", default=CodeBlock("", dtypes.Language.CPP)) code_init = CodeProperty( desc="Extra code that is called on DaCe runtime initialization", default=CodeBlock("", dtypes.Language.CPP)) code_exit = CodeProperty( desc="Extra code that is called on DaCe runtime cleanup", default=CodeBlock("", dtypes.Language.CPP)) debuginfo = DebugInfoProperty() instrument = EnumProperty( dtype=dtypes.InstrumentationType, desc="Measure execution statistics with given method", default=dtypes.InstrumentationType.No_Instrumentation) def __init__(self, label, inputs=None, outputs=None, code="", language=dtypes.Language.Python, state_fields=None, code_global="", code_init="", code_exit="", location=None, debuginfo=None): super(Tasklet, self).__init__(label, location, inputs, outputs) self.code = CodeBlock(code, language) self.state_fields = state_fields or [] self.code_global = CodeBlock(code_global, dtypes.Language.CPP) self.code_init = CodeBlock(code_init, dtypes.Language.CPP) self.code_exit = CodeBlock(code_exit, dtypes.Language.CPP) self.debuginfo = debuginfo @property def language(self): return self.code.language @staticmethod def from_json(json_obj, context=None): ret = Tasklet("dummylabel") dace.serialize.set_properties_from_json(ret, json_obj, context=context) return ret @property def name(self): return self._label def validate(self, sdfg, state): if not dtypes.validate_name(self.label): raise NameError('Invalid tasklet name "%s"' % self.label) for in_conn in self.in_connectors: if not dtypes.validate_name(in_conn): raise NameError('Invalid input connector "%s"' % in_conn) for out_conn in self.out_connectors: if not dtypes.validate_name(out_conn): raise NameError('Invalid output connector "%s"' % out_conn) @property def free_symbols(self) -> Set[str]: return self.code.get_free_symbols(self.in_connectors.keys() | self.out_connectors.keys()) def infer_connector_types(self, sdfg, state): # If a MLIR tasklet, simply read out the types (it's explicit) if self.code.language == dtypes.Language.MLIR: # Inline import because mlir.utils depends on pyMLIR which may not be installed # Doesn't cause crashes due to missing pyMLIR if a MLIR tasklet is not present from dace.codegen.targets.mlir import utils mlir_ast = utils.get_ast(self.code.code) mlir_is_generic = utils.is_generic(mlir_ast) mlir_entry_func = utils.get_entry_func(mlir_ast, mlir_is_generic) mlir_result_type = utils.get_entry_result_type( mlir_entry_func, mlir_is_generic) mlir_out_name = next(iter(self.out_connectors.keys())) if self.out_connectors[ mlir_out_name] is None or self.out_connectors[ mlir_out_name].ctype == "void": self.out_connectors[mlir_out_name] = utils.get_dace_type( mlir_result_type) elif self.out_connectors[mlir_out_name] != utils.get_dace_type( mlir_result_type): warnings.warn( "Type mismatch between MLIR tasklet out connector and MLIR code" ) for mlir_arg in utils.get_entry_args(mlir_entry_func, mlir_is_generic): if self.in_connectors[ mlir_arg[0]] is None or self.in_connectors[ mlir_arg[0]].ctype == "void": self.in_connectors[mlir_arg[0]] = utils.get_dace_type( mlir_arg[1]) elif self.in_connectors[mlir_arg[0]] != utils.get_dace_type( mlir_arg[1]): warnings.warn( "Type mismatch between MLIR tasklet in connector and MLIR code" ) return # If a Python tasklet, use type inference to figure out all None output # connectors if all(cval.type is not None for cval in self.out_connectors.values()): return if self.code.language != dtypes.Language.Python: return if any(cval.type is None for cval in self.in_connectors.values()): raise TypeError('Cannot infer output connectors of tasklet "%s", ' 'not all input connectors have types' % str(self)) # Avoid import loop from dace.codegen.tools.type_inference import infer_types # Get symbols defined at beginning of node, and infer all types in # tasklet syms = state.symbols_defined_at(self) syms.update(self.in_connectors) new_syms = infer_types(self.code.code, syms) for cname, oconn in self.out_connectors.items(): if oconn.type is None: if cname not in new_syms: raise TypeError('Cannot infer type of tasklet %s output ' '"%s", please specify manually.' % (self.label, cname)) self.out_connectors[cname] = new_syms[cname] def __str__(self): if not self.label: return "--Empty--" else: return self.label
def make_read_row(): sdfg = SDFG("spmv_read_row") begin = sdfg.add_state("begin") entry = sdfg.add_state("entry") end = sdfg.add_state("end") body = sdfg.add_state("body") sdfg.add_edge(begin, entry, InterstateEdge(assignments={"h": "0"})) sdfg.add_edge( entry, body, InterstateEdge(condition=CodeProperty.from_string( "h < H + 1", language=Language.Python))) sdfg.add_edge( entry, end, InterstateEdge(condition=CodeProperty.from_string( "h >= H + 1", language=Language.Python))) sdfg.add_edge(body, entry, InterstateEdge(assignments={"h": "h + 1"})) a_row_mem = body.add_array("A_row_mem", (H + 1, ), itype, storage=StorageType.FPGA_Global) to_val_pipe = body.add_stream("to_val_pipe", itype, storage=StorageType.FPGA_Local) to_col_pipe = body.add_stream("to_col_pipe", itype, storage=StorageType.FPGA_Local) to_compute_pipe = body.add_stream("to_compute_pipe", itype, storage=StorageType.FPGA_Local) to_x_pipe = body.add_stream("to_x_pipe", itype, storage=StorageType.FPGA_Local) tasklet = body.add_tasklet( "read_row", {"row_in"}, {"to_val_out", "to_col_out", "to_compute_out", "to_x_out"}, "to_val_out = row_in\n" "to_col_out = row_in\n" "to_compute_out = row_in\n" "to_x_out = row_in") body.add_memlet_path(a_row_mem, tasklet, dst_conn="row_in", memlet=Memlet.simple(a_row_mem, "h")) body.add_memlet_path(tasklet, to_val_pipe, src_conn="to_val_out", memlet=Memlet.simple(to_val_pipe, "0")) body.add_memlet_path(tasklet, to_col_pipe, src_conn="to_col_out", memlet=Memlet.simple(to_col_pipe, "0")) body.add_memlet_path(tasklet, to_compute_pipe, src_conn="to_compute_out", memlet=Memlet.simple(to_compute_pipe, "0")) body.add_memlet_path(tasklet, to_x_pipe, src_conn="to_x_out", memlet=Memlet.simple(to_x_pipe, "0")) return sdfg
def make_compute_nested_sdfg(): sdfg = SDFG("spmv_compute_nested") if_state = sdfg.add_state("if") then_state = sdfg.add_state("then") else_state = sdfg.add_state("else") end_state = sdfg.add_state("end") sdfg.add_edge( if_state, then_state, InterstateEdge(condition=CodeProperty.from_string( "c == 0", language=Language.Python))) sdfg.add_edge( if_state, else_state, InterstateEdge(condition=CodeProperty.from_string( "c != 0", language=Language.Python))) sdfg.add_edge(then_state, end_state, InterstateEdge()) sdfg.add_edge(else_state, end_state, InterstateEdge()) a_in = if_state.add_scalar("a_in", dtype, storage=StorageType.FPGA_Registers) x_in = if_state.add_scalar("x_in", dtype, storage=StorageType.FPGA_Registers) b_tmp_out = if_state.add_scalar("b_tmp", dtype, transient=True, storage=StorageType.FPGA_Registers) tasklet = if_state.add_tasklet("compute", {"_a_in", "_x_in"}, {"_b_out"}, "_b_out = _a_in * _x_in") if_state.add_memlet_path(a_in, tasklet, dst_conn="_a_in", memlet=Memlet.simple(a_in, "0")) if_state.add_memlet_path(x_in, tasklet, dst_conn="_x_in", memlet=Memlet.simple(x_in, "0")) if_state.add_memlet_path(tasklet, b_tmp_out, src_conn="_b_out", memlet=Memlet.simple(b_tmp_out, "0")) b_tmp_then_in = then_state.add_scalar("b_tmp", dtype, transient=True, storage=StorageType.FPGA_Registers) b_then_out = then_state.add_scalar("b_out", dtype, storage=StorageType.FPGA_Registers) then_state.add_memlet_path(b_tmp_then_in, b_then_out, memlet=Memlet.simple(b_then_out, "0")) b_tmp_else_in = else_state.add_scalar("b_tmp", dtype, transient=True, storage=StorageType.FPGA_Registers) b_else_in = else_state.add_scalar("b_in", dtype, storage=StorageType.FPGA_Registers) b_else_out = else_state.add_scalar("b_out", dtype, storage=StorageType.FPGA_Registers) else_tasklet = else_state.add_tasklet("b_wcr", {"_b_in", "b_prev"}, {"_b_out"}, "_b_out = b_prev + _b_in") else_state.add_memlet_path(b_tmp_else_in, else_tasklet, dst_conn="_b_in", memlet=Memlet.simple(b_tmp_else_in, "0")) else_state.add_memlet_path(b_else_in, else_tasklet, dst_conn="b_prev", memlet=Memlet.simple(b_else_in, "0")) else_state.add_memlet_path(else_tasklet, b_else_out, src_conn="_b_out", memlet=Memlet.simple(b_else_out, "0")) return sdfg
def make_iteration_space(sdfg): pre_state = sdfg.add_state("pre_state") rows_begin = sdfg.add_state("rows_begin") rows_entry = sdfg.add_state("rows_entry") rows_end = sdfg.add_state("rows_end") shift_rowptr = sdfg.add_state("shift_rowptr") read_rowptr = sdfg.add_state("read_rowptr") cols_begin = sdfg.add_state("cols_begin") cols_entry = sdfg.add_state("cols_entry") cols_end = sdfg.add_state("cols_end") body = sdfg.add_state("compute") post_state = sdfg.add_state("post_state") sdfg.add_edge(pre_state, rows_begin, InterstateEdge()) sdfg.add_edge(rows_begin, rows_entry, InterstateEdge(assignments={"h": "0"})) sdfg.add_edge( rows_entry, shift_rowptr, InterstateEdge(condition=CodeProperty.from_string( "h < H", language=Language.Python))) sdfg.add_edge( rows_entry, rows_end, InterstateEdge(condition=CodeProperty.from_string( "h >= H", language=Language.Python))) sdfg.add_edge(shift_rowptr, read_rowptr, InterstateEdge()) sdfg.add_edge(read_rowptr, cols_begin, InterstateEdge()) sdfg.add_edge(cols_begin, cols_entry, InterstateEdge(assignments={"c": "0"})) sdfg.add_edge( cols_entry, body, InterstateEdge(condition=CodeProperty.from_string( "c < row_end - row_begin", language=Language.Python))) sdfg.add_edge( cols_entry, cols_end, InterstateEdge(condition=CodeProperty.from_string( "c >= row_end - row_begin", language=Language.Python))) sdfg.add_edge(body, cols_entry, InterstateEdge(assignments={"c": "c + 1"})) sdfg.add_edge(cols_end, post_state, InterstateEdge()) sdfg.add_edge(post_state, rows_entry, InterstateEdge(assignments={"h": "h + 1"})) row_end_first = pre_state.add_scalar("row_end", itype, transient=True, storage=StorageType.FPGA_Registers) row_pipe_first = pre_state.add_stream("row_pipe", itype, storage=StorageType.FPGA_Local) pre_state.add_memlet_path(row_pipe_first, row_end_first, memlet=Memlet.simple(row_end_first, "0")) row_end_shift = shift_rowptr.add_scalar("row_end", itype, transient=True, storage=StorageType.FPGA_Registers) row_begin_shift = shift_rowptr.add_scalar( "row_begin", itype, transient=True, lifetime=AllocationLifetime.SDFG, storage=StorageType.FPGA_Registers) shift_rowptr.add_memlet_path(row_end_shift, row_begin_shift, memlet=Memlet.simple(row_begin_shift, "0")) row_pipe = read_rowptr.add_stream("row_pipe", itype, storage=StorageType.FPGA_Local) row_end = read_rowptr.add_scalar("row_end", itype, transient=True, storage=StorageType.FPGA_Registers) read_rowptr.add_memlet_path(row_pipe, row_end, memlet=Memlet.simple(row_end, "0")) return pre_state, body, post_state
class InterstateEdge(object): """ An SDFG state machine edge. These edges can contain a condition (which may include data accesses for data-dependent decisions) and zero or more assignments of values to inter-state variables (e.g., loop iterates). """ assignments = Property( dtype=dict, desc="Assignments to perform upon transition (e.g., 'x=x+1; y = 0')", from_string=assignments_from_string, to_string=assignments_to_string) condition = CodeProperty(desc="Transition condition") def __init__(self, condition=None, assignments=None): if condition is None: condition = ast.parse("1").body[0] if assignments is None: assignments = {} self.condition = condition self.assignments = assignments self._dotOpts = {"minlen": 3, "color": "blue", "fontcolor": "blue"} def is_unconditional(self): """ Returns True if the state transition is unconditional. """ return (self.condition is None or InterstateEdge.condition.to_string( self.condition).strip() == "1") def condition_sympy(self): cond_ast = self.condition return symbolic.pystr_to_symbolic(astutils.unparse(cond_ast)) def condition_symbols(self): return dace.symbolic.symbols_in_ast(self.condition[0]) def to_json(self, parent=None): ret = { 'type': type(self).__name__, 'attributes': dace.serialize.all_properties_to_json(self), 'label': self.label } return ret @staticmethod def from_json(json_obj, context=None): if json_obj['type'] != "InterstateEdge": raise TypeError("Invalid data type") # Create dummy object ret = InterstateEdge() dace.serialize.set_properties_from_json(ret, json_obj, context=context) return ret @property def label(self): assignments = ','.join( ['%s=%s' % (k, v) for k, v in self.assignments.items()]) # Edge with assigment only (no condition) if astutils.unparse(self.condition) == '1': # Edge without conditions or assignments if len(self.assignments) == 0: return '' return assignments # Edge with condition only (no assignment) if len(self.assignments) == 0: return astutils.unparse(self.condition) # Edges with assigments and conditions return astutils.unparse(self.condition) + '; ' + assignments @property def dotOpts(self): result = {} result.update(self._dotOpts) result.update({'label': self.label}) return result
class Tasklet(CodeNode): """ A node that contains a tasklet: a functional computation procedure that can only access external data specified using connectors. Tasklets may be implemented in Python, C++, or any supported language by the code generator. """ label = Property(dtype=str, desc="Name of the tasklet") code = CodeProperty(desc="Tasklet code") code_global = CodeProperty( desc="Global scope code needed for tasklet execution", default="") code_init = CodeProperty( desc="Extra code that is called on DaCe runtime initialization", default="") code_exit = CodeProperty( desc="Extra code that is called on DaCe runtime cleanup", default="") location = Property(dtype=str, desc="Tasklet execution location descriptor") debuginfo = DebugInfoProperty() instrument = Property( choices=dtypes.InstrumentationType, desc="Measure execution statistics with given method", default=dtypes.InstrumentationType.No_Instrumentation) def __init__(self, label, inputs=None, outputs=None, code="", language=dtypes.Language.Python, code_global="", code_init="", code_exit="", location="-1", debuginfo=None): super(Tasklet, self).__init__(inputs or set(), outputs or set()) # Properties self.label = label # Set the language directly #self.language = language self.code = {'code_or_block': code, 'language': language} self.location = location self.code_global = {'code_or_block': code_global, 'language': language} self.code_init = {'code_or_block': code_init, 'language': language} self.code_exit = {'code_or_block': code_exit, 'language': language} self.debuginfo = debuginfo @property def language(self): return self._code['language'] @staticmethod def from_json(json_obj, context=None): ret = Tasklet("dummylabel") dace.serialize.set_properties_from_json(ret, json_obj, context=context) return ret @property def name(self): return self._label def draw_node(self, sdfg, graph): return dot.draw_node(sdfg, graph, self, shape="octagon") def validate(self, sdfg, state): if not data.validate_name(self.label): raise NameError('Invalid tasklet name "%s"' % self.label) for in_conn in self.in_connectors: if not data.validate_name(in_conn): raise NameError('Invalid input connector "%s"' % in_conn) for out_conn in self.out_connectors: if not data.validate_name(out_conn): raise NameError('Invalid output connector "%s"' % out_conn) def __str__(self): if not self.label: return "--Empty--" else: return self.label
class InterstateEdge(object): """ An SDFG state machine edge. These edges can contain a condition (which may include data accesses for data-dependent decisions) and zero or more assignments of values to inter-state variables (e.g., loop iterates). """ assignments = Property( dtype=dict, desc="Assignments to perform upon transition (e.g., 'x=x+1; y = 0')", from_string=assignments_from_string, to_string=assignments_to_string) condition = CodeProperty(desc="Transition condition") language = Property(enum=types.Language, default=types.Language.Python) def __init__(self, condition=None, assignments=None): if condition is None: condition = ast.parse("1").body[0] if assignments is None: assignments = {} self.condition = condition self.assignments = assignments self._dotOpts = {"minlen": 3, "color": "blue", "fontcolor": "blue"} def is_unconditional(self): """ Returns True if the state transition is unconditional. """ return (self.condition == None or InterstateEdge.condition.to_string( self.condition).strip() == "1") def condition_sympy(self): cond_ast = self.condition return symbolic.pystr_to_symbolic(astutils.unparse(cond_ast)) def condition_symbols(self): return dace.symbolic.symbols_in_ast(self.condition[0]) def toJSON(self, indent=0): json = str(self.label) # get rid of newlines (why are they there in the first place?) json = re.sub(r"\n", " ", json) return "\"" + json + "\"" @property def label(self): assignments = ','.join( ['%s=%s' % (k, v) for k, v in self.assignments.items()]) # Edge with assigment only (no condition) if astutils.unparse(self.condition) == '1': # Edge without conditions or assignments if len(self.assignments) == 0: return '' return assignments # Edge with condition only (no assignment) if len(self.assignments) == 0: return astutils.unparse(self.condition) # Edges with assigments and conditions return assignments + '; ' + astutils.unparse(self.condition) @property def dotOpts(self): result = {} result.update(self._dotOpts) result.update({'label': self.label}) return result