def graph(): k = DataflowGraph("graph") channel = DataflowChannel("ch", 1) k.add_channel(channel) process_a = DataflowProcess("a") process_a.connect_to_outgoing_channel(channel) process_b = DataflowProcess("b") process_b.connect_to_incomming_channel(channel) k.add_process(DataflowProcess("a")) k.add_process(DataflowProcess("b")) return k
def __init__(self, xml_file, name=None): log.info("Start parsing the SDF3 graph") # load the xml with open(to_absolute_path(xml_file)) as f: sdf3 = _sdf_parser.CreateFromDocument(f.read()) if sdf3.type != "sdf": raise RuntimeError(f"Cannot parse {sdf3.type} graphs. " "Only SDF graphs are supported.") graph = sdf3.applicationGraph # set the name and initialize parent class if name is None: name = graph.sdf.name super().__init__(name) # add all processes for actor in graph.sdf.actor: log.debug(f"Add process {name}.{actor.name}") self.add_process(DataflowProcess(actor.name)) # add all channels for sdf_channel in graph.sdf.channel: c_name = sdf_channel.name # find channel properties c_props = None for props in graph.sdfProperties.channelProperties: if props.channel == c_name: c_props = props break if c_props is None: raise RuntimeError( "Did not find sdf3 channel properties for channel " f"{name}.{c_name}") # add the new channel # FIXME token size unit log.debug(f"Add channel {name}.{c_name} with a token size of " "{c_props.token_size} bytes") token_size = next(iter(c_props.tokenSize)) channel = DataflowChannel(sdf_channel.name, int(token_size.sz)) self.add_channel(channel) src_process = self.find_process(sdf_channel.srcActor) src_process.connect_to_outgoing_channel(channel) log.debug(f"Process {name}.{src_process.name} writes to channel " f"{name}.{c_name}") sink_process = self.find_process(sdf_channel.dstActor) sink_process.connect_to_incomming_channel(channel) log.debug(f"Process {name}.{src_process.name} reads from channel " f"{name}.{c_name}") log.info("Done parsing the SDF3 graph")
def __init__(self): super().__init__("example") a = DataflowProcess("a") b = DataflowProcess("b") c = DataflowChannel("c", 16) self.add_process(a) self.add_process(b) self.add_channel(c) a.connect_to_outgoing_channel(c) b.connect_to_incomming_channel(c)
def __init__(self, name, xml_file): super().__init__(name) log.info("Start parsing the PnGraph") log.debug("Reading from file: %s" % xml_file) tree = ET.parse(to_absolute_path(xml_file)) xmlroot = tree.getroot() for channel in xmlroot.iter("PNchannel"): name = channel.find("Name").text token_size = int(channel.find("EntrySizeHint").text) log.debug("".join([ "Found the channel ", name, " with a token size of ", str(token_size), " bytes", ])) self.add_channel(DataflowChannel(name, token_size)) for process in xmlroot.iter("PNprocess"): name = process.find("Name").text outgoing = [] incoming = [] for c in process.find("PNin").iter("Expr"): incoming.append(c.text) for c in process.find("PNout").iter("Expr"): outgoing.append(c.text) log.debug("Found the process " + name) log.debug("It reads from the channels " + str(incoming) + " ...") log.debug("and writes to the channels " + str(outgoing)) process = DataflowProcess(name) self.add_process(process) for cn in outgoing: channel = None for c in self.channels(): if cn == c.name: channel = c break assert channel is not None process.connect_to_outgoing_channel(channel) for cn in incoming: channel = None for c in self.channels(): if cn == c.name: channel = c break assert channel is not None process.connect_to_incomming_channel(channel) log.info("Done parsing the PnGraph")
def to_dataflow_graph(self): """Transfers the the tgff graph into a dataflow graph :returns: the equivalent dataflow graph representation :rtype: DataflowGraph """ graph = DataflowGraph(self.identifier) tasks = [] channels = [] # Create process for each node in for task in self.tasks: task = DataflowProcess(task) tasks.append(task) # Create channel for each edge in for key, properties in self.channels.items(): name = key token_size = int(self._quantities[0][int(properties[2])]) channel = DataflowChannel(name, token_size) for task in tasks: if task.name == properties[0]: task.connect_to_outgoing_channel(channel) if task.name == properties[1]: task.connect_to_incomming_channel(channel) channels.append(channel) # Add channels and processes to empty for task in tasks: graph.add_process(task) for channel in channels: graph.add_channel(channel) return graph
def graph(): k = DataflowGraph("a") k.add_process(DataflowProcess("a")) k.add_process(DataflowProcess("b")) return k