def add_pipeline(self, q: Pipeline, init: ast.Node) -> None: """Add registered signals and logics for q initialized by init. Args: q (Pipeline): The pipelined variable. init (ast.Node): Value used to drive the first stage of the pipeline. """ self.add_signals(q.signals) self.add_logics(( ast.Always( sens_list=CLK_SENS_LIST, statement=ast.make_block( ast.NonblockingSubstitution(left=q[i + 1], right=q[i]) for i in range(q.level)), ), ast.Assign(left=q[0], right=init), ))
def _instantiate_children_tasks( self, task: Task, width_table: Dict[str, int], ) -> List[ast.Identifier]: is_done_signals: List[rtl.Pipeline] = [] arg_table: Dict[str, rtl.Pipeline] = {} async_mmap_args: Dict[Instance.Arg, List[str]] = collections.OrderedDict() task.add_m_axi(width_table, self.tcl_files) for instance in task.instances: child_port_set = set(instance.task.module.ports) # add signal delcarations for arg in instance.args: if arg.cat not in { Instance.Arg.Cat.ISTREAM, Instance.Arg.Cat.OSTREAM, }: width = 64 # 64-bit address if arg.cat == Instance.Arg.Cat.SCALAR: width = width_table.get(arg.name, 0) if width == 0: width = int(arg.name.split("'d")[0]) q = rtl.Pipeline( name=instance.get_instance_arg(arg.name), level=self.register_level, width=width, ) arg_table[arg.name] = q task.module.add_pipeline(q, init=ast.Identifier(arg.name)) # arg.name is the upper-level name # arg.port is the lower-level name # check which ports are used for async_mmap if arg.cat == Instance.Arg.Cat.ASYNC_MMAP: for tag in rtl.ASYNC_MMAP_SUFFIXES: if set(x.portname for x in rtl.generate_async_mmap_ports( tag=tag, port=arg.port, arg=arg.name, instance=instance, )) & child_port_set: async_mmap_args.setdefault(arg, []).append(tag) # declare wires or forward async_mmap ports for tag in async_mmap_args.get(arg, []): if task.is_upper and instance.task.is_lower: task.module.add_signals( rtl.generate_async_mmap_signals( tag=tag, arg=arg.mmap_name, data_width=width_table[arg.name], )) else: task.module.add_ports( rtl.generate_async_mmap_ioports( tag=tag, arg=arg.name, data_width=width_table[arg.name], )) # add reset registers rst_q = rtl.Pipeline(instance.rst_n, level=self.register_level) task.module.add_pipeline(rst_q, init=rtl.RST_N) # add start registers start_q = rtl.Pipeline( f'{instance.start.name}_global', level=self.register_level, ) task.module.add_pipeline(start_q, self.start_q[0]) if instance.is_autorun: # autorun modules start when the global start signal is asserted task.module.add_logics([ ast.Always( sens_list=rtl.CLK_SENS_LIST, statement=ast.make_block( ast.make_if_with_block( cond=ast.Unot(rst_q[-1]), true=ast.NonblockingSubstitution( left=instance.start, right=rtl.FALSE, ), false=ast.make_if_with_block( cond=start_q[-1], true=ast.NonblockingSubstitution( left=instance.start, right=rtl.TRUE, ), ), )), ), ]) else: # set up state is_done_q = rtl.Pipeline( f'{instance.is_done.name}', level=self.register_level, ) done_q = rtl.Pipeline( f'{instance.done.name}_global', level=self.register_level, ) task.module.add_pipeline(is_done_q, instance.is_state(STATE10)) task.module.add_pipeline(done_q, self.done_q[0]) if_branch = (instance.set_state(STATE00)) else_branch = (( ast.make_if_with_block( cond=instance.is_state(STATE00), true=ast.make_if_with_block( cond=start_q[-1], true=instance.set_state(STATE01), ), ), ast.make_if_with_block( cond=instance.is_state(STATE01), true=ast.make_if_with_block( cond=instance.ready, true=ast.make_if_with_block( cond=instance.done, true=instance.set_state(STATE10), false=instance.set_state(STATE11), )), ), ast.make_if_with_block( cond=instance.is_state(STATE11), true=ast.make_if_with_block( cond=instance.done, true=instance.set_state(STATE10), ), ), ast.make_if_with_block( cond=instance.is_state(STATE10), true=ast.make_if_with_block( cond=done_q[-1], true=instance.set_state(STATE00), ), ), )) task.module.add_logics([ ast.Always( sens_list=rtl.CLK_SENS_LIST, statement=ast.make_block( ast.make_if_with_block( cond=ast.Unot(rst_q[-1]), true=if_branch, false=else_branch, )), ), ast.Assign( left=instance.start, right=instance.is_state(STATE01), ), ]) is_done_signals.append(is_done_q) # insert handshake signals task.module.add_signals(instance.handshake_signals) # add task module instances portargs = list(rtl.generate_handshake_ports(instance, rst_q)) for arg in instance.args: if arg.cat == Instance.Arg.Cat.SCALAR: portargs.append( ast.PortArg(portname=arg.port, argname=arg_table[arg.name][-1])) elif arg.cat == Instance.Arg.Cat.ISTREAM: portargs.extend( instance.task.module.generate_istream_ports( port=arg.port, arg=arg.name, )) portargs.extend(portarg for portarg in rtl.generate_peek_ports( rtl, port=arg.port, arg=arg.name) if portarg.portname in child_port_set) elif arg.cat == Instance.Arg.Cat.OSTREAM: portargs.extend( instance.task.module.generate_ostream_ports( port=arg.port, arg=arg.name, )) elif arg.cat == Instance.Arg.Cat.MMAP: portargs.extend( rtl.generate_m_axi_ports( module=instance.task.module, port=arg.port, arg=arg.mmap_name, arg_reg=arg_table[arg.name][-1].name, )) elif arg.cat == Instance.Arg.Cat.ASYNC_MMAP: for tag in async_mmap_args[arg]: portargs.extend( rtl.generate_async_mmap_ports( tag=tag, port=arg.port, arg=arg.mmap_name, instance=instance, )) task.module.add_instance( module_name=util.get_module_name(instance.task.name), instance_name=instance.name, ports=portargs, ) # instantiate async_mmap modules at the upper levels if task.is_upper: for arg in async_mmap_args: task.module.add_async_mmap_instance( name=arg.mmap_name, offset_name=arg_table[arg.name][-1], tags=async_mmap_args[arg], data_width=width_table[arg.name], ) return is_done_signals
def _instantiate_global_fsm( self, task: Task, is_done_signals: List[rtl.Pipeline], ) -> None: # global state machine def is_state(state: ast.IntConst) -> ast.Eq: return ast.Eq(left=rtl.STATE, right=state) def set_state(state: ast.IntConst) -> ast.NonblockingSubstitution: return ast.NonblockingSubstitution(left=rtl.STATE, right=state) countdown = ast.Identifier('countdown') countdown_width = (self.register_level - 1).bit_length() task.module.add_signals([ ast.Reg(rtl.STATE.name, width=ast.make_width(2)), ast.Reg(countdown.name, width=ast.make_width(countdown_width)), ]) state01_action = set_state(STATE10) if is_done_signals: state01_action = ast.make_if_with_block( cond=ast.make_operation( operator=ast.Land, nodes=(x[-1] for x in reversed(is_done_signals)), ), true=state01_action, ) global_fsm = ast.make_case_with_block( comp=rtl.STATE, cases=[ ( STATE00, ast.make_if_with_block( cond=self.start_q[-1], true=set_state(STATE01), ), ), ( STATE01, state01_action, ), ( STATE10, [ set_state(STATE11 if self.register_level else STATE00), ast.NonblockingSubstitution( left=countdown, right=ast.make_int(max(0, self.register_level - 1)), ), ], ), ( STATE11, ast.make_if_with_block( cond=ast.Eq( left=countdown, right=ast.make_int(0, width=countdown_width), ), true=set_state(STATE00), false=ast.NonblockingSubstitution( left=countdown, right=ast.Minus( left=countdown, right=ast.make_int(1, width=countdown_width), ), ), ), ), ], ) task.module.add_logics([ ast.Always( sens_list=rtl.CLK_SENS_LIST, statement=ast.make_block( ast.make_if_with_block( cond=rtl.RST, true=set_state(STATE00), false=global_fsm, )), ), ast.Assign(left=rtl.IDLE, right=is_state(STATE00)), ast.Assign(left=rtl.DONE, right=self.done_q[-1]), ast.Assign(left=rtl.READY, right=self.done_q[0]), ]) task.module.add_pipeline(self.start_q, init=rtl.START) task.module.add_pipeline(self.done_q, init=is_state(STATE10))
def _instantiate_fifos(self, task: Task) -> None: _logger.debug(' instantiating FIFOs in %s', task.name) # skip instantiating if the fifo is not declared in this task fifos = { name: fifo for name, fifo in task.fifos.items() if 'depth' in fifo } if not fifos: return col_width = max( max(len(name), len(util.get_instance_name(fifo['consumed_by'])), len(util.get_instance_name(fifo['produced_by']))) for name, fifo in fifos.items()) for fifo_name, fifo in fifos.items(): _logger.debug(' instantiating %s.%s', task.name, fifo_name) # add FIFO instances task.module.add_fifo_instance( name=fifo_name, width=self._get_fifo_width(task, fifo_name), depth=fifo['depth'], ) # print debugging info debugging_blocks = [] fmtargs = { 'fifo_prefix': '\\033[97m', 'fifo_suffix': '\\033[0m', 'task_prefix': '\\033[90m', 'task_suffix': '\\033[0m', } for suffixes, fmt, fifo_tag in zip( (rtl.ISTREAM_SUFFIXES, rtl.OSTREAM_SUFFIXES), ('DEBUG: R: {fifo_prefix}{fifo:>{width}}{fifo_suffix} -> ' '{task_prefix}{task:<{width}}{task_suffix} %h', 'DEBUG: W: {task_prefix}{task:>{width}}{task_suffix} -> ' '{fifo_prefix}{fifo:<{width}}{fifo_suffix} %h'), ('consumed_by', 'produced_by')): display = ast.SingleStatement(statement=ast.SystemCall( syscall='display', args=(ast.StringConst(value=fmt.format( width=col_width, fifo=fifo_name, task=(util.get_instance_name(fifo[fifo_tag])), **fmtargs)), ast.Identifier( name=rtl.wire_name(fifo_name, suffixes[0]))))) debugging_blocks.append( ast.Always( sens_list=rtl.CLK_SENS_LIST, statement=ast.make_block( ast.IfStatement( cond=ast.Eq( left=ast.Identifier(name=rtl.wire_name( fifo_name, suffixes[-1])), right=rtl.TRUE, ), true_statement=ast.make_block(display), false_statement=None)))) task.module.add_logics(debugging_blocks)