Ejemplo n.º 1
0
    def __init__(self, code_file_name, topmodule='', config_file=None):
        #TODO this corrspondence is temporal.
        BindVisitor._createAlwaysinfo = _createAlwaysinfo.__get__(BindVisitor)
        BindVisitor._is_reset = _is_reset.__get__(BindVisitor)
        #
        (topmodule, terms, binddict, resolved_terms, resolved_binddict,
         constlist, fsm_vars) = self.get_dataflow(code_file_name)

        VerilogControlflowAnalyzer.__init__(self, topmodule, terms, binddict,
                                            resolved_terms, resolved_binddict,
                                            constlist, fsm_vars)
        self.binds = BindLibrary(binddict, terms)
def test():
    filelist = [codedir + 'partselect_assign.v']
    topmodule = 'TOP'
    noreorder = False
    nobind = False
    include = None
    define = None

    analyzer = VerilogDataflowAnalyzer(filelist,
                                       topmodule,
                                       noreorder=noreorder,
                                       nobind=nobind,
                                       preprocess_include=include,
                                       preprocess_define=define)
    analyzer.generate()

    directives = analyzer.get_directives()
    instances = analyzer.getInstances()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)
    optimizer.resolveConstant()

    c_analyzer = VerilogControlflowAnalyzer(
        topmodule,
        terms,
        binddict,
        resolved_terms=optimizer.getResolvedTerms(),
        resolved_binddict=optimizer.getResolvedBinddict(),
        constlist=optimizer.getConstlist())

    output = []
    for tk in sorted(c_analyzer.resolved_terms.keys(), key=lambda x: str(x)):
        tree = c_analyzer.makeTree(tk)
        output.append(str(tk) + ': ' + tree.tocode())

    rslt = '\n'.join(output) + '\n'

    print(rslt)

    assert (expected == rslt)
Ejemplo n.º 3
0
def test():
    filelist = [codedir + 'partselect_assign.v']
    topmodule = 'TOP'
    noreorder = False
    nobind = False
    include = None
    define = None

    analyzer = VerilogDataflowAnalyzer(filelist, topmodule,
                                       noreorder=noreorder,
                                       nobind=nobind,
                                       preprocess_include=include,
                                       preprocess_define=define)
    analyzer.generate()

    directives = analyzer.get_directives()
    instances = analyzer.getInstances()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)
    optimizer.resolveConstant()

    c_analyzer = VerilogControlflowAnalyzer(topmodule, terms,
                                            binddict,
                                            resolved_terms=optimizer.getResolvedTerms(),
                                            resolved_binddict=optimizer.getResolvedBinddict(),
                                            constlist=optimizer.getConstlist()
                                            )

    output = []
    for tk in sorted(c_analyzer.resolved_terms.keys(), key=lambda x:str(x)):
        tree = c_analyzer.makeTree(tk)
        output.append(str(tk) + ': ' + tree.tocode())

    rslt = '\n'.join(output) + '\n'

    print(rslt)
    
    assert(expected == rslt)
Ejemplo n.º 4
0
    def _create_graphgen_obj(self, verilog_file, top_module, generate_cfg,
                             generate_ast):
        dataflow_analyzer = PyDataflowAnalyzer(verilog_file, top_module)
        dataflow_analyzer.generate()
        binddict = dataflow_analyzer.getBinddict()
        terms = dataflow_analyzer.getTerms()

        dataflow_optimizer = PyDataflowOptimizer(terms, binddict)
        dataflow_optimizer.resolveConstant()
        resolved_terms = dataflow_optimizer.getResolvedTerms()
        resolved_binddict = dataflow_optimizer.getResolvedBinddict()
        constlist = dataflow_optimizer.getConstlist()

        if generate_cfg:
            fsm_vars = tuple(['fsm', 'state', 'count', 'cnt', 'step', 'mode'])
            self.cfg_graph_generator = PyControlflowAnalyzer(
                "top", terms, binddict, resolved_terms, resolved_binddict,
                constlist, fsm_vars)
        elif generate_ast:
            #when generating AST, determines which substructure (dictionary/array) to generate
            #before converting the json-like structure into actual json
            self.DICTIONARY_GEN = [
                "Source", "Description", "Ioport", "Decl", "Lvalue"
            ]
            self.ARRAY_GEN = [
                "ModuleDef", "Paramlist", "Portlist", "Input", "Width", "Reg",
                "Wire", "Rvalue", "ParseSelect", "Uplus", "Uminus", "Ulnot",
                "Unot", "Uand", "Unand", "Uor", "Unor", "Uxnor", "Power",
                "Times", "Divide", "Mod", "Plus", "Minus", "Sll", "Srl", "Sla",
                "Sra", "LessThan", "GreaterThan", "LessEq", "GreaterEq", "Eq",
                "Eql", "NotEq", "Eql", "NotEql", "And", "Xor", "Xnor", "Or",
                "Land", "Lor", "Cond", "Assign", "Always", "AlwaysFF",
                "AlwaysComb", "AlwaysLatch", "SensList", "Sens",
                "Substitution", "BlockingSubstitution",
                "NonblockingSubstitution", "IfStatement", "Block", "Initial",
                "Plus", "Output", "Partselect"
            ]
            self.CONST_DICTIONARY_GEN = [
                "IntConst", "FloatConst", "StringConst", "Identifier"
            ]

            self.ast, _ = parse([verilog_file])

        else:  #generate dfg
            self.dfg_graph_generator = PyGraphGenerator(
                top_module, terms, binddict, resolved_terms, resolved_binddict,
                constlist, f'{self.output_directory}seperate_modules.pdf')
Ejemplo n.º 5
0
def main():
    INFO = "Verilog module signal/module dataflow analyzer"
    VERSION = pyverilog.utils.version.VERSION
    USAGE = "Usage: python example_dataflow_analyzer.py -t TOPMODULE file ..."

    def showVersion():
        print(INFO)
        print(VERSION)
        print(USAGE)
        sys.exit()

    optparser = OptionParser()
    optparser.add_option("-v",
                         "--version",
                         action="store_true",
                         dest="showversion",
                         default=False,
                         help="Show the version")
    optparser.add_option("-I",
                         "--include",
                         dest="include",
                         action="append",
                         default=[],
                         help="Include path")
    optparser.add_option("-D",
                         dest="define",
                         action="append",
                         default=[],
                         help="Macro Definition")
    optparser.add_option("-t",
                         "--top",
                         dest="topmodule",
                         default="TOP",
                         help="Top module, Default=TOP")
    optparser.add_option("--nobind",
                         action="store_true",
                         dest="nobind",
                         default=False,
                         help="No binding traversal, Default=False")
    optparser.add_option(
        "--noreorder",
        action="store_true",
        dest="noreorder",
        default=False,
        help="No reordering of binding dataflow, Default=False")
    (options, args) = optparser.parse_args()

    filelist = args
    if options.showversion:
        showVersion()

    for f in filelist:
        if not os.path.exists(f): raise IOError("file not found: " + f)

    if len(filelist) == 0:
        showVersion()

    analyzer = VerilogDataflowAnalyzer(filelist,
                                       options.topmodule,
                                       noreorder=options.noreorder,
                                       nobind=options.nobind,
                                       preprocess_include=options.include,
                                       preprocess_define=options.define)
    analyzer.generate()

    CIRCUIT = options.topmodule

    directives = analyzer.get_directives()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)

    optimizer.resolveConstant()
    resolved_terms = optimizer.getResolvedTerms()
    resolved_binddict = optimizer.getResolvedBinddict()
    constlist = optimizer.getConstlist()

    top = options.topmodule
    fsm_vars = tuple(['state'])
    ##    fsm_vars = tuple(['dpll_state'])
    ##    fsm_vars = tuple(['s1','s0'])
    canalyzer = VerilogControlflowAnalyzer(options.topmodule, terms, binddict,
                                           resolved_terms, resolved_binddict,
                                           constlist, fsm_vars)
    fsms = canalyzer.getFiniteStateMachines()
    print("")

    name = 'test'

    if CIRCUIT == "add_serial":
        state_var = CDFG.newScope('add_serial', 'state')
        clk = CDFG.newScope('add_serial', 'clk')
        rst = CDFG.newScope('add_serial', 'rst')
    elif CIRCUIT == "bbara":
        state_var = CDFG.newScope('top', 'state')
        clk = CDFG.newScope('top', 'clk')
        rst = None
    elif CIRCUIT == "dma_rrarb":
        state_var = CDFG.newScope('dma_rrarb', 'state')
        clk = CDFG.newScope('dma_rrarb', 'HCLK')
        rst = CDFG.newScope('dma_rrarbrb', 'HRSTn')
    elif CIRCUIT == "mc_timing":
        state_var = CDFG.newScope('mc_timing', 'state')
        clk = CDFG.newScope('mc_timing', 'clk')
        rst = CDFG.newScope('mc_timing', 'rst')
    elif CIRCUIT == "correlator":
        state_var = CDFG.newScope('correlator', 'state')
        clk = CDFG.newScope('correlator', 'clk')
        rst = CDFG.newScope('correlator', 'rst_n')

    fsm_obj = fsms[state_var]

    if CIRCUIT == "add_serial":
        state_list = [
            CDFG.newScope('add_serial', 'IDLE'),
            CDFG.newScope('add_serial', 'ADD'),
            CDFG.newScope('add_serial', 'DONE')
        ]
    elif CIRCUIT == "bbara":
        state_list = [
            CDFG.newScope('top', 'st0'),
            CDFG.newScope('top', 'st1'),
            CDFG.newScope('top', 'st2'),
            CDFG.newScope('top', 'st3'),
            CDFG.newScope('top', 'st4'),
            CDFG.newScope('top', 'st5'),
            CDFG.newScope('top', 'st6'),
            CDFG.newScope('top', 'st7'),
            CDFG.newScope('top', 'st8'),
            CDFG.newScope('top', 'st9')
        ]
    elif CIRCUIT == "dma_rrarb":
        state_list = [
            CDFG.newScope('dma_rrarb', 'grant0'),
            CDFG.newScope('dma_rrarb', 'grant1'),
            CDFG.newScope('dma_rrarb', 'grant2'),
            CDFG.newScope('dma_rrarb', 'grant3'),
            CDFG.newScope('dma_rrarb', 'grant4'),
            CDFG.newScope('dma_rrarb', 'grant5'),
            CDFG.newScope('dma_rrarb', 'grant6'),
            CDFG.newScope('dma_rrarb', 'grant7'),
        ]
    elif CIRCUIT == "mc_timing":
        state_list = [
            CDFG.newScope('mc_timing', 'POR'),
            CDFG.newScope('mc_timing', 'IDLE'),
            CDFG.newScope('mc_timing', 'IDLE_T'),
            CDFG.newScope('mc_timing', 'IDLE_T2'),
            CDFG.newScope('mc_timing', 'PRECHARGE'),
            CDFG.newScope('mc_timing', 'PRECHARGE_W'),
            CDFG.newScope('mc_timing', 'ACTIVATE'),
            CDFG.newScope('mc_timing', 'ACTIVATE_W'),
            CDFG.newScope('mc_timing', 'SD_RD_WR'),
            CDFG.newScope('mc_timing', 'SD_RD'),
            CDFG.newScope('mc_timing', 'SD_RD_W'),
            CDFG.newScope('mc_timing', 'SD_RD_LOOP'),
            CDFG.newScope('mc_timing', 'SD_RD_W2'),
            CDFG.newScope('mc_timing', 'SD_WR'),
            CDFG.newScope('mc_timing', 'SD_WR_W'),
            CDFG.newScope('mc_timing', 'BT'),
            CDFG.newScope('mc_timing', 'BT_W'),
            CDFG.newScope('mc_timing', 'REFR'),
            CDFG.newScope('mc_timing', 'LMR0'),
            CDFG.newScope('mc_timing', 'LMR1'),
            CDFG.newScope('mc_timing', 'LMR2'),
            CDFG.newScope('mc_timing', 'INIT0'),
            CDFG.newScope('mc_timing', 'INIT'),
            CDFG.newScope('mc_timing', 'INIT_W'),
            CDFG.newScope('mc_timing', 'INIT_REFR1'),
            CDFG.newScope('mc_timing', 'INIT_REFR1_W'),
            CDFG.newScope('mc_timing', 'INIT_LMR'),
            CDFG.newScope('mc_timing', 'SUSP1'),
            CDFG.newScope('mc_timing', 'SUSP2'),
            CDFG.newScope('mc_timing', 'SUSP3'),
            CDFG.newScope('mc_timing', 'SUSP4'),
            CDFG.newScope('mc_timing', 'RESUME1'),
            CDFG.newScope('mc_timing', 'RESUME2'),
            CDFG.newScope('mc_timing', 'BG0'),
            CDFG.newScope('mc_timing', 'BG1'),
            CDFG.newScope('mc_timing', 'BG2'),
            CDFG.newScope('mc_timing', 'ACS_RD'),
            CDFG.newScope('mc_timing', 'ACS_RD1'),
            CDFG.newScope('mc_timing', 'ACS_RD2A'),
            CDFG.newScope('mc_timing', 'ACS_RD2'),
            CDFG.newScope('mc_timing', 'ACS_RD3'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_1'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_2'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_3'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_4'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_5'),
            CDFG.newScope('mc_timing', 'ACS_RD_8_6'),
            CDFG.newScope('mc_timing', 'ACS_WR'),
            CDFG.newScope('mc_timing', 'ACS_WR1'),
            CDFG.newScope('mc_timing', 'ACS_WR2'),
            CDFG.newScope('mc_timing', 'ACS_WR3'),
            CDFG.newScope('mc_timing', 'ACS_WR4'),
            CDFG.newScope('mc_timing', 'SRAM_RD'),
            CDFG.newScope('mc_timing', 'SRAM_RD0'),
            CDFG.newScope('mc_timing', 'SRAM_RD1'),
            CDFG.newScope('mc_timing', 'SRAM_RD2'),
            CDFG.newScope('mc_timing', 'SRAM_RD3'),
            CDFG.newScope('mc_timing', 'SRAM_RD4'),
            CDFG.newScope('mc_timing', 'SRAM_WR'),
            CDFG.newScope('mc_timing', 'SRAM_WR0'),
            CDFG.newScope('mc_timing', 'SCS_RD'),
            CDFG.newScope('mc_timing', 'SCS_RD1'),
            CDFG.newScope('mc_timing', 'SCS_RD2'),
            CDFG.newScope('mc_timing', 'SCS_WR'),
            CDFG.newScope('mc_timing', 'SCS_WR1'),
            CDFG.newScope('mc_timing', 'SCS_ERR')
        ]
    elif CIRCUIT == "correlator":
        state_list = [
            CDFG.newScope('correlator', 'WAITING'),
            CDFG.newScope('correlator', 'DECIDING'),
            CDFG.newScope('correlator', 'OFFSETTING'),
            CDFG.newScope('correlator', 'RUNNING'),
            CDFG.newScope('correlator', 'IDLE'),
            CDFG.newScope('correlator', 'LOCKED'),
            CDFG.newScope('correlator', 'READ_RANK'),
            CDFG.newScope('correlator', 'default')
        ]

    cdfg =\
         CDFG.ControlDataFlowGraph(name, fsm_obj, state_var, clk, rst,
                                   state_list, constlist,
                                   resolved_terms, resolved_binddict)
    cdfg.generate()

    # fsm
    cdfg.fsm_obj.view()
    print("")

    PIs = cdfg.getPIs()
    # exempt clk
    PIs.remove(cdfg.clk)
    # and reset from scrambling
    if cdfg.rst:
        PIs.remove(cdfg.rst)
    total_bits = 0
    for PI in PIs:
        total_bits += cdfg.getNumBitsOfVar(PI)
    print("number of scrambled bits: " + str(total_bits / 2))
    cdfg.scramblePIBits(total_bits / 2)

    num_ex_states = 1
    for ex_state_i in range(num_ex_states):
        src = cdfg.state_list[ex_state_i]
        dst = cdfg.state_list[ex_state_i + 1]
        delay = CDFG.newScope(top, 'delay' + str(ex_state_i))
        ##        delay = CDFG.newScope('add_serial', 'delay'+str(ex_state_i))
        cdfg.insCopyState(src, dst, delay)
##        cdfg.insDelayState(src, dst, delay)

    num_bits = 6
    # nonZeroStates test
    (all_trans_freqs, num_PIs) = cdfg.getTransFreqs()
    for row in all_trans_freqs:
        print(row)
    print("")
    for i in range(len(cdfg.state_list)):
        trans_freqs = cdfg.nonZeroStates(all_trans_freqs[i],\
                                         cdfg.state_list[i], num_bits)
        all_trans_freqs[i] = trans_freqs
##    (all_trans_freqs, num_PIs) = cdfg.getTransFreqs()
    for row in all_trans_freqs:
        print(row)
    print("")

    ##    cdfg.toCode(options.topmodule, options.topmodule + '_codegen.v')

    ##    cdfg.toCode('add_serial', 'add_serial_uniform.v')

    ##    cdfg.toCode('add_serial', 'add_serial_scramb.v')

    ##    cdfg.updateBinddict()
    ##    cdfg.toCode('dma_rrarb', 'dma_rrarb_uniform_s0.v')

    ##    cdfg.updateBinddict()
    ##    cdfg.toCode('dma_rrarb', 'dma_rrarb_scramb_s0_b{}.v'
    ##                .format(num_bits))
    ##    cdfg.toCode('dma_rrarb', 'dma_rrarb_scramb_delay_b{}.v'.format(num_bits))

    print("\n")
    ##    print("num_edits = {}".format(num_edits))

    ##    # original binds
    ##    for var, bind in cdfg.binddict.items():
    ##        print(var)
    ##        print(bind[0].tostr())
    ####        print(bind[0].isCombination())
    ####        print(bind[0].alwaysinfo)
    ####        print(bind[0].parameterinfo)
    ##    print("")
    ##
    # binds by state
    for state, binddict in cdfg.state_binddict.items():
        print(state)
        for var, binds in binddict.items():
            print(var)
            for bind in binds:
                print(bind.tostr())
##                print(bind.dest)
##                print(type(bind.dest))
##                print(bind.msb)
##                print(type(bind.msb))
##                print(bind.lsb)
##                print(type(bind.lsb))
##                print(bind.ptr)
##                print(type(bind.ptr))
##                print(bind.alwaysinfo)
##                print(type(bind.alwaysinfo))
##                print(bind.parameterinfo)
##                print(type(bind.parameterinfo))
##                print("")
        print("")
    print("")
Ejemplo n.º 6
0
 def __init__(self, topmodule, terms, binddict, 
              resolved_terms, resolved_binddict, constlist):
     VerilogControlflowAnalyzer.__init__(self, topmodule, terms, binddict, 
                                         resolved_terms, resolved_binddict, constlist)
     self.fsm_loops, self.fsms = self.getLoops()
Ejemplo n.º 7
0
 def __init__(self, topmodule, terms, binddict, resolved_terms,
              resolved_binddict, constlist):
     VerilogControlflowAnalyzer.__init__(self, topmodule, terms, binddict,
                                         resolved_terms, resolved_binddict,
                                         constlist)
def main():
    INFO = "Control-flow analyzer for Verilog definitions"
    VERSION = pyverilog.utils.version.VERSION
    USAGE = "Usage: python example_controlflow_analyzer.py -t TOPMODULE file ..."

    def showVersion():
        print(INFO)
        print(VERSION)
        print(USAGE)
        sys.exit()

    optparser = OptionParser()
    optparser.add_option("-v",
                         "--version",
                         action="store_true",
                         dest="showversion",
                         default=False,
                         help="Show the version")
    optparser.add_option("-t",
                         "--top",
                         dest="topmodule",
                         default="TOP",
                         help="Top module, Default=TOP")
    optparser.add_option("-s",
                         "--search",
                         dest="searchtarget",
                         action="append",
                         default=[],
                         help="Search Target Signal")
    optparser.add_option("--graphformat",
                         dest="graphformat",
                         default="png",
                         help="Graph file format, Default=png")
    optparser.add_option("--nograph",
                         action="store_true",
                         dest="nograph",
                         default=False,
                         help="Non graph generation")
    optparser.add_option("--nolabel",
                         action="store_true",
                         dest="nolabel",
                         default=False,
                         help="State Machine Graph without Labels")
    optparser.add_option("-I",
                         "--include",
                         dest="include",
                         action="append",
                         default=[],
                         help="Include path")
    optparser.add_option("-D",
                         dest="define",
                         action="append",
                         default=[],
                         help="Macro Definition")
    (options, args) = optparser.parse_args()

    filelist = args
    if options.showversion:
        showVersion()

    for f in filelist:
        if not os.path.exists(f): raise IOError("file not found: " + f)

    if len(filelist) == 0:
        showVersion()

    analyzer = VerilogDataflowAnalyzer(filelist,
                                       options.topmodule,
                                       preprocess_include=options.include,
                                       preprocess_define=options.define)
    analyzer.generate()

    directives = analyzer.get_directives()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)

    optimizer.resolveConstant()
    resolved_terms = optimizer.getResolvedTerms()
    resolved_binddict = optimizer.getResolvedBinddict()
    constlist = optimizer.getConstlist()
    fsm_vars = tuple(['fsm', 'state', 'count', 'cnt', 'step', 'mode'] +
                     options.searchtarget)

    canalyzer = VerilogControlflowAnalyzer(options.topmodule, terms, binddict,
                                           resolved_terms, resolved_binddict,
                                           constlist, fsm_vars)
    fsms = canalyzer.getFiniteStateMachines()

    for signame, fsm in fsms.items():
        print('# SIGNAL NAME: %s' % signame)
        print('# DELAY CNT: %d' % fsm.delaycnt)
        fsm.view()
        if not options.nograph:
            fsm.tograph(filename=util.toFlatname(signame) + '.' +
                        options.graphformat,
                        nolabel=options.nolabel)
        loops = fsm.get_loop()
        print('Loop')
        for loop in loops:
            print(loop)
Ejemplo n.º 9
0
 def __init__(self, topmodule, terms, binddict, 
              resolved_terms, resolved_binddict, constlist):
     VerilogControlflowAnalyzer.__init__(self, topmodule, terms, binddict, 
                                         resolved_terms, resolved_binddict, constlist)
Ejemplo n.º 10
0
class VerilogParser:
    '''
        the only class that interfaces with pyverilog.
        https://github.com/Microsoft/vscode-tips-and-tricks#intellisense
    '''

    #holds a GRAPH_GENERATOR INSTANCE
    def __init__(self,
                 verilog_file,
                 output_directory,
                 top_module,
                 generate_cfg=False,
                 generate_ast=False):
        print("Verilog file: ", verilog_file)
        print("Output directory: ", output_directory)
        if not os.path.exists(verilog_file):
            raise IOError("File Not Found:  ")
        self.output_directory = output_directory

        #Options
        self.dfg_graph_generator = None
        self.cfg_graph_generator = None
        self.ast = None
        self.ARRAY_GEN = None
        self.CONST_DICTIONARY_GEN = None
        self.DICTIONARY_GEN = None

        self._create_graphgen_obj(verilog_file, top_module, generate_cfg,
                                  generate_ast)

    #helper fcn to __init__, create a graph object used to generate json
    def _create_graphgen_obj(self, verilog_file, top_module, generate_cfg,
                             generate_ast):
        dataflow_analyzer = PyDataflowAnalyzer(verilog_file, top_module)
        dataflow_analyzer.generate()
        binddict = dataflow_analyzer.getBinddict()
        terms = dataflow_analyzer.getTerms()

        dataflow_optimizer = PyDataflowOptimizer(terms, binddict)
        dataflow_optimizer.resolveConstant()
        resolved_terms = dataflow_optimizer.getResolvedTerms()
        resolved_binddict = dataflow_optimizer.getResolvedBinddict()
        constlist = dataflow_optimizer.getConstlist()

        if generate_cfg:
            fsm_vars = tuple(['fsm', 'state', 'count', 'cnt', 'step', 'mode'])
            self.cfg_graph_generator = PyControlflowAnalyzer(
                "top", terms, binddict, resolved_terms, resolved_binddict,
                constlist, fsm_vars)
        elif generate_ast:
            #when generating AST, determines which substructure (dictionary/array) to generate
            #before converting the json-like structure into actual json
            self.DICTIONARY_GEN = [
                "Source", "Description", "Ioport", "Decl", "Lvalue"
            ]
            self.ARRAY_GEN = [
                "ModuleDef", "Paramlist", "Portlist", "Input", "Width", "Reg",
                "Wire", "Rvalue", "ParseSelect", "Uplus", "Uminus", "Ulnot",
                "Unot", "Uand", "Unand", "Uor", "Unor", "Uxnor", "Power",
                "Times", "Divide", "Mod", "Plus", "Minus", "Sll", "Srl", "Sla",
                "Sra", "LessThan", "GreaterThan", "LessEq", "GreaterEq", "Eq",
                "Eql", "NotEq", "Eql", "NotEql", "And", "Xor", "Xnor", "Or",
                "Land", "Lor", "Cond", "Assign", "Always", "AlwaysFF",
                "AlwaysComb", "AlwaysLatch", "SensList", "Sens",
                "Substitution", "BlockingSubstitution",
                "NonblockingSubstitution", "IfStatement", "Block", "Initial",
                "Plus", "Output", "Partselect"
            ]
            self.CONST_DICTIONARY_GEN = [
                "IntConst", "FloatConst", "StringConst", "Identifier"
            ]

            self.ast, _ = parse([verilog_file])

        else:  #generate dfg
            self.dfg_graph_generator = PyGraphGenerator(
                top_module, terms, binddict, resolved_terms, resolved_binddict,
                constlist, f'{self.output_directory}seperate_modules.pdf')

    #generates nested dictionary for conversion to json (AST helper)
    def _generate_ast_dict(self, ast_node):
        class_name = ast_node.__class__.__name__
        structure = {}
        #based on the token class_name, determine the value type of class_name
        if class_name in self.ARRAY_GEN:
            structure[class_name] = [
                getattr(ast_node, n) for n in ast_node.attr_names
            ] if ast_node.attr_names else []
            for c in ast_node.children():
                structure[class_name].append(self._generate_ast_dict(c))
        elif class_name in self.DICTIONARY_GEN:
            structure[class_name] = self._generate_ast_dict(
                ast_node.children()[0])
        elif class_name in self.CONST_DICTIONARY_GEN:
            structure = {}
            structure[class_name] = getattr(ast_node, ast_node.attr_names[0])
            return structure
        else:
            raise Exception(
                f"Error. Token name {class_name} is invalid or has not yet been supported"
            )
        return structure

    #generates abstract syntax tree for conversion (AST helper)
    def export_ast(self, nested_dictionary):
        print(f'Saving abstract syntax tree as json')
        #print(f'{self.output_directory}/ast.json')
        with open(f'{self.output_directory}/ast.json', 'w') as f:
            f.write(dumps(nested_dictionary, indent=2))
        print('List of root nodes saved in ast.json.\n')
        f.close()

    #generates dot file (CFG helper)
    def generate_dot_file(self, graph_format='png', no_label=False):
        assert self.cfg_graph_generator != None, "Error: Generate dot file only if you are generating CFG's "

        fsms = self.cfg_graph_generator.getFiniteStateMachines()

        print("VIEWING FSM's")
        print("LENGTH OF FSM: ", len(fsms))
        for signame, fsm in fsms.items():
            print('# SIGNAL NAME: %s' % signame)
            print('# DELAY CNT: %d' % fsm.delaycnt)
            fsm.view()
            fsm.tograph(filename=util.toFlatname(signame) + '.' + graph_format,
                        nolabel=no_label)

    #generate CFG from dot file
    def export_cfg_graph(self, output='graph'):
        def default_val():
            return []

        graph = pydot.graph_from_dot_file("./file.dot")[0]

        nodes = [node.get_name() for node in graph.get_node_list()]
        root_nodes = [
            node.get_name() for node in graph.get_node_list()
            if node.obj_dict['parent_graph'] == None
        ]
        edges = [[
            edge.get_source(),
            edge.get_destination(), edge.obj_dict['attributes']['label']
        ] for edge in graph.get_edge_list()]
        topModule = defaultdict(
            default_val)  #key: node, val: list of out going edges

        for edge in edges:
            if edge[2] == 'None':
                topModule[edge[0]].append("")
            else:
                topModule[edge[0]].append(edge[2])

        if (output == 'roots'):
            print(f'Saving all {len(root_nodes)} nodes as json')
            with open(f'{self.output_directory}root_nodes.json', 'w') as f:
                f.write(dumps(root_nodes, indent=4))
            print('List of root nodes saved in root_nodes.json.\n')
            f.close()

        elif (output == 'nodes'):
            print(f'Saving all {len(nodes)} nodes as json')
            with open(f'{self.output_directory}all_nodes.json', 'w') as f:
                f.write(dumps(nodes, indent=4))
            print('List of nodes saved in all_nodes.json.\n')
            f.close()

        elif (output == 'edges'):
            print(f'Saving all {len(edges)} edges as json')
            with open(f'{self.output_directory}all_edges.json', 'w') as f:
                f.write(dumps(edges, indent=4))
            print('List of edges saved in all_edges.json.\n')
            f.close()

        elif (output == 'graph'):
            print(f'Saving cfg graph dictionary as a json')
            with open(f'{self.output_directory}topModule.json', 'w') as f:
                f.write(dumps(topModule, indent=4))
            print('Saving cfg graph dictionary as a json.\n')
            f.close()
            print('The graph is saved as topModule.json.\n')

    # This function returns True, if the child is a child of checkParent
    def _isChild(self, graph, checkParent, child):
        # This function recursively returns a list of all the parents of a node up to the root
        def getAllParents(node):
            # if node has no parents
            if graph.in_degree(node) == 0:
                return []
            # if node has no grandparents
            elif sum([
                    graph.in_degree(parent)
                    for parent in graph.predecessors(node)
            ]) == 0:
                return graph.predecessors(node)
            # recursive call, node has unknown generations of parents
            else:
                retlist = list()
                for parent in graph.predecessors(node):
                    x = getAllParents(parent)
                    x.append(parent)
                    retlist += x
                return retlist

        allParents = getAllParents(child)
        return checkParent in allParents

    #generate separate graph separate modules
    def graph_separate_modules(self, draw_graph=False):
        # binddict with string keys
        signals = [str(bind) for bind in self.dfg_graph_generator.binddict]

        print(f'{len(signals)} signals to generate seprate subgraphs...')
        for num, signal in enumerate(sorted(signals, key=str.casefold),
                                     start=1):
            self.dfg_graph_generator.generate(signal, walk=False)
            print(f'\rProgress : {num} / {len(signals)}', end='', flush=True)
        print('\nThe subgraphs are generated.\n')

        if draw_graph:
            print(
                f'Saving subgraphs with {len(self.dfg_graph_generator.graph.nodes())} nodes as a pdf...'
            )
            self.dfg_graph_generator.draw()
            print('The subgraphs are saved.\n')

    #merge the graphs
    def merge_graphs(self, draw_graph=False):
        label_to_node = dict()
        for node in self.dfg_graph_generator.graph.nodes():
            if self.dfg_graph_generator.graph.in_degree(node) == 0:
                label = node.attr[
                    'label'] if node.attr['label'] != '\\N' else str(node)
                label_to_node[label] = node

        deleted = 0
        print('Merging subgraphs... ')
        for num, node in enumerate(self.dfg_graph_generator.graph.nodes(),
                                   start=1):
            label = node.attr['label'] if node.attr['label'] != '\\N' else str(
                node)
            if '_' in label and label.replace('_', '.') in label_to_node:
                parents = self.dfg_graph_generator.graph.predecessors(node)
                self.dfg_graph_generator.graph.delete_node(node)
                deleted += 1
                for parent in parents:
                    if not self._isChild(
                            self.dfg_graph_generator.graph,
                            label_to_node[label.replace('_', '.')], parent):
                        self.dfg_graph_generator.graph.add_edge(
                            parent, label_to_node[label.replace('_', '.')])
            print(
                f'\rProgress : {num - deleted} / {len(self.dfg_graph_generator.graph.nodes())}',
                end='',
                flush=True)
        print('\nThe signals subgraphs are merged.\n')

        if draw_graph:
            print(
                f'Saving merged graph with {len(self.dfg_graph_generator.graph.nodes())} nodes as a pdf...'
            )
            self.dfg_graph_generator.draw(
                f'{self.output_directory}merged_graph.pdf')
            print('The graphs are saved.\n')

    def get_root_nodes(self):
        if self.dfg_graph_generator:
            return [
                node for node in self.dfg_graph_generator.graph.nodes()
                if self.dfg_graph_generator.graph.in_degree(node) == 0
            ]

    def get_nodes(self):
        if self.dfg_graph_generator:
            return self.dfg_graph_generator.graph.nodes()

    def get_edges(self):
        if self.dfg_graph_generator:
            all_edges = list()
            for edge in self.dfg_graph_generator.graph.edges():
                all_edges.append((edge[0], edge[1], edge.attr['label']))
            return all_edges

    def get_edge_list(self):
        if self.dfg_graph_generator:
            jsondict = {}
            for node in self.dfg_graph_generator.graph.nodes():
                jsondict[str(node)] = list()
                for child in self.dfg_graph_generator.graph.successors(node):
                    edgeLabel = self.dfg_graph_generator.graph.get_edge(
                        node, child).attr['label']
                    jsondict[str(node)].append((edgeLabel, str(child)))
            return jsondict

    #export the dfg graphs
    def export_dfg_graph(self, output='graph'):
        if (output == 'roots'):
            root_nodes = [
                node for node in self.dfg_graph_generator.graph.nodes()
                if self.dfg_graph_generator.graph.in_degree(node) == 0
            ]
            print(f'Saving {len(root_nodes)} root nodes as a json...')
            with open(f'{self.output_directory}root_nodes.json', 'w') as f:
                f.write(dumps(root_nodes, indent=4))
            print('List of root nodes saved in root_nodes.json.\n')
            f.close()

        elif (output == 'nodes'):
            all_nodes = (self.dfg_graph_generator.graph.nodes())
            print(f'Saving all {len(all_nodes)} nodes as a json...')
            with open(f'{self.output_directory}all_nodes.json', 'w') as f:
                f.write(dumps(all_nodes, indent=4))
            print('List of nodes saved in all_nodes.json.\n')
            f.close()

        elif (output == 'edges'):
            all_edges = list()
            for edge in self.dfg_graph_generator.graph.edges():
                all_edges.append((edge[0], edge[1], edge.attr['label']))
            print(f'Saving all {len(all_edges)} edges as a json...')
            with open(f'{self.output_directory}all_edges.json', 'w') as f:
                f.write(dumps(all_edges, indent=4))
            print('List of edges is saved in all_edges.json.\n')
            f.close()

        elif (output == 'graph'):
            jsondict = {}
            for node in self.dfg_graph_generator.graph.nodes():
                jsondict[str(node)] = list()
                for child in self.dfg_graph_generator.graph.successors(node):
                    edgeLabel = self.dfg_graph_generator.graph.get_edge(
                        node, child).attr['label']
                    jsondict[str(node)].append((edgeLabel, str(child)))
            print(f'Saving graph dictionary as a json...')
            f = open(f'{self.output_directory}topModule.json', 'w')
            jsonstr = dumps(jsondict, indent=4)
            f.write(jsonstr)
            f.close()
            print('The graph is saved as topModule.json.\n')

    #to be refactored
    def graph_input_dependencies(self, draw_graph=False):
        labeltoNames = dict()
        for node in self.dfg_graph_generator.graph.nodes():
            label = node.attr['label'] if node.attr['label'] != '\\N' else str(
                node)
            if label not in labeltoNames:
                labeltoNames[label] = list()
            labeltoNames[label].append(str(node))

        inputs = [
            self.dfg_graph_generator.graph.get_node(
                *labeltoNames[str(term).replace('.', '_')])
            for x, term in zip(self.dfg_graph_generator.terms,
                               self.dfg_graph_generator.terms.values())
            if len(x.get_module_list()) == 1 and 'Input' in term.termtype
        ]

        print('Locating nodes not connected to inputs...')
        to_delete = list()
        for num, node in enumerate(self.dfg_graph_generator.graph.nodes(),
                                   start=1):
            label = node.attr['label'] if node.attr['label'] != '\\N' else str(
                node)
            if label not in inputs:
                x = True
                for input_ in inputs:
                    if self._isChild(self.dfg_graph_generator.graph, node,
                                     input_):
                        x = False
                if x == True:
                    to_delete.append(node)
            print(
                f'\rProgress : {num} / {len(self.dfg_graph_generator.graph.nodes())}',
                end='',
                flush=True)
        print('\nRemoving nodes not connected to inputs...')
        for num, node in enumerate(to_delete, start=1):
            self.dfg_graph_generator.graph.delete_node(node)
            print(f'\rProgress : {num} / {len(to_delete)}', end='', flush=True)
        print('\nRemoval is complete.\n')

        if draw_graph:
            print(
                f'Saving graph with {len(self.dfg_graph_generator.graph.nodes())} nodes as a pdf...'
            )
            self.dfg_graph_generator.draw(
                f'{self.output_directory}input_dependencies.pdf')
            print('Graph saved.\n')

        #cleanup dot file and other residual files generated earlier

    def cleanup_files(self):
        for file in ['file.dot', 'parser.out', 'parsetab.py', 'top_state.png']:
            try:
                os.remove(file)
            except FileNotFoundError:
                pass
def main():
    INFO = "Control-flow analyzer for Verilog definitions"
    VERSION = pyverilog.utils.version.VERSION
    USAGE = "Usage: python example_controlflow_analyzer.py -t TOPMODULE file ..."

    def showVersion():
        print(INFO)
        print(VERSION)
        print(USAGE)
        sys.exit()
    
    optparser = OptionParser()
    optparser.add_option("-v","--version",action="store_true",dest="showversion",
                         default=False,help="Show the version")
    optparser.add_option("-t","--top",dest="topmodule",
                         default="TOP",help="Top module, Default=TOP")
    optparser.add_option("-s","--search",dest="searchtarget",action="append",
                         default=[],help="Search Target Signal")
    optparser.add_option("--graphformat",dest="graphformat",
                         default="png",help="Graph file format, Default=png")
    optparser.add_option("--nograph",action="store_true",dest="nograph",
                         default=False,help="Non graph generation")
    optparser.add_option("--nolabel",action="store_true",dest="nolabel",
                         default=False,help="State Machine Graph without Labels")
    optparser.add_option("-I","--include",dest="include",action="append",
                         default=[],help="Include path")
    optparser.add_option("-D",dest="define",action="append",
                         default=[],help="Macro Definition")
    (options, args) = optparser.parse_args()

    filelist = args
    if options.showversion:
        showVersion()

    for f in filelist:
        if not os.path.exists(f): raise IOError("file not found: " + f)

    if len(filelist) == 0:
        showVersion()

    analyzer = VerilogDataflowAnalyzer(filelist, options.topmodule,
                                       preprocess_include=options.include,
                                       preprocess_define=options.define)
    analyzer.generate()

    directives = analyzer.get_directives()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)

    optimizer.resolveConstant()
    resolved_terms = optimizer.getResolvedTerms()
    resolved_binddict = optimizer.getResolvedBinddict()
    constlist = optimizer.getConstlist()
    fsm_vars = tuple(['fsm', 'state', 'count', 'cnt', 'step', 'mode'] + options.searchtarget)
    
    canalyzer = VerilogControlflowAnalyzer(options.topmodule, terms, binddict,
                                           resolved_terms, resolved_binddict, constlist, fsm_vars)
    fsms = canalyzer.getFiniteStateMachines()

    for signame, fsm in fsms.items():
        print('# SIGNAL NAME: %s' % signame)
        print('# DELAY CNT: %d' % fsm.delaycnt)
        fsm.view()
        if not options.nograph:
            fsm.tograph(filename=util.toFlatname(signame)+'.'+options.graphformat, nolabel=options.nolabel)
        loops = fsm.get_loop()
        print('Loop')
        for loop in loops:
            print(loop)
Ejemplo n.º 12
0
def main():
    ##    CIRCUIT = "dma_rrarb"
    CIRCUIT = "add_serial"

    if CIRCUIT == "dma_rrarb":
        src = "dma_rrarb/"
        filelist = ["dma_rrarb_mod.v"]
        topmodule = "dma_rrarb"
    elif CIRCUIT == "add_serial":
        src = "add_serial/"
        filelist = ["add_serial_mod.v"]
        topmodule = "add_serial"
    dst = "./"
    new_files = []
    for file_name in os.listdir(src):
        full_file_name = os.path.join(src, file_name)
        if os.path.isfile(full_file_name):
            shutil.copy(full_file_name, dst)
            new_files.append(file_name)

    noreorder = False
    nobind = False
    include = []
    define = []

    analyzer = VerilogDataflowAnalyzer(filelist, topmodule, noreorder, nobind,
                                       include, define)
    analyzer.generate()

    ##    directives = analyzer.get_directives()
    terms = analyzer.getTerms()
    binddict = analyzer.getBinddict()

    optimizer = VerilogDataflowOptimizer(terms, binddict)
    optimizer.resolveConstant()
    resolved_terms = optimizer.getResolvedTerms()
    resolved_binddict = optimizer.getResolvedBinddict()
    constlist = optimizer.getConstlist()

    fsm_vars = tuple(['state'])
    canalyzer = VerilogControlflowAnalyzer(topmodule, terms, binddict,
                                           resolved_terms, resolved_binddict,
                                           constlist, fsm_vars)
    fsms = canalyzer.getFiniteStateMachines()

    name = topmodule
    if CIRCUIT == "dma_rrarb":
        state_var = CDFG.newScope('dma_rrarb', 'state')
        clk = CDFG.newScope('dma_rrarb', 'HCLK')
        rst = CDFG.newScope('dma_rrarb', 'HRSTn')
        state_list = [
            CDFG.newScope('dma_rrarb', 'grant0'),
            CDFG.newScope('dma_rrarb', 'grant1'),
            CDFG.newScope('dma_rrarb', 'grant2'),
            CDFG.newScope('dma_rrarb', 'grant3'),
            CDFG.newScope('dma_rrarb', 'grant4'),
            CDFG.newScope('dma_rrarb', 'grant5'),
            CDFG.newScope('dma_rrarb', 'grant6'),
            CDFG.newScope('dma_rrarb', 'grant7'),
        ]
    elif CIRCUIT == "add_serial":
        state_var = CDFG.newScope('add_serial', 'state')
        clk = CDFG.newScope('add_serial', 'clk')
        rst = CDFG.newScope('add_serial', 'rst')
        state_list = [
            CDFG.newScope('add_serial', 'IDLE'),
            CDFG.newScope('add_serial', 'ADD'),
            CDFG.newScope('add_serial', 'DONE')
        ]

    max_trials = 20
    max_bits = 6
    max_num_ex_states = 8

    for num_ex_states in range(max_num_ex_states + 1):
        ##    for num_ex_states in [3,4,5]:
        print("num_ex_states = " + str(num_ex_states))
        codegen_dir = "{}/codegen/nonZeroCopyState/d{}/".format(
            topmodule, num_ex_states)
        try:
            os.makedirs(codegen_dir)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise
        f = open(codegen_dir + "edge_count_d{}.csv".format(num_ex_states), "w")
        for i in range(max_bits):
            num_bits = i + 1
            for trial in range(max_trials):
                print("generating code for num_bits = {}, trial = {}".format(
                    num_bits, trial + 1))
                fsm_obj = copy.deepcopy(fsms[state_var])
                cur_state_list = copy.deepcopy(state_list)
                cur_constlist = copy.deepcopy(constlist)
                cur_terms = copy.deepcopy(resolved_terms)
                cur_binddict = copy.deepcopy(resolved_binddict)

                cdfg =\
                     CDFG.ControlDataFlowGraph(name, fsm_obj, state_var, clk, rst,
                                               cur_state_list, cur_constlist,
                                               cur_terms, cur_binddict)
                cdfg.generate()
                PIs = cdfg.getPIs()
                # exempt clk
                PIs.remove(cdfg.clk)
                # and reset from scrambling
                if cdfg.rst:
                    PIs.remove(cdfg.rst)
                total_bits = 0
                for PI in PIs:
                    total_bits += cdfg.getNumBitsOfVar(PI)
                cdfg.scramblePIBits(total_bits / 2)
                for ex_state_i in range(num_ex_states):
                    src = cdfg.state_list[ex_state_i]
                    dst = cdfg.state_list[ex_state_i + 1]
                    delay = CDFG.newScope(topmodule, 'delay' + str(ex_state_i))
                    ##                    cdfg.insDelayState(src, dst, delay)
                    cdfg.insCopyState(src, dst, delay)
                (all_trans_freqs, num_PIs) = cdfg.getTransFreqs()
                for state in cur_state_list:
                    src_i = cdfg.state_list.index(state)
                    trans_freqs = all_trans_freqs[src_i]
                    trans_freqs = cdfg.nonZeroStates(trans_freqs, state,
                                                     num_bits)
                    all_trans_freqs[src_i] = trans_freqs
                cdfg.toCode(
                    topmodule, codegen_dir + "{}_d{}_b{}_t{}.v".format(
                        topmodule, num_ex_states, num_bits, trial + 1))
                metric_val = cdfg.countConnects(all_trans_freqs)
                f.write(str(metric_val) + ",")
            f.write("\n")
        f.close()

    print("\n")
    print("done")

    for file_name in new_files:
        os.remove(file_name)