def __init__(self, moduleList):

    if(moduleList.isDependsBuild):           
        return


    # Here we add user-defined area groups into the build.  These area
    # groups have a parent, and are explictly not already in the module list. 
    if(moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_ENABLE') and (wrapper_gen_tool.getFirstPassLIGraph() is None)):
        area_group_tool.insertDeviceModules(moduleList)

            
    synthesis_library.buildNetlists(moduleList, synthesis_library.buildVivadoEDF, synthesis_library.buildVivadoEDF)
Example #2
0
  def __init__(self, moduleList):

    if(moduleList.isDependsBuild):           
        return

    RESOURCE_COLLECTOR = eval(moduleList.getAWBParam('synthesis_tool', 'RESOURCE_COLLECTOR'))
    PLATFORM_BUILDER = eval(moduleList.getAWBParam('synthesis_tool', 'PLATFORM_BUILDER'))

    buildUser = functools.partial(buildSynplifyEDF, resourceCollector = RESOURCE_COLLECTOR)


    # Here we add user-defined area groups into the build.  These area
    # groups have a parent, and are explictly not already in the module list. 
    if(moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_ENABLE')):
        if(wrapper_gen_tool.getFirstPassLIGraph() is None):
            area_group_tool.insertDeviceModules(moduleList)

    synthesis_library.buildNetlists(moduleList, buildUser, PLATFORM_BUILDER)
Example #3
0
def buildNetlists(moduleList, userModuleBuilder, platformModuleBuilder):
    # We load this graph in to memory several times. 
    # TODO: load this graph once. 
    firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    DEBUG = model.getBuildPipelineDebug(moduleList) 

    # string together the xcf, sort of like the ucf
    # Concatenate XCF files
    MODEL_CLOCK_FREQ = moduleList.getAWBParam('clocks_device', 'MODEL_CLOCK_FREQ')

    ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore] 

    [globalVerilogs, globalVHDs] = globalRTLs(moduleList, moduleList.moduleList)

    synth_deps = []
    # drop exiting boundaries. 

    for module in ngcModules:   
        # did we get an ngc from the first pass?  If so, did the lim
        # graph give code for this module?  If both are true, then we
        # will link the old ngc in, rather than regenerate it. 

        if((not firstPassLIGraph is None) and (module.name in firstPassLIGraph.modules) and (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            synth_deps += linkNGC(moduleList, module, firstPassLIGraph)
        else:
            # We need to build the netlist. We build platformModules
            # with the platformModuleBuilder.  User modules get built
            # with userModuleBuilder.
            if(module.platformModule):
                synth_deps += platformModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)
            else:
                synth_deps += userModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)


    top_netlist = platformModuleBuilder(moduleList, moduleList.topModule, globalVerilogs, globalVHDs)
    synth_deps += top_netlist
    moduleList.topModule.moduleDependency['SYNTHESIS'] = synth_deps

    # Alias for synthesis
    moduleList.env.Alias('synth', synth_deps)
Example #4
0
def buildNetlists(moduleList, userModuleBuilder, platformModuleBuilder):
    # We load this graph in to memory several times. 
    # TODO: load this graph once. 
    firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    DEBUG = model.getBuildPipelineDebug(moduleList) 

    # string together the xcf, sort of like the ucf
    # Concatenate XCF files
    MODEL_CLOCK_FREQ = moduleList.getAWBParam('clocks_device', 'MODEL_CLOCK_FREQ')

    ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore] 

    [globalVerilogs, globalVHDs] = globalRTLs(moduleList, moduleList.moduleList)

    synth_deps = []
    # drop exiting boundaries. 

    for module in ngcModules:   
        # did we get an ngc from the first pass?  If so, did the lim
        # graph give code for this module?  If both are true, then we
        # will link the old ngc in, rather than regenerate it. 

        if((not firstPassLIGraph is None) and (module.name in firstPassLIGraph.modules) and (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            synth_deps += linkNGC(moduleList, module, firstPassLIGraph)
        else:
            # We need to build the netlist. We build platformModules
            # with the platformModuleBuilder.  User modules get built
            # with userModuleBuilder.
            if(module.platformModule):
                synth_deps += platformModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)
            else:
                synth_deps += userModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)


    top_netlist = platformModuleBuilder(moduleList, moduleList.topModule, globalVerilogs, globalVHDs)
    synth_deps += top_netlist
    moduleList.topModule.moduleDependency['SYNTHESIS'] = synth_deps

    # Alias for synthesis
    moduleList.env.Alias('synth', synth_deps)
Example #5
0
File: BSV.py Project: chenm001/leap
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool', 'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [self.hw_dir.Dir(moduleList.topModule.buildPath)]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [self.hw_dir.Dir('include'),
                                 self.hw_dir.Dir('include/awb/provides')]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool', 'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS']['BUILD_DIR']  # need to set the builddir for synplify


        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH =  get_build_path(moduleList, module)
                os.system('cd '+ MODULE_PATH + '/' + self.TMP_BSC_DIR + '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop() # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)


            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [moduleList.topModule]:
                if('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(self.TMP_BSC_DIR + '/' + moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                 all_str_src,
                                                 [ 'cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                bsc_str,
                                                [ 'ln -fs ' + self.TMP_BSC_DIR + '/`basename $TARGET` $TARGET' ])
                moduleList.topDependency += [strDep]



            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD: 

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = ['awb/provides/soft_services.bsh']
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo,'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write('// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(moduleList, tree_module)
                topo.append(tree_module)


            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(moduleList, moduleList.topModule) + "/" +  moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList, moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([ module for module in getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None])
                for module in sorted(liGraph.graph.nodes(), key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module, wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[wrapper_import_path])
        
            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList, module, useDerived, fileName=module.dependsFile)

            moduleList.topDependsInit += deps
Example #6
0
  def __init__(self, moduleList, isPrimaryBuildTarget):
    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_INC_DIRS_FROM_ROOT   = '-Xv +incdir+' + ALL_DIRS_FROM_ROOT.replace(':','+') 
    ALL_LIB_DIRS_FROM_ROOT   = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.
    if (not self.firstPassLIGraph is None):
        for moduleName in self.firstPassLIGraph.modules:            
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            for codeType in liCodeType:
                if(codeType in moduleObject.objectCache):
                    for verilog in moduleObject.objectCache[codeType]:
                        linkPath = vexe_vdir + '/' + os.path.basename(verilog)
                        def linkVerilog(target, source, env):
                            # It might be more useful if the Module contained a pointer to the LIModules...                        
                            if(os.path.lexists(str(target[0]))):
                                os.remove(str(target[0]))
                            print "Linking: " + str(source[0]) + " to " + str(target[0])
                            os.symlink(str(source[0]), str(target[0]))
                        moduleList.env.Command(linkPath, verilog, linkVerilog)
                        if(codeType in moduleList.topModule.moduleDependency):
                            moduleList.topModule.moduleDependency[codeType] += [linkPath]
                        else:
                            moduleList.topModule.moduleDependency[codeType] = [linkPath]
                    else:
                        # Warn that we did not find the ngc we expected to find..
                        print "Warning: We did not find verilog for module " + moduleName 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim vcs '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 


    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition
 
    # cflags to be passed into vcs compiler
    for definition in defs:
        vexe_gen_command += ' -Xv -CFLAGS -Xv ' + definition
    for path in inc_paths:
        vexe_gen_command += ' -Xv -CFLAGS -Xv -I' + path

    for lib in moduleList.swLinkLibs:
        vexe_gen_command += ' -Xl -l' + lib + ' '
        vexe_gen_command += ' -Xv -LDFLAGS -Xv -l' + lib + ' '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 

    vexe_gen_command += ' -Xv -full64 '
    vexe_gen_command += ' -Xv -sverilog '
    vexe_gen_command += ' -Xv +librescan '
    vexe_gen_command += ' -Xv +libext+.sv '
    if(moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')):
        vexe_gen_command += moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')
    vexe_gen_command += ' ' + ALL_INC_DIRS_FROM_ROOT + ' '

    # VCS must be informed of all BDPI.  Really we need some kind of
    # file object here.  All this massaging of path is ridiculous.
    vexe_gen_command += ' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + (' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/').join(moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + ' ' 

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('GIVEN_BDPI_CS'):
            print 'GIVEN_BDPI_CS: ' + str(m)
        

    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('$ENV{LD_LIBRARY_PATH} = $platform . ":" . $ENV{LD_LIBRARY_PATH};\n')
        wrapper_handle.write('`ln -sf $platform/directc_mk_model_Wrapper.so .`;\n')
        wrapper_handle.write('exec("$platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe  -licqueue \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []
        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG') + moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') +moduleList.getDependencies(moduleList.topModule, 'VHDL') + moduleList.getDependencies(moduleList.topModule, 'BA') + map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))
        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))
          
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command ])

        moduleList.env.AlwaysBuild(vbin)

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
               SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])
        

        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            moduleList.getAllDependencies('VERILOG') +
            moduleList.getAllDependencies('VHDL') +
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')),
            [ vexe_gen_command ])
 

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
Example #7
0
  def __init__(self, moduleList, isPrimaryBuildTarget):

    # if we have a deps build, don't do anything...
    if(moduleList.isDependsBuild):
        return

    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT


    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.

    if (not self.firstPassLIGraph is None):     
        for moduleName in self.firstPassLIGraph.modules:                       
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            # we also need the module list object
            moduleListObject = moduleList.modules[moduleName]
            for codeType in liCodeType:
                # If we're linking, clean out any previous code dependencies.  These are guaranteed not to be used. 
                moduleListObject.moduleDependency[codeType] = []
                li_module.linkFirstPassObject(moduleList, moduleListObject, self.firstPassLIGraph, codeType, codeType, linkDirectory=vexe_vdir)
                 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim iverilog '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' ' #+ '-Xv -I' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 

    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition

    # Hack to link against pthreads.  Really we should have a better solution.
    vexe_gen_command += ' -Xl -lpthread '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 


    # Use systemverilog 2005
    if(moduleList.getAWBParam('verilog_tool', 'ENABLE_SYSTEM_VERILOG')):
        vexe_gen_command += ' -Xv -g2005-sv '

    # Allow .vh/.sv file extensions etc.
    # vexe_gen_command += ' -Xv -Y.vh -Xv -Y.sv '

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)


    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('@script = `cat $platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' + '`;\n')   
        wrapper_handle.write('$script[0] =~ s/#!/ /g;\n')
        wrapper_handle.write('$vvp = $script[0];\n')
        wrapper_handle.write('chomp($vvp);\n')
        wrapper_handle.write('exec("$vvp -m$platform/directc_mk_model_Wrapper.so $platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' + ' +bscvcd \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []

        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            # Collect linked dependencies for every module
            for moduleName in self.firstPassLIGraph.modules:
                moduleListObject = moduleList.modules[moduleName]
                vbinDeps += moduleList.getDependencies(moduleListObject, 'VERILOG') + moduleList.getDependencies(moduleListObject, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleListObject, 'GEN_VPI_HS') + moduleList.getDependencies(moduleListObject, 'GEN_VPI_CS') + moduleList.getDependencies(moduleListObject, 'VHDL') + moduleList.getDependencies(moduleListObject, 'BA') + moduleList.getDependencies(moduleListObject, 'GEN_BAS')

            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG') + moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') +moduleList.getDependencies(moduleList.topModule, 'VHDL') + moduleList.getDependencies(moduleList.topModule, 'BA') + map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))

        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))


        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command,
              SCons.Script.Delete('directc.sft') ])

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])


        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbinDeps = moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))

        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            vbinDeps,
            [ vexe_gen_command,
              SCons.Script.Delete('directc.sft') ])


        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
              SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
Example #8
0
    def __init__(self, moduleList):
        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)
        # if we have a deps build, don't do anything...
        if(moduleList.isDependsBuild):           
            return

        def modify_path_hw(path):
            return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        if (not moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_ENABLE')):
            return

        self.emitPlatformAreaGroups = (moduleList.getAWBParam('area_group_tool',
                                                              'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0)

        self.enableParentClustering = (moduleList.getAWBParam('area_group_tool',
                                                              'AREA_GROUPS_ENABLE_PARENT_CLUSTERING') != 0)

        self.enableCommunicationClustering = (moduleList.getAWBParam('area_group_tool',
                                                                     'AREA_GROUPS_ENABLE_COMMUNICATION_CLUSTERING') != 0)

        self.clusteringWeight = moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_CLUSTERING_WEIGHT')


        liGraph = LIGraph([])
        firstPassGraph = wrapper_gen_tool.getFirstPassLIGraph()
        # We should ignore the 'PLATFORM_MODULE'                                                                                                                    
        # We may have a none-type graph, if we are in the first pass.
        if(not firstPassGraph is None):
            liGraph.mergeModules(firstPassGraph.modules.values())

        self.firstPassLIGraph = liGraph


        # elaborate area group representation. This may be used in configuring later stages. 
        areaGroups = self.elaborateAreaConstraints(moduleList)
        pickle_handle = open(_areaConstraintsFileElaborated(moduleList), 'wb')
        pickle.dump(areaGroups, pickle_handle, protocol=-1)
        pickle_handle.close()                 


        # if we are only building logs, then we can stop. 
        if (moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')):
            return  

        # We'll build a rather complex function to emit area group constraints 
        def area_group_closure(moduleList):

             def area_group(target, source, env):

                 # have we built these area groups before? If we have,
                 # then, we'll get a pickle which we can read in and
                 # operate on.
                 areaGroupsPrevious = None 
                 if(os.path.exists(_areaConstraintsFile(moduleList))):
                     # We got some previous area groups.  We'll try to
                     # reuse the solution to save on compile time.
                     pickle_handle = open(_areaConstraintsFile(moduleList), 'rb')
                     areaGroupsPrevious = pickle.load(pickle_handle)
                     pickle_handle.close()
                 
                 areaGroupsFinal = None
                 # If we got a previous area group, we'll attempt to
                 # reuse its knowledge
                 if(not areaGroupsPrevious is None):
                     areaGroupsModified = copy.deepcopy(areaGroups)
                     # if the area didn't change much (within say a
                     # few percent, we will reuse previous placement
                     # information
                     allowableAreaDelta = 1.01
                     for groupName in areaGroupsPrevious:
                         if(groupName in areaGroupsModified):
                             previousGroupObject = areaGroupsPrevious[groupName]
                             modifiedGroupObject = areaGroupsModified[groupName]
                             if((modifiedGroupObject.area > previousGroupObject.area/allowableAreaDelta) and (modifiedGroupObject.area < previousGroupObject.area*allowableAreaDelta)):
                                 modifiedGroupObject.xDimension = previousGroupObject.xDimension
                                 modifiedGroupObject.yDimension = previousGroupObject.yDimension

                     areaGroupsFinal = self.solveILPPartial(areaGroupsModified, fileprefix="partial_ilp_reuse_")

                 # Either we didn't have the previous information, or
                 # we failed to use it.
                 if(areaGroupsFinal is None):        
                     areaGroupsFinal = self.solveILPPartial(areaGroups)

                 # We failed to assign area groups.  Eventually, we
                 # could demote this to a warning.
                 if(areaGroupsFinal is None):      
                     print "Failed to obtain area groups"
                     exit(1)

                 # Sort area groups topologically, annotating each area group
                 # with a sortIdx field.
                 self.sort_area_groups(areaGroupsFinal)

                 # Now that we've solved (to some extent) the area
                 # group mapping problem we can dump the results for 
                 # the build tree. 

                 pickle_handle = open(_areaConstraintsFilePlaced(moduleList), 'wb')
                 pickle.dump(areaGroupsFinal, pickle_handle, protocol=-1)
                 pickle_handle.close()                 
                 
             return area_group

        # expose this dependency to the backend tools.
        moduleList.topModule.moduleDependency['AREA_GROUPS'] = [_areaConstraintsFilePlaced(moduleList)]

        # We need to get the resources for all modules, except the top module, which can change. 
        resources = [dep for dep in moduleList.getAllDependencies('RESOURCES')]

        areagroup = moduleList.env.Command( 
            [_areaConstraintsFilePlaced(moduleList)],
            resources + map(modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_AREA_CONSTRAINTS')),
            area_group_closure(moduleList)
            )                   
Example #9
0
File: BSV.py Project: chenm001/leap
    def build_synth_boundary(self, moduleList, module):
        if (self.pipeline_debug != 0):
            print "Working on " + module.name

        env = moduleList.env
        BSVS = moduleList.getSynthBoundaryDependencies(module, 'GIVEN_BSVS')
        # each submodel will have a generated BSV
        GEN_BSVS = moduleList.getSynthBoundaryDependencies(module, 'GEN_BSVS')
        APM_FILE = moduleList.env['DEFS']['APM_FILE']
        BSC = moduleList.env['DEFS']['BSC']

        MODULE_PATH = get_build_path(moduleList, module)

        ##
        ## Load intra-Bluespec dependence already computed.  This information will
        ## ultimately drive the building of Bluespec modules.
        ##
        env.ParseDepends(MODULE_PATH + '/' + module.dependsFile,
                         must_exist=not moduleList.env.GetOption('clean'))

        # This function is responsible for creating build rules for
        # subdirectories.  It must be called or no subdirectory builds
        # will happen since scons won't have the recipe.
        self.setup_module_build(moduleList, MODULE_PATH)

        if not os.path.isdir(self.TMP_BSC_DIR):
            os.mkdir(self.TMP_BSC_DIR)

        moduleList.env.VariantDir(MODULE_PATH + '/' + self.TMP_BSC_DIR,
                                  '.',
                                  duplicate=0)

        # set up builds for the various bsv of this synthesis
        # boundary.  One wonders if we could handle this as a single
        # global operation.
        bsc_builds = []
        for bsv in BSVS + GEN_BSVS:
            bsc_builds += env.BSC(
                MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                bsv.replace('.bsv', ''), MODULE_PATH + '/' + bsv)

        firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()
        # In the second pass of the LIM build, we don't need to build
        # any modules, except in some cases where we turn on
        # module-specific optimizations, as in the case of central
        # cache.  If we do need to rebuild, we'll see a tag.
        if ((module.name != moduleList.topModule.name)
                and (not firstPassLIGraph is None)
                and (module.name in firstPassLIGraph.modules) and
            (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE')
             is None)):
            return

        # This should not be a for loop.
        for bsv in [model.get_wrapper(module)]:
            if env.GetOption('clean'):
                os.system('rm -f ' + MODULE_PATH + '/' +
                          bsv.replace('Wrapper.bsv', 'Log.bsv'))
                os.system('rm -f ' + MODULE_PATH + '/' +
                          bsv.replace('.bsv', '_con_size.bsh'))

            ##
            ## First pass just generates a log file to figure out cross synthesis
            ## boundary soft connection array sizes.
            ##
            ## All but the top level build need the log build pass to compute
            ## the size of the external soft connection vector.  The top level has
            ## no exposed connections and can generate the log file, needed
            ## for global strings, during the final build.
            ##
            logfile = model.get_logfile(moduleList, module)
            module.moduleDependency['BSV_LOG'] += [logfile]
            module.moduleDependency['GEN_LOGS'] = [logfile]
            if (module.name != moduleList.topModule.name):
                log = env.BSC_LOG_ONLY(
                    logfile,
                    MODULE_PATH + '/' + bsv.replace('Wrapper.bsv', 'Log'))

                ##
                ## Parse the log, generate a stub file
                ##

                stub_name = bsv.replace('.bsv', '_con_size.bsh')

                def build_con_size_bsh_closure(target, source, env):
                    liGraph = LIGraph(li_module.parseLogfiles(source))
                    # Should have only one module...
                    if (len(liGraph.modules) == 0):
                        bshModule = LIModule(module.name, module.name)
                    else:
                        bshModule = liGraph.modules.values()[0]
                    bsh_handle = open(str(target[0]), 'w')
                    wrapper_gen_tool.generateConnectionBSH(
                        bshModule, bsh_handle)
                    bsh_handle.close()

                stub = env.Command(MODULE_PATH + '/' + stub_name, log,
                                   build_con_size_bsh_closure)

            ##
            ## Now we are ready for the real build
            ##
            if (module.name != moduleList.topModule.name):
                wrapper_bo = env.BSC(
                    MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                    bsv.replace('.bsv', ''), MODULE_PATH + '/' + bsv)
                moduleList.env.Depends(wrapper_bo, stub)
                module.moduleDependency['BO'] = [wrapper_bo]
                if (self.BUILD_LOGS_ONLY):
                    model_dir = self.hw_dir.Dir(
                        moduleList.env['DEFS']['ROOT_DIR_MODEL'])

                    module.moduleDependency['BSV_SCHED'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.sched',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('sched.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' > $TARGET')]

                    module.moduleDependency['BSV_PATH'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.path',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('path.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' > $TARGET')]

                    moduleList.topDependency += \
                        module.moduleDependency['BSV_SCHED'] + module.moduleDependency['BSV_PATH']

                    module.moduleDependency['BSV_IFC'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.ifc',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('interfaceType.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' | python site_scons/model/PythonTidy.py > $TARGET')]

                    moduleList.topDependency += module.moduleDependency[
                        'BSV_IFC']

            else:
                ## Top level build can generate the log in a single pass since no
                ## connections are exposed
                wrapper_bo = env.BSC_LOG(
                    MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                    bsv.replace('.bsv', ''), MODULE_PATH + '/' + bsv)

                ## SCons doesn't deal well with logfile as a 2nd target to BSC_LOG rule,
                ## failing to derive dependence correctly.
                module.moduleDependency['BSV_BO'] = [wrapper_bo]
                log = env.Command(logfile, wrapper_bo, '')
                env.Precious(logfile)

                ## In case Bluespec build is the end of the build pipeline.
                if (not self.BUILD_LOGS_ONLY):
                    moduleList.topDependency += [log]

                ## The toplevel bo also depends on the on the synthesis of the build tree from log files.

            ##
            ## Meta-data written during compilation to separate files.
            ##
            glob_str = env.Command(
                MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                bsv.replace('.bsv', '.str'), wrapper_bo, '')

            env.Precious(glob_str)
            module.moduleDependency['STR'] = [glob_str]

            ## All but the top level build need the log build pass to compute
            ## the size of the external soft connection vector.  The top level has
            ## no exposed connections and needs no log build pass.
            ##
            if (module.name != moduleList.topModule.name):
                if (self.pipeline_debug != 0):
                    print 'wrapper_bo: ' + str(wrapper_bo)
                    print 'stub: ' + str(stub)

                synth_stub_path = moduleList.env['DEFS'][
                    'ROOT_DIR_HW'] + '/' + module.buildPath + '/'
                synth_stub = synth_stub_path + module.name + '_synth.bsv'
                # This stub may be needed in certain compilation flows.  Note its presence here.
                module.moduleDependency['BSV_SYNTH'] = [
                    module.name + '_synth.bsv'
                ]
                module.moduleDependency['BSV_SYNTH_BSH'] = [
                    module.name + '_Wrapper_con_size.bsh'
                ]

                def build_synth_stub(target, source, env):
                    liGraph = LIGraph(li_module.parseLogfiles(source))
                    # Should have only one module...
                    if (len(liGraph.modules) == 0):
                        synthModule = LIModule(module.name, module.name)
                    else:
                        synthModule = liGraph.modules.values()[0]

                    synth_handle = open(target[0].path, 'w')
                    wrapper_gen_tool.generateSynthWrapper(
                        synthModule,
                        synth_handle,
                        moduleList.localPlatformName,
                        moduleType=module.interfaceType,
                        extraImports=module.extraImports,
                        synthBoundaryModule=module)
                    synth_handle.close()

                env.Command(
                    synth_stub,  # target
                    logfile,
                    build_synth_stub)

            ##
            ## The mk_<wrapper>.v file is really built by the Wrapper() builder
            ## above.  We use NULL commands to convince SCons the file is generated.
            ## This seems easier than SCons SideEffect() calls, which don't clean
            ## targets.
            ##
            ## We also generate all this synth boundary's GEN_VS
            ##
            ext_gen_v = []
            for v in moduleList.getSynthBoundaryDependencies(module, 'GEN_VS'):
                ext_gen_v += [MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + v]

            # Add the dependence for all Verilog noted above
            bld_v = env.Command([
                MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                module.wrapperName() + '.v'
            ] + ext_gen_v, MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                                bsv.replace('.bsv', '.bo'), '')
            env.Precious(bld_v)

            if (moduleList.getAWBParam('bsv_tool', 'BUILD_VERILOG') == 1):

                module.moduleDependency['VERILOG'] += [bld_v] + [ext_gen_v]

                module.moduleDependency['GEN_WRAPPER_VERILOGS'] = [
                    os.path.basename(module.wrapperName() + '.v')
                ]

            if (self.pipeline_debug != 0):
                print "Name: " + module.name

            # each synth boundary will produce a ba
            bld_ba = [
                env.Command([
                    MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                    module.wrapperName() + '.ba'
                ], MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +
                            bsv.replace('.bsv', '.bo'), '')
            ]

            module.moduleDependency['BA'] += bld_ba
            env.Precious(bld_ba)

            ##
            ## Build the Verilog black-box stub.
            ##
            bb = self.stubGenCommand(MODULE_PATH, module.boundaryName, bld_v)

            # Only the subordinate modules have stubs.
            # The platform module should not be enumerated here. This is a false dependency.
            if (module.name != moduleList.topModule.name):
                moduleList.topModule.moduleDependency['VERILOG_STUB'] += [bb]
                module.moduleDependency['GEN_VERILOG_STUB'] = [bb]

            return [bb]  #This doesn't seem to do anything.
 def __init__(self, parent):
     self.parent = parent
     self.getFirstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()
  def __init__(self, moduleList):

    # if we have a deps build, don't do anything...
    if(moduleList.isDependsBuild):
        return

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # A collector for all of the checkpoint objects we will gather/build in the following code. 
    dcps = []

    # Construct the tcl file 
    self.part = moduleList.getAWBParam('physical_platform_config', 'FPGA_PART_XILINX')

    apm_name = moduleList.compileDirectory + '/' + moduleList.apmName

    self.paramTclFile = moduleList.topModule.moduleDependency['PARAM_TCL'][0]

    # If the TMP_XILINX_DIR doesn't exist, create it.
    if not os.path.isdir(moduleList.env['DEFS']['TMP_XILINX_DIR']):
        os.mkdir(moduleList.env['DEFS']['TMP_XILINX_DIR'])

    # Gather Tcl files for handling constraints.
    self.tcl_headers = []
    if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_HEADERS')) > 0):
        self.tcl_headers = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_HEADERS'))

    self.tcl_defs = []
    if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_DEFINITIONS')) > 0):
        self.tcl_defs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_DEFINITIONS'))

    self.tcl_funcs = []
    if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_FUNCTIONS')) > 0):
        self.tcl_funcs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_FUNCTIONS'))

    self.tcl_algs = []
    if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_ALGEBRAS')) > 0):
        self.tcl_algs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_ALGEBRAS'))

    self.tcl_bmms = []
    if(len(moduleList.getAllDependencies('GIVEN_XILINX_BMMS')) > 0):
        self.tcl_bmms = moduleList.getAllDependencies('GIVEN_XILINX_BMMS')

    self.tcl_elfs = []
    if(len(moduleList.getAllDependencies('GIVEN_XILINX_ELFS')) > 0):
        self.tcl_elfs = moduleList.getAllDependencies('GIVEN_XILINX_ELFS')

    self.tcl_ag = []

    #Emit area group definitions
    # If we got an area group placement data structure, now is the
    # time to convert it into a new constraint tcl. 
    self.area_group_file = moduleList.compileDirectory + '/areagroups.xdc'
    if ('AREA_GROUPS' in moduleList.topModule.moduleDependency):
        self.area_constraints = area_group_tool.AreaConstraints(moduleList)
        self.routeAG = (moduleList.getAWBParam('area_group_tool',
                                               'AREA_GROUPS_ROUTE_AG') != 0)

        # user ucf may be overridden by our area group ucf.  Put our
        # generated ucf first.
        #tcl_defs.insert(0,self.area_group_file)
        def area_group_ucf_closure(moduleList):

             def area_group_ucf(target, source, env):
                 self.area_constraints.loadAreaConstraints()
                 self.area_constraints.emitConstraintsVivado(self.area_group_file)

             return area_group_ucf

        moduleList.env.Command( 
            [self.area_group_file],
            self.area_constraints.areaConstraintsFile(),
            area_group_ucf_closure(moduleList)
            )                             



    synthDepsBase =  moduleList.getAllDependencies('GEN_VIVADO_DCPS')

    # We got a stack of synthesis results for the LI modules.  We need
    # to convert these to design checkpoints for the fast place and
    # route flow.
    ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore]

    for module in ngcModules + [moduleList.topModule]:   
        # did we get a dcp from the first pass?  If so, did the lim
        # graph give code for this module?  If both are true, then we
        # will link the old ngc in, rather than regenerate it. 
        if ((not self.firstPassLIGraph is None) and (module.name in self.firstPassLIGraph.modules) and (self.firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            if (li_module.linkFirstPassObject(moduleList, module, self.firstPassLIGraph, 'GEN_VIVADO_DCPS', 'GEN_VIVADO_DCPS') is None):
                module.moduleDependency['GEN_VIVADO_DCPS'] = [self.edf_to_dcp(moduleList, module)]
            
        # it's possible that we got dcp from this compilation
        # pass. This will happen for the platform modules.
        elif (len(module.getDependencies('GEN_VIVADO_DCPS')) > 0):
            continue

        # we got neither. therefore, we must create a dcp out of the ngc.
        else:            
            module.moduleDependency['GEN_VIVADO_DCPS'] = [self.edf_to_dcp(moduleList, module)]

                        
   

    synthDeps =  moduleList.getAllDependencies('GEN_VIVADO_DCPS')

    postSynthTcl = apm_name + '.physical.tcl'

    topWrapper = moduleList.topModule.wrapperName()

    newTclFile = open(postSynthTcl, 'w')
    newTclFile.write('create_project -force ' + moduleList.apmName + ' ' + moduleList.compileDirectory + ' -part ' + self.part + ' \n')

    # To resolve black boxes, we need to load checkpoints in the
    # following order:
    # 1) topModule
    # 2) platformModule
    # 3) user program, in any order

    userModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore and not module.platformModule]
    platformModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore and module.platformModule]
    checkpointCommands = [] 

    if(not moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_ENABLE')):
         
        for module in [moduleList.topModule] + platformModules + userModules:   
            dcps.append(module.getDependencies('GEN_VIVADO_DCPS'))
            checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_DCPS'))
            
            # There should only be one checkpoint here. 
            if(len(checkpoint) > 1):
                print "Error too many checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue

            if(len(checkpoint) == 0):
                print "No checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue

            newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

            
    # We're attempting the new, parallel flow. 
    else:
        # we need to issue seperate place commands.  Therefore, we attempt to place each design 

        # There may be some special area groups in platforms -- handle them
        elabAreaConstraints = area_group_tool.AreaConstraints(moduleList)
        elabAreaConstraints.loadAreaConstraintsElaborated()
        #self.area_constraints = area_group_tool.AreaConstraints(moduleList)
        for module in userModules:  
            # Did we get a placed module already?
            if((module.name in elabAreaConstraints.constraints) and ('LIBRARY_DCP' in elabAreaConstraints.constraints[module.name].attributes)):
                # we need to locate the dcp corresponding to this area group. 
                candidates = moduleList.getAllDependencies('GIVEN_VIVADO_DCPS')
                for dcpCandidate in moduleList.getAllDependencies('GIVEN_VIVADO_DCPS'):                   
                   if dcpCandidate.attributes['module'] == module.name:         
                       dcp = str(model.modify_path_hw(dcpCandidate))
                       model.dictionary_list_create_append(module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp)
                       dcps.append(dcp)
            else:
                dcp = self.place_dcp(moduleList, module)
                model.dictionary_list_create_append(module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp)
                dcps.append(dcp)

        for module in [moduleList.topModule] + platformModules:   
            checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_DCPS'))
            dcps.append(checkpoint)
              
            # There should only be one checkpoint here. 
            if(len(checkpoint) > 1):
                print "Error too many checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue

            if(len(checkpoint) == 0):
                print "No checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue

            newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

        # We can have parent/child relationships in the user modules.
        # Vivado requires that checkpoints be read in topological
        # order.
        def isBlackBox(module):
            if(self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP')):
                return 1
            return 0


        for module in sorted(userModules, key=isBlackBox):   
            checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_PLACEMENT_DCPS'))

            # There should only be one checkpoint here. 
            if(len(checkpoint) > 1):
                print "Error too many checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue

            if(len(checkpoint) == 0):
                print "No checkpoints for " + str(module.name) + ":  " + str(checkpoint)  
                continue
            #read in new checkoutpoint

            newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

            emitPlatformAreaGroups = (moduleList.getAWBParam('area_group_tool',
                                                             'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0)

            # platformModule refers to the main platform module, not
            # the subordinate device area groups.
            platformModule = (self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) and (not self.firstPassLIGraph.modules[module.name].getAttribute('PLATFORM_MODULE') is None)

            allowAGPlatform = (not platformModule) or emitPlatformAreaGroups 

            emitDeviceGroups = (moduleList.getAWBParam('area_group_tool',
                                                       'AREA_GROUPS_PAR_DEVICE_AG') != 0)           
            allowAGDevice = (self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) or emitDeviceGroups

            if (allowAGPlatform and allowAGDevice):               
                refName = module.wrapperName()
                lockPlacement = True
                lockRoute = self.routeAG
                # If this is an platform/user-defined area group, the wrapper name may be different.
                if (not self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None):
                    refName = elabAreaConstraints.constraints[module.name].attributes['MODULE_NAME']
                    lockPlacement = not ('NO_PLACE' in elabAreaConstraints.constraints[module.name].attributes) and lockPlacement
                    lockRoute = not ('NO_ROUTE' in elabAreaConstraints.constraints[module.name].attributes) and lockRoute

                checkpointCommands.append('if { [llength [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]] } {\n')            
                checkpointCommands.append('    puts "Locking ' + refName + '"\n')
                if (lockRoute):
                    # locking routing requires us to emit an area group. boo. 
                    ag_tcl = self.ag_constraints(moduleList, module)
                    self.tcl_ag.append(ag_tcl)
                    checkpointCommands.append('    source ' + str(ag_tcl) + '\n')
                    checkpointCommands.append('    lock_design -level routing [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n')            
                elif (lockPlacement):
                    checkpointCommands.append('    lock_design -level placement [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n')            
                checkpointCommands.append('}\n')

    given_netlists = [ moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + netlist for netlist in moduleList.getAllDependenciesWithPaths('GIVEN_NGCS') + moduleList.getAllDependenciesWithPaths('GIVEN_EDFS') ]

    for netlist in given_netlists:
        newTclFile.write('read_edif ' + netlist + '\n')

    # We have lots of dangling wires (Thanks, Bluespec).  Set the
    # following properties to silence the warnings. 
   
    newTclFile.write("set_property SEVERITY {Warning} [get_drc_checks NSTD-1]\n")
    newTclFile.write("set_property SEVERITY {Warning} [get_drc_checks UCIO-1]\n")

    newTclFile.write("link_design -top " + topWrapper + " -part " + self.part  + "\n")

    newTclFile.write("report_utilization -file " + apm_name + ".link.util\n")

    newTclFile.write("write_checkpoint -force " + apm_name + ".link.dcp\n")

    # lock down the area group routing.
    newTclFile.write("\n".join(checkpointCommands) + "\n")

    for elf in self.tcl_elfs:
        newTclFile.write("add_file " + model.modify_path_hw(elf) + "\n")
        newTclFile.write("set_property MEMDATA.ADDR_MAP_CELLS {" + str(elf.attributes['ref']) + "} [get_files " + model.modify_path_hw(elf) + "]\n")



    # We will now attempt to link in any bmm that we might have.
    for bmm in self.tcl_bmms:
        newTclFile.write("add_file " + model.modify_path_hw(bmm) + "\n")
        newTclFile.write("set_property SCOPED_TO_REF " + str(bmm.attributes['ref']) + " [get_files " + model.modify_path_hw(bmm) + "]\n")


 
    newTclFile.write('set IS_TOP_BUILD 1\n ')
    newTclFile.write('set IS_AREA_GROUP_BUILD 0\n ')
    newTclFile.write('set SYNTH_OBJECT ""\n')
    newTclFile.write('source ' + self.paramTclFile + '\n')

    for tcl_header in self.tcl_headers:
        newTclFile.write('source ' + tcl_header + '\n')

    for tcl_def in self.tcl_defs:
         newTclFile.write('source ' + tcl_def + '\n') 

    for tcl_func in self.tcl_funcs:
        newTclFile.write('source ' + tcl_func + '\n')

    for tcl_alg in self.tcl_algs:
        newTclFile.write('source ' + tcl_alg + '\n')

    def dumpPBlockCmd(tgt):
        return 'dumpPBlockUtilization "' + moduleList.compileDirectory + '/' + tgt + '.util"\n'

    newTclFile.write(dumpPBlockCmd('link'))
    newTclFile.write("report_timing_summary -file " + apm_name + ".map.twr\n\n")

    newTclFile.write("opt_design -directive AddRemap\n")
    newTclFile.write("report_utilization -file " + apm_name + ".opt.util\n")
    newTclFile.write(dumpPBlockCmd('opt'))
    newTclFile.write("write_checkpoint -force " + apm_name + ".opt.dcp\n\n")

    newTclFile.write("place_design -no_drc -directive WLDrivenBlockPlacement\n")
    newTclFile.write(dumpPBlockCmd('place'))

    newTclFile.write("phys_opt_design -directive AggressiveFanoutOpt\n")
    newTclFile.write("write_checkpoint -force " + apm_name + ".map.dcp\n")
    newTclFile.write(dumpPBlockCmd('phyopt'))
    newTclFile.write("report_utilization -file " + apm_name + ".map.util\n\n")

    newTclFile.write("route_design\n")
    newTclFile.write("write_checkpoint -force " + apm_name + ".par.dcp\n")
    newTclFile.write(dumpPBlockCmd('par'))
    newTclFile.write("report_timing_summary -file " + apm_name + ".par.twr\n\n")

    newTclFile.write("report_utilization -hierarchical -file " + apm_name + ".par.util\n")
    newTclFile.write("report_drc -file " + topWrapper + ".drc\n\n")
 
    newTclFile.write("write_bitstream -force " + apm_name + "_par.bit\n")

    newTclFile.close()

    # generate bitfile
    xilinx_bit = moduleList.env.Command(
      apm_name + '_par.bit',
      synthDeps + self.tcl_algs + self.tcl_defs + self.tcl_funcs + self.tcl_ag + [self.paramTclFile] + dcps + [postSynthTcl], 
      ['touch start.txt; vivado -verbose -mode batch -source ' + postSynthTcl + ' -log ' + moduleList.compileDirectory + '/postsynth.log'])


    moduleList.topModule.moduleDependency['BIT'] = [apm_name + '_par.bit']

    # We still need to generate a download script. 
    xilinx_loader.LOADER(moduleList)
Example #12
0
File: BSV.py Project: chenm001/leap
    def build_synth_boundary(self, moduleList, module):
        if (self.pipeline_debug != 0):
            print "Working on " + module.name

        env = moduleList.env
        BSVS = moduleList.getSynthBoundaryDependencies(module, 'GIVEN_BSVS')
        # each submodel will have a generated BSV
        GEN_BSVS = moduleList.getSynthBoundaryDependencies(module, 'GEN_BSVS')
        APM_FILE = moduleList.env['DEFS']['APM_FILE']
        BSC = moduleList.env['DEFS']['BSC']

        MODULE_PATH =  get_build_path(moduleList, module)

        ##
        ## Load intra-Bluespec dependence already computed.  This information will
        ## ultimately drive the building of Bluespec modules.
        ##
        env.ParseDepends(MODULE_PATH + '/' + module.dependsFile,
                         must_exist = not moduleList.env.GetOption('clean'))


        # This function is responsible for creating build rules for
        # subdirectories.  It must be called or no subdirectory builds
        # will happen since scons won't have the recipe.
        self.setup_module_build(moduleList, MODULE_PATH)

        if not os.path.isdir(self.TMP_BSC_DIR):
            os.mkdir(self.TMP_BSC_DIR)

        moduleList.env.VariantDir(MODULE_PATH + '/' + self.TMP_BSC_DIR, '.', duplicate=0)

        # set up builds for the various bsv of this synthesis
        # boundary.  One wonders if we could handle this as a single
        # global operation.
        bsc_builds = []
        for bsv in BSVS + GEN_BSVS:
            bsc_builds += env.BSC(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', ''), MODULE_PATH + '/' + bsv)


        firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()
        # In the second pass of the LIM build, we don't need to build
        # any modules, except in some cases where we turn on
        # module-specific optimizations, as in the case of central
        # cache.  If we do need to rebuild, we'll see a tag.
        if ((module.name != moduleList.topModule.name) and (not firstPassLIGraph is None) and (module.name in firstPassLIGraph.modules) and (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            return

        # This should not be a for loop.
        for bsv in [model.get_wrapper(module)]:
            if env.GetOption('clean'):
                os.system('rm -f ' + MODULE_PATH + '/' + bsv.replace('Wrapper.bsv', 'Log.bsv'))
                os.system('rm -f ' + MODULE_PATH + '/' + bsv.replace('.bsv', '_con_size.bsh'))

            ##
            ## First pass just generates a log file to figure out cross synthesis
            ## boundary soft connection array sizes.
            ##
            ## All but the top level build need the log build pass to compute
            ## the size of the external soft connection vector.  The top level has
            ## no exposed connections and can generate the log file, needed
            ## for global strings, during the final build.
            ##
            logfile = model.get_logfile(moduleList, module)
            module.moduleDependency['BSV_LOG'] += [logfile]
            module.moduleDependency['GEN_LOGS'] = [logfile]
            if (module.name != moduleList.topModule.name):
                log = env.BSC_LOG_ONLY(logfile, MODULE_PATH + '/' + bsv.replace('Wrapper.bsv', 'Log'))

                ##
                ## Parse the log, generate a stub file
                ##

                stub_name = bsv.replace('.bsv', '_con_size.bsh')

                def build_con_size_bsh_closure(target, source, env):
                    liGraph = LIGraph(li_module.parseLogfiles(source))
                    # Should have only one module...
                    if(len(liGraph.modules) == 0):
                        bshModule = LIModule(module.name, module.name)
                    else:
                        bshModule = liGraph.modules.values()[0]
                    bsh_handle = open(str(target[0]), 'w')
                    wrapper_gen_tool.generateConnectionBSH(bshModule, bsh_handle)
                    bsh_handle.close()

                stub = env.Command(MODULE_PATH + '/' + stub_name, log, build_con_size_bsh_closure)

            ##
            ## Now we are ready for the real build
            ##
            if (module.name != moduleList.topModule.name):
                wrapper_bo = env.BSC(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', ''), MODULE_PATH + '/' + bsv)
                moduleList.env.Depends(wrapper_bo, stub)
                module.moduleDependency['BO'] = [wrapper_bo]
                if (self.BUILD_LOGS_ONLY):
                    model_dir = self.hw_dir.Dir(moduleList.env['DEFS']['ROOT_DIR_MODEL'])

                    module.moduleDependency['BSV_SCHED'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.sched',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('sched.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' > $TARGET')]

                    module.moduleDependency['BSV_PATH'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.path',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('path.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' > $TARGET')]

                    moduleList.topDependency += \
                        module.moduleDependency['BSV_SCHED'] + module.moduleDependency['BSV_PATH']

                    module.moduleDependency['BSV_IFC'] = \
                        [moduleList.env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.ba.ifc',
                                                wrapper_bo,
                                                'bluetcl ' + model_dir.File('interfaceType.tcl').path + ' -p ' + self.ALL_BUILD_DIR_PATHS + ' --m ' + module.wrapperName() + ' | python site_scons/model/PythonTidy.py > $TARGET')]

                    moduleList.topDependency += module.moduleDependency['BSV_IFC']




            else:
                ## Top level build can generate the log in a single pass since no
                ## connections are exposed
                wrapper_bo = env.BSC_LOG(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', ''),
                                         MODULE_PATH + '/' + bsv)

                ## SCons doesn't deal well with logfile as a 2nd target to BSC_LOG rule,
                ## failing to derive dependence correctly.
                module.moduleDependency['BSV_BO'] = [wrapper_bo]
                log = env.Command(logfile, wrapper_bo, '')
                env.Precious(logfile)

                ## In case Bluespec build is the end of the build pipeline.
                if (not self.BUILD_LOGS_ONLY):
                    moduleList.topDependency += [log]

                ## The toplevel bo also depends on the on the synthesis of the build tree from log files.

            ##
            ## Meta-data written during compilation to separate files.
            ##
            glob_str = env.Command(MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', '.str'),
                                   wrapper_bo,
                                   '')

            env.Precious(glob_str)
            module.moduleDependency['STR'] = [glob_str]

            ## All but the top level build need the log build pass to compute
            ## the size of the external soft connection vector.  The top level has
            ## no exposed connections and needs no log build pass.
            ##
            if (module.name != moduleList.topModule.name):
                if (self.pipeline_debug != 0):
                    print 'wrapper_bo: ' + str(wrapper_bo)
                    print 'stub: ' + str(stub)

                synth_stub_path = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + module.buildPath + '/'
                synth_stub = synth_stub_path + module.name +'_synth.bsv'
                # This stub may be needed in certain compilation flows.  Note its presence here.
                module.moduleDependency['BSV_SYNTH'] = [module.name +'_synth.bsv']
                module.moduleDependency['BSV_SYNTH_BSH'] = [module.name +'_Wrapper_con_size.bsh']

                def build_synth_stub(target, source, env):
                    liGraph = LIGraph(li_module.parseLogfiles(source))
                    # Should have only one module...
                    if(len(liGraph.modules) == 0):
                        synthModule = LIModule(module.name, module.name)
                    else:
                        synthModule = liGraph.modules.values()[0]

                    synth_handle = open(target[0].path, 'w')
                    wrapper_gen_tool.generateSynthWrapper(synthModule,
                                                          synth_handle,
                                                          moduleList.localPlatformName,
                                                          moduleType=module.interfaceType,
                                                          extraImports=module.extraImports,
                                                          synthBoundaryModule = module)
                    synth_handle.close()

                env.Command(synth_stub, # target
                            logfile,
                            build_synth_stub)

            ##
            ## The mk_<wrapper>.v file is really built by the Wrapper() builder
            ## above.  We use NULL commands to convince SCons the file is generated.
            ## This seems easier than SCons SideEffect() calls, which don't clean
            ## targets.
            ##
            ## We also generate all this synth boundary's GEN_VS
            ##
            ext_gen_v = []
            for v in moduleList.getSynthBoundaryDependencies(module, 'GEN_VS'):
                ext_gen_v += [MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + v]


            # Add the dependence for all Verilog noted above
            bld_v = env.Command([MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + module.wrapperName() + '.v'] + ext_gen_v,
                                MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', '.bo'),
                                '')
            env.Precious(bld_v)

            if (moduleList.getAWBParam('bsv_tool', 'BUILD_VERILOG') == 1):

                module.moduleDependency['VERILOG'] += [bld_v] + [ext_gen_v]

                module.moduleDependency['GEN_WRAPPER_VERILOGS'] = [os.path.basename(module.wrapperName() + '.v')]

            if (self.pipeline_debug != 0):
                print "Name: " + module.name

            # each synth boundary will produce a ba
            bld_ba = [env.Command([MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' +  module.wrapperName() + '.ba'],
                                  MODULE_PATH + '/' + self.TMP_BSC_DIR + '/' + bsv.replace('.bsv', '.bo'),
                                  '')]

            module.moduleDependency['BA'] += bld_ba
            env.Precious(bld_ba)

            ##
            ## Build the Verilog black-box stub.
            ##
            bb = self.stubGenCommand(MODULE_PATH, module.boundaryName, bld_v)

            # Only the subordinate modules have stubs.
            # The platform module should not be enumerated here. This is a false dependency.
            if (module.name != moduleList.topModule.name):
                moduleList.topModule.moduleDependency['VERILOG_STUB'] += [bb]
                module.moduleDependency['GEN_VERILOG_STUB'] = [bb]

            return [bb] #This doesn't seem to do anything.
Example #13
0
    def __init__(self, moduleList):

        # if we have a deps build, don't do anything...
        if (moduleList.isDependsBuild):
            return

        self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

        # A collector for all of the checkpoint objects we will gather/build in the following code.
        dcps = []

        # Construct the tcl file
        self.part = moduleList.getAWBParam('physical_platform_config',
                                           'FPGA_PART_XILINX')

        apm_name = moduleList.compileDirectory + '/' + moduleList.apmName

        self.paramTclFile = moduleList.topModule.moduleDependency['PARAM_TCL'][
            0]

        # If the TMP_FPGA_DIR doesn't exist, create it.
        if not os.path.isdir(moduleList.env['DEFS']['TMP_FPGA_DIR']):
            os.mkdir(moduleList.env['DEFS']['TMP_FPGA_DIR'])

        # Gather Tcl files for handling constraints.
        self.tcl_headers = []
        if (len(
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_HEADERS')) > 0):
            self.tcl_headers = map(
                model.modify_path_hw,
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_HEADERS'))

        self.tcl_defs = []
        if (len(
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_DEFINITIONS')) > 0):
            self.tcl_defs = map(
                model.modify_path_hw,
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_DEFINITIONS'))

        self.tcl_funcs = []
        if (len(
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_FUNCTIONS')) > 0):
            self.tcl_funcs = map(
                model.modify_path_hw,
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_FUNCTIONS'))

        self.tcl_algs = []
        if (len(
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_ALGEBRAS')) > 0):
            self.tcl_algs = map(
                model.modify_path_hw,
                moduleList.getAllDependenciesWithPaths(
                    'GIVEN_VIVADO_TCL_ALGEBRAS'))

        self.tcl_bmms = []
        if (len(moduleList.getAllDependencies('GIVEN_XILINX_BMMS')) > 0):
            self.tcl_bmms = moduleList.getAllDependencies('GIVEN_XILINX_BMMS')

        self.tcl_elfs = []
        if (len(moduleList.getAllDependencies('GIVEN_XILINX_ELFS')) > 0):
            self.tcl_elfs = moduleList.getAllDependencies('GIVEN_XILINX_ELFS')

        self.tcl_ag = []

        #Emit area group definitions
        # If we got an area group placement data structure, now is the
        # time to convert it into a new constraint tcl.
        self.area_group_file = moduleList.compileDirectory + '/areagroups.xdc'
        if ('AREA_GROUPS' in moduleList.topModule.moduleDependency):
            self.area_constraints = area_group_tool.AreaConstraints(moduleList)
            self.routeAG = (moduleList.getAWBParam(
                'area_group_tool', 'AREA_GROUPS_ROUTE_AG') != 0)

            # user ucf may be overridden by our area group ucf.  Put our
            # generated ucf first.
            #tcl_defs.insert(0,self.area_group_file)
            def area_group_ucf_closure(moduleList):
                def area_group_ucf(target, source, env):
                    self.area_constraints.loadAreaConstraints()
                    self.area_constraints.emitConstraintsVivado(
                        self.area_group_file)

                return area_group_ucf

            moduleList.env.Command([self.area_group_file],
                                   self.area_constraints.areaConstraintsFile(),
                                   area_group_ucf_closure(moduleList))

        synthDepsBase = moduleList.getAllDependencies('GEN_VIVADO_DCPS')

        # We got a stack of synthesis results for the LI modules.  We need
        # to convert these to design checkpoints for the fast place and
        # route flow.
        ngcModules = [
            module for module in moduleList.synthBoundaries()
            if not module.liIgnore
        ]

        for module in ngcModules + [moduleList.topModule]:
            # did we get a dcp from the first pass?  If so, did the lim
            # graph give code for this module?  If both are true, then we
            # will link the old ngc in, rather than regenerate it.
            if ((not self.firstPassLIGraph is None)
                    and (module.name in self.firstPassLIGraph.modules)
                    and (self.firstPassLIGraph.modules[
                        module.name].getAttribute('RESYNTHESIZE') is None)):
                if (li_module.linkFirstPassObject(
                        moduleList, module, self.firstPassLIGraph,
                        'GEN_VIVADO_DCPS', 'GEN_VIVADO_DCPS') is None):
                    module.moduleDependency['GEN_VIVADO_DCPS'] = [
                        self.edf_to_dcp(moduleList, module)
                    ]

            # it's possible that we got dcp from this compilation
            # pass. This will happen for the platform modules.
            elif (len(module.getDependencies('GEN_VIVADO_DCPS')) > 0):
                continue

            # we got neither. therefore, we must create a dcp out of the ngc.
            else:
                module.moduleDependency['GEN_VIVADO_DCPS'] = [
                    self.edf_to_dcp(moduleList, module)
                ]

        synthDeps = moduleList.getAllDependencies('GEN_VIVADO_DCPS')

        postSynthTcl = apm_name + '.physical.tcl'

        topWrapper = moduleList.topModule.wrapperName()

        newTclFile = open(postSynthTcl, 'w')
        newTclFile.write('create_project -force ' + moduleList.apmName + ' ' +
                         moduleList.compileDirectory + ' -part ' + self.part +
                         ' \n')

        # To resolve black boxes, we need to load checkpoints in the
        # following order:
        # 1) topModule
        # 2) platformModule
        # 3) user program, in any order

        userModules = [
            module for module in moduleList.synthBoundaries()
            if not module.liIgnore and not module.platformModule
        ]
        platformModules = [
            module for module in moduleList.synthBoundaries()
            if not module.liIgnore and module.platformModule
        ]
        checkpointCommands = []

        if (not moduleList.getAWBParamSafe('area_group_tool',
                                           'AREA_GROUPS_ENABLE')):

            for module in [moduleList.topModule
                           ] + platformModules + userModules:
                dcps.append(module.getDependencies('GEN_VIVADO_DCPS'))
                checkpoint = model.convertDependencies(
                    module.getDependencies('GEN_VIVADO_DCPS'))

                # There should only be one checkpoint here.
                if (len(checkpoint) > 1):
                    print "Error too many checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue

                if (len(checkpoint) == 0):
                    print "No checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue

                newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

        # We're attempting the new, parallel flow.
        else:
            # we need to issue seperate place commands.  Therefore, we attempt to place each design

            # There may be some special area groups in platforms -- handle them
            elabAreaConstraints = area_group_tool.AreaConstraints(moduleList)
            elabAreaConstraints.loadAreaConstraintsElaborated()
            #self.area_constraints = area_group_tool.AreaConstraints(moduleList)
            for module in userModules:
                # Did we get a placed module already?
                if ((module.name in elabAreaConstraints.constraints)
                        and ('LIBRARY_DCP' in elabAreaConstraints.constraints[
                            module.name].attributes)):
                    # we need to locate the dcp corresponding to this area group.
                    candidates = moduleList.getAllDependencies(
                        'GIVEN_VIVADO_DCPS')
                    for dcpCandidate in moduleList.getAllDependencies(
                            'GIVEN_VIVADO_DCPS'):
                        if dcpCandidate.attributes['module'] == module.name:
                            dcp = str(model.modify_path_hw(dcpCandidate))
                            model.dictionary_list_create_append(
                                module.moduleDependency,
                                'GEN_VIVADO_PLACEMENT_DCPS', dcp)
                            dcps.append(dcp)
                else:
                    dcp = self.place_dcp(moduleList, module)
                    model.dictionary_list_create_append(
                        module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS',
                        dcp)
                    dcps.append(dcp)

            for module in [moduleList.topModule] + platformModules:
                checkpoint = model.convertDependencies(
                    module.getDependencies('GEN_VIVADO_DCPS'))
                dcps.append(checkpoint)

                # There should only be one checkpoint here.
                if (len(checkpoint) > 1):
                    print "Error too many checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue

                if (len(checkpoint) == 0):
                    print "No checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue

                newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

            # We can have parent/child relationships in the user modules.
            # Vivado requires that checkpoints be read in topological
            # order.
            def isBlackBox(module):
                if (self.firstPassLIGraph.modules[module.name].getAttribute(
                        'BLACK_BOX_AREA_GROUP')):
                    return 1
                return 0

            for module in sorted(userModules, key=isBlackBox):
                checkpoint = model.convertDependencies(
                    module.getDependencies('GEN_VIVADO_PLACEMENT_DCPS'))

                # There should only be one checkpoint here.
                if (len(checkpoint) > 1):
                    print "Error too many checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue

                if (len(checkpoint) == 0):
                    print "No checkpoints for " + str(
                        module.name) + ":  " + str(checkpoint)
                    continue
                #read in new checkoutpoint

                newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n')

                emitPlatformAreaGroups = (moduleList.getAWBParam(
                    'area_group_tool', 'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0)

                # platformModule refers to the main platform module, not
                # the subordinate device area groups.
                platformModule = (
                    self.firstPassLIGraph.modules[module.name].getAttribute(
                        'BLACK_BOX_AREA_GROUP') is
                    None) and (not self.firstPassLIGraph.modules[
                        module.name].getAttribute('PLATFORM_MODULE') is None)

                allowAGPlatform = (
                    not platformModule) or emitPlatformAreaGroups

                emitDeviceGroups = (moduleList.getAWBParam(
                    'area_group_tool', 'AREA_GROUPS_PAR_DEVICE_AG') != 0)
                allowAGDevice = (self.firstPassLIGraph.modules[
                    module.name].getAttribute('BLACK_BOX_AREA_GROUP') is
                                 None) or emitDeviceGroups

                if (allowAGPlatform and allowAGDevice):
                    refName = module.wrapperName()
                    lockPlacement = True
                    lockRoute = self.routeAG
                    # If this is an platform/user-defined area group, the wrapper name may be different.
                    if (not self.firstPassLIGraph.modules[module.name].
                            getAttribute('BLACK_BOX_AREA_GROUP') is None):
                        refName = elabAreaConstraints.constraints[
                            module.name].attributes['MODULE_NAME']
                        lockPlacement = not (
                            'NO_PLACE' in elabAreaConstraints.constraints[
                                module.name].attributes) and lockPlacement
                        lockRoute = not (
                            'NO_ROUTE' in elabAreaConstraints.constraints[
                                module.name].attributes) and lockRoute

                    checkpointCommands.append(
                        'if { [llength [get_cells -hier -filter {REF_NAME =~ "'
                        + refName + '"}]] } {\n')
                    checkpointCommands.append('    puts "Locking ' + refName +
                                              '"\n')
                    if (lockRoute):
                        # locking routing requires us to emit an area group. boo.
                        ag_tcl = self.ag_constraints(moduleList, module)
                        self.tcl_ag.append(ag_tcl)
                        checkpointCommands.append('    source ' + str(ag_tcl) +
                                                  '\n')
                        checkpointCommands.append(
                            '    lock_design -level routing [get_cells -hier -filter {REF_NAME =~ "'
                            + refName + '"}]\n')
                    elif (lockPlacement):
                        checkpointCommands.append(
                            '    lock_design -level placement [get_cells -hier -filter {REF_NAME =~ "'
                            + refName + '"}]\n')
                    checkpointCommands.append('}\n')

        given_netlists = [
            moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + netlist for netlist
            in moduleList.getAllDependenciesWithPaths('GIVEN_NGCS') +
            moduleList.getAllDependenciesWithPaths('GIVEN_EDFS')
        ]

        for netlist in given_netlists:
            newTclFile.write('read_edif ' + netlist + '\n')

        # We have lots of dangling wires (Thanks, Bluespec).  Set the
        # following properties to silence the warnings.

        newTclFile.write(
            "set_property SEVERITY {Warning} [get_drc_checks NSTD-1]\n")
        newTclFile.write(
            "set_property SEVERITY {Warning} [get_drc_checks UCIO-1]\n")

        newTclFile.write("link_design -top " + topWrapper + " -part " +
                         self.part + "\n")

        newTclFile.write("report_utilization -file " + apm_name +
                         ".link.util\n")

        newTclFile.write("write_checkpoint -force " + apm_name + ".link.dcp\n")

        # lock down the area group routing.
        newTclFile.write("\n".join(checkpointCommands) + "\n")

        for elf in self.tcl_elfs:
            newTclFile.write("add_file " + model.modify_path_hw(elf) + "\n")
            newTclFile.write("set_property MEMDATA.ADDR_MAP_CELLS {" +
                             str(elf.attributes['ref']) + "} [get_files " +
                             model.modify_path_hw(elf) + "]\n")

        # We will now attempt to link in any bmm that we might have.
        for bmm in self.tcl_bmms:
            newTclFile.write("add_file " + model.modify_path_hw(bmm) + "\n")
            newTclFile.write("set_property SCOPED_TO_REF " +
                             str(bmm.attributes['ref']) + " [get_files " +
                             model.modify_path_hw(bmm) + "]\n")

        newTclFile.write('set IS_TOP_BUILD 1\n ')
        newTclFile.write('set IS_AREA_GROUP_BUILD 0\n ')
        newTclFile.write('set SYNTH_OBJECT ""\n')
        newTclFile.write('source ' + self.paramTclFile + '\n')

        for tcl_header in self.tcl_headers:
            newTclFile.write('source ' + tcl_header + '\n')

        for tcl_def in self.tcl_defs:
            newTclFile.write('source ' + tcl_def + '\n')

        for tcl_func in self.tcl_funcs:
            newTclFile.write('source ' + tcl_func + '\n')

        for tcl_alg in self.tcl_algs:
            newTclFile.write('source ' + tcl_alg + '\n')

        def dumpPBlockCmd(tgt):
            return 'dumpPBlockUtilization "' + moduleList.compileDirectory + '/' + tgt + '.util"\n'

        newTclFile.write(dumpPBlockCmd('link'))
        newTclFile.write("report_timing_summary -file " + apm_name +
                         ".map.twr\n\n")

        newTclFile.write("opt_design -directive AddRemap\n")
        newTclFile.write("report_utilization -file " + apm_name +
                         ".opt.util\n")
        newTclFile.write(dumpPBlockCmd('opt'))
        newTclFile.write("write_checkpoint -force " + apm_name +
                         ".opt.dcp\n\n")

        newTclFile.write(
            "place_design -no_drc -directive WLDrivenBlockPlacement\n")
        newTclFile.write(dumpPBlockCmd('place'))

        newTclFile.write("phys_opt_design -directive AggressiveFanoutOpt\n")
        newTclFile.write("write_checkpoint -force " + apm_name + ".map.dcp\n")
        newTclFile.write(dumpPBlockCmd('phyopt'))
        newTclFile.write("report_utilization -file " + apm_name +
                         ".map.util\n\n")

        newTclFile.write("route_design\n")
        newTclFile.write("write_checkpoint -force " + apm_name + ".par.dcp\n")
        newTclFile.write(dumpPBlockCmd('par'))
        newTclFile.write("report_timing_summary -file " + apm_name +
                         ".par.twr\n\n")

        newTclFile.write("report_utilization -hierarchical -file " + apm_name +
                         ".par.util\n")
        newTclFile.write("report_drc -file " + topWrapper + ".drc\n\n")

        newTclFile.write("write_bitstream -force " + apm_name + "_par.bit\n")

        newTclFile.close()

        # generate bitfile
        xilinx_bit = moduleList.env.Command(
            apm_name + '_par.bit',
            synthDeps + self.tcl_algs + self.tcl_defs + self.tcl_funcs +
            self.tcl_ag + [self.paramTclFile] + dcps + [postSynthTcl], [
                'touch start.txt; vivado -verbose -mode batch -source ' +
                postSynthTcl + ' -log ' + moduleList.compileDirectory +
                '/postsynth.log'
            ])

        moduleList.topModule.moduleDependency['BIT'] = [apm_name + '_par.bit']

        # We still need to generate a download script.
        xilinx_loader.LOADER(moduleList)
Example #14
0
    def __init__(self, moduleList, isPrimaryBuildTarget):

        # if we have a deps build, don't do anything...
        if (moduleList.isDependsBuild):
            return

        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir  # we need to depend on libasim

        self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

        # This is not correct for LIM builds and needs to be fixed.
        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
        ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(
            ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

        # Due to the bluespec linker, for LI second pass builds, the final
        # verilog link step must occur in a different directory than the
        # bsc object code wrapper compilation step.  However, non-LIM
        # linker builds need to build in the original .bsc directory to
        # pick up VPI.
        vexe_vdir = moduleList.env['DEFS'][
            'ROOT_DIR_HW'] + '/' + moduleList.env['DEFS'][
                'ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR']
        if (not self.firstPassLIGraph is None):
            vexe_vdir = vexe_vdir + '_vlog'

        if not os.path.isdir(vexe_vdir):
            os.mkdir(vexe_vdir)

        LI_LINK_DIR = ""
        if (not self.firstPassLIGraph is None):
            LI_LINK_DIR = model.get_build_path(moduleList,
                                               moduleList.topModule) + "/.li/"
            inc_paths += [LI_LINK_DIR]
            ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT

        liCodeType = [
            'VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS'
        ]

        # This can be refactored as a function.

        if (not self.firstPassLIGraph is None):
            for moduleName in self.firstPassLIGraph.modules:
                moduleObject = self.firstPassLIGraph.modules[moduleName]
                # we also need the module list object
                moduleListObject = moduleList.modules[moduleName]
                for codeType in liCodeType:
                    # If we're linking, clean out any previous code dependencies.  These are guaranteed not to be used.
                    moduleListObject.moduleDependency[codeType] = []
                    li_module.linkFirstPassObject(moduleList,
                                                  moduleListObject,
                                                  self.firstPassLIGraph,
                                                  codeType,
                                                  codeType,
                                                  linkDirectory=vexe_vdir)

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths(
                'GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(
                moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '

        BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim iverilog '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_VERILOG += ' -I ' + path + ' '  #+ '-Xv -I' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS'][
            'ROOT_DIR_MODEL'] + '_Wrapper'

        vexe_gen_command = \
            BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
            ' -o $TARGET'

        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                vexe_gen_command += ' -Xc++ -O0'
            else:
                vexe_gen_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition

        # Hack to link against pthreads.  Really we should have a better solution.
        vexe_gen_command += ' -Xl -lpthread '

        # construct full path to BAs
        def modify_path(str):
            array = str.split('/')
            file = array.pop()
            return moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(
                array) + '/' + TMP_BSC_DIR + '/' + file

        # Use systemverilog 2005
        if (moduleList.getAWBParam('verilog_tool', 'ENABLE_SYSTEM_VERILOG')):
            vexe_gen_command += ' -Xv -g2005-sv '

        # Allow .vh/.sv file extensions etc.
        # vexe_gen_command += ' -Xv -Y.vh -Xv -Y.sv '

        # Bluespec requires that source files terminate the command line.
        vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                            moduleList.env['DEFS']['BDPI_CS']

        if (model.getBuildPipelineDebug(moduleList) != 0):
            for m in moduleList.getAllDependencies('BA'):
                print 'BA dep: ' + str(m)
            for m in moduleList.getAllDependencies('VERILOG'):
                print 'VL dep: ' + str(m)
            for m in moduleList.getAllDependencies('VHDL'):
                print 'BA dep: ' + str(m)

        # Generate a thin wrapper around the verilog executable.  This
        # wrapper is used to address a problem in iverilog in which the
        # simulator does not support shared library search paths.  The
        # current wrapper only works for iverilog.  Due to brokeness in
        # the iverilog argument parser, we must construct a correct
        # iverilog command line by analyzing its compiled script. Also,
        # this script is not passing through the arguments that it should
        # be passing through.
        def generate_vexe_wrapper(target, source, env):
            wrapper_handle = open(str(target[0]), 'w')
            wrapper_handle.write('#!/usr/bin/perl\n')
            wrapper_handle.write('# generated by verilog.py\n')
            wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
            wrapper_handle.write('@script = `cat $platform/' + TMP_BSC_DIR +
                                 '/' + APM_NAME + '_hw.exe' + '`;\n')
            wrapper_handle.write('$script[0] =~ s/#!/ /g;\n')
            wrapper_handle.write('$vvp = $script[0];\n')
            wrapper_handle.write('chomp($vvp);\n')
            wrapper_handle.write(
                'exec("$vvp -m$platform/directc_mk_model_Wrapper.so $platform/'
                + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' +
                ' +bscvcd \$* ");\n')
            wrapper_handle.close()

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        # Bluesim builds apparently touch this code. This control block
        # preserves their behavior, but it is unclear why the verilog build is
        # involved.
        if (isPrimaryBuildTarget):
            vbinDeps = []

            # If we got a lim graph, we'll pick up many of our dependencies from it.
            # These were annotated in the top module above. Really, this seems unclean.
            # we should build a graph during the second pass and just use it.
            if (not self.firstPassLIGraph is None):
                # Collect linked dependencies for every module
                for moduleName in self.firstPassLIGraph.modules:
                    moduleListObject = moduleList.modules[moduleName]
                    vbinDeps += moduleList.getDependencies(
                        moduleListObject,
                        'VERILOG') + moduleList.getDependencies(
                            moduleListObject,
                            'GIVEN_VERILOG_HS') + moduleList.getDependencies(
                                moduleListObject,
                                'GEN_VPI_HS') + moduleList.getDependencies(
                                    moduleListObject,
                                    'GEN_VPI_CS') + moduleList.getDependencies(
                                        moduleListObject,
                                        'VHDL') + moduleList.getDependencies(
                                            moduleListObject,
                                            'BA') + moduleList.getDependencies(
                                                moduleListObject, 'GEN_BAS')

                vbinDeps += moduleList.getDependencies(
                    moduleList.topModule,
                    'VERILOG') + moduleList.getDependencies(
                        moduleList.topModule,
                        'GIVEN_VERILOG_HS') + moduleList.getDependencies(
                            moduleList.topModule,
                            'GEN_VPI_HS') + moduleList.getDependencies(
                                moduleList.topModule, 'GEN_VPI_CS'
                            ) + moduleList.getDependencies(
                                moduleList.topModule, 'VHDL'
                            ) + moduleList.getDependencies(
                                moduleList.topModule, 'BA') + map(
                                    modify_path_ba_local,
                                    moduleList.getModuleDependenciesWithPaths(
                                        moduleList.topModule, 'GEN_BAS'))

            # collect dependencies from all awb modules
            else:
                vbinDeps += moduleList.getAllDependencies(
                    'VERILOG') + moduleList.getAllDependencies(
                        'VHDL') + moduleList.getAllDependencies('BA') + map(
                            modify_path_ba_local,
                            moduleList.getAllDependenciesWithPaths('GEN_BAS'))

            vbin = moduleList.env.Command(
                TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe', vbinDeps,
                [vexe_gen_command,
                 SCons.Script.Delete('directc.sft')])

            vexe = moduleList.env.Command(APM_NAME + '_hw.exe', vbin, [
                generate_vexe_wrapper, '@chmod a+x $TARGET',
                SCons.Script.Delete(APM_NAME + '_hw.errinfo')
            ])

            moduleList.topDependency = moduleList.topDependency + [vexe]

        else:
            vbinDeps = moduleList.getAllDependencies(
                'VERILOG') + moduleList.getAllDependencies(
                    'VHDL') + moduleList.getAllDependencies('BA') + map(
                        modify_path_ba_local,
                        moduleList.getAllDependenciesWithPaths('GEN_BAS'))

            vbin = moduleList.env.Command(
                TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe', vbinDeps,
                [vexe_gen_command,
                 SCons.Script.Delete('directc.sft')])

            vexe = moduleList.env.Command(APM_NAME + '_hw.vexe', vbin, [
                generate_vexe_wrapper, '@chmod a+x $TARGET',
                SCons.Script.Delete(APM_NAME + '_hw.exe'),
                SCons.Script.Delete(APM_NAME + '_hw.errinfo')
            ])

        moduleList.env.Alias('vexe', vexe)
Example #15
0
    def __init__(self, moduleList):
        env = moduleList.env

        tree_base_path = env.Dir(
            model.get_build_path(moduleList, moduleList.topModule))

        # get rid of this at some point - since we know we're in
        # bluesim, we should be able to do the right thing.
        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir  # we need to depend on libasim

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths(
                'GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(
                moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '

        BSC_FLAGS_SIM = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule -l pthread ' + ldflags + ' '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_SIM += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_SIM += '-I ' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']

        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS'][
            'ROOT_DIR_MODEL'] + '_Wrapper'

        all_hw_dirs = [
            env.Dir(d)
            for d in moduleList.env['DEFS']['ALL_HW_DIRS'].split(':')
        ]
        all_build_dirs = [d.Dir(TMP_BSC_DIR) for d in all_hw_dirs]

        ALL_DIRS_FROM_ROOT = ':'.join([d.path for d in all_hw_dirs])
        ALL_BUILD_DIRS_FROM_ROOT = ':'.join([d.path for d in all_build_dirs])
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

        bsc_sim_command = BSC + ' ' + BSC_FLAGS_SIM + ' ' + LDFLAGS + ' ' + ldflags + ' -o $TARGET'

        # Set MAKEFLAGS because Bluespec is going to invoke make on its own and
        # we don't want to pass on the current build's recursive flags.
        bsc_sim_command = 'env MAKEFLAGS="-j ' + str(
            n_jobs) + '" ' + bsc_sim_command

        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                bsc_sim_command += ' -Xc++ -O0'
            else:
                bsc_sim_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                bsc_sim_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            bsc_sim_command += ' -Xc++ ' + definition + ' -Xc ' + definition

        def modify_path_bdpi(path):
            return moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        LI_LINK_DIR = ""
        if (not (wrapper_gen_tool.getFirstPassLIGraph()) is None):
            LI_LINK_DIR = tree_base_path.Dir('.li').path

        bdpi_cs = [
            env.File(c) for c in moduleList.env['DEFS']['BDPI_CS'].split(' ')
        ]
        BDPI_CS = ' '.join([c.path for c in bdpi_cs])

        bsc_sim_command += \
            ' -sim -e ' + ROOT_WRAPPER_SYNTH_ID + ' -p +:' + LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT +' -simdir ' + \
            TMP_BSC_DIR + ' ' +\
            ' ' + BDPI_CS

        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "BLUESIM DEPS: \n"
            for ba in moduleList.getAllDependencies('BA'):
                print 'Bluesim BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local,
                          moduleList.getAllDependenciesWithPaths('GIVEN_BAS')):
                print 'Bluesim GIVEN_BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local,
                          moduleList.getAllDependenciesWithPaths('GEN_BAS')):
                print 'Bluesim GEN_BA dep: ' + str(ba) + '\n'

        sbin_name = TMP_BSC_DIR + '/' + APM_NAME
        sbin = moduleList.env.Command(
            sbin_name + '_hw.exe',
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local,
                moduleList.getAllDependenciesWithPaths('GIVEN_BAS')) +
            map(modify_path_ba_local,
                moduleList.getAllDependenciesWithPaths('GEN_BAS')) +
            map(modify_path_bdpi,
                moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) +
            map(modify_path_bdpi,
                moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_HS')),
            bsc_sim_command)

        if moduleList.env.GetOption('clean'):
            os.system('rm -rf .bsc')

        # If we have bsc data files, copy them over to the .bsc directory
        if len(moduleList.getAllDependencies('GEN_VS')) > 0:
            Copy(TMP_BSC_DIR, moduleList.getAllDependencies('GIVEN_DATAS'))

        #
        # The final step must leave a few well known names:
        #   APM_NAME must be the software side, if there is one.  If there isn't, then
        #   it must be the Bluesim image.
        #
        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "ModuleList desp : " + str(moduleList.swExe)

        exe = moduleList.env.Command(APM_NAME, sbin, [
            '@ln -fs ' + sbin_name + '_hw.exe ${TARGET}_hw.exe',
            '@ln -fs ' + sbin_name + '_hw.exe.so ${TARGET}_hw.exe.so',
            '@ln -fs ' + moduleList.swExeOrTarget + ' ${TARGET}',
            SCons.Script.Delete('${TARGET}_hw.vexe'),
            SCons.Script.Delete('${TARGET}_hw.errinfo')
        ])

        moduleList.topDependency = moduleList.topDependency + [exe]
Example #16
0
    def __init__(self, moduleList):
        env = moduleList.env

        tree_base_path = env.Dir(model.get_build_path(moduleList, moduleList.topModule))

        # get rid of this at some point - since we know we're in 
        # bluesim, we should be able to do the right thing.
        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir # we need to depend on libasim

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '    
            
        BSC_FLAGS_SIM = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule -l pthread ' + ldflags + ' '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_SIM += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_SIM += '-I ' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']

        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

        all_hw_dirs = [env.Dir(d) for d in moduleList.env['DEFS']['ALL_HW_DIRS'].split(':')]
        all_build_dirs = [d.Dir(TMP_BSC_DIR) for d in all_hw_dirs]

        ALL_DIRS_FROM_ROOT = ':'.join([d.path for d in all_hw_dirs])
        ALL_BUILD_DIRS_FROM_ROOT = ':'.join([d.path for d in all_build_dirs])
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT
        
        bsc_sim_command = BSC + ' ' + BSC_FLAGS_SIM + ' ' + LDFLAGS + ' ' + ldflags + ' -o $TARGET'

        # Set MAKEFLAGS because Bluespec is going to invoke make on its own and
        # we don't want to pass on the current build's recursive flags.
        bsc_sim_command = 'env MAKEFLAGS="-j ' + str(n_jobs) + '" ' + bsc_sim_command


        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                bsc_sim_command += ' -Xc++ -O0'
            else:
                bsc_sim_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                 bsc_sim_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            bsc_sim_command += ' -Xc++ ' + definition + ' -Xc ' + definition


        def modify_path_bdpi(path):
            return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        LI_LINK_DIR = ""
        if (not (wrapper_gen_tool.getFirstPassLIGraph()) is None):
            LI_LINK_DIR = tree_base_path.Dir('.li').path

        bdpi_cs = [env.File(c) for c in moduleList.env['DEFS']['BDPI_CS'].split(' ')]
        BDPI_CS = ' '.join([c.path for c in bdpi_cs])

        bsc_sim_command += \
            ' -sim -e ' + ROOT_WRAPPER_SYNTH_ID + ' -p +:' + LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT +' -simdir ' + \
            TMP_BSC_DIR + ' ' +\
            ' ' + BDPI_CS


        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "BLUESIM DEPS: \n" 
            for ba in moduleList.getAllDependencies('BA'):
                print 'Bluesim BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GIVEN_BAS')):
                print 'Bluesim GIVEN_BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')):
                print 'Bluesim GEN_BA dep: ' + str(ba) + '\n'

        sbin_name = TMP_BSC_DIR + '/' + APM_NAME
        sbin = moduleList.env.Command(
            sbin_name + '_hw.exe',
            moduleList.getAllDependencies('BA') + 
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GIVEN_BAS')) +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')) +
            map(modify_path_bdpi, moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + 
            map(modify_path_bdpi, moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_HS')),
            bsc_sim_command)

        if moduleList.env.GetOption('clean'):
            os.system('rm -rf .bsc')

        # If we have bsc data files, copy them over to the .bsc directory 
        if len(moduleList.getAllDependencies('GEN_VS'))> 0:
           Copy(TMP_BSC_DIR,  moduleList.getAllDependencies('GIVEN_DATAS')) 

        #
        # The final step must leave a few well known names:
        #   APM_NAME must be the software side, if there is one.  If there isn't, then
        #   it must be the Bluesim image.
        #
        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "ModuleList desp : " + str(moduleList.swExe)

        exe = moduleList.env.Command(
            APM_NAME,
            sbin,
            [ '@ln -fs ' + sbin_name + '_hw.exe ${TARGET}_hw.exe',
              '@ln -fs ' + sbin_name + '_hw.exe.so ${TARGET}_hw.exe.so',
              '@ln -fs ' + moduleList.swExeOrTarget + ' ${TARGET}',
              SCons.Script.Delete('${TARGET}_hw.vexe'),
              SCons.Script.Delete('${TARGET}_hw.errinfo') ])

        moduleList.topDependency = moduleList.topDependency + [exe] 
Example #17
0
  def __init__(self, moduleList, isPrimaryBuildTarget):
    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_INC_DIRS_FROM_ROOT   = '-Xv +incdir+' + ALL_DIRS_FROM_ROOT.replace(':','+') 

    #insert any supplied include paths
    for incdir in moduleList.getAllDependencies('VERILOG_INC_DIRS'):
        ALL_INC_DIRS_FROM_ROOT += "+" + incdir

    ALL_LIB_DIRS_FROM_ROOT   = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG_PKG', 'VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.
    if (not self.firstPassLIGraph is None):
        for moduleName in self.firstPassLIGraph.modules:            
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            for codeType in liCodeType:
                if(codeType in moduleObject.objectCache):
                    for verilog in moduleObject.objectCache[codeType]:
                        linkPath = vexe_vdir + '/' + os.path.basename(verilog)
                        moduleList.env.Command(linkPath, verilog, model.link_file)
                        if(codeType in moduleList.topModule.moduleDependency):
                            moduleList.topModule.moduleDependency[codeType] += [linkPath]
                        else:
                            moduleList.topModule.moduleDependency[codeType] = [linkPath]
                    else:
                        # Warn that we did not find the ngc we expected to find..
                        print "Warning: We did not find verilog for module " + moduleName 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim vcs '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 


    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition
 
    # cflags to be passed into vcs compiler
    for definition in defs:
        vexe_gen_command += ' -Xv -CFLAGS -Xv ' + definition
    for path in inc_paths:
        vexe_gen_command += ' -Xv -CFLAGS -Xv -I' + path

    for lib in moduleList.swLinkLibs:
        vexe_gen_command += ' -Xl -l' + lib + ' '
        vexe_gen_command += ' -Xv -LDFLAGS -Xv -l' + lib + ' '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 

    if (moduleList.getAWBParam('verilog_tool', 'VCS_ENABLE_LINT') != 0):
        vexe_gen_command += ' -Xv +lint=all,noVCDE'

    vexe_gen_command += ' -Xv -full64 '
    vexe_gen_command += ' -Xv -sverilog '
    vexe_gen_command += ' -Xv +librescan '
    vexe_gen_command += ' -Xv +libext+.sv '
    if (moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')):
        vexe_gen_command += moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')
    vexe_gen_command += ' ' + ALL_INC_DIRS_FROM_ROOT + ' '

    # VCS must be informed of all BDPI.  Really we need some kind of
    # file object here.  All this massaging of path is ridiculous.
    vexe_gen_command += ' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + (' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/').join(moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + ' ' 

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += ' -verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VERILOG_PKG'))
    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VERILOG'))
    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VHDL'))

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG_PKG'):
            print 'VPKG dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('GIVEN_BDPI_CS'):
            print 'GIVEN_BDPI_CS: ' + str(m)

    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('$ENV{LD_LIBRARY_PATH} = $platform . ":" . $ENV{LD_LIBRARY_PATH};\n')
        wrapper_handle.write('`ln -sf $platform/directc_mk_model_Wrapper.so .`;\n')
        wrapper_handle.write('exec("$platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe  -licqueue \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []
        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG_PKG') + \
                        moduleList.getDependencies(moduleList.topModule, 'VERILOG') + \
                        moduleList.getDependencies(moduleList.topModule, 'VERILOG_LIB') + \
                        moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + \
                        moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + \
                        moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') + \
                        moduleList.getDependencies(moduleList.topModule, 'VHDL') + \
                        moduleList.getDependencies(moduleList.topModule, 'BA') + \
                        map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))
        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG_PKG') + \
                        moduleList.getAllDependencies('VERILOG') + \
                        moduleList.getAllDependencies('VERILOG_LIB') + \
                        moduleList.getAllDependencies('VHDL') + \
                        moduleList.getAllDependencies('BA') + \
                        map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))
          
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command ])

        moduleList.env.AlwaysBuild(vbin)

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
               SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])
        

        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            moduleList.getAllDependencies('VERILOG_PKG') +
            moduleList.getAllDependencies('VERILOG') +
            moduleList.getAllDependencies('VHDL') +
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')),
            [ vexe_gen_command ])
 

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
Example #18
0
File: BSV.py Project: chenm001/leap
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool',
                                                     'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [
            self.hw_dir.Dir(moduleList.topModule.buildPath)
        ]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [
            d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs
        ]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [
            self.hw_dir.Dir('include'),
            self.hw_dir.Dir('include/awb/provides')
        ]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool',
                                                      'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool',
                                                'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS'][
            'BUILD_DIR']  # need to set the builddir for synplify

        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH = get_build_path(moduleList, module)
                os.system('cd ' + MODULE_PATH + '/' + self.TMP_BSC_DIR +
                          '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop()  # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)

            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [
                    moduleList.topModule
            ]:
                if ('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(
                    self.TMP_BSC_DIR + '/' +
                    moduleList.env['DEFS']['APM_NAME'] + '.str', all_str_src,
                    ['cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(
                    moduleList.env['DEFS']['APM_NAME'] + '.str', bsc_str, [
                        'ln -fs ' + self.TMP_BSC_DIR +
                        '/`basename $TARGET` $TARGET'
                    ])
                moduleList.topDependency += [strDep]

            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD:

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = [
                    'awb/provides/soft_services.bsh'
                ]
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(
                    moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo, 'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write(
                    '// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(
                    moduleList, tree_module)
                topo.append(tree_module)

            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(
                    moduleList, moduleList.topModule
                ) + "/" + moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList,
                                                moduleList.topModule,
                                                useDerived,
                                                fileName=platform_deps,
                                                targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList,
                                                moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([
                    module for module in getUserModules(firstPassGraph)
                    if module.getAttribute('RESYNTHESIZE') is None
                ])
                for module in sorted(liGraph.graph.nodes(),
                                     key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(
                        wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module,
                                                      wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(
                        moduleList,
                        moduleList.topModule,
                        useDerived,
                        fileName=platform_deps,
                        targetFiles=[wrapper_import_path])

            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList,
                                                module,
                                                useDerived,
                                                fileName=module.dependsFile)

            moduleList.topDependsInit += deps