示例#1
0
def buildSynplifyEDF(moduleList, module, globalVerilogs, globalVHDs,
                     resourceCollector):
    MODEL_CLOCK_FREQ = moduleList.getAWBParam('clocks_device',
                                              'MODEL_CLOCK_FREQ')

    # Synplify compiles from the root directory
    compile_dir = moduleList.env.Dir('.')
    compileDirectory = compile_dir.Dir('.')

    # need to eventually break this out into a seperate function
    # first step - modify prj options file to contain any generated wrappers
    prjFile = open('config/' + moduleList.apmName + '.synplify.prj', 'r')
    newPrjPath = 'config/' + module.wrapperName() + '.modified.synplify.prj'
    newPrjFile = open(newPrjPath, 'w')

    # allow duplicate files, so long as they are not used...
    newPrjFile.write('set_option -dup {1}\n')

    newPrjFile.write('add_file -verilog \"../hw/' + module.buildPath +
                     '/.bsc/' + module.wrapperName() + '.v\"\n')

    annotationFiles, annotationDeps = synthesis_library.generateSynthesisTcl(
        moduleList, module, compileDirectory)

    # now dump all the 'VERILOG'
    fileArray = globalVerilogs + globalVHDs + \
                moduleList.getDependencies(module, 'VERILOG_STUB') + \
                map(lambda x: moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + x, moduleList.getAllDependenciesWithPaths('GIVEN_SYNPLIFY_VERILOGS')) + \
                moduleList.getAllDependencies('NGC') + \
                moduleList.getAllDependencies('SDC') + annotationFiles

    # Replace any known black boxes
    blackBoxDeps = []
    blackBoxes = module.getAttribute('BLACK_BOX')

    for file in fileArray:

        if (not blackBoxes is None):
            if (file in blackBoxes):
                file = blackBoxes[file]
                blackBoxDeps.append(file)

        if (type(file) is str):
            newPrjFile.write(_generate_synplify_include(file))
        elif (isinstance(file, model.Source.Source)):
            newPrjFile.write(_generate_synplify_include_source(file))
        else:
            if (model.getBuildPipelineDebug(moduleList) != 0):
                print type(file)
                print "is not a string"

    # Set up new implementation
    build_dir = moduleList.compileDirectory + '/' + module.wrapperName()

    try:
        os.mkdir(moduleList.compileDirectory)
    except OSError, err:
        if err.errno != errno.EEXIST: raise
示例#2
0
def _filter_file_add(file, moduleList):
    # Check for relative/absolute path
    output = ''
    for line in file.readlines():
        output += re.sub('add_file.*$', '', line)
        if (model.getBuildPipelineDebug(moduleList) != 0):
            print 'converted ' + line + 'to ' + re.sub('add_file.*$', '', line)

    return output
示例#3
0
def assignResources(moduleList, environmentGraph=None, moduleGraph=None):

    pipeline_debug = model.getBuildPipelineDebug(moduleList) or True

    # We require this extra 'S', but maybe this should not be the case.
    resourceFile = moduleList.getAllDependenciesWithPaths('GIVEN_RESOURCESS')

    filenames = []
    if (len(resourceFile) > 0):
        filenames.append(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' +
                         resourceFile[0])
        # let's read in a resource file

    # we can also get resource files from the first compilation pass.
    # pick those resource files up here.  However, we don't force the
    # caller to supply such things.
    if (moduleGraph is not None):
        for moduleName in moduleGraph.modules:
            moduleObject = moduleGraph.modules[moduleName]
            filenames += moduleObject.getObjectCode('RESOURCES')
    else:
        filenames += moduleList.getAllDependencies('RESOURCES')

    resources = {}

    # need to check for file existance. returning an empty resource
    # dictionary is acceptable.
    for filename in filenames:
        if (not os.path.exists(str(filename.from_bld()))):
            print "Warning, no resources found at " + str(
                filename.from_bld()) + "\n"
            continue

        logfile = open(str(filename.from_bld()), 'r')
        for line in logfile:
            # There are several ways that we can get resource. One way is instrumenting the router.
            params = line.split(':')
            moduleName = params.pop(0)
            resources[moduleName] = {}
            for index in range(len(params) / 2):
                resources[moduleName][params[2 * index]] = float(
                    params[2 * index + 1])
    if (pipeline_debug):
        print "PLACER RESOURCES: " + str(resources)

    return resources
示例#4
0
def assignResources(moduleList, environmentGraph = None, moduleGraph = None):

    pipeline_debug = model.getBuildPipelineDebug(moduleList) or True

    # We require this extra 'S', but maybe this should not be the case.
    resourceFile = moduleList.getAllDependenciesWithPaths('GIVEN_RESOURCESS')    

    filenames = []
    if (len(resourceFile) > 0):
        filenames.append(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + resourceFile[0])
        # let's read in a resource file
        
    # we can also get resource files from the first compilation pass.
    # pick those resource files up here.  However, we don't force the
    # caller to supply such things. 
    if (moduleGraph is not None):
        for moduleName in moduleGraph.modules:
            moduleObject = moduleGraph.modules[moduleName]   
            filenames += moduleObject.getObjectCode('RESOURCES')
    else:
        filenames += moduleList.getAllDependencies('RESOURCES')    

    resources = {}

    # need to check for file existance. returning an empty resource
    # dictionary is acceptable.
    for filename in filenames:
        if (not os.path.exists(str(filename.from_bld()))):
            print "Warning, no resources found at " + str(filename.from_bld()) + "\n"
            continue

        logfile = open(str(filename.from_bld()), 'r')  
        for line in logfile:
            # There are several ways that we can get resource. One way is instrumenting the router. 
            params = line.split(':')
            moduleName = params.pop(0)
            resources[moduleName] = {}
            for index in range(len(params)/2):
                resources[moduleName][params[2*index]] = float(params[2*index+1])
    if (pipeline_debug):        
        print "PLACER RESOURCES: " + str(resources)

    return resources
示例#5
0
def buildNetlists(moduleList, userModuleBuilder, platformModuleBuilder):
    # We load this graph in to memory several times. 
    # TODO: load this graph once. 
    firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    DEBUG = model.getBuildPipelineDebug(moduleList) 

    # string together the xcf, sort of like the ucf
    # Concatenate XCF files
    MODEL_CLOCK_FREQ = moduleList.getAWBParam('clocks_device', 'MODEL_CLOCK_FREQ')

    ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore] 

    [globalVerilogs, globalVHDs] = globalRTLs(moduleList, moduleList.moduleList)

    synth_deps = []
    # drop exiting boundaries. 

    for module in ngcModules:   
        # did we get an ngc from the first pass?  If so, did the lim
        # graph give code for this module?  If both are true, then we
        # will link the old ngc in, rather than regenerate it. 

        if((not firstPassLIGraph is None) and (module.name in firstPassLIGraph.modules) and (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            synth_deps += linkNGC(moduleList, module, firstPassLIGraph)
        else:
            # We need to build the netlist. We build platformModules
            # with the platformModuleBuilder.  User modules get built
            # with userModuleBuilder.
            if(module.platformModule):
                synth_deps += platformModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)
            else:
                synth_deps += userModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)


    top_netlist = platformModuleBuilder(moduleList, moduleList.topModule, globalVerilogs, globalVHDs)
    synth_deps += top_netlist
    moduleList.topModule.moduleDependency['SYNTHESIS'] = synth_deps

    # Alias for synthesis
    moduleList.env.Alias('synth', synth_deps)
示例#6
0
def buildNetlists(moduleList, userModuleBuilder, platformModuleBuilder):
    # We load this graph in to memory several times. 
    # TODO: load this graph once. 
    firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    DEBUG = model.getBuildPipelineDebug(moduleList) 

    # string together the xcf, sort of like the ucf
    # Concatenate XCF files
    MODEL_CLOCK_FREQ = moduleList.getAWBParam('clocks_device', 'MODEL_CLOCK_FREQ')

    ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore] 

    [globalVerilogs, globalVHDs] = globalRTLs(moduleList, moduleList.moduleList)

    synth_deps = []
    # drop exiting boundaries. 

    for module in ngcModules:   
        # did we get an ngc from the first pass?  If so, did the lim
        # graph give code for this module?  If both are true, then we
        # will link the old ngc in, rather than regenerate it. 

        if((not firstPassLIGraph is None) and (module.name in firstPassLIGraph.modules) and (firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)):
            synth_deps += linkNGC(moduleList, module, firstPassLIGraph)
        else:
            # We need to build the netlist. We build platformModules
            # with the platformModuleBuilder.  User modules get built
            # with userModuleBuilder.
            if(module.platformModule):
                synth_deps += platformModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)
            else:
                synth_deps += userModuleBuilder(moduleList, module, globalVerilogs, globalVHDs)


    top_netlist = platformModuleBuilder(moduleList, moduleList.topModule, globalVerilogs, globalVHDs)
    synth_deps += top_netlist
    moduleList.topModule.moduleDependency['SYNTHESIS'] = synth_deps

    # Alias for synthesis
    moduleList.env.Alias('synth', synth_deps)
示例#7
0
  def __init__(self, moduleList, isPrimaryBuildTarget):
    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_INC_DIRS_FROM_ROOT   = '-Xv +incdir+' + ALL_DIRS_FROM_ROOT.replace(':','+') 
    ALL_LIB_DIRS_FROM_ROOT   = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.
    if (not self.firstPassLIGraph is None):
        for moduleName in self.firstPassLIGraph.modules:            
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            for codeType in liCodeType:
                if(codeType in moduleObject.objectCache):
                    for verilog in moduleObject.objectCache[codeType]:
                        linkPath = vexe_vdir + '/' + os.path.basename(verilog)
                        def linkVerilog(target, source, env):
                            # It might be more useful if the Module contained a pointer to the LIModules...                        
                            if(os.path.lexists(str(target[0]))):
                                os.remove(str(target[0]))
                            print "Linking: " + str(source[0]) + " to " + str(target[0])
                            os.symlink(str(source[0]), str(target[0]))
                        moduleList.env.Command(linkPath, verilog, linkVerilog)
                        if(codeType in moduleList.topModule.moduleDependency):
                            moduleList.topModule.moduleDependency[codeType] += [linkPath]
                        else:
                            moduleList.topModule.moduleDependency[codeType] = [linkPath]
                    else:
                        # Warn that we did not find the ngc we expected to find..
                        print "Warning: We did not find verilog for module " + moduleName 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim vcs '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 


    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition
 
    # cflags to be passed into vcs compiler
    for definition in defs:
        vexe_gen_command += ' -Xv -CFLAGS -Xv ' + definition
    for path in inc_paths:
        vexe_gen_command += ' -Xv -CFLAGS -Xv -I' + path

    for lib in moduleList.swLinkLibs:
        vexe_gen_command += ' -Xl -l' + lib + ' '
        vexe_gen_command += ' -Xv -LDFLAGS -Xv -l' + lib + ' '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 

    vexe_gen_command += ' -Xv -full64 '
    vexe_gen_command += ' -Xv -sverilog '
    vexe_gen_command += ' -Xv +librescan '
    vexe_gen_command += ' -Xv +libext+.sv '
    if(moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')):
        vexe_gen_command += moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')
    vexe_gen_command += ' ' + ALL_INC_DIRS_FROM_ROOT + ' '

    # VCS must be informed of all BDPI.  Really we need some kind of
    # file object here.  All this massaging of path is ridiculous.
    vexe_gen_command += ' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + (' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/').join(moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + ' ' 

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('GIVEN_BDPI_CS'):
            print 'GIVEN_BDPI_CS: ' + str(m)
        

    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('$ENV{LD_LIBRARY_PATH} = $platform . ":" . $ENV{LD_LIBRARY_PATH};\n')
        wrapper_handle.write('`ln -sf $platform/directc_mk_model_Wrapper.so .`;\n')
        wrapper_handle.write('exec("$platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe  -licqueue \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []
        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG') + moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') +moduleList.getDependencies(moduleList.topModule, 'VHDL') + moduleList.getDependencies(moduleList.topModule, 'BA') + map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))
        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))
          
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command ])

        moduleList.env.AlwaysBuild(vbin)

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
               SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])
        

        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            moduleList.getAllDependencies('VERILOG') +
            moduleList.getAllDependencies('VHDL') +
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')),
            [ vexe_gen_command ])
 

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
示例#8
0
  def __init__(self, moduleList):

    fpga_part_xilinx = moduleList.env['DEFS']['FPGA_PART_XILINX']
    xilinx_apm_name = moduleList.compileDirectory + '/' + moduleList.apmName

    # Generate the signature for the FPGA image
    signature = moduleList.env.Command(
      'config/signature.sh',
      moduleList.getAllDependencies('BIT'),
      [ '@echo \'#!/bin/sh\' > $TARGET',
        '@echo signature=\\"' + moduleList.apmName + '-`md5sum $SOURCE | sed \'s/ .*//\'`\\" >> $TARGET' ])
    
    moduleList.topModule.moduleDependency['SIGNATURE'] = [signature]

    if (model.getBuildPipelineDebug(moduleList) != 0):
        print  moduleList.swExeOrTarget + "\n"

    ##
    ## Generate a script for loading bitfiles onto an FPGA.
    ##
    def leap_xilinx_loader(xilinx_apm_name):
      try:
        fpga_pos = moduleList.getAWBParam(['physical_platform_config', 'physical_platform'], 'FPGA_POSITION')
      except:
        fpga_pos = None

      def leap_xilinx_loader_closure(target, source, env):
        lf = open(str(target[0]), 'w')

        lf.write('#!/usr/bin/perl\n')
        lf.write('\n')
        lf.write('my $retval = 0;\n')
        if fpga_pos != None:
          lf.write('use Getopt::Long;\n')
          lf.write('my $dev_id = undef;\n')
          lf.write('GetOptions(\'device-id=i\', \$dev_id);\n')
          lf.write('\n')

          lf.write('# Check for existance of expected bitfile.\n') 
          lf.write('if ( ! -e  "' + xilinx_apm_name + '_par.bit" ) {\n')
          lf.write('  die "Could not find bitfile ' + xilinx_apm_name + '_par.bit";\n')
          lf.write('}\n')

          lf.write('# Specify specific cable if device database includes a cable ID\n')
          lf.write('my $setCable = \'setCable -p auto\';\n')
          lf.write('if (defined($dev_id)) {\n')
          lf.write('  my $cable_cfg = `leap-fpga-ctrl --device-id=${dev_id} --getconfig prog_cable_id`;\n')
          lf.write('  chomp($cable_cfg);\n')
          lf.write('  $setCable = "setCable $cable_cfg" if ($cable_cfg ne "");\n')
          lf.write('}\n')
          lf.write('\n')
          lf.write('open (BATCH,">batch.opt");\n')
          lf.write('print BATCH "setMode -bscan\n')
          lf.write('${setCable}\n')
          lf.write('identify\n')
          lf.write('assignfile -p ' + str(fpga_pos) + ' -file ' + xilinx_apm_name + '_par.bit\n')
          lf.write('program -p ' + str(fpga_pos) + '\n')
          lf.write('quit\n')
          lf.write('EOF\n')
          lf.write('";\n')
          lf.write('close(BATCH);\n')
          lf.write('open (STDOUT, ">$ARGV[0]");\n')
          lf.write('open (STDERR, ">$ARGV[0]");\n')
          lf.write('$retval = system("impact -batch batch.opt");\n')
        lf.write('if($retval != 0) {\n')
        lf.write('    exit(257);\n') # some perl installs only return an 8 bit value
        lf.write('}\n')

        lf.close()
        os.chmod(str(target[0]), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
                                 stat.S_IRGRP | stat.S_IXGRP |
                                 stat.S_IROTH | stat.S_IXOTH)

      return leap_xilinx_loader_closure
 
    loader = moduleList.env.Command(
      'config/' + moduleList.apmName + '.download',
      [],
      leap_xilinx_loader(xilinx_apm_name))

    dependOnSW = moduleList.getAWBParam(['xilinx_loader'], 'DEPEND_ON_SW')
    summary = 0
    if(dependOnSW):   
      summary = moduleList.env.Command(
        moduleList.apmName + '_hw.errinfo',
        moduleList.getAllDependencies('SIGNATURE') + moduleList.swExe,
        [ '@ln -fs ' + moduleList.swExeOrTarget + ' ' + moduleList.apmName,
          SCons.Script.Delete(moduleList.apmName + '_hw.exe'),
          SCons.Script.Delete(moduleList.apmName + '_hw.vexe'),
          '@echo "++++++++++++ Post-Place & Route ++++++++"',
          synthesis_library.leap_physical_summary(xilinx_apm_name + '.par.twr', moduleList.apmName + '_hw.errinfo', '^Slack \(MET\)', '^Slack \(VIOLATED\)') ])
    else:
      summary = moduleList.env.Command(
        moduleList.apmName + '_hw.errinfo',
        moduleList.getAllDependencies('SIGNATURE'),
        [ SCons.Script.Delete(moduleList.apmName + '_hw.exe'),
          SCons.Script.Delete(moduleList.apmName + '_hw.vexe'),
          '@echo "++++++++++++ Post-Place & Route ++++++++"',
          synthesis_library.leap_physical_summary(xilinx_apm_name + '.par.twr', moduleList.apmName + '_hw.errinfo', '^Slack \(MET\)', '^Slack \(VIOLATED\)') ])



    moduleList.env.Depends(summary, loader)

    moduleList.topModule.moduleDependency['LOADER'] = [summary]
    moduleList.topDependency = moduleList.topDependency + [summary]     
示例#9
0
  def __init__(self, moduleList, isPrimaryBuildTarget):

    # if we have a deps build, don't do anything...
    if(moduleList.isDependsBuild):
        return

    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT


    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.

    if (not self.firstPassLIGraph is None):     
        for moduleName in self.firstPassLIGraph.modules:                       
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            # we also need the module list object
            moduleListObject = moduleList.modules[moduleName]
            for codeType in liCodeType:
                # If we're linking, clean out any previous code dependencies.  These are guaranteed not to be used. 
                moduleListObject.moduleDependency[codeType] = []
                li_module.linkFirstPassObject(moduleList, moduleListObject, self.firstPassLIGraph, codeType, codeType, linkDirectory=vexe_vdir)
                 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim iverilog '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' ' #+ '-Xv -I' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 

    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition

    # Hack to link against pthreads.  Really we should have a better solution.
    vexe_gen_command += ' -Xl -lpthread '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 


    # Use systemverilog 2005
    if(moduleList.getAWBParam('verilog_tool', 'ENABLE_SYSTEM_VERILOG')):
        vexe_gen_command += ' -Xv -g2005-sv '

    # Allow .vh/.sv file extensions etc.
    # vexe_gen_command += ' -Xv -Y.vh -Xv -Y.sv '

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)


    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('@script = `cat $platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' + '`;\n')   
        wrapper_handle.write('$script[0] =~ s/#!/ /g;\n')
        wrapper_handle.write('$vvp = $script[0];\n')
        wrapper_handle.write('chomp($vvp);\n')
        wrapper_handle.write('exec("$vvp -m$platform/directc_mk_model_Wrapper.so $platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' + ' +bscvcd \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []

        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            # Collect linked dependencies for every module
            for moduleName in self.firstPassLIGraph.modules:
                moduleListObject = moduleList.modules[moduleName]
                vbinDeps += moduleList.getDependencies(moduleListObject, 'VERILOG') + moduleList.getDependencies(moduleListObject, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleListObject, 'GEN_VPI_HS') + moduleList.getDependencies(moduleListObject, 'GEN_VPI_CS') + moduleList.getDependencies(moduleListObject, 'VHDL') + moduleList.getDependencies(moduleListObject, 'BA') + moduleList.getDependencies(moduleListObject, 'GEN_BAS')

            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG') + moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') +moduleList.getDependencies(moduleList.topModule, 'VHDL') + moduleList.getDependencies(moduleList.topModule, 'BA') + map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))

        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))


        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command,
              SCons.Script.Delete('directc.sft') ])

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])


        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbinDeps = moduleList.getAllDependencies('VERILOG') + moduleList.getAllDependencies('VHDL') + moduleList.getAllDependencies('BA') + map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))

        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            vbinDeps,
            [ vexe_gen_command,
              SCons.Script.Delete('directc.sft') ])


        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
              SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
示例#10
0
    def __init__(self, moduleList):
        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)
        # if we have a deps build, don't do anything...
        if(moduleList.isDependsBuild):           
            return

        def modify_path_hw(path):
            return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        if (not moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_ENABLE')):
            return

        self.emitPlatformAreaGroups = (moduleList.getAWBParam('area_group_tool',
                                                              'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0)

        self.enableParentClustering = (moduleList.getAWBParam('area_group_tool',
                                                              'AREA_GROUPS_ENABLE_PARENT_CLUSTERING') != 0)

        self.enableCommunicationClustering = (moduleList.getAWBParam('area_group_tool',
                                                                     'AREA_GROUPS_ENABLE_COMMUNICATION_CLUSTERING') != 0)

        self.clusteringWeight = moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_CLUSTERING_WEIGHT')


        liGraph = LIGraph([])
        firstPassGraph = wrapper_gen_tool.getFirstPassLIGraph()
        # We should ignore the 'PLATFORM_MODULE'                                                                                                                    
        # We may have a none-type graph, if we are in the first pass.
        if(not firstPassGraph is None):
            liGraph.mergeModules(firstPassGraph.modules.values())

        self.firstPassLIGraph = liGraph


        # elaborate area group representation. This may be used in configuring later stages. 
        areaGroups = self.elaborateAreaConstraints(moduleList)
        pickle_handle = open(_areaConstraintsFileElaborated(moduleList), 'wb')
        pickle.dump(areaGroups, pickle_handle, protocol=-1)
        pickle_handle.close()                 


        # if we are only building logs, then we can stop. 
        if (moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')):
            return  

        # We'll build a rather complex function to emit area group constraints 
        def area_group_closure(moduleList):

             def area_group(target, source, env):

                 # have we built these area groups before? If we have,
                 # then, we'll get a pickle which we can read in and
                 # operate on.
                 areaGroupsPrevious = None 
                 if(os.path.exists(_areaConstraintsFile(moduleList))):
                     # We got some previous area groups.  We'll try to
                     # reuse the solution to save on compile time.
                     pickle_handle = open(_areaConstraintsFile(moduleList), 'rb')
                     areaGroupsPrevious = pickle.load(pickle_handle)
                     pickle_handle.close()
                 
                 areaGroupsFinal = None
                 # If we got a previous area group, we'll attempt to
                 # reuse its knowledge
                 if(not areaGroupsPrevious is None):
                     areaGroupsModified = copy.deepcopy(areaGroups)
                     # if the area didn't change much (within say a
                     # few percent, we will reuse previous placement
                     # information
                     allowableAreaDelta = 1.01
                     for groupName in areaGroupsPrevious:
                         if(groupName in areaGroupsModified):
                             previousGroupObject = areaGroupsPrevious[groupName]
                             modifiedGroupObject = areaGroupsModified[groupName]
                             if((modifiedGroupObject.area > previousGroupObject.area/allowableAreaDelta) and (modifiedGroupObject.area < previousGroupObject.area*allowableAreaDelta)):
                                 modifiedGroupObject.xDimension = previousGroupObject.xDimension
                                 modifiedGroupObject.yDimension = previousGroupObject.yDimension

                     areaGroupsFinal = self.solveILPPartial(areaGroupsModified, fileprefix="partial_ilp_reuse_")

                 # Either we didn't have the previous information, or
                 # we failed to use it.
                 if(areaGroupsFinal is None):        
                     areaGroupsFinal = self.solveILPPartial(areaGroups)

                 # We failed to assign area groups.  Eventually, we
                 # could demote this to a warning.
                 if(areaGroupsFinal is None):      
                     print "Failed to obtain area groups"
                     exit(1)

                 # Sort area groups topologically, annotating each area group
                 # with a sortIdx field.
                 self.sort_area_groups(areaGroupsFinal)

                 # Now that we've solved (to some extent) the area
                 # group mapping problem we can dump the results for 
                 # the build tree. 

                 pickle_handle = open(_areaConstraintsFilePlaced(moduleList), 'wb')
                 pickle.dump(areaGroupsFinal, pickle_handle, protocol=-1)
                 pickle_handle.close()                 
                 
             return area_group

        # expose this dependency to the backend tools.
        moduleList.topModule.moduleDependency['AREA_GROUPS'] = [_areaConstraintsFilePlaced(moduleList)]

        # We need to get the resources for all modules, except the top module, which can change. 
        resources = [dep for dep in moduleList.getAllDependencies('RESOURCES')]

        areagroup = moduleList.env.Command( 
            [_areaConstraintsFilePlaced(moduleList)],
            resources + map(modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_AREA_CONSTRAINTS')),
            area_group_closure(moduleList)
            )                   
示例#11
0
  def __init__(self, moduleList):
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    topModulePath = get_build_path(moduleList, moduleList.topModule)
    # The LIM compiler uniquifies synthesis boundary names  
    uidOffset = int(moduleList.getAWBParam('wrapper_gen_tool', 'MODULE_UID_OFFSET'))

    # We only inject the platform wrapper in first pass builds.  In
    # the second pass, we import the first pass object code.  It may
    # be that we need this code?

    # Inject a synth boundary for platform build code.  we need to
    # pick up some dependencies from the top level code.  this is a
    # pretty major hack, in my opinion. Better would be to actually
    # inspect the eventual .ba files for their dependencies. 
    platformName = moduleList.localPlatformName + '_platform'
    platformDeps = {}
    platformDeps['GEN_VERILOGS'] = []
    platformDeps['GEN_BAS'] = [] #moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_BAS')                               
    platformDeps['GEN_VPI_HS'] = moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_VPI_HS')                               
    platformDeps['GEN_VPI_CS'] = moduleList.getSynthBoundaryDependencies(moduleList.topModule, 'GEN_VPI_CS')                          
     
    #This is sort of a hack.

    platformDeps['GIVEN_BSVS'] = []
    platformDeps['WRAPPER_BSHS'] = ['awb/provides/virtual_platform.bsh', 'awb/provides/physical_platform.bsh']
    platformDeps['BA'] = []
    platformDeps['STR'] = []
    platformDeps['VERILOG'] = [topModulePath + '/' + TMP_BSC_DIR + '/mk_' + platformName + '_Wrapper.v']
    platformDeps['BSV_LOG'] = []
    platformDeps['VERILOG_STUB'] = []
       
    platform_module = Module( platformName, ["mkVirtualPlatform"], moduleList.topModule.buildPath,\
                          moduleList.topModule.name,\
                          [], moduleList.topModule.name, [], platformDeps, platformModule=True)

    platform_module.dependsFile = '.depends-platform'
    platform_module.interfaceType = 'VIRTUAL_PLATFORM'
    platform_module.extraImports = ['virtual_platform']

    first_pass_LI_graph = getFirstPassLIGraph()
    if(first_pass_LI_graph is None):
        moduleList.insertModule(platform_module)
        moduleList.graphize()
        moduleList.graphizeSynth()

        # Sprinkle more files expected by the two-pass build.  
        generateWrapperStub(moduleList, platform_module)
        generateAWBCompileWrapper(moduleList, platform_module)

    else:
        platform_module_li = first_pass_LI_graph.modules[moduleList.localPlatformName + '_platform']

        # This gives us the right path. 
        synthHandle = getSynthHandle(moduleList, platform_module)

        # throw in some includes...
        synthHandle.write('import HList::*;\n')
        synthHandle.write('import Vector::*;\n')
        synthHandle.write('import ModuleContext::*;\n')
        synthHandle.write('import GetPut::*;\n')
        synthHandle.write('import Clocks::*;\n')
        synthHandle.write('`include "awb/provides/virtual_platform.bsh"\n')
        synthHandle.write('`include "awb/provides/physical_platform.bsh"\n')
        generateWellKnownIncludes(synthHandle)
        # May need an extra import here?
        # get the platform module from the LIGraph            
        generateBAImport(platform_module_li, synthHandle)
        # include synth stub here....
        _emitSynthModule(platform_module_li, synthHandle, platform_module.interfaceType,
                         localPlatformName = moduleList.localPlatformName)

    ## Here we use a module list sorted alphabetically in order to guarantee
    ## the generated wrapper files are consistent.  The topological sort
    ## guarantees only a depth first traversal -- not the same traversal
    ## each time.
    synth_modules = [moduleList.topModule] + moduleList.synthBoundaries()

    ## Models have the option of declaring top-level clocks that will
    ## be exposed as arguments.  When top-level clocks exist a single
    ## top-level reset is also defined.  To request no top-level clocks
    ## the variable N_TOP_LEVEL_CLOCKS should be removed from a platform's
    ## AWB configuration file, since Bluespec can't test the value of
    ## a preprocessor variable.
    try:
      n_top_clocks = int(moduleList.getAWBParam('physical_platform', 'N_TOP_LEVEL_CLOCKS'))
      if (n_top_clocks == 0):
        sys.stderr.write("Error: N_TOP_LEVEL_CLOCKS may not be 0 due to Bluespec preprocessor\n")
        sys.stderr.write("       limitations.  To eliminate top-level clocks, remove the AWB\n")
        sys.stderr.write("       parameter from the platform configuration.\n")
        sys.exit(1)
    except:
      n_top_clocks = 0

    for module in synth_modules:
      modPath = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + module.buildPath + '/' + module.name
      wrapperPath =  modPath + "_Wrapper.bsv"
      logPath = modPath + "_Log.bsv"

      conSizePath =  modPath + "_Wrapper_con_size.bsh"
      ignorePath = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + module.buildPath + '/.ignore'

      # clear out code on clean.
      if moduleList.env.GetOption('clean'):
          os.system('rm -f ' + wrapperPath)
          os.system('rm -f ' + logPath)
          os.system('rm -f ' + conSizePath)
          os.system('rm -f ' + ignorePath)
          if (module.name != moduleList.topModule.name):
              os.system('rm -f ' + modPath + '.bsv') 
          continue

      if (model.getBuildPipelineDebug(moduleList) != 0):
        print "Wrapper path is " + wrapperPath
      wrapper_bsv = open(wrapperPath, 'w')

      ignore_bsv = open(ignorePath, 'w')
      ignore_bsv.write("// Generated by wrapper_gen.py\n\n")

      # Connection size doesn't appear on the first dependence pass, since it
      # doesn't exist until after the first build.  Finding it later results in
      # build dependence changes and rebuilding.  Ignore it, since the file will
      # change only when some other file changes.
      ignore_bsv.write(conSizePath)
      ignore_bsv.close()

      # Generate a dummy connection size file to avoid errors during dependence
      # analysis.
      if not os.path.exists(conSizePath):
          dummyModule = LIModule(module.name, module.name)
          bsh_handle = open(conSizePath, 'w')
          generateConnectionBSH(dummyModule, bsh_handle)
          bsh_handle.close()

      wrapper_bsv.write('import HList::*;\n')
      wrapper_bsv.write('import Vector::*;\n')
      wrapper_bsv.write('import ModuleContext::*;\n')
      # the top module is handled specially
      if (module.name == moduleList.topModule.name):
        generateWellKnownIncludes(wrapper_bsv)
        wrapper_bsv.write('// These are well-known/required leap modules\n')
        wrapper_bsv.write('// import non-synthesis public files\n')

        # Include all subordinate synthesis boundaries for use by
        # instantiateAllSynthBoundaries() below.
        # If we're doing a LIM build, there are no *_synth.bsv for use codes.
        # Probably if we're doing a build tree they aren't necessary either, but
        # removing those dependencies would take a little work. 
        if(first_pass_LI_graph is None):
            for synth in synth_modules:
                if synth != module:
                    wrapper_bsv.write('`include "' + synth.name + '_synth.bsv"\n')

        # Provide a method that imports all subordinate synthesis
        # boundaries.  It will be invoked inside the top level model
        # in order to build all soft connections
        use_build_tree = moduleList.getAWBParam('wrapper_gen_tool', 'USE_BUILD_TREE')
        expose_all_connections = 0
        try:
            expose_all_connections = moduleList.getAWBParam('model', 'EXPOSE_ALL_CONNECTIONS')
        except:
            pass

        if (use_build_tree == 1):
            wrapper_bsv.write('\n\n`ifdef  CONNECTION_SIZES_KNOWN\n');
            # build_tree.bsv will get generated later, during the
            # leap-connect phase.
            wrapper_bsv.write('    import build_tree_synth::*;\n'); 
            wrapper_bsv.write('    module [Connected_Module] instantiateAllSynthBoundaries#(Reset baseReset) ();\n')
            wrapper_bsv.write('        Reset rst <- mkResetFanout(baseReset);\n')
            wrapper_bsv.write('        let m <- build_tree(baseReset, reset_by rst);\n')
            wrapper_bsv.write('    endmodule\n')
            wrapper_bsv.write('`else\n');

        wrapper_bsv.write('\n    module ')
        if len(synth_modules) != 1:
            wrapper_bsv.write('[Connected_Module]')
        wrapper_bsv.write(' instantiateAllSynthBoundaries#(Reset baseReset) ();\n')

        for synth in synth_modules:
          if synth != module and not synth.platformModule:
              wrapper_bsv.write('        ' + synth.synthBoundaryModule + '();\n')

        wrapper_bsv.write('    endmodule\n')
        if (use_build_tree == 1):
            wrapper_bsv.write('`endif\n'); 


        # Import platform wrapper.
        wrapper_bsv.write('    import ' + moduleList.localPlatformName +'_platform_synth::*;\n'); 

        wrapper_bsv.write('    module [Connected_Module] instantiatePlatform ('+ platform_module.interfaceType +');\n')
        wrapper_bsv.write('        let m <- ' + moduleList.localPlatformName + '_platform(noReset);\n')
        wrapper_bsv.write('        return m;\n')
        wrapper_bsv.write('    endmodule\n')

        wrapper_bsv.write('`include "' + module.name + '.bsv"\n')

        wrapper_bsv.write('\n// import non-synthesis private files\n')
        wrapper_bsv.write('// Get defintion of TOP_LEVEL_WIRES\n')
        wrapper_bsv.write('import physical_platform::*;\n')
        wrapper_bsv.write('(* synthesize *)\n')

        wrapper_bsv.write('module [Module] mk_model_Wrapper\n')
        wrapper_bsv.write('    (TOP_LEVEL_WIRES);\n\n')
        wrapper_bsv.write('    // Instantiate main module\n')
        
        wrapper_bsv.write('    let m <- mkModel(clocked_by noClock, reset_by noReset);\n')

        wrapper_bsv.write('    return m;\n')
        wrapper_bsv.write('endmodule\n')

      else:
        log_bsv = open(logPath, 'w')
        log_bsv.write('import HList::*;\n')
        log_bsv.write('import ModuleContext::*;\n')

        # Parents of a synthesis boundary likely import the top level module of
        # the boundary.  This way, the synthesis boundary could be removed and
        # the code within the boundary would be imported correctly by the parent.
        # The code within the synthesis boundary will actually be imported at the
        # top level instead, so we need a dummy module for use by the parent of
        # a boundary that looks like it imports the code but actually does nothing.
        # Importing at the top level allows us to build all synthesis regions
        # in parallel.
        dummy_import_bsv = open(modPath + '.bsv', 'w')
        dummy_import_bsv.write('// Generated by wrapper_gen.py\n\n')
        dummy_import_bsv.write('module ' + module.synthBoundaryModule + ' ();\n');
        dummy_import_bsv.write('endmodule\n');
        dummy_import_bsv.close()

        if not os.path.exists(modPath + '_synth.bsv'):            
            dummy_module = LIModule(module.name, module.name)
            handle = getSynthHandle(moduleList, module)
            generateSynthWrapper(dummy_module, handle, moduleList.localPlatformName,
                                 moduleType = module.interfaceType,
                                 extraImports = module.extraImports)

        for wrapper in [wrapper_bsv, log_bsv]:      
            wrapper.write('// These are well-known/required leap modules\n')
            generateWellKnownIncludes(wrapper)
            wrapper.write('`include "awb/provides/librl_bsv_base.bsh"\n')
            wrapper.write('// import non-synthesis public files\n')
            wrapper.write('`include "' + module.name + '_compile.bsv"\n')
            wrapper.write('\n\n')
            
        log_bsv.write('// First pass to see how large the vectors should be\n')
        log_bsv.write('`define CON_RECV_' + module.boundaryName + ' 100\n')
        log_bsv.write('`define CON_SEND_' + module.boundaryName + ' 100\n')
        log_bsv.write('`define CON_RECV_MULTI_' + module.boundaryName + ' 50\n')
        log_bsv.write('`define CON_SEND_MULTI_' + module.boundaryName + ' 50\n')
        log_bsv.write('`define CHAINS_' + module.boundaryName + ' 50\n')
        wrapper_bsv.write('// Real build pass.  Include file built dynamically.\n')
        wrapper_bsv.write('`include "' + module.name + '_Wrapper_con_size.bsh"\n')

        for wrapper in [wrapper_bsv, log_bsv]:      
            wrapper.write('(* synthesize *)\n')
            wrapper.write('module [Module] ' + module.wrapperName() + '#(Reset baseReset) (SOFT_SERVICES_SYNTHESIS_BOUNDARY#(`CON_RECV_' + module.boundaryName + ', `CON_SEND_' + module.boundaryName + ', `CON_RECV_MULTI_' + module.boundaryName + ', `CON_SEND_MULTI_' + module.boundaryName +', `CHAINS_' + module.boundaryName +', ' + module.interfaceType + '));\n')
            wrapper.write('  \n')
            # we need to insert the fpga platform here
            # get my parameters 

            wrapper.write('    // instantiate own module\n')
            wrapper.write('    let int_ctx0 <- initializeServiceContext();\n')
            wrapper.write('    match {.int_ctx1, .int_name1} <- runWithContext(int_ctx0, putSynthesisBoundaryID(fpgaNumPlatforms() + ' + str(module.synthBoundaryUID + uidOffset)  + '));\n');
            wrapper.write('    match {.int_ctx2, .int_name2} <- runWithContext(int_ctx1, putSynthesisBoundaryPlatform("' + moduleList.localPlatformName + '"));\n')
            wrapper.write('    match {.int_ctx3, .int_name3} <- runWithContext(int_ctx2, putSynthesisBoundaryPlatformID(' + str(moduleList.localPlatformUID) + '));\n')
            wrapper.write('    match {.int_ctx4, .int_name4} <- runWithContext(int_ctx3, putSynthesisBoundaryName("' + str(module.boundaryName) + '"));\n')
            wrapper.write('    // By convention, global string ID 0 (the first string) is the module name\n');
            wrapper.write('    match {.int_ctx5, .int_name5} <- runWithContext(int_ctx4, getGlobalStringUID("' + moduleList.localPlatformName + ':' + module.name + '"));\n');
            wrapper.write('    match {.int_ctx6, .module_ifc} <- runWithContext(int_ctx5, ' + module.synthBoundaryModule + ');\n')
            
            # Need to expose clocks of the platform Module
            if(module.platformModule):
                wrapper.write('    match {.clk, .rst} = extractClocks(module_ifc);\n')
                wrapper.write('    match {.int_ctx7, .int_name7} <- runWithContext(int_ctx6, mkSoftConnectionDebugInfo(clocked_by clk, reset_by rst));\n')
                wrapper.write('    match {.final_ctx, .m_final}  <- runWithContext(int_ctx7, mkSoftConnectionLatencyInfo(clocked_by clk, reset_by rst));\n')                
            else:
                wrapper.write('    match {.int_ctx7, .int_name7} <- runWithContext(int_ctx6, mkSoftConnectionDebugInfo);\n')
                wrapper.write('    match {.final_ctx, .m_final}  <- runWithContext(int_ctx7, mkSoftConnectionLatencyInfo);\n')
            wrapper.write('    let service_ifc <- exposeServiceContext(final_ctx);\n')
            wrapper.write('    interface services = service_ifc;\n')
            wrapper.write('    interface device = module_ifc;\n')
            wrapper.write('endmodule\n')
    
        log_bsv.close()

      wrapper_bsv.close()
示例#12
0
def dump_lim_graph(moduleList):
    lim_logs = []
    lim_stubs = []

    pipeline_debug = model.getBuildPipelineDebug(moduleList)

    for module in moduleList.synthBoundaries():

        if(module.getAttribute('LI_GRAPH_IGNORE')):
            continue

        # scrub tree build/platform, which are redundant.
        lim_logs.extend(module.getDependencies('BSV_LOG'))
        lim_stubs.extend(module.getDependencies('GEN_VERILOG_STUB'))

    # clean duplicates in logs/stubs
    lim_logs  = list(set(lim_logs))
    lim_stubs = list(set(lim_stubs))

    li_graph = moduleList.env['DEFS']['APM_NAME'] + '.li'

    ## dump a LIM graph for use by the LIM compiler.  here
    ## we wastefully contstruct (or reconstruct, depending on your
    ## perspective, a LIM graph including the platform channels.
    ## Probably this result could be acheived with the mergeGraphs
    ## function.
    def dump_lim_graph(target, source, env):
        # Find the subset of sources that are log files and parse them
        logs = [s for s in source if (str(s)[-4:] == '.log')]
        fullLIGraph = LIGraph(parseLogfiles(logs))

        # annotate modules with relevant object code (useful in
        # LIM compilation)
        # this is not technically a part of the tree cut methodology, but we need to do this

        # For the LIM compiler, we must also annotate those
        # channels which are coming out of the platform code.

        for module in moduleList.synthBoundaries():
            modulePath = module.buildPath


            # Wrap the real findBuildPath() so it can be invoked
            # later by map().
            def __findBuildPath(path):
                return Source.findBuildPath(path, modulePath)

            # User area groups add a wrinkle. We need to
            # keep them around, but they don't have LI
            # channels

            if(not module.getAttribute('AREA_GROUP') is None):
                # We now need to create and integrate an
                # LI Module for this module
                newModule = LIModule(module.name, module.name)
                newModule.putAttribute('PLATFORM_MODULE', True)
                newModule.putAttribute('BLACK_BOX_AREA_GROUP', True)
                fullLIGraph.mergeModules([newModule])

            # the liGraph only knows about modules that actually
            # have connections some modules are vestigial, andso
            # we can forget about them...
            if (module.boundaryName in fullLIGraph.modules):
                for objectType in module.moduleDependency:
                    # it appears that we need to filter
                    # these objects.  TODO: Clean the
                    # things adding to this list so we
                    # don't require the filtering step.
                    depList = module.moduleDependency[objectType]
                    convertedDeps = model.convertDependencies(depList)
                    relativeDeps = map(__findBuildPath, convertedDeps)
                    fullLIGraph.modules[module.boundaryName].putObjectCode(objectType, relativeDeps)

        for module in moduleList.synthBoundaries():
            if(module.boundaryName in fullLIGraph.modules):
                # annotate platform module with local mapping.
                if(module.name == moduleList.localPlatformName + '_platform'):
                    # The platform module is special.
                    fullLIGraph.modules[module.boundaryName].putAttribute('MAPPING', moduleList.localPlatformName)
                    fullLIGraph.modules[module.boundaryName].putAttribute('PLATFORM_MODULE', True)

        # Decorate LI modules with type
        for module in fullLIGraph.modules.values():
            module.putAttribute("EXECUTION_TYPE","RTL")

        # dump graph representation.
        pickleHandle = open(str(target[0]), 'wb')
        pickle.dump(fullLIGraph, pickleHandle, protocol=-1)
        pickleHandle.close()

        if (pipeline_debug != 0):
            print "Initial Graph is: " + str(fullLIGraph) + ": " + sys.version +"\n"

    # Setup the graph dump Although the graph is built
    # from only LI modules, the top wrapper contains
    # sizing information. Also needs stubs.
    dumpGraph = moduleList.env.Command(li_graph,
                                       lim_logs + lim_stubs,
                                       dump_lim_graph)

    moduleList.topModule.moduleDependency['LIM_GRAPH'] = [li_graph]

    # dumpGraph depends on most other top level builds since it
    # walks the set of generated files.
    moduleList.env.Depends(dumpGraph, moduleList.topDependency)
    moduleList.topDependency = [dumpGraph]
示例#13
0
  def __init__(self, moduleList, isPrimaryBuildTarget):
    APM_NAME = moduleList.env['DEFS']['APM_NAME']
    BSC = moduleList.env['DEFS']['BSC']
    inc_paths = moduleList.swIncDir # we need to depend on libasim

    self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

    # This is not correct for LIM builds and needs to be fixed. 
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
    ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
    ALL_INC_DIRS_FROM_ROOT   = '-Xv +incdir+' + ALL_DIRS_FROM_ROOT.replace(':','+') 

    #insert any supplied include paths
    for incdir in moduleList.getAllDependencies('VERILOG_INC_DIRS'):
        ALL_INC_DIRS_FROM_ROOT += "+" + incdir

    ALL_LIB_DIRS_FROM_ROOT   = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

    # Due to the bluespec linker, for LI second pass builds, the final
    # verilog link step must occur in a different directory than the
    # bsc object code wrapper compilation step.  However, non-LIM
    # linker builds need to build in the original .bsc directory to
    # pick up VPI.
    vexe_vdir = moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR'] 
    if(not self.firstPassLIGraph is None):
        vexe_vdir = vexe_vdir + '_vlog'

    if not os.path.isdir(vexe_vdir):
        os.mkdir(vexe_vdir)

    LI_LINK_DIR = ""
    if (not self.firstPassLIGraph is None):
        LI_LINK_DIR = model.get_build_path(moduleList, moduleList.topModule) + "/.li/"
        inc_paths += [LI_LINK_DIR]
        ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' +  ALL_LIB_DIRS_FROM_ROOT

    liCodeType = ['VERILOG_PKG', 'VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS']

    # This can be refactored as a function.
    if (not self.firstPassLIGraph is None):
        for moduleName in self.firstPassLIGraph.modules:            
            moduleObject = self.firstPassLIGraph.modules[moduleName]
            for codeType in liCodeType:
                if(codeType in moduleObject.objectCache):
                    for verilog in moduleObject.objectCache[codeType]:
                        linkPath = vexe_vdir + '/' + os.path.basename(verilog)
                        moduleList.env.Command(linkPath, verilog, model.link_file)
                        if(codeType in moduleList.topModule.moduleDependency):
                            moduleList.topModule.moduleDependency[codeType] += [linkPath]
                        else:
                            moduleList.topModule.moduleDependency[codeType] = [linkPath]
                    else:
                        # Warn that we did not find the ngc we expected to find..
                        print "Warning: We did not find verilog for module " + moduleName 
                
    bsc_version = bsv_tool.getBluespecVersion()

    ldflags = ''
    for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
      ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
      ldflags += ldHandle.read() + ' '    

    BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim vcs '

    # Build in parallel.
    n_jobs = moduleList.env.GetOption('num_jobs')
    if (bsc_version >= 30006):
        BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

    for path in inc_paths:
        BSC_FLAGS_VERILOG += ' -I ' + path + ' '

    LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
    TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
    ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

    vexe_gen_command = \
        BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
        ' -o $TARGET' 


    if (bsc_version >= 13013):
        # 2008.01.A compiler allows us to pass C++ arguments.
        if (model.getDebug(moduleList)):
            vexe_gen_command += ' -Xc++ -O0'
        else:
            vexe_gen_command += ' -Xc++ -O1'

        # g++ 4.5.2 is complaining about overflowing the var tracking table

        if (model.getGccVersion() >= 40501):
             vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

    defs = (software_tool.host_defs()).split(" ")
    for definition in defs:
        vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition
 
    # cflags to be passed into vcs compiler
    for definition in defs:
        vexe_gen_command += ' -Xv -CFLAGS -Xv ' + definition
    for path in inc_paths:
        vexe_gen_command += ' -Xv -CFLAGS -Xv -I' + path

    for lib in moduleList.swLinkLibs:
        vexe_gen_command += ' -Xl -l' + lib + ' '
        vexe_gen_command += ' -Xv -LDFLAGS -Xv -l' + lib + ' '

    # construct full path to BAs
    def modify_path(str):
        array = str.split('/')
        file = array.pop()
        return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(array) + '/' + TMP_BSC_DIR + '/' + file 

    if (moduleList.getAWBParam('verilog_tool', 'VCS_ENABLE_LINT') != 0):
        vexe_gen_command += ' -Xv +lint=all,noVCDE'

    vexe_gen_command += ' -Xv -full64 '
    vexe_gen_command += ' -Xv -sverilog '
    vexe_gen_command += ' -Xv +librescan '
    vexe_gen_command += ' -Xv +libext+.sv '
    if (moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')):
        vexe_gen_command += moduleList.getAWBParam('verilog_tool', 'VCS_ARGUMENTS')
    vexe_gen_command += ' ' + ALL_INC_DIRS_FROM_ROOT + ' '

    # VCS must be informed of all BDPI.  Really we need some kind of
    # file object here.  All this massaging of path is ridiculous.
    vexe_gen_command += ' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + (' -Xv ' +  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/').join(moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + ' ' 

    # Bluespec requires that source files terminate the command line.
    vexe_gen_command += ' -verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                        moduleList.env['DEFS']['BDPI_CS']

    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VERILOG_PKG'))
    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VERILOG'))
    vexe_gen_command += ' ' + ' '.join(moduleList.getAllDependencies('VHDL'))

    if (model.getBuildPipelineDebug(moduleList) != 0):
        for m in moduleList.getAllDependencies('BA'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG_PKG'):
            print 'VPKG dep: ' + str(m)
        for m in moduleList.getAllDependencies('VERILOG'):
            print 'VL dep: ' + str(m)
        for m in moduleList.getAllDependencies('VHDL'):
            print 'BA dep: ' + str(m)
        for m in moduleList.getAllDependencies('GIVEN_BDPI_CS'):
            print 'GIVEN_BDPI_CS: ' + str(m)

    # Generate a thin wrapper around the verilog executable.  This
    # wrapper is used to address a problem in iverilog in which the
    # simulator does not support shared library search paths.  The
    # current wrapper only works for iverilog.  Due to brokeness in
    # the iverilog argument parser, we must construct a correct
    # iverilog command line by analyzing its compiled script. Also,
    # this script is not passing through the arguments that it should
    # be passing through. 
    def generate_vexe_wrapper(target, source, env):
        wrapper_handle = open(str(target[0]),'w')
        wrapper_handle.write('#!/usr/bin/perl\n')
        wrapper_handle.write('# generated by verilog.py\n') 
        wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
        wrapper_handle.write('$ENV{LD_LIBRARY_PATH} = $platform . ":" . $ENV{LD_LIBRARY_PATH};\n')
        wrapper_handle.write('`ln -sf $platform/directc_mk_model_Wrapper.so .`;\n')
        wrapper_handle.write('exec("$platform/' + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe  -licqueue \$* ");\n')
        wrapper_handle.close()
 
    def modify_path_ba_local(path):
        return bsv_tool.modify_path_ba(moduleList, path)

    # Bluesim builds apparently touch this code. This control block
    # preserves their behavior, but it is unclear why the verilog build is 
    # involved.
    if (isPrimaryBuildTarget):
        vbinDeps = []
        # If we got a lim graph, we'll pick up many of our dependencies from it. 
        # These were annotated in the top module above. Really, this seems unclean.
        # we should build a graph during the second pass and just use it.
        if(not self.firstPassLIGraph is None):
            vbinDeps += moduleList.getDependencies(moduleList.topModule, 'VERILOG_PKG') + \
                        moduleList.getDependencies(moduleList.topModule, 'VERILOG') + \
                        moduleList.getDependencies(moduleList.topModule, 'VERILOG_LIB') + \
                        moduleList.getDependencies(moduleList.topModule, 'GIVEN_VERILOG_HS') + \
                        moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_HS') + \
                        moduleList.getDependencies(moduleList.topModule, 'GEN_VPI_CS') + \
                        moduleList.getDependencies(moduleList.topModule, 'VHDL') + \
                        moduleList.getDependencies(moduleList.topModule, 'BA') + \
                        map(modify_path_ba_local, moduleList.getModuleDependenciesWithPaths(moduleList.topModule, 'GEN_BAS'))
        # collect dependencies from all awb modules
        else:
            vbinDeps += moduleList.getAllDependencies('VERILOG_PKG') + \
                        moduleList.getAllDependencies('VERILOG') + \
                        moduleList.getAllDependencies('VERILOG_LIB') + \
                        moduleList.getAllDependencies('VHDL') + \
                        moduleList.getAllDependencies('BA') + \
                        map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS'))
          
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe',
            vbinDeps,
            [ vexe_gen_command ])

        moduleList.env.AlwaysBuild(vbin)

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.exe',
            vbin,
            [  generate_vexe_wrapper,
              '@chmod a+x $TARGET',
               SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])
        

        moduleList.topDependency = moduleList.topDependency + [vexe]

    else:
        vbin = moduleList.env.Command(
            TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe',
            moduleList.getAllDependencies('VERILOG_PKG') +
            moduleList.getAllDependencies('VERILOG') +
            moduleList.getAllDependencies('VHDL') +
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')),
            [ vexe_gen_command ])
 

        vexe = moduleList.env.Command(
            APM_NAME + '_hw.vexe',
            vbin,
            [ generate_vexe_wrapper,
              '@chmod a+x $TARGET',
            SCons.Script.Delete(APM_NAME + '_hw.exe'),
            SCons.Script.Delete(APM_NAME + '_hw.errinfo') ])

    moduleList.env.Alias('vexe', vexe)
示例#14
0
文件: BSV.py 项目: chenm001/leap
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool', 'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [self.hw_dir.Dir(moduleList.topModule.buildPath)]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [self.hw_dir.Dir('include'),
                                 self.hw_dir.Dir('include/awb/provides')]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool', 'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS']['BUILD_DIR']  # need to set the builddir for synplify


        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH =  get_build_path(moduleList, module)
                os.system('cd '+ MODULE_PATH + '/' + self.TMP_BSC_DIR + '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop() # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)


            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [moduleList.topModule]:
                if('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(self.TMP_BSC_DIR + '/' + moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                 all_str_src,
                                                 [ 'cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(moduleList.env['DEFS']['APM_NAME'] + '.str',
                                                bsc_str,
                                                [ 'ln -fs ' + self.TMP_BSC_DIR + '/`basename $TARGET` $TARGET' ])
                moduleList.topDependency += [strDep]



            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD: 

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = ['awb/provides/soft_services.bsh']
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo,'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write('// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(moduleList, tree_module)
                topo.append(tree_module)


            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(moduleList, moduleList.topModule) + "/" +  moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList, moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([ module for module in getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None])
                for module in sorted(liGraph.graph.nodes(), key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module, wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(moduleList, moduleList.topModule, useDerived, fileName=platform_deps, targetFiles=[wrapper_import_path])
        
            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList, module, useDerived, fileName=module.dependsFile)

            moduleList.topDependsInit += deps
示例#15
0
文件: verilog.py 项目: chenm001/leap
    def __init__(self, moduleList, isPrimaryBuildTarget):

        # if we have a deps build, don't do anything...
        if (moduleList.isDependsBuild):
            return

        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir  # we need to depend on libasim

        self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph()

        # This is not correct for LIM builds and needs to be fixed.
        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ALL_DIRS_FROM_ROOT = moduleList.env['DEFS']['ALL_HW_DIRS']
        ALL_BUILD_DIRS_FROM_ROOT = model.transform_string_list(
            ALL_DIRS_FROM_ROOT, ':', '', '/' + TMP_BSC_DIR)
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

        # Due to the bluespec linker, for LI second pass builds, the final
        # verilog link step must occur in a different directory than the
        # bsc object code wrapper compilation step.  However, non-LIM
        # linker builds need to build in the original .bsc directory to
        # pick up VPI.
        vexe_vdir = moduleList.env['DEFS'][
            'ROOT_DIR_HW'] + '/' + moduleList.env['DEFS'][
                'ROOT_DIR_MODEL'] + '/' + moduleList.env['DEFS']['TMP_BSC_DIR']
        if (not self.firstPassLIGraph is None):
            vexe_vdir = vexe_vdir + '_vlog'

        if not os.path.isdir(vexe_vdir):
            os.mkdir(vexe_vdir)

        LI_LINK_DIR = ""
        if (not self.firstPassLIGraph is None):
            LI_LINK_DIR = model.get_build_path(moduleList,
                                               moduleList.topModule) + "/.li/"
            inc_paths += [LI_LINK_DIR]
            ALL_LIB_DIRS_FROM_ROOT = LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT

        liCodeType = [
            'VERILOG', 'GIVEN_VERILOG_HS', 'GEN_VPI_CS', 'GEN_VPI_HS'
        ]

        # This can be refactored as a function.

        if (not self.firstPassLIGraph is None):
            for moduleName in self.firstPassLIGraph.modules:
                moduleObject = self.firstPassLIGraph.modules[moduleName]
                # we also need the module list object
                moduleListObject = moduleList.modules[moduleName]
                for codeType in liCodeType:
                    # If we're linking, clean out any previous code dependencies.  These are guaranteed not to be used.
                    moduleListObject.moduleDependency[codeType] = []
                    li_module.linkFirstPassObject(moduleList,
                                                  moduleListObject,
                                                  self.firstPassLIGraph,
                                                  codeType,
                                                  codeType,
                                                  linkDirectory=vexe_vdir)

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths(
                'GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(
                moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '

        BSC_FLAGS_VERILOG = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule ' + ldflags + ' -verilog -v -vsim iverilog '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_VERILOG += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_VERILOG += ' -I ' + path + ' '  #+ '-Xv -I' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']
        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS'][
            'ROOT_DIR_MODEL'] + '_Wrapper'

        vexe_gen_command = \
            BSC + ' ' + BSC_FLAGS_VERILOG + ' -vdir ' + vexe_vdir + ' -simdir ' + vexe_vdir + ' -bdir ' + vexe_vdir +' -p +:' +  ALL_LIB_DIRS_FROM_ROOT + ' -vsearch +:' + ALL_LIB_DIRS_FROM_ROOT + ' ' + \
            ' -o $TARGET'

        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                vexe_gen_command += ' -Xc++ -O0'
            else:
                vexe_gen_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                vexe_gen_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            vexe_gen_command += ' -Xc++ ' + definition + ' -Xc ' + definition

        # Hack to link against pthreads.  Really we should have a better solution.
        vexe_gen_command += ' -Xl -lpthread '

        # construct full path to BAs
        def modify_path(str):
            array = str.split('/')
            file = array.pop()
            return moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + '/'.join(
                array) + '/' + TMP_BSC_DIR + '/' + file

        # Use systemverilog 2005
        if (moduleList.getAWBParam('verilog_tool', 'ENABLE_SYSTEM_VERILOG')):
            vexe_gen_command += ' -Xv -g2005-sv '

        # Allow .vh/.sv file extensions etc.
        # vexe_gen_command += ' -Xv -Y.vh -Xv -Y.sv '

        # Bluespec requires that source files terminate the command line.
        vexe_gen_command += '-verilog -e ' + ROOT_WRAPPER_SYNTH_ID + ' ' +\
                            moduleList.env['DEFS']['BDPI_CS']

        if (model.getBuildPipelineDebug(moduleList) != 0):
            for m in moduleList.getAllDependencies('BA'):
                print 'BA dep: ' + str(m)
            for m in moduleList.getAllDependencies('VERILOG'):
                print 'VL dep: ' + str(m)
            for m in moduleList.getAllDependencies('VHDL'):
                print 'BA dep: ' + str(m)

        # Generate a thin wrapper around the verilog executable.  This
        # wrapper is used to address a problem in iverilog in which the
        # simulator does not support shared library search paths.  The
        # current wrapper only works for iverilog.  Due to brokeness in
        # the iverilog argument parser, we must construct a correct
        # iverilog command line by analyzing its compiled script. Also,
        # this script is not passing through the arguments that it should
        # be passing through.
        def generate_vexe_wrapper(target, source, env):
            wrapper_handle = open(str(target[0]), 'w')
            wrapper_handle.write('#!/usr/bin/perl\n')
            wrapper_handle.write('# generated by verilog.py\n')
            wrapper_handle.write('$platform = $ENV{"PLATFORM_DIRECTORY"};\n')
            wrapper_handle.write('@script = `cat $platform/' + TMP_BSC_DIR +
                                 '/' + APM_NAME + '_hw.exe' + '`;\n')
            wrapper_handle.write('$script[0] =~ s/#!/ /g;\n')
            wrapper_handle.write('$vvp = $script[0];\n')
            wrapper_handle.write('chomp($vvp);\n')
            wrapper_handle.write(
                'exec("$vvp -m$platform/directc_mk_model_Wrapper.so $platform/'
                + TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe' +
                ' +bscvcd \$* ");\n')
            wrapper_handle.close()

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        # Bluesim builds apparently touch this code. This control block
        # preserves their behavior, but it is unclear why the verilog build is
        # involved.
        if (isPrimaryBuildTarget):
            vbinDeps = []

            # If we got a lim graph, we'll pick up many of our dependencies from it.
            # These were annotated in the top module above. Really, this seems unclean.
            # we should build a graph during the second pass and just use it.
            if (not self.firstPassLIGraph is None):
                # Collect linked dependencies for every module
                for moduleName in self.firstPassLIGraph.modules:
                    moduleListObject = moduleList.modules[moduleName]
                    vbinDeps += moduleList.getDependencies(
                        moduleListObject,
                        'VERILOG') + moduleList.getDependencies(
                            moduleListObject,
                            'GIVEN_VERILOG_HS') + moduleList.getDependencies(
                                moduleListObject,
                                'GEN_VPI_HS') + moduleList.getDependencies(
                                    moduleListObject,
                                    'GEN_VPI_CS') + moduleList.getDependencies(
                                        moduleListObject,
                                        'VHDL') + moduleList.getDependencies(
                                            moduleListObject,
                                            'BA') + moduleList.getDependencies(
                                                moduleListObject, 'GEN_BAS')

                vbinDeps += moduleList.getDependencies(
                    moduleList.topModule,
                    'VERILOG') + moduleList.getDependencies(
                        moduleList.topModule,
                        'GIVEN_VERILOG_HS') + moduleList.getDependencies(
                            moduleList.topModule,
                            'GEN_VPI_HS') + moduleList.getDependencies(
                                moduleList.topModule, 'GEN_VPI_CS'
                            ) + moduleList.getDependencies(
                                moduleList.topModule, 'VHDL'
                            ) + moduleList.getDependencies(
                                moduleList.topModule, 'BA') + map(
                                    modify_path_ba_local,
                                    moduleList.getModuleDependenciesWithPaths(
                                        moduleList.topModule, 'GEN_BAS'))

            # collect dependencies from all awb modules
            else:
                vbinDeps += moduleList.getAllDependencies(
                    'VERILOG') + moduleList.getAllDependencies(
                        'VHDL') + moduleList.getAllDependencies('BA') + map(
                            modify_path_ba_local,
                            moduleList.getAllDependenciesWithPaths('GEN_BAS'))

            vbin = moduleList.env.Command(
                TMP_BSC_DIR + '/' + APM_NAME + '_hw.exe', vbinDeps,
                [vexe_gen_command,
                 SCons.Script.Delete('directc.sft')])

            vexe = moduleList.env.Command(APM_NAME + '_hw.exe', vbin, [
                generate_vexe_wrapper, '@chmod a+x $TARGET',
                SCons.Script.Delete(APM_NAME + '_hw.errinfo')
            ])

            moduleList.topDependency = moduleList.topDependency + [vexe]

        else:
            vbinDeps = moduleList.getAllDependencies(
                'VERILOG') + moduleList.getAllDependencies(
                    'VHDL') + moduleList.getAllDependencies('BA') + map(
                        modify_path_ba_local,
                        moduleList.getAllDependenciesWithPaths('GEN_BAS'))

            vbin = moduleList.env.Command(
                TMP_BSC_DIR + '/' + APM_NAME + '_hw.vexe', vbinDeps,
                [vexe_gen_command,
                 SCons.Script.Delete('directc.sft')])

            vexe = moduleList.env.Command(APM_NAME + '_hw.vexe', vbin, [
                generate_vexe_wrapper, '@chmod a+x $TARGET',
                SCons.Script.Delete(APM_NAME + '_hw.exe'),
                SCons.Script.Delete(APM_NAME + '_hw.errinfo')
            ])

        moduleList.env.Alias('vexe', vexe)
示例#16
0
    def __init__(self, moduleList):

        fpga_part_xilinx = moduleList.env['DEFS']['FPGA_PART_XILINX']
        xilinx_apm_name = moduleList.compileDirectory + '/' + moduleList.apmName

        # Generate the signature for the FPGA image
        signature = moduleList.env.Command(
            'config/signature.sh', moduleList.getAllDependencies('BIT'), [
                '@echo \'#!/bin/sh\' > $TARGET',
                '@echo signature=\\"' + moduleList.apmName +
                '-`md5sum $SOURCE | sed \'s/ .*//\'`\\" >> $TARGET'
            ])

        moduleList.topModule.moduleDependency['SIGNATURE'] = [signature]

        if (model.getBuildPipelineDebug(moduleList) != 0):
            print moduleList.swExeOrTarget + "\n"

        ##
        ## Generate a script for loading bitfiles onto an FPGA.
        ##
        def leap_xilinx_loader(xilinx_apm_name):
            try:
                fpga_pos = moduleList.getAWBParam(
                    ['physical_platform_config', 'physical_platform'],
                    'FPGA_POSITION')
            except:
                fpga_pos = None

            def leap_xilinx_loader_closure(target, source, env):
                lf = open(str(target[0]), 'w')

                lf.write('#!/usr/bin/perl\n')
                lf.write('\n')
                lf.write('my $retval = 0;\n')
                if fpga_pos != None:
                    lf.write('use Getopt::Long;\n')
                    lf.write('my $dev_id = undef;\n')
                    lf.write('GetOptions(\'device-id=i\', \$dev_id);\n')
                    lf.write('\n')

                    lf.write('# Check for existance of expected bitfile.\n')
                    lf.write('if ( ! -e  "' + xilinx_apm_name +
                             '_par.bit" ) {\n')
                    lf.write('  die "Could not find bitfile ' +
                             xilinx_apm_name + '_par.bit";\n')
                    lf.write('}\n')

                    lf.write(
                        '# Specify specific cable if device database includes a cable ID\n'
                    )
                    lf.write('my $setCable = \'setCable -p auto\';\n')
                    lf.write('if (defined($dev_id)) {\n')
                    lf.write(
                        '  my $cable_cfg = `leap-fpga-ctrl --device-id=${dev_id} --getconfig prog_cable_id`;\n'
                    )
                    lf.write('  chomp($cable_cfg);\n')
                    lf.write(
                        '  $setCable = "setCable $cable_cfg" if ($cable_cfg ne "");\n'
                    )
                    lf.write('}\n')
                    lf.write('\n')
                    lf.write('open (BATCH,">batch.opt");\n')
                    lf.write('print BATCH "setMode -bscan\n')
                    lf.write('${setCable}\n')
                    lf.write('identify\n')
                    lf.write('assignfile -p ' + str(fpga_pos) + ' -file ' +
                             xilinx_apm_name + '_par.bit\n')
                    lf.write('program -p ' + str(fpga_pos) + '\n')
                    lf.write('quit\n')
                    lf.write('EOF\n')
                    lf.write('";\n')
                    lf.write('close(BATCH);\n')
                    lf.write('open (STDOUT, ">$ARGV[0]");\n')
                    lf.write('open (STDERR, ">$ARGV[0]");\n')
                    lf.write('$retval = system("impact -batch batch.opt");\n')
                lf.write('if($retval != 0) {\n')
                lf.write('    exit(257);\n'
                         )  # some perl installs only return an 8 bit value
                lf.write('}\n')

                lf.close()
                os.chmod(
                    str(target[0]),
                    stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
                    | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)

            return leap_xilinx_loader_closure

        loader = moduleList.env.Command(
            'config/' + moduleList.apmName + '.download', [],
            leap_xilinx_loader(xilinx_apm_name))

        dependOnSW = moduleList.getAWBParam(['xilinx_loader'], 'DEPEND_ON_SW')
        summary = 0
        if (dependOnSW):
            summary = moduleList.env.Command(
                moduleList.apmName + '_hw.errinfo',
                moduleList.getAllDependencies('SIGNATURE') + moduleList.swExe,
                [
                    '@ln -fs ' + moduleList.swExeOrTarget + ' ' +
                    moduleList.apmName,
                    SCons.Script.Delete(moduleList.apmName + '_hw.exe'),
                    SCons.Script.Delete(moduleList.apmName + '_hw.vexe'),
                    '@echo "++++++++++++ Post-Place & Route ++++++++"',
                    synthesis_library.leap_physical_summary(
                        xilinx_apm_name + '.par.twr', moduleList.apmName +
                        '_hw.errinfo', '^Slack \(MET\)', '^Slack \(VIOLATED\)')
                ])
        else:
            summary = moduleList.env.Command(
                moduleList.apmName + '_hw.errinfo',
                moduleList.getAllDependencies('SIGNATURE'), [
                    SCons.Script.Delete(moduleList.apmName + '_hw.exe'),
                    SCons.Script.Delete(moduleList.apmName + '_hw.vexe'),
                    '@echo "++++++++++++ Post-Place & Route ++++++++"',
                    synthesis_library.leap_physical_summary(
                        xilinx_apm_name + '.par.twr', moduleList.apmName +
                        '_hw.errinfo', '^Slack \(MET\)', '^Slack \(VIOLATED\)')
                ])

        moduleList.env.Depends(summary, loader)

        moduleList.topModule.moduleDependency['LOADER'] = [summary]
        moduleList.topDependency = moduleList.topDependency + [summary]
示例#17
0
    def __init__(self, moduleList):
        env = moduleList.env

        tree_base_path = env.Dir(
            model.get_build_path(moduleList, moduleList.topModule))

        # get rid of this at some point - since we know we're in
        # bluesim, we should be able to do the right thing.
        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir  # we need to depend on libasim

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths(
                'GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(
                moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '

        BSC_FLAGS_SIM = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule -l pthread ' + ldflags + ' '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_SIM += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_SIM += '-I ' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']

        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS'][
            'ROOT_DIR_MODEL'] + '_Wrapper'

        all_hw_dirs = [
            env.Dir(d)
            for d in moduleList.env['DEFS']['ALL_HW_DIRS'].split(':')
        ]
        all_build_dirs = [d.Dir(TMP_BSC_DIR) for d in all_hw_dirs]

        ALL_DIRS_FROM_ROOT = ':'.join([d.path for d in all_hw_dirs])
        ALL_BUILD_DIRS_FROM_ROOT = ':'.join([d.path for d in all_build_dirs])
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT

        bsc_sim_command = BSC + ' ' + BSC_FLAGS_SIM + ' ' + LDFLAGS + ' ' + ldflags + ' -o $TARGET'

        # Set MAKEFLAGS because Bluespec is going to invoke make on its own and
        # we don't want to pass on the current build's recursive flags.
        bsc_sim_command = 'env MAKEFLAGS="-j ' + str(
            n_jobs) + '" ' + bsc_sim_command

        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                bsc_sim_command += ' -Xc++ -O0'
            else:
                bsc_sim_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                bsc_sim_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            bsc_sim_command += ' -Xc++ ' + definition + ' -Xc ' + definition

        def modify_path_bdpi(path):
            return moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        LI_LINK_DIR = ""
        if (not (wrapper_gen_tool.getFirstPassLIGraph()) is None):
            LI_LINK_DIR = tree_base_path.Dir('.li').path

        bdpi_cs = [
            env.File(c) for c in moduleList.env['DEFS']['BDPI_CS'].split(' ')
        ]
        BDPI_CS = ' '.join([c.path for c in bdpi_cs])

        bsc_sim_command += \
            ' -sim -e ' + ROOT_WRAPPER_SYNTH_ID + ' -p +:' + LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT +' -simdir ' + \
            TMP_BSC_DIR + ' ' +\
            ' ' + BDPI_CS

        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "BLUESIM DEPS: \n"
            for ba in moduleList.getAllDependencies('BA'):
                print 'Bluesim BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local,
                          moduleList.getAllDependenciesWithPaths('GIVEN_BAS')):
                print 'Bluesim GIVEN_BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local,
                          moduleList.getAllDependenciesWithPaths('GEN_BAS')):
                print 'Bluesim GEN_BA dep: ' + str(ba) + '\n'

        sbin_name = TMP_BSC_DIR + '/' + APM_NAME
        sbin = moduleList.env.Command(
            sbin_name + '_hw.exe',
            moduleList.getAllDependencies('BA') +
            map(modify_path_ba_local,
                moduleList.getAllDependenciesWithPaths('GIVEN_BAS')) +
            map(modify_path_ba_local,
                moduleList.getAllDependenciesWithPaths('GEN_BAS')) +
            map(modify_path_bdpi,
                moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) +
            map(modify_path_bdpi,
                moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_HS')),
            bsc_sim_command)

        if moduleList.env.GetOption('clean'):
            os.system('rm -rf .bsc')

        # If we have bsc data files, copy them over to the .bsc directory
        if len(moduleList.getAllDependencies('GEN_VS')) > 0:
            Copy(TMP_BSC_DIR, moduleList.getAllDependencies('GIVEN_DATAS'))

        #
        # The final step must leave a few well known names:
        #   APM_NAME must be the software side, if there is one.  If there isn't, then
        #   it must be the Bluesim image.
        #
        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "ModuleList desp : " + str(moduleList.swExe)

        exe = moduleList.env.Command(APM_NAME, sbin, [
            '@ln -fs ' + sbin_name + '_hw.exe ${TARGET}_hw.exe',
            '@ln -fs ' + sbin_name + '_hw.exe.so ${TARGET}_hw.exe.so',
            '@ln -fs ' + moduleList.swExeOrTarget + ' ${TARGET}',
            SCons.Script.Delete('${TARGET}_hw.vexe'),
            SCons.Script.Delete('${TARGET}_hw.errinfo')
        ])

        moduleList.topDependency = moduleList.topDependency + [exe]
示例#18
0
文件: bluesim.py 项目: chenm001/leap
    def __init__(self, moduleList):
        env = moduleList.env

        tree_base_path = env.Dir(model.get_build_path(moduleList, moduleList.topModule))

        # get rid of this at some point - since we know we're in 
        # bluesim, we should be able to do the right thing.
        APM_NAME = moduleList.env['DEFS']['APM_NAME']
        BSC = moduleList.env['DEFS']['BSC']
        inc_paths = moduleList.swIncDir # we need to depend on libasim

        bsc_version = bsv_tool.getBluespecVersion()

        ldflags = ''
        for ld_file in moduleList.getAllDependenciesWithPaths('GIVEN_BLUESIM_LDFLAGSS'):
            ldHandle = open(moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + ld_file, 'r')
            ldflags += ldHandle.read() + ' '    
            
        BSC_FLAGS_SIM = '-steps 10000000 +RTS -K1000M -RTS -keep-fires -aggressive-conditions -wait-for-license -no-show-method-conf -no-opt-bool -licenseWarning 7 -elab -show-schedule -l pthread ' + ldflags + ' '

        # Build in parallel.
        n_jobs = moduleList.env.GetOption('num_jobs')
        if (bsc_version >= 30006):
            BSC_FLAGS_SIM += '-parallel-sim-link ' + str(n_jobs) + ' '

        for path in inc_paths:
            BSC_FLAGS_SIM += '-I ' + path + ' '

        LDFLAGS = moduleList.env['DEFS']['LDFLAGS']

        TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        ROOT_WRAPPER_SYNTH_ID = 'mk_' + moduleList.env['DEFS']['ROOT_DIR_MODEL'] + '_Wrapper'

        all_hw_dirs = [env.Dir(d) for d in moduleList.env['DEFS']['ALL_HW_DIRS'].split(':')]
        all_build_dirs = [d.Dir(TMP_BSC_DIR) for d in all_hw_dirs]

        ALL_DIRS_FROM_ROOT = ':'.join([d.path for d in all_hw_dirs])
        ALL_BUILD_DIRS_FROM_ROOT = ':'.join([d.path for d in all_build_dirs])
        ALL_LIB_DIRS_FROM_ROOT = ALL_DIRS_FROM_ROOT + ':' + ALL_BUILD_DIRS_FROM_ROOT
        
        bsc_sim_command = BSC + ' ' + BSC_FLAGS_SIM + ' ' + LDFLAGS + ' ' + ldflags + ' -o $TARGET'

        # Set MAKEFLAGS because Bluespec is going to invoke make on its own and
        # we don't want to pass on the current build's recursive flags.
        bsc_sim_command = 'env MAKEFLAGS="-j ' + str(n_jobs) + '" ' + bsc_sim_command


        if (bsc_version >= 13013):
            # 2008.01.A compiler allows us to pass C++ arguments.
            if (model.getDebug(moduleList)):
                bsc_sim_command += ' -Xc++ -O0'
            else:
                bsc_sim_command += ' -Xc++ -O1'

            # g++ 4.5.2 is complaining about overflowing the var tracking table

            if (model.getGccVersion() >= 40501):
                 bsc_sim_command += ' -Xc++ -fno-var-tracking-assignments'

        defs = (software_tool.host_defs()).split(" ")
        for definition in defs:
            bsc_sim_command += ' -Xc++ ' + definition + ' -Xc ' + definition


        def modify_path_bdpi(path):
            return  moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + path

        def modify_path_ba_local(path):
            return bsv_tool.modify_path_ba(moduleList, path)

        LI_LINK_DIR = ""
        if (not (wrapper_gen_tool.getFirstPassLIGraph()) is None):
            LI_LINK_DIR = tree_base_path.Dir('.li').path

        bdpi_cs = [env.File(c) for c in moduleList.env['DEFS']['BDPI_CS'].split(' ')]
        BDPI_CS = ' '.join([c.path for c in bdpi_cs])

        bsc_sim_command += \
            ' -sim -e ' + ROOT_WRAPPER_SYNTH_ID + ' -p +:' + LI_LINK_DIR + ':' + ALL_LIB_DIRS_FROM_ROOT +' -simdir ' + \
            TMP_BSC_DIR + ' ' +\
            ' ' + BDPI_CS


        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "BLUESIM DEPS: \n" 
            for ba in moduleList.getAllDependencies('BA'):
                print 'Bluesim BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GIVEN_BAS')):
                print 'Bluesim GIVEN_BA dep: ' + str(ba) + '\n'

            for ba in map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')):
                print 'Bluesim GEN_BA dep: ' + str(ba) + '\n'

        sbin_name = TMP_BSC_DIR + '/' + APM_NAME
        sbin = moduleList.env.Command(
            sbin_name + '_hw.exe',
            moduleList.getAllDependencies('BA') + 
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GIVEN_BAS')) +
            map(modify_path_ba_local, moduleList.getAllDependenciesWithPaths('GEN_BAS')) +
            map(modify_path_bdpi, moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_CS')) + 
            map(modify_path_bdpi, moduleList.getAllDependenciesWithPaths('GIVEN_BDPI_HS')),
            bsc_sim_command)

        if moduleList.env.GetOption('clean'):
            os.system('rm -rf .bsc')

        # If we have bsc data files, copy them over to the .bsc directory 
        if len(moduleList.getAllDependencies('GEN_VS'))> 0:
           Copy(TMP_BSC_DIR,  moduleList.getAllDependencies('GIVEN_DATAS')) 

        #
        # The final step must leave a few well known names:
        #   APM_NAME must be the software side, if there is one.  If there isn't, then
        #   it must be the Bluesim image.
        #
        if (model.getBuildPipelineDebug(moduleList) != 0):
            print "ModuleList desp : " + str(moduleList.swExe)

        exe = moduleList.env.Command(
            APM_NAME,
            sbin,
            [ '@ln -fs ' + sbin_name + '_hw.exe ${TARGET}_hw.exe',
              '@ln -fs ' + sbin_name + '_hw.exe.so ${TARGET}_hw.exe.so',
              '@ln -fs ' + moduleList.swExeOrTarget + ' ${TARGET}',
              SCons.Script.Delete('${TARGET}_hw.vexe'),
              SCons.Script.Delete('${TARGET}_hw.errinfo') ])

        moduleList.topDependency = moduleList.topDependency + [exe] 
示例#19
0
def dump_lim_graph(moduleList):
    lim_logs = []
    lim_stubs = []

    pipeline_debug = model.getBuildPipelineDebug(moduleList)

    for module in moduleList.synthBoundaries():

        if (module.getAttribute('LI_GRAPH_IGNORE')):
            continue

        # scrub tree build/platform, which are redundant.
        lim_logs.extend(module.getDependencies('BSV_LOG'))
        lim_stubs.extend(module.getDependencies('GEN_VERILOG_STUB'))

    # clean duplicates in logs/stubs
    lim_logs = list(set(lim_logs))
    lim_stubs = list(set(lim_stubs))

    li_graph = moduleList.env['DEFS']['APM_NAME'] + '.li'

    ## dump a LIM graph for use by the LIM compiler.  here
    ## we wastefully contstruct (or reconstruct, depending on your
    ## perspective, a LIM graph including the platform channels.
    ## Probably this result could be acheived with the mergeGraphs
    ## function.
    def dump_lim_graph(target, source, env):
        # Find the subset of sources that are log files and parse them
        logs = [s for s in source if (str(s)[-4:] == '.log')]
        fullLIGraph = LIGraph(parseLogfiles(logs))

        # annotate modules with relevant object code (useful in
        # LIM compilation)
        # this is not technically a part of the tree cut methodology, but we need to do this

        # For the LIM compiler, we must also annotate those
        # channels which are coming out of the platform code.

        for module in moduleList.synthBoundaries():
            modulePath = module.buildPath

            # Wrap the real findBuildPath() so it can be invoked
            # later by map().
            def __findBuildPath(path):
                return Source.findBuildPath(path, modulePath)

            # User area groups add a wrinkle. We need to
            # keep them around, but they don't have LI
            # channels

            if (not module.getAttribute('AREA_GROUP') is None):
                # We now need to create and integrate an
                # LI Module for this module
                newModule = LIModule(module.name, module.name)
                newModule.putAttribute('PLATFORM_MODULE', True)
                newModule.putAttribute('BLACK_BOX_AREA_GROUP', True)
                fullLIGraph.mergeModules([newModule])

            # the liGraph only knows about modules that actually
            # have connections some modules are vestigial, andso
            # we can forget about them...
            if (module.boundaryName in fullLIGraph.modules):
                for objectType in module.moduleDependency:
                    # it appears that we need to filter
                    # these objects.  TODO: Clean the
                    # things adding to this list so we
                    # don't require the filtering step.
                    depList = module.moduleDependency[objectType]
                    convertedDeps = model.convertDependencies(depList)
                    relativeDeps = map(__findBuildPath, convertedDeps)
                    fullLIGraph.modules[module.boundaryName].putObjectCode(
                        objectType, relativeDeps)

        for module in moduleList.synthBoundaries():
            if (module.boundaryName in fullLIGraph.modules):
                # annotate platform module with local mapping.
                if (module.name == moduleList.localPlatformName + '_platform'):
                    # The platform module is special.
                    fullLIGraph.modules[module.boundaryName].putAttribute(
                        'MAPPING', moduleList.localPlatformName)
                    fullLIGraph.modules[module.boundaryName].putAttribute(
                        'PLATFORM_MODULE', True)

        # Decorate LI modules with type
        for module in fullLIGraph.modules.values():
            module.putAttribute("EXECUTION_TYPE", "RTL")

        # dump graph representation.
        pickleHandle = open(str(target[0]), 'wb')
        pickle.dump(fullLIGraph, pickleHandle, protocol=-1)
        pickleHandle.close()

        if (pipeline_debug != 0):
            print "Initial Graph is: " + str(
                fullLIGraph) + ": " + sys.version + "\n"

    # Setup the graph dump Although the graph is built
    # from only LI modules, the top wrapper contains
    # sizing information. Also needs stubs.
    dumpGraph = moduleList.env.Command(li_graph, lim_logs + lim_stubs,
                                       dump_lim_graph)

    moduleList.topModule.moduleDependency['LIM_GRAPH'] = [li_graph]

    # dumpGraph depends on most other top level builds since it
    # walks the set of generated files.
    moduleList.env.Depends(dumpGraph, moduleList.topDependency)
    moduleList.topDependency = [dumpGraph]
示例#20
0
文件: BSV.py 项目: chenm001/leap
    def __init__(self, moduleList):
        # some definitions used during the bsv compilation process
        env = moduleList.env
        self.moduleList = moduleList

        self.hw_dir = env.Dir(moduleList.env['DEFS']['ROOT_DIR_HW'])

        self.TMP_BSC_DIR = env['DEFS']['TMP_BSC_DIR']
        synth_modules = moduleList.synthBoundaries()

        self.USE_TREE_BUILD = moduleList.getAWBParam('wrapper_gen_tool',
                                                     'USE_BUILD_TREE')

        # all_module_dirs: a list of all module directories in the build tree
        self.all_module_dirs = [
            self.hw_dir.Dir(moduleList.topModule.buildPath)
        ]
        for module in synth_modules:
            if (module.buildPath != moduleList.topModule.buildPath):
                self.all_module_dirs += [self.hw_dir.Dir(module.buildPath)]

        # all_build_dirs: the build (.bsc) sub-directory of all module directories
        self.all_build_dirs = [
            d.Dir(self.TMP_BSC_DIR) for d in self.all_module_dirs
        ]

        # Include iface directories
        self.all_module_dirs += iface_tool.getIfaceIncludeDirs(moduleList)
        self.all_build_dirs += iface_tool.getIfaceLibDirs(moduleList)

        # Add the top level build directory
        self.all_build_dirs += [env.Dir(self.TMP_BSC_DIR)]

        self.all_module_dirs += [
            self.hw_dir.Dir('include'),
            self.hw_dir.Dir('include/awb/provides')
        ]

        # Full search path: all module and build directories
        self.all_lib_dirs = self.all_module_dirs + self.all_build_dirs

        all_build_dir_paths = [d.path for d in self.all_build_dirs]
        self.ALL_BUILD_DIR_PATHS = ':'.join(all_build_dir_paths)

        all_lib_dir_paths = [d.path for d in self.all_lib_dirs]
        self.ALL_LIB_DIR_PATHS = ':'.join(all_lib_dir_paths)

        # we need to annotate the module list with the
        # bluespec-provided library files. Do so here.
        bsv_tool.decorateBluespecLibraryCode(moduleList)

        self.TMP_BSC_DIR = moduleList.env['DEFS']['TMP_BSC_DIR']
        self.BUILD_LOGS_ONLY = moduleList.getAWBParam('bsv_tool',
                                                      'BUILD_LOGS_ONLY')
        self.USE_BVI = moduleList.getAWBParam('bsv_tool', 'USE_BVI')

        self.pipeline_debug = model.getBuildPipelineDebug(moduleList)

        # Should we be building in events?
        if (model.getEvents(moduleList) == 0):
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=False '
        else:
            bsc_events_flag = ' -D HASIM_EVENTS_ENABLED=True '

        self.BSC_FLAGS = moduleList.getAWBParam('bsv_tool',
                                                'BSC_FLAGS') + bsc_events_flag

        moduleList.env.VariantDir(self.TMP_BSC_DIR, '.', duplicate=0)
        moduleList.env['ENV']['BUILD_DIR'] = moduleList.env['DEFS'][
            'BUILD_DIR']  # need to set the builddir for synplify

        topo = moduleList.topologicalOrderSynth()
        topo.reverse()

        # Cleaning? Wipe out module temporary state. Do this before
        # the topo pop to ensure that we don't leave garbage around at
        # the top level.
        if moduleList.env.GetOption('clean'):
            for module in topo:
                MODULE_PATH = get_build_path(moduleList, module)
                os.system('cd ' + MODULE_PATH + '/' + self.TMP_BSC_DIR +
                          '; rm -f *.ba *.c *.h *.sched *.log *.v *.bo *.str')

        topo.pop()  # get rid of top module.

        ## Python module that generates a wrapper to connect the exposed
        ## wires of all synthesis boundaries.
        tree_builder = bsv_tool.BSVSynthTreeBuilder(self)

        ##
        ## Is this a normal build or a build in which only Bluespec dependence
        ## is computed?
        ##

        if not moduleList.isDependsBuild:
            ##
            ## Normal build.
            ##

            ##
            ## Now that the "depends-init" build is complete we can
            ## continue with accurate inter-Bluespec file dependence.
            ## This build only takes place for the first pass object
            ## code generation.  If the first pass li graph exists, it
            ## subsumes awb-style synthesis boundary generation.
            ##
            for module in topo:
                self.build_synth_boundary(moduleList, module)

            ## We are going to have a whole bunch of BA and V files coming.
            ## We don't yet know what they contain, but we do know that there
            ## will be |synth_modules| - 2 of them

            if (not 'GEN_VERILOGS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_VERILOGS'] = []
            if (not 'GEN_BAS' in moduleList.topModule.moduleDependency):
                moduleList.topModule.moduleDependency['GEN_BAS'] = []

            ## Having described the new build tree dependencies we can build
            ## the top module.
            self.build_synth_boundary(moduleList, moduleList.topModule)

            ## Merge all synthesis boundaries using a tree?  The tree reduces
            ## the number of connections merged in a single compilation, allowing
            ## us to support larger systems.
            if self.USE_TREE_BUILD:
                tree_builder.setupTreeBuild(moduleList, topo)

            ##
            ## Generate the global string table.  Bluespec-generated global
            ## strings are stored in files by the compiler.
            ##
            ## The global string file will be generated in the top-level
            ## .bsc directory and a link to it will be added to the
            ## top-level directory.
            ##
            all_str_src = []
            #for module in topo + [moduleList.topModule]:
            for module in moduleList.moduleList + topo + [
                    moduleList.topModule
            ]:
                if ('STR' in module.moduleDependency):
                    all_str_src.extend(module.moduleDependency['STR'])

            if (self.BUILD_LOGS_ONLY == 0):
                bsc_str = moduleList.env.Command(
                    self.TMP_BSC_DIR + '/' +
                    moduleList.env['DEFS']['APM_NAME'] + '.str', all_str_src,
                    ['cat $SOURCES > $TARGET'])
                strDep = moduleList.env.Command(
                    moduleList.env['DEFS']['APM_NAME'] + '.str', bsc_str, [
                        'ln -fs ' + self.TMP_BSC_DIR +
                        '/`basename $TARGET` $TARGET'
                    ])
                moduleList.topDependency += [strDep]

            if moduleList.env.GetOption('clean'):
                print 'Cleaning depends-init...'
                s = os.system('scons --clean depends-init')
        else:

            ##
            ## Dependence build.  The target of this build is "depens-init".  No
            ## Bluespec modules will be compiled in this invocation of SCons.
            ## Only .depends-bsv files will be produced.
            ##

            # We need to calculate some dependencies for the build
            # tree.  We could be clever and put this code somewhere
            # rather than replicate it.
            if self.USE_TREE_BUILD:

                buildTreeDeps = {}
                buildTreeDeps['GEN_VERILOGS'] = []
                buildTreeDeps['GEN_BAS'] = []
                #This is sort of a hack.
                buildTreeDeps['WRAPPER_BSHS'] = [
                    'awb/provides/soft_services.bsh'
                ]
                buildTreeDeps['GIVEN_BSVS'] = []
                buildTreeDeps['BA'] = []
                buildTreeDeps['STR'] = []
                buildTreeDeps['VERILOG'] = []
                buildTreeDeps['BSV_LOG'] = []
                buildTreeDeps['VERILOG_STUB'] = []

                tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\
                             moduleList.topModule.name,\
                             [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True)

                tree_module.dependsFile = '.depends-build-tree'

                moduleList.insertModule(tree_module)
                tree_file_bo = get_build_path(
                    moduleList, moduleList.topModule) + "/build_tree.bsv"
                # sprinkle files to get dependencies right
                bo_handle = open(tree_file_bo, 'w')

                # mimic AWB/leap-configure

                bo_handle.write('//\n')
                bo_handle.write(
                    '// Synthesized compilation file for module: build_tree\n')
                bo_handle.write('//\n')
                bo_handle.write('//   This file was created by BSV.py\n')
                bo_handle.write('//\n')

                bo_handle.write('`define BUILDING_MODULE_build_tree\n')
                bo_handle.write('`include "build_tree_Wrapper.bsv"\n')

                bo_handle.close()

                # Calling generateWrapperStub will write out default _Wrapper.bsv
                # and _Log.bsv files for build tree. However, these files
                # may already exists, and, in the case of build_tree_Wrapper.bsv,
                # have meaningful content.  Fortunately, generateWrapperStub
                # will not over write existing files.
                wrapper_gen_tool.generateWrapperStub(moduleList, tree_module)
                wrapper_gen_tool.generateAWBCompileWrapper(
                    moduleList, tree_module)
                topo.append(tree_module)

            deps = []

            useDerived = True
            first_pass_LI_graph = wrapper_gen_tool.getFirstPassLIGraph()
            if (not first_pass_LI_graph is None):
                useDerived = False
                # we also need to parse the platform_synth file in th
                platform_synth = get_build_path(
                    moduleList, moduleList.topModule
                ) + "/" + moduleList.localPlatformName + "_platform_synth.bsv"
                platform_deps = ".depends-platform"
                deps += self.compute_dependence(moduleList,
                                                moduleList.topModule,
                                                useDerived,
                                                fileName=platform_deps,
                                                targetFiles=[platform_synth])

                # If we have an LI graph, we need to construct and compile
                # several LI wrappers.  do that here.
                # include all the dependencies in the graph in the wrapper.
                li_wrappers = []
                tree_base_path = get_build_path(moduleList,
                                                moduleList.topModule)
                liGraph = LIGraph([])
                firstPassGraph = first_pass_LI_graph
                # We should ignore the 'PLATFORM_MODULE'
                liGraph.mergeModules([
                    module for module in getUserModules(firstPassGraph)
                    if module.getAttribute('RESYNTHESIZE') is None
                ])
                for module in sorted(liGraph.graph.nodes(),
                                     key=lambda module: module.name):
                    wrapper_import_path = tree_base_path + '/' + module.name + '_Wrapper.bsv'
                    li_wrappers.append(module.name + '_Wrapper.bsv')
                    wrapper_import_handle = open(wrapper_import_path, 'w')
                    wrapper_import_handle.write('import Vector::*;\n')
                    wrapper_gen_tool.generateWellKnownIncludes(
                        wrapper_import_handle)
                    wrapper_gen_tool.generateBAImport(module,
                                                      wrapper_import_handle)
                    wrapper_import_handle.close()
                    platform_deps = ".depends-" + module.name
                    deps += self.compute_dependence(
                        moduleList,
                        moduleList.topModule,
                        useDerived,
                        fileName=platform_deps,
                        targetFiles=[wrapper_import_path])

            for module in topo + [moduleList.topModule]:
                # for object import builds no Wrapper code will be included. remove it.
                deps += self.compute_dependence(moduleList,
                                                module,
                                                useDerived,
                                                fileName=module.dependsFile)

            moduleList.topDependsInit += deps