def insertDeviceModules(moduleList, annotateParentsOnly=False): elabAreaConstraints = AreaConstraints(moduleList) # this was constructed upon the original call to load area. elabAreaConstraints.loadAreaConstraintsElaborated() for userAreaGroup in elabAreaConstraints.constraints.values(): if('SYNTH_BOUNDARY' in userAreaGroup.attributes): # Modify parent to know about this child. parentModule = moduleList.modules[userAreaGroup.parentName] # pick up deps from parent. moduleDeps ={} moduleName = userAreaGroup.attributes['MODULE_NAME'] # grab the parent module verilog and convert it. This # is really ugly, and demonstrates whe first class # language constructs are so nice. Eventually, we # should push these new synth boundary objects into # flow earlier. moduleVerilog = None for dep in map(functools.partial(bsv_tool.modify_path_ba, moduleList), model.convertDependencies(moduleList.getAllDependenciesWithPaths('GEN_VERILOGS'))): if (re.search(moduleName, dep)): moduleVerilog = dep if(moduleVerilog is None): print "ERROR: failed to find verilog for area group: " + userAreaGroup.name exit(1) moduleVerilogBlackBox = moduleVerilog.replace('.v', '_stub.v') moduleDeps['GEN_VERILOG_STUB'] = [moduleVerilogBlackBox] # We need to ensure that the second pass glue logic # modules don't look at the black box stubs. The modules # are in the current synth boundaries list, but not in the LI graph. parentList = [parentModule, moduleList.topModule] + [module for module in moduleList.synthBoundaries() if not module.name in elabAreaConstraints.constraints] for parent in parentList: print "BLACK_BOX Annotating: " + parent.name if(parent.getAttribute('BLACK_BOX') is None): parent.putAttribute('BLACK_BOX', {moduleVerilog: moduleVerilogBlackBox}) else: blackBoxDict = parent.getAttribute('BLACK_BOX') blackBoxDict[moduleVerilog] = moduleVerilogBlackBox if(not annotateParentsOnly): moduleList.env.Command([moduleVerilogBlackBox], [moduleVerilog], 'leap-gen-black-box -nohash $SOURCE > $TARGET') m = Module(userAreaGroup.name, [moduleName],\ parentModule.buildPath, parentModule.name,\ [], parentModule.name, [], moduleDeps) m.putAttribute("WRAPPER_NAME", moduleName) m.putAttribute("AREA_GROUP", 1) moduleList.insertModule(m)
def buildNGC(moduleList, module, globalVerilogs, globalVHDs, xstTemplate, xilinx_xcf): #Let's synthesize a xilinx .prj file for ths synth boundary. # spit out a new prj generatePrj(moduleList, module, globalVerilogs, globalVHDs) newXSTPath = generateXST(moduleList, module, xstTemplate) compile_dir = moduleList.env.Dir(moduleList.compileDirectory) ngcFile = compile_dir.File(module.wrapperName() + '.ngc') srpFile = compile_dir.File(module.wrapperName() + '.srp') resourceFile = compile_dir.File(module.wrapperName() + '.resources') # sorted(moduleList.getAllDependencies('VERILOG_LIB')) + sorted(model.convertDependencies(moduleList.getDependencies(module, 'VERILOG_STUB')))))) # Sort dependencies because SCons will rebuild if the order changes. sub_netlist = moduleList.env.Command( [ngcFile, srpFile], sorted(module.moduleDependency['VERILOG']) + sorted(moduleList.getAllDependencies('VERILOG_LIB')) + sorted(map(model.modify_path_hw,moduleList.getAllDependenciesWithPaths('GIVEN_VERILOGS'))) + sorted(model.convertDependencies(moduleList.getDependencies(module, 'VERILOG_STUB'))) + [ newXSTPath ] + xilinx_xcf, [ SCons.Script.Delete(compile_dir.File(module.wrapperName() + '.srp')), SCons.Script.Delete(compile_dir.File(module.wrapperName() + '_xst.xrpt')), 'xst -intstyle silent -ifn config/' + module.wrapperName() + '.modified.xst -ofn ' + compile_dir.File(module.wrapperName() + '.srp').path, '@echo xst ' + module.wrapperName() + ' build complete.' ]) module.moduleDependency['SRP'] = [srpFile] if(not 'GEN_NGCS' in module.moduleDependency): module.moduleDependency['GEN_NGCS'] = [ngcFile] else: module.moduleDependency['GEN_NGCS'] += [ngcFile] module.moduleDependency['RESOURCES'] = [resourceFile] module.moduleDependency['SYNTHESIS'] = [sub_netlist] SCons.Script.Clean(sub_netlist, compile_dir.File(module.wrapperName() + '.srp')) moduleList.env.Command(resourceFile, srpFile, getSRPResourcesClosure(module)) # If we're building for the FPGA, we'll claim that the # top-level build depends on the existence of the ngc # file. This allows us to do resource analysis later on. if(moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')): moduleList.topDependency += [resourceFile] return sub_netlist
def buildVivadoEDF(moduleList, module, globalVerilogs, globalVHDs): compile_dir = moduleList.env.Dir(moduleList.compileDirectory) vivadoCompileDirectory = compile_dir.Dir(module.wrapperName() + '_synth/') if not os.path.isdir(str(vivadoCompileDirectory)): os.mkdir(str(vivadoCompileDirectory)) #Let's synthesize a xilinx .prj file for this synth boundary. # spit out a new prj tclDeps = generateVivadoTcl(moduleList, module, globalVerilogs, globalVHDs, vivadoCompileDirectory) checkpointFile = vivadoCompileDirectory.File(module.wrapperName() + '.synth.dcp') edfFile = vivadoCompileDirectory.File(module.wrapperName() + '.edf') srpFile = vivadoCompileDirectory.File(module.wrapperName() + '.synth.opt.util') logFile = module.wrapperName() + '.synth.log' resourceFile = vivadoCompileDirectory.File(module.wrapperName() + '.resources') # area group modules have a different base dependency than normal # modules wrapperVerilogDependency = model.get_temp_path(moduleList,module) + module.wrapperName() + '_stub.v' if(not module.getAttribute('AREA_GROUP') is None): # grab the parent stub? wrapperVerilogDependency = model.get_temp_path(moduleList,module) + module.wrapperName() + '.v' # Sort dependencies because SCons will rebuild if the order changes. sub_netlist = moduleList.env.Command( [edfFile, srpFile, checkpointFile], [wrapperVerilogDependency] + sorted(moduleList.getDependencies(module,'VERILOG')) + tclDeps + sorted(moduleList.getAllDependencies('VERILOG_LIB')) + sorted(model.convertDependencies(moduleList.getDependencies(module, 'VERILOG_STUB'))), [ SCons.Script.Delete(vivadoCompileDirectory.File(module.wrapperName() + '.synth.opt.util')), SCons.Script.Delete(vivadoCompileDirectory.File(module.wrapperName() + '_xst.xrpt')), 'cd ' + vivadoCompileDirectory.path + '; touch start.txt; vivado -nojournal -mode batch -source ' + module.wrapperName() + '.synthesis.tcl 2>&1 > ' + logFile, '@echo vivado synthesis ' + module.wrapperName() + ' build complete.' ]) utilFile = moduleList.env.Command(resourceFile, srpFile, getVivadoUtilResourcesClosure(module)) module.moduleDependency['SRP'] = [srpFile] if (not 'GEN_NGCS' in module.moduleDependency): module.moduleDependency['GEN_NGCS'] = [edfFile] else: module.moduleDependency['GEN_NGCS'] += [edfFile] module.moduleDependency['GEN_VIVADO_DCPS'] = [checkpointFile] module.moduleDependency['RESOURCES'] = [utilFile] module.moduleDependency['SYNTHESIS'] = [edfFile] SCons.Script.Clean(sub_netlist, compile_dir.File(module.wrapperName() + '.srp')) # If we're building for the FPGA, we'll claim that the # top-level build depends on the existence of the ngc # file. This allows us to do resource analysis later on. if(moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')): moduleList.topDependency += [utilFile] return sub_netlist
def dump_lim_graph(target, source, env): # Find the subset of sources that are log files and parse them logs = [s for s in source if (str(s)[-4:] == '.log')] fullLIGraph = LIGraph(parseLogfiles(logs)) # annotate modules with relevant object code (useful in # LIM compilation) # this is not technically a part of the tree cut methodology, but we need to do this # For the LIM compiler, we must also annotate those # channels which are coming out of the platform code. for module in moduleList.synthBoundaries(): modulePath = module.buildPath # Wrap the real findBuildPath() so it can be invoked # later by map(). def __findBuildPath(path): return Source.findBuildPath(path, modulePath) # User area groups add a wrinkle. We need to # keep them around, but they don't have LI # channels if(not module.getAttribute('AREA_GROUP') is None): # We now need to create and integrate an # LI Module for this module newModule = LIModule(module.name, module.name) newModule.putAttribute('PLATFORM_MODULE', True) newModule.putAttribute('BLACK_BOX_AREA_GROUP', True) fullLIGraph.mergeModules([newModule]) # the liGraph only knows about modules that actually # have connections some modules are vestigial, andso # we can forget about them... if (module.boundaryName in fullLIGraph.modules): for objectType in module.moduleDependency: # it appears that we need to filter # these objects. TODO: Clean the # things adding to this list so we # don't require the filtering step. depList = module.moduleDependency[objectType] convertedDeps = model.convertDependencies(depList) relativeDeps = map(__findBuildPath, convertedDeps) fullLIGraph.modules[module.boundaryName].putObjectCode(objectType, relativeDeps) for module in moduleList.synthBoundaries(): if(module.boundaryName in fullLIGraph.modules): # annotate platform module with local mapping. if(module.name == moduleList.localPlatformName + '_platform'): # The platform module is special. fullLIGraph.modules[module.boundaryName].putAttribute('MAPPING', moduleList.localPlatformName) fullLIGraph.modules[module.boundaryName].putAttribute('PLATFORM_MODULE', True) # Decorate LI modules with type for module in fullLIGraph.modules.values(): module.putAttribute("EXECUTION_TYPE","RTL") # dump graph representation. pickleHandle = open(str(target[0]), 'wb') pickle.dump(fullLIGraph, pickleHandle, protocol=-1) pickleHandle.close() if (pipeline_debug != 0): print "Initial Graph is: " + str(fullLIGraph) + ": " + sys.version +"\n"
def buildVivadoEDF(moduleList, module, globalVerilogs, globalVHDs): compile_dir = moduleList.env.Dir(moduleList.compileDirectory) vivadoCompileDirectory = compile_dir.Dir(module.wrapperName() + '_synth/') if not os.path.isdir(str(vivadoCompileDirectory)): os.mkdir(str(vivadoCompileDirectory)) #Let's synthesize a xilinx .prj file for this synth boundary. # spit out a new prj tclDeps = generateVivadoTcl(moduleList, module, globalVerilogs, globalVHDs, vivadoCompileDirectory) checkpointFile = vivadoCompileDirectory.File(module.wrapperName() + '.synth.dcp') edfFile = vivadoCompileDirectory.File(module.wrapperName() + '.edf') srpFile = vivadoCompileDirectory.File(module.wrapperName() + '.synth.opt.util') logFile = module.wrapperName() + '.synth.log' resourceFile = vivadoCompileDirectory.File(module.wrapperName() + '.resources') # area group modules have a different base dependency than normal # modules wrapperVerilogDependency = model.get_temp_path(moduleList,module) + module.wrapperName() + '_stub.v' if(not module.getAttribute('AREA_GROUP') is None): # grab the parent stub? wrapperVerilogDependency = model.get_temp_path(moduleList,module) + module.wrapperName() + '.v' # Sort dependencies because SCons will rebuild if the order changes. sub_netlist = moduleList.env.Command( [edfFile, srpFile, checkpointFile], [wrapperVerilogDependency] + sorted(moduleList.getDependencies(module,'VERILOG')) + tclDeps + sorted(moduleList.getAllDependencies('VERILOG_LIB')) + sorted(map(model.modify_path_hw,moduleList.getAllDependenciesWithPaths('GIVEN_VERILOGS'))) + sorted(model.convertDependencies(moduleList.getDependencies(module, 'VERILOG_STUB'))), [ SCons.Script.Delete(vivadoCompileDirectory.File(module.wrapperName() + '.synth.opt.util')), SCons.Script.Delete(vivadoCompileDirectory.File(module.wrapperName() + '_xst.xrpt')), 'cd ' + vivadoCompileDirectory.path + '; touch start.txt; vivado -nojournal -mode batch -source ' + module.wrapperName() + '.synthesis.tcl 2>&1 > ' + logFile, '@echo vivado synthesis ' + module.wrapperName() + ' build complete.' ]) utilFile = moduleList.env.Command(resourceFile, srpFile, getVivadoUtilResourcesClosure(module)) module.moduleDependency['SRP'] = [srpFile] if (not 'GEN_NGCS' in module.moduleDependency): module.moduleDependency['GEN_NGCS'] = [edfFile] else: module.moduleDependency['GEN_NGCS'] += [edfFile] module.moduleDependency['GEN_VIVADO_DCPS'] = [checkpointFile] module.moduleDependency['RESOURCES'] = [utilFile] module.moduleDependency['SYNTHESIS'] = [edfFile] SCons.Script.Clean(sub_netlist, compile_dir.File(module.wrapperName() + '.srp')) # If we're building for the FPGA, we'll claim that the # top-level build depends on the existence of the ngc # file. This allows us to do resource analysis later on. if(moduleList.getAWBParam('bsv_tool', 'BUILD_LOGS_ONLY')): moduleList.topDependency += [utilFile] return sub_netlist
def place_dcp(self, moduleList, module): # Due to area groups, we first need a closure to generate tcl. placeCompileDirectory = moduleList.compileDirectory + '/' + module.name + '_physical/' dcp = placeCompileDirectory + module.name + ".place.dcp" edfTcl = placeCompileDirectory + module.name + ".place.tcl" constraintsTcl = placeCompileDirectory + module.name + ".constraints.tcl" checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_DCPS')) if not os.path.isdir(placeCompileDirectory): os.mkdir(placeCompileDirectory) area_constraints = area_group_tool.AreaConstraints(moduleList) def place_dcp_tcl_closure(moduleList): def place_dcp_tcl(target, source, env): # TODO: Eventually, we'll need to examine the contstraints to decide if we need to rebuild. area_constraints.loadAreaConstraints() edfTclFile = open(edfTcl,'w') constraintsTclFile = open(constraintsTcl,'w') edfTclFile.write('read_checkpoint ' + model.rel_if_not_abspath(checkpoint[0], placeCompileDirectory) + '\n') # throw out area group constraints. (and maybe loc constraints too?) # Some modules may not have placement information. Ignore them for now. needToLink = True refName = module.wrapperName() # If this is an platform/user-defined area group, the wrapper name may be different. if (not self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None): refName = area_constraints.constraints[module.name].attributes['MODULE_NAME'] if((self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) or moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_PAR_DEVICE_AG')): if(not area_constraints.emitModuleConstraintsVivado(constraintsTclFile, module.name, useSourcePath=False) is None): # for platform modules, we need to insert the tcl environment. constraintsTclFile.write('set IS_TOP_BUILD 0\n') constraintsTclFile.write('set AG_OBJECT ' + module.name + '\n') constraintsTclFile.write('set IS_AREA_GROUP_BUILD 1\n') constraintsTclFile.write('source ' + model.rel_if_not_abspath(self.paramTclFile, placeCompileDirectory) + '\n') for tcl_header in self.tcl_headers: constraintsTclFile.write('source ' + model.rel_if_not_abspath(tcl_header, placeCompileDirectory) + '\n') for tcl_def in self.tcl_defs: constraintsTclFile.write('source ' + model.rel_if_not_abspath(tcl_def, placeCompileDirectory) + '\n') for tcl_func in self.tcl_funcs: constraintsTclFile.write('source ' + model.rel_if_not_abspath(tcl_func, placeCompileDirectory) + '\n') constraintsTclFile.write("annotateModelClock\n") constraintsTclFile.write("annotateCLK_SRC\n") for tcl_alg in self.tcl_algs: constraintsTclFile.write('source ' + model.rel_if_not_abspath(tcl_alg, placeCompileDirectory) + '\n') edfTclFile.write('add_file ' + model.rel_if_not_abspath(constraintsTcl, placeCompileDirectory) + '\n') if(not 'NO_PLACE' in area_constraints.constraints[module.name].attributes): if(not 'NO_ROUTE' in area_constraints.constraints[module.name].attributes and self.routeAG): edfTclFile.write("set_property USED_IN {synthesis implementation opt_design place_design phys_opt_design route_design out_of_context} [get_files " + model.rel_if_not_abspath(constraintsTcl, placeCompileDirectory) + "]\n") else: edfTclFile.write("set_property USED_IN {synthesis implementation opt_design place_design phys_opt_design out_of_context} [get_files " + model.rel_if_not_abspath(constraintsTcl, placeCompileDirectory) + "]\n") # linking lets us pull in placement constraints. edfTclFile.write("link_design -mode out_of_context -top " + refName + " -part " + self.part + "\n") needToLink = False # if ended here... if(not 'NO_PLACE' in area_constraints.constraints[module.name].attributes): edfTclFile.write("place_design -no_drc \n") edfTclFile.write("report_timing_summary -file " + module.name + ".place.twr\n") edfTclFile.write("phys_opt_design \n") if(not 'NO_ROUTE' in area_constraints.constraints[module.name].attributes and self.routeAG): edfTclFile.write("route_design\n") edfTclFile.write("report_timing_summary -file " + module.name + ".route.twr\n") edfTclFile.write("report_route_status\n") # still need to link design. if(needToLink): edfTclFile.write("link_design -mode out_of_context -top " + refName + " -part " + self.part + "\n") edfTclFile.write('write_checkpoint -force ' + module.name + ".place.dcp" + '\n') edfTclFile.close() constraintsTclFile.close() return place_dcp_tcl moduleList.env.Command( [edfTcl, constraintsTcl], [area_constraints.areaConstraintsFile()], place_dcp_tcl_closure(moduleList) ) # generate checkpoint return moduleList.env.Command( [dcp], [checkpoint] + [edfTcl, constraintsTcl] + self.tcl_headers + self.tcl_algs + self.tcl_defs + self.tcl_funcs, ['cd ' + placeCompileDirectory + '; touch start.txt; vivado -mode batch -source ' + module.name + ".place.tcl" + ' -log ' + module.name + '.place.log'])
def dump_lim_graph(target, source, env): # Find the subset of sources that are log files and parse them logs = [s for s in source if (str(s)[-4:] == '.log')] fullLIGraph = LIGraph(parseLogfiles(logs)) # annotate modules with relevant object code (useful in # LIM compilation) # this is not technically a part of the tree cut methodology, but we need to do this # For the LIM compiler, we must also annotate those # channels which are coming out of the platform code. for module in moduleList.synthBoundaries(): modulePath = module.buildPath # Wrap the real findBuildPath() so it can be invoked # later by map(). def __findBuildPath(path): return Source.findBuildPath(path, modulePath) # User area groups add a wrinkle. We need to # keep them around, but they don't have LI # channels if (not module.getAttribute('AREA_GROUP') is None): # We now need to create and integrate an # LI Module for this module newModule = LIModule(module.name, module.name) newModule.putAttribute('PLATFORM_MODULE', True) newModule.putAttribute('BLACK_BOX_AREA_GROUP', True) fullLIGraph.mergeModules([newModule]) # the liGraph only knows about modules that actually # have connections some modules are vestigial, andso # we can forget about them... if (module.boundaryName in fullLIGraph.modules): for objectType in module.moduleDependency: # it appears that we need to filter # these objects. TODO: Clean the # things adding to this list so we # don't require the filtering step. depList = module.moduleDependency[objectType] convertedDeps = model.convertDependencies(depList) relativeDeps = map(__findBuildPath, convertedDeps) fullLIGraph.modules[module.boundaryName].putObjectCode( objectType, relativeDeps) for module in moduleList.synthBoundaries(): if (module.boundaryName in fullLIGraph.modules): # annotate platform module with local mapping. if (module.name == moduleList.localPlatformName + '_platform'): # The platform module is special. fullLIGraph.modules[module.boundaryName].putAttribute( 'MAPPING', moduleList.localPlatformName) fullLIGraph.modules[module.boundaryName].putAttribute( 'PLATFORM_MODULE', True) # Decorate LI modules with type for module in fullLIGraph.modules.values(): module.putAttribute("EXECUTION_TYPE", "RTL") # dump graph representation. pickleHandle = open(str(target[0]), 'wb') pickle.dump(fullLIGraph, pickleHandle, protocol=-1) pickleHandle.close() if (pipeline_debug != 0): print "Initial Graph is: " + str( fullLIGraph) + ": " + sys.version + "\n"
def __init__(self, moduleList): # if we have a deps build, don't do anything... if(moduleList.isDependsBuild): return self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph() # A collector for all of the checkpoint objects we will gather/build in the following code. dcps = [] # Construct the tcl file self.part = moduleList.getAWBParam('physical_platform_config', 'FPGA_PART_XILINX') apm_name = moduleList.compileDirectory + '/' + moduleList.apmName self.paramTclFile = moduleList.topModule.moduleDependency['PARAM_TCL'][0] # If the TMP_XILINX_DIR doesn't exist, create it. if not os.path.isdir(moduleList.env['DEFS']['TMP_XILINX_DIR']): os.mkdir(moduleList.env['DEFS']['TMP_XILINX_DIR']) # Gather Tcl files for handling constraints. self.tcl_headers = [] if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_HEADERS')) > 0): self.tcl_headers = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_HEADERS')) self.tcl_defs = [] if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_DEFINITIONS')) > 0): self.tcl_defs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_DEFINITIONS')) self.tcl_funcs = [] if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_FUNCTIONS')) > 0): self.tcl_funcs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_FUNCTIONS')) self.tcl_algs = [] if(len(moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_ALGEBRAS')) > 0): self.tcl_algs = map(model.modify_path_hw, moduleList.getAllDependenciesWithPaths('GIVEN_VIVADO_TCL_ALGEBRAS')) self.tcl_bmms = [] if(len(moduleList.getAllDependencies('GIVEN_XILINX_BMMS')) > 0): self.tcl_bmms = moduleList.getAllDependencies('GIVEN_XILINX_BMMS') self.tcl_elfs = [] if(len(moduleList.getAllDependencies('GIVEN_XILINX_ELFS')) > 0): self.tcl_elfs = moduleList.getAllDependencies('GIVEN_XILINX_ELFS') self.tcl_ag = [] #Emit area group definitions # If we got an area group placement data structure, now is the # time to convert it into a new constraint tcl. self.area_group_file = moduleList.compileDirectory + '/areagroups.xdc' if ('AREA_GROUPS' in moduleList.topModule.moduleDependency): self.area_constraints = area_group_tool.AreaConstraints(moduleList) self.routeAG = (moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_ROUTE_AG') != 0) # user ucf may be overridden by our area group ucf. Put our # generated ucf first. #tcl_defs.insert(0,self.area_group_file) def area_group_ucf_closure(moduleList): def area_group_ucf(target, source, env): self.area_constraints.loadAreaConstraints() self.area_constraints.emitConstraintsVivado(self.area_group_file) return area_group_ucf moduleList.env.Command( [self.area_group_file], self.area_constraints.areaConstraintsFile(), area_group_ucf_closure(moduleList) ) synthDepsBase = moduleList.getAllDependencies('GEN_VIVADO_DCPS') # We got a stack of synthesis results for the LI modules. We need # to convert these to design checkpoints for the fast place and # route flow. ngcModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore] for module in ngcModules + [moduleList.topModule]: # did we get a dcp from the first pass? If so, did the lim # graph give code for this module? If both are true, then we # will link the old ngc in, rather than regenerate it. if ((not self.firstPassLIGraph is None) and (module.name in self.firstPassLIGraph.modules) and (self.firstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)): if (li_module.linkFirstPassObject(moduleList, module, self.firstPassLIGraph, 'GEN_VIVADO_DCPS', 'GEN_VIVADO_DCPS') is None): module.moduleDependency['GEN_VIVADO_DCPS'] = [self.edf_to_dcp(moduleList, module)] # it's possible that we got dcp from this compilation # pass. This will happen for the platform modules. elif (len(module.getDependencies('GEN_VIVADO_DCPS')) > 0): continue # we got neither. therefore, we must create a dcp out of the ngc. else: module.moduleDependency['GEN_VIVADO_DCPS'] = [self.edf_to_dcp(moduleList, module)] synthDeps = moduleList.getAllDependencies('GEN_VIVADO_DCPS') postSynthTcl = apm_name + '.physical.tcl' topWrapper = moduleList.topModule.wrapperName() newTclFile = open(postSynthTcl, 'w') newTclFile.write('create_project -force ' + moduleList.apmName + ' ' + moduleList.compileDirectory + ' -part ' + self.part + ' \n') # To resolve black boxes, we need to load checkpoints in the # following order: # 1) topModule # 2) platformModule # 3) user program, in any order userModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore and not module.platformModule] platformModules = [module for module in moduleList.synthBoundaries() if not module.liIgnore and module.platformModule] checkpointCommands = [] if(not moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_ENABLE')): for module in [moduleList.topModule] + platformModules + userModules: dcps.append(module.getDependencies('GEN_VIVADO_DCPS')) checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_DCPS')) # There should only be one checkpoint here. if(len(checkpoint) > 1): print "Error too many checkpoints for " + str(module.name) + ": " + str(checkpoint) continue if(len(checkpoint) == 0): print "No checkpoints for " + str(module.name) + ": " + str(checkpoint) continue newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') # We're attempting the new, parallel flow. else: # we need to issue seperate place commands. Therefore, we attempt to place each design # There may be some special area groups in platforms -- handle them elabAreaConstraints = area_group_tool.AreaConstraints(moduleList) elabAreaConstraints.loadAreaConstraintsElaborated() #self.area_constraints = area_group_tool.AreaConstraints(moduleList) for module in userModules: # Did we get a placed module already? if((module.name in elabAreaConstraints.constraints) and ('LIBRARY_DCP' in elabAreaConstraints.constraints[module.name].attributes)): # we need to locate the dcp corresponding to this area group. candidates = moduleList.getAllDependencies('GIVEN_VIVADO_DCPS') for dcpCandidate in moduleList.getAllDependencies('GIVEN_VIVADO_DCPS'): if dcpCandidate.attributes['module'] == module.name: dcp = str(model.modify_path_hw(dcpCandidate)) model.dictionary_list_create_append(module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp) dcps.append(dcp) else: dcp = self.place_dcp(moduleList, module) model.dictionary_list_create_append(module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp) dcps.append(dcp) for module in [moduleList.topModule] + platformModules: checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_DCPS')) dcps.append(checkpoint) # There should only be one checkpoint here. if(len(checkpoint) > 1): print "Error too many checkpoints for " + str(module.name) + ": " + str(checkpoint) continue if(len(checkpoint) == 0): print "No checkpoints for " + str(module.name) + ": " + str(checkpoint) continue newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') # We can have parent/child relationships in the user modules. # Vivado requires that checkpoints be read in topological # order. def isBlackBox(module): if(self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP')): return 1 return 0 for module in sorted(userModules, key=isBlackBox): checkpoint = model.convertDependencies(module.getDependencies('GEN_VIVADO_PLACEMENT_DCPS')) # There should only be one checkpoint here. if(len(checkpoint) > 1): print "Error too many checkpoints for " + str(module.name) + ": " + str(checkpoint) continue if(len(checkpoint) == 0): print "No checkpoints for " + str(module.name) + ": " + str(checkpoint) continue #read in new checkoutpoint newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') emitPlatformAreaGroups = (moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0) # platformModule refers to the main platform module, not # the subordinate device area groups. platformModule = (self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) and (not self.firstPassLIGraph.modules[module.name].getAttribute('PLATFORM_MODULE') is None) allowAGPlatform = (not platformModule) or emitPlatformAreaGroups emitDeviceGroups = (moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_PAR_DEVICE_AG') != 0) allowAGDevice = (self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) or emitDeviceGroups if (allowAGPlatform and allowAGDevice): refName = module.wrapperName() lockPlacement = True lockRoute = self.routeAG # If this is an platform/user-defined area group, the wrapper name may be different. if (not self.firstPassLIGraph.modules[module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None): refName = elabAreaConstraints.constraints[module.name].attributes['MODULE_NAME'] lockPlacement = not ('NO_PLACE' in elabAreaConstraints.constraints[module.name].attributes) and lockPlacement lockRoute = not ('NO_ROUTE' in elabAreaConstraints.constraints[module.name].attributes) and lockRoute checkpointCommands.append('if { [llength [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]] } {\n') checkpointCommands.append(' puts "Locking ' + refName + '"\n') if (lockRoute): # locking routing requires us to emit an area group. boo. ag_tcl = self.ag_constraints(moduleList, module) self.tcl_ag.append(ag_tcl) checkpointCommands.append(' source ' + str(ag_tcl) + '\n') checkpointCommands.append(' lock_design -level routing [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n') elif (lockPlacement): checkpointCommands.append(' lock_design -level placement [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n') checkpointCommands.append('}\n') given_netlists = [ moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + netlist for netlist in moduleList.getAllDependenciesWithPaths('GIVEN_NGCS') + moduleList.getAllDependenciesWithPaths('GIVEN_EDFS') ] for netlist in given_netlists: newTclFile.write('read_edif ' + netlist + '\n') # We have lots of dangling wires (Thanks, Bluespec). Set the # following properties to silence the warnings. newTclFile.write("set_property SEVERITY {Warning} [get_drc_checks NSTD-1]\n") newTclFile.write("set_property SEVERITY {Warning} [get_drc_checks UCIO-1]\n") newTclFile.write("link_design -top " + topWrapper + " -part " + self.part + "\n") newTclFile.write("report_utilization -file " + apm_name + ".link.util\n") newTclFile.write("write_checkpoint -force " + apm_name + ".link.dcp\n") # lock down the area group routing. newTclFile.write("\n".join(checkpointCommands) + "\n") for elf in self.tcl_elfs: newTclFile.write("add_file " + model.modify_path_hw(elf) + "\n") newTclFile.write("set_property MEMDATA.ADDR_MAP_CELLS {" + str(elf.attributes['ref']) + "} [get_files " + model.modify_path_hw(elf) + "]\n") # We will now attempt to link in any bmm that we might have. for bmm in self.tcl_bmms: newTclFile.write("add_file " + model.modify_path_hw(bmm) + "\n") newTclFile.write("set_property SCOPED_TO_REF " + str(bmm.attributes['ref']) + " [get_files " + model.modify_path_hw(bmm) + "]\n") newTclFile.write('set IS_TOP_BUILD 1\n ') newTclFile.write('set IS_AREA_GROUP_BUILD 0\n ') newTclFile.write('set SYNTH_OBJECT ""\n') newTclFile.write('source ' + self.paramTclFile + '\n') for tcl_header in self.tcl_headers: newTclFile.write('source ' + tcl_header + '\n') for tcl_def in self.tcl_defs: newTclFile.write('source ' + tcl_def + '\n') for tcl_func in self.tcl_funcs: newTclFile.write('source ' + tcl_func + '\n') for tcl_alg in self.tcl_algs: newTclFile.write('source ' + tcl_alg + '\n') def dumpPBlockCmd(tgt): return 'dumpPBlockUtilization "' + moduleList.compileDirectory + '/' + tgt + '.util"\n' newTclFile.write(dumpPBlockCmd('link')) newTclFile.write("report_timing_summary -file " + apm_name + ".map.twr\n\n") newTclFile.write("opt_design -directive AddRemap\n") newTclFile.write("report_utilization -file " + apm_name + ".opt.util\n") newTclFile.write(dumpPBlockCmd('opt')) newTclFile.write("write_checkpoint -force " + apm_name + ".opt.dcp\n\n") newTclFile.write("place_design -no_drc -directive WLDrivenBlockPlacement\n") newTclFile.write(dumpPBlockCmd('place')) newTclFile.write("phys_opt_design -directive AggressiveFanoutOpt\n") newTclFile.write("write_checkpoint -force " + apm_name + ".map.dcp\n") newTclFile.write(dumpPBlockCmd('phyopt')) newTclFile.write("report_utilization -file " + apm_name + ".map.util\n\n") newTclFile.write("route_design\n") newTclFile.write("write_checkpoint -force " + apm_name + ".par.dcp\n") newTclFile.write(dumpPBlockCmd('par')) newTclFile.write("report_timing_summary -file " + apm_name + ".par.twr\n\n") newTclFile.write("report_utilization -hierarchical -file " + apm_name + ".par.util\n") newTclFile.write("report_drc -file " + topWrapper + ".drc\n\n") newTclFile.write("write_bitstream -force " + apm_name + "_par.bit\n") newTclFile.close() # generate bitfile xilinx_bit = moduleList.env.Command( apm_name + '_par.bit', synthDeps + self.tcl_algs + self.tcl_defs + self.tcl_funcs + self.tcl_ag + [self.paramTclFile] + dcps + [postSynthTcl], ['touch start.txt; vivado -verbose -mode batch -source ' + postSynthTcl + ' -log ' + moduleList.compileDirectory + '/postsynth.log']) moduleList.topModule.moduleDependency['BIT'] = [apm_name + '_par.bit'] # We still need to generate a download script. xilinx_loader.LOADER(moduleList)
def setupTreeBuild(self, moduleList, topo): useBVI = self.parent.USE_BVI env = moduleList.env root_directory = model.rootDir ## ## Load intra-Bluespec dependence already computed. This ## information will ultimately drive the building of Bluespec ## modules. Build tree has a few dependencies which must be ## captured. ## ## If we aren't building the build tree, don't bother with its dependencies env.ParseDepends(get_build_path(moduleList, moduleList.topModule) + '/.depends-build-tree', must_exist = not moduleList.env.GetOption('clean')) tree_base_path = env.Dir(get_build_path(moduleList, moduleList.topModule)) tree_file_synth = tree_base_path.File('build_tree_synth.bsv') tree_file_synth_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/build_tree_synth.bo') tree_file_wrapper = tree_base_path.File('build_tree_Wrapper.bsv') tree_file_wrapper_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/build_tree_Wrapper.bo') # Area constraints area_constraints = None try: if (moduleList.getAWBParam('area_group_tool', 'AREA_GROUPS_ENABLE')): area_constraints = area_group_tool.AreaConstraints(moduleList) except: # The area constraints code is not present. pass boundary_logs = [] for module in topo: # Remove any platform modules.. These are special in that # they can have wired interfaces. if (not module.platformModule): for log in module.moduleDependency['BSV_LOG']: boundary_logs += [root_directory.File(log)] ## ## Back to SCons configuration (first) pass... ## top_module_path = get_build_path(moduleList, moduleList.topModule) # Inform object code build of the LI Graph retrieved from the # first pass. Probe firstPassGraph for relevant object codes # (BA/NGC/BSV_SYNTH/BSV_SYNTH_BSH) accessed: # module.objectCache['NGC'] (these already have absolute # paths) I feel like the GEN_BAS/GEN_VERILOGS of the first # pass may be missing. We insert these modules as objects in # the ModuleList. def makeAWBLink(doLink, source, buildPath, uniquifier=''): base_file = os.path.basename(str(source)) link_dir = buildPath + '/.li' link_path = link_dir + '/' + uniquifier + base_file if (doLink): if (os.path.lexists(link_path)): os.remove(link_path) rel = os.path.relpath(str(source), link_dir) print 'Linking: ' + link_path + ' -> ' + rel os.symlink(rel, link_path) return link_path limLinkUserSources = [] limLinkUserTargets = [] limLinkPlatformSources = [] limLinkPlatformTargets = [] importStubs = [] if (not self.getFirstPassLIGraph is None): # Now that we have demanded bluespec builds (for # dependencies), we should now should downgrade synthesis boundaries for the backend. oldStubs = [] for module in topo: if(not module.platformModule): if((not module.name in self.getFirstPassLIGraph.modules) or (self.getFirstPassLIGraph.modules[module.name].getAttribute('RESYNTHESIZE') is None)): module.liIgnore = True # this may not be needed. else: oldStubs += module.moduleDependency['GEN_VERILOG_STUB'] # let's pick up the platform dependencies, since they are also special. env.ParseDepends(get_build_path(moduleList, moduleList.topModule) + '/.depends-platform', must_exist = not moduleList.env.GetOption('clean')) # Due to the way that string files are # generated, they are difficult to rename in # the front-end compilation. This leads to # collisions amoung similarly-typed LI # Modules. We fix it by uniquifying the links. def getModuleName(module): return module.name def getEmpty(module): return '' linkthroughMap = {'BA': getEmpty, 'GEN_BAS': getEmpty, 'GEN_VERILOGS': getEmpty, 'GEN_VERILOG_STUB': getEmpty, 'STR': getModuleName} buildPath = get_build_path(moduleList, moduleList.topModule) for module in self.getFirstPassLIGraph.modules.values(): # do not link through those modules marked for resynthesis. if(not module.getAttribute('RESYNTHESIZE') is None): continue moduleDeps = {} for objType in linkthroughMap: if(objType in module.objectCache): localNames = map(lambda fileName: makeAWBLink(False, fileName.from_bld(), buildPath, uniquifier=linkthroughMap[objType](module)), module.objectCache[objType]) # The previous passes GEN_VERILOGS are not # really generated here, so we can't call them # as such. Tuck them in to 'VERILOG' if(objType == 'GEN_VERILOG_STUB'): oldStubs += localNames moduleDeps[objType] = localNames if (module.getAttribute('PLATFORM_MODULE') is None): limLinkUserTargets += localNames limLinkUserSources += module.objectCache[objType] else: limLinkPlatformTargets += localNames limLinkPlatformSources += module.objectCache[objType] m = Module(module.name, ["mk_" + module.name + "_Wrapper"],\ moduleList.topModule.buildPath, moduleList.topModule.name,\ [], moduleList.topModule.name, [], moduleDeps) moduleList.insertModule(m) else: # The top module/build pipeline only depend on non-platformModules oldStubs = [module.moduleDependency['GEN_VERILOG_STUB'] for module in moduleList.synthBoundaries() if not module.platformModule] ## Enumerate the dependencies created by the build tree. buildTreeDeps = {} ## We have now generated a completely new module. Let's throw it ## into the list. Although we are building it seperately, this ## module is an extension to the build tree. expected_wrapper_count = len(boundary_logs) - 2 importBOs = [] if (not self.getFirstPassLIGraph is None): # we now have platform modules in here. expected_wrapper_count = len(self.getFirstPassLIGraph.modules) - 2 # If we have an LI graph, we need to construct and compile # LI import wrappers for the modules we received from the # first pass. Do that here. include all the dependencies # in the graph in the wrapper. liGraph = LIGraph([]) firstPassGraph = self.getFirstPassLIGraph # We should ignore the 'PLATFORM_MODULE' liGraph.mergeModules([ module for module in bsv_tool.getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None]) for module in sorted(liGraph.graph.nodes(), key=lambda module: module.name): # pull in the dependecies generate by the dependency pass. env.ParseDepends(str(tree_base_path) + '/.depends-' + module.name, must_exist = not moduleList.env.GetOption('clean')) wrapper_path = tree_base_path.File(module.name + '_Wrapper.bsv') wrapper_bo_path = tree_base_path.File(self.parent.TMP_BSC_DIR + '/' + module.name + '_Wrapper.bo') # include commands to build the wrapper .bo/.ba # Here, we won't be using the generated .v (it's garbage), so we intentionally get rid of it. importVDir = env.Dir('.lim_import_verilog') if not os.path.isdir(str(importVDir)): os.mkdir(str(importVDir)) wrapper_command = self.parent.compile_bo_bsc_base([wrapper_bo_path], get_build_path(moduleList, moduleList.topModule), vdir=importVDir) + ' $SOURCES' wrapper_bo = env.Command([wrapper_bo_path], [wrapper_path], wrapper_command) # create BO. importBOs += [wrapper_bo] verilog_deps = [ "__TREE_MODULE__" + str(id) for id in range(expected_wrapper_count)] if(self.parent.BUILD_LOGS_ONLY == 0): buildTreeDeps['GEN_VERILOGS'] = ["mk_" + vlog + '_Wrapper' + ".v" for vlog in verilog_deps] else: buildTreeDeps['GEN_VERILOGS'] = [] buildTreeDeps['GEN_BAS'] = [ "mk_" + vlog + '_Wrapper' + ".ba" for vlog in verilog_deps] buildTreeDeps['BA'] = [] buildTreeDeps['STR'] = [] buildTreeDeps['VERILOG'] = [top_module_path + '/' + self.parent.TMP_BSC_DIR + '/mk_build_tree_Wrapper.v'] buildTreeDeps['GIVEN_BSVS'] = [] buildTreeDeps['VERILOG_STUB'] = model.convertDependencies(oldStubs) tree_module = Module( 'build_tree', ["mkBuildTree"], moduleList.topModule.buildPath,\ moduleList.topModule.name,\ [], moduleList.topModule.name, [], buildTreeDeps, platformModule=True) tree_module.putAttribute('LI_GRAPH_IGNORE', True) moduleList.insertModule(tree_module) wrapper_gen_tool.generateAWBCompileWrapper(moduleList, tree_module) ## This produces the treeNode BSV. It must wait for the ## compilation of the log files, which it will read to form the ## LIM graph ## ## We do two operations during this phase. First, we dump a ## representation of the user program. This representation is ## used by the LIM compiler to create heterogeneous ## executables. We then do a local modification to the build ## tree to reduce Bluespec compilation time. # If I got an LI graph, I don't care about the boundary logs. # In this case, everything comes from the first pass graph. # Usually, we only need logs and BOs to build the build tree. # However, during the second pass build we also need to fill # in information about area group paths (changed by tree build) tree_build_deps = boundary_logs + importBOs tree_build_results = [tree_file_wrapper, tree_file_synth] if (self.getFirstPassLIGraph and area_constraints): tree_build_deps += [area_constraints.areaConstraintsFilePlaced()] tree_build_results += [area_constraints.areaConstraintsFile()] ## ## The cutTreeBuild builder function needs some of the local state ## in the current function. Build a dictionary with the required ## state and partial instance of cutTreeBuild with the state applied. ## cut_tree_state = dict() cut_tree_state['area_constraints'] = area_constraints cut_tree_state['boundary_logs'] = boundary_logs cut_tree_state['moduleList'] = moduleList cut_tree_state['tree_file_synth'] = tree_file_synth cut_tree_state['tree_file_wrapper'] = tree_file_wrapper cut_tree_build = functools.partial(self.cutTreeBuild, cut_tree_state) cut_tree_build.__name__ = 'cutTreeBuild' tree_components = env.Command(tree_build_results, tree_build_deps, cut_tree_build) ## Compiling the build tree wrapper produces several .ba ## files, some that are useful, the TREE_MODULES, and some ## which are not, the _Wrapper.ba. As a result, we dump the ## tree build output to a different directory, so as not to ## pollute the existing build. Here, we link to the relevant ## files in that directory. def linkLIMObjClosure(liModules, buildPath): def linkLIMObj(target, source, env): if (not self.getFirstPassLIGraph is None): # The LIM build has passed us some source and we need # to patch it through. for module in liModules: for objType in linkthroughMap: if(objType in module.objectCache): map(lambda fileName: makeAWBLink(True, fileName.from_bld(), buildPath, uniquifier=linkthroughMap[objType](module)), module.objectCache[objType]) return linkLIMObj ## The top level build depends on the compilation of the tree components ## into bo/ba/v files. # the GEN_BAS attached to the build tree need to be massaged # to reflect their actual path. Perhaps we should be using # some kind of object that makes these sorts of conversions # simpler. producedBAs = map(lambda path: bsv_tool.modify_path_ba(moduleList, path), moduleList.getModuleDependenciesWithPaths(tree_module, 'GEN_BAS')) producedVs = map(lambda path: bsv_tool.modify_path_ba(moduleList, path), moduleList.getModuleDependenciesWithPaths(tree_module, 'GEN_VERILOGS')) + \ buildTreeDeps['VERILOG'] tree_command = self.parent.compile_bo_bsc_base([tree_file_wrapper_bo_path], get_build_path(moduleList, moduleList.topModule)) + ' ' + tree_file_wrapper.path tree_file_wrapper_bo = env.Command([tree_file_wrapper_bo_path] + producedBAs + producedVs, tree_components, tree_command) # If we got a first pass LI graph, we need to link its object codes. if (not self.getFirstPassLIGraph is None): srcs = [s.from_bld() for s in limLinkUserSources] link_lim_user_objs = env.Command(limLinkUserTargets, srcs, linkLIMObjClosure([ module for module in bsv_tool.getUserModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None], tree_base_path.path)) env.Depends(link_lim_user_objs, tree_file_wrapper_bo) # the tree_file_wrapper build needs all the wrapper bo from the user program, # but not the top level build. top_bo = moduleList.topModule.moduleDependency['BSV_BO'] all_bo = moduleList.getAllDependencies('BO') env.Depends(tree_file_wrapper_bo, all_bo) tree_synth_command = self.parent.compile_bo_bsc_base([tree_file_synth_bo_path], get_build_path(moduleList, moduleList.topModule)) + ' ' + tree_file_synth.path tree_file_synth_bo = env.Command([tree_file_synth_bo_path], [tree_file_synth, tree_file_wrapper_bo], tree_synth_command) env.Depends(top_bo, tree_file_synth_bo) env.Depends(moduleList.topModule.moduleDependency['BSV_LOG'], tree_file_synth_bo) #Handle the platform_synth build, which is special cased. platform_synth = get_build_path(moduleList, moduleList.topModule) + "/" + moduleList.localPlatformName + "_platform_synth.bsv" platform_synth_bo_path = get_build_path(moduleList, moduleList.topModule) + "/" + self.parent.TMP_BSC_DIR +"/" + moduleList.localPlatformName + "_platform_synth" # if we are in the lim linking phase, we need to change the # vdir directory to hide the spurious verilog generated by # bluespec. importVDir = None if(not self.getFirstPassLIGraph is None): importVDir = env.Dir('.lim_import_verilog') if not os.path.isdir(str(importVDir)): os.mkdir(str(importVDir)) platform_synth_command = self.parent.compile_bo_bsc_base([platform_synth_bo_path + '.bo'], get_build_path(moduleList, moduleList.topModule), vdir=importVDir) + ' $SOURCE' platform_wrapper_bo = get_build_path(moduleList, moduleList.topModule) + "/" + self.parent.TMP_BSC_DIR + "/" +moduleList.localPlatformName + '_platform_Wrapper.bo' platform_synth_deps = [platform_synth] #if we have a module graph, we don't require the compilation of the platform_wrapper_bo. if (self.getFirstPassLIGraph is None): platform_synth_deps.append(platform_wrapper_bo) platform_synth_bo = env.Command([platform_synth_bo_path + '.bo'], platform_synth_deps, platform_synth_command) # this produces a ba also? env.Depends(moduleList.topModule.moduleDependency['BSV_LOG'], platform_synth_bo) # Platform synth does the same object-bypass dance as tree_module. if(not self.getFirstPassLIGraph is None): srcs = [s.from_bld() for s in limLinkPlatformSources] link_lim_platform_objs = env.Command(limLinkPlatformTargets, srcs, linkLIMObjClosure([ module for module in bsv_tool.getPlatformModules(firstPassGraph) if module.getAttribute('RESYNTHESIZE') is None], tree_base_path.path)) env.Depends(link_lim_platform_objs, platform_synth_bo) # need to generate a stub file for the build tree module. # note that in some cases, there will be only one module in # the graph, usually in a multifpga build. In this case, # the build_tree module will be vestigal, but since we can't # predict this statically we'll have to build it anyway. tree_module.moduleDependency['GEN_VERILOG_STUB'] = [self.parent.stubGenCommand(top_module_path, "build_tree", top_module_path + '/' + self.parent.TMP_BSC_DIR + "/mk_build_tree_Wrapper.v")] # top level only depends on platform modules moduleList.topModule.moduleDependency['VERILOG_STUB'] = model.convertDependencies([module.moduleDependency['GEN_VERILOG_STUB'] for module in moduleList.synthBoundaries() if module.platformModule]) if(not self.getFirstPassLIGraph is None): #Second pass build picks up stub files from the first pass build. moduleList.topModule.moduleDependency['VERILOG_STUB'] += model.convertDependencies(oldStubs)
# area group modules have a different base dependency than normal # modules wrapperVerilogDependency = model.get_temp_path( moduleList, module) + module.wrapperName() + '_stub.v' if (not module.getAttribute('AREA_GROUP') is None): # grab the parent stub? wrapperVerilogDependency = model.get_temp_path( moduleList, module) + module.wrapperName() + '.v' sub_netlist = moduleList.env.Command( [edfFile, srrFile], [wrapperVerilogDependency] + blackBoxDeps + sorted(moduleList.getDependencies(module, 'VERILOG')) + moduleVerilogs + moduleVHDs + sorted(moduleList.getAllDependencies('VERILOG_LIB')) + sorted( model.convertDependencies( moduleList.getDependencies(module, 'VERILOG_STUB'))) + [newPrjPath] + annotationFiles + annotationDeps + ['config/' + moduleList.apmName + '.synplify.prj'], [ SCons.Script.Delete(srrFile), 'synplify_premier -batch -license_wait ' + newPrjPath + ' > ' + build_dir + '.log', # Files in coreip just copied from elsewhere and waste space SCons.Script.Delete(build_dir + '/coreip'), '@echo synplify_premier ' + module.wrapperName() + ' build complete.' ]) # Make sure the build depends on all input files (e.g. XDC) moduleList.env.Depends(sub_netlist, fileArray)
def place_dcp(self, moduleList, module): # Due to area groups, we first need a closure to generate tcl. placeCompileDirectory = moduleList.compileDirectory + '/' + module.name + '_physical/' dcp = placeCompileDirectory + module.name + ".place.dcp" edfTcl = placeCompileDirectory + module.name + ".place.tcl" constraintsTcl = placeCompileDirectory + module.name + ".constraints.tcl" checkpoint = model.convertDependencies( module.getDependencies('GEN_VIVADO_DCPS')) if not os.path.isdir(placeCompileDirectory): os.mkdir(placeCompileDirectory) area_constraints = area_group_tool.AreaConstraints(moduleList) def place_dcp_tcl_closure(moduleList): def place_dcp_tcl(target, source, env): # TODO: Eventually, we'll need to examine the contstraints to decide if we need to rebuild. area_constraints.loadAreaConstraints() edfTclFile = open(edfTcl, 'w') constraintsTclFile = open(constraintsTcl, 'w') edfTclFile.write('read_checkpoint ' + model.rel_if_not_abspath( checkpoint[0], placeCompileDirectory) + '\n') # throw out area group constraints. (and maybe loc constraints too?) # Some modules may not have placement information. Ignore them for now. needToLink = True refName = module.wrapperName() # If this is an platform/user-defined area group, the wrapper name may be different. if (not self.firstPassLIGraph.modules[module.name]. getAttribute('BLACK_BOX_AREA_GROUP') is None): refName = area_constraints.constraints[ module.name].attributes['MODULE_NAME'] if ((self.firstPassLIGraph.modules[module.name].getAttribute( 'BLACK_BOX_AREA_GROUP') is None) or moduleList.getAWBParamSafe( 'area_group_tool', 'AREA_GROUPS_PAR_DEVICE_AG')): if (not area_constraints.emitModuleConstraintsVivado( constraintsTclFile, module.name, useSourcePath=False) is None): # for platform modules, we need to insert the tcl environment. constraintsTclFile.write('set IS_TOP_BUILD 0\n') constraintsTclFile.write('set AG_OBJECT ' + module.name + '\n') constraintsTclFile.write('set IS_AREA_GROUP_BUILD 1\n') constraintsTclFile.write( 'source ' + model.rel_if_not_abspath( self.paramTclFile, placeCompileDirectory) + '\n') for tcl_header in self.tcl_headers: constraintsTclFile.write( 'source ' + model.rel_if_not_abspath( tcl_header, placeCompileDirectory) + '\n') for tcl_def in self.tcl_defs: constraintsTclFile.write( 'source ' + model.rel_if_not_abspath( tcl_def, placeCompileDirectory) + '\n') for tcl_func in self.tcl_funcs: constraintsTclFile.write( 'source ' + model.rel_if_not_abspath( tcl_func, placeCompileDirectory) + '\n') constraintsTclFile.write("annotateModelClock\n") constraintsTclFile.write("annotateCLK_SRC\n") for tcl_alg in self.tcl_algs: constraintsTclFile.write( 'source ' + model.rel_if_not_abspath( tcl_alg, placeCompileDirectory) + '\n') edfTclFile.write( 'add_file ' + model.rel_if_not_abspath( constraintsTcl, placeCompileDirectory) + '\n') if (not 'NO_PLACE' in area_constraints.constraints[ module.name].attributes): if (not 'NO_ROUTE' in area_constraints.constraints[ module.name].attributes and self.routeAG): edfTclFile.write( "set_property USED_IN {synthesis implementation opt_design place_design phys_opt_design route_design out_of_context} [get_files " + model.rel_if_not_abspath( constraintsTcl, placeCompileDirectory) + "]\n") else: edfTclFile.write( "set_property USED_IN {synthesis implementation opt_design place_design phys_opt_design out_of_context} [get_files " + model.rel_if_not_abspath( constraintsTcl, placeCompileDirectory) + "]\n") # linking lets us pull in placement constraints. edfTclFile.write( "link_design -mode out_of_context -top " + refName + " -part " + self.part + "\n") needToLink = False # if ended here... if (not 'NO_PLACE' in area_constraints.constraints[ module.name].attributes): edfTclFile.write("place_design -no_drc \n") edfTclFile.write("report_timing_summary -file " + module.name + ".place.twr\n") edfTclFile.write("phys_opt_design \n") if (not 'NO_ROUTE' in area_constraints.constraints[ module.name].attributes and self.routeAG): edfTclFile.write("route_design\n") edfTclFile.write( "report_timing_summary -file " + module.name + ".route.twr\n") edfTclFile.write("report_route_status\n") # still need to link design. if (needToLink): edfTclFile.write( "link_design -mode out_of_context -top " + refName + " -part " + self.part + "\n") edfTclFile.write('write_checkpoint -force ' + module.name + ".place.dcp" + '\n') edfTclFile.close() constraintsTclFile.close() return place_dcp_tcl moduleList.env.Command([edfTcl, constraintsTcl], [area_constraints.areaConstraintsFile()], place_dcp_tcl_closure(moduleList)) # generate checkpoint return moduleList.env.Command( [dcp], [checkpoint] + [edfTcl, constraintsTcl] + self.tcl_headers + self.tcl_algs + self.tcl_defs + self.tcl_funcs, [ 'cd ' + placeCompileDirectory + '; touch start.txt; vivado -mode batch -source ' + module.name + ".place.tcl" + ' -log ' + module.name + '.place.log' ])
def __init__(self, moduleList): # if we have a deps build, don't do anything... if (moduleList.isDependsBuild): return self.firstPassLIGraph = wrapper_gen_tool.getFirstPassLIGraph() # A collector for all of the checkpoint objects we will gather/build in the following code. dcps = [] # Construct the tcl file self.part = moduleList.getAWBParam('physical_platform_config', 'FPGA_PART_XILINX') apm_name = moduleList.compileDirectory + '/' + moduleList.apmName self.paramTclFile = moduleList.topModule.moduleDependency['PARAM_TCL'][ 0] # If the TMP_FPGA_DIR doesn't exist, create it. if not os.path.isdir(moduleList.env['DEFS']['TMP_FPGA_DIR']): os.mkdir(moduleList.env['DEFS']['TMP_FPGA_DIR']) # Gather Tcl files for handling constraints. self.tcl_headers = [] if (len( moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_HEADERS')) > 0): self.tcl_headers = map( model.modify_path_hw, moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_HEADERS')) self.tcl_defs = [] if (len( moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_DEFINITIONS')) > 0): self.tcl_defs = map( model.modify_path_hw, moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_DEFINITIONS')) self.tcl_funcs = [] if (len( moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_FUNCTIONS')) > 0): self.tcl_funcs = map( model.modify_path_hw, moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_FUNCTIONS')) self.tcl_algs = [] if (len( moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_ALGEBRAS')) > 0): self.tcl_algs = map( model.modify_path_hw, moduleList.getAllDependenciesWithPaths( 'GIVEN_VIVADO_TCL_ALGEBRAS')) self.tcl_bmms = [] if (len(moduleList.getAllDependencies('GIVEN_XILINX_BMMS')) > 0): self.tcl_bmms = moduleList.getAllDependencies('GIVEN_XILINX_BMMS') self.tcl_elfs = [] if (len(moduleList.getAllDependencies('GIVEN_XILINX_ELFS')) > 0): self.tcl_elfs = moduleList.getAllDependencies('GIVEN_XILINX_ELFS') self.tcl_ag = [] #Emit area group definitions # If we got an area group placement data structure, now is the # time to convert it into a new constraint tcl. self.area_group_file = moduleList.compileDirectory + '/areagroups.xdc' if ('AREA_GROUPS' in moduleList.topModule.moduleDependency): self.area_constraints = area_group_tool.AreaConstraints(moduleList) self.routeAG = (moduleList.getAWBParam( 'area_group_tool', 'AREA_GROUPS_ROUTE_AG') != 0) # user ucf may be overridden by our area group ucf. Put our # generated ucf first. #tcl_defs.insert(0,self.area_group_file) def area_group_ucf_closure(moduleList): def area_group_ucf(target, source, env): self.area_constraints.loadAreaConstraints() self.area_constraints.emitConstraintsVivado( self.area_group_file) return area_group_ucf moduleList.env.Command([self.area_group_file], self.area_constraints.areaConstraintsFile(), area_group_ucf_closure(moduleList)) synthDepsBase = moduleList.getAllDependencies('GEN_VIVADO_DCPS') # We got a stack of synthesis results for the LI modules. We need # to convert these to design checkpoints for the fast place and # route flow. ngcModules = [ module for module in moduleList.synthBoundaries() if not module.liIgnore ] for module in ngcModules + [moduleList.topModule]: # did we get a dcp from the first pass? If so, did the lim # graph give code for this module? If both are true, then we # will link the old ngc in, rather than regenerate it. if ((not self.firstPassLIGraph is None) and (module.name in self.firstPassLIGraph.modules) and (self.firstPassLIGraph.modules[ module.name].getAttribute('RESYNTHESIZE') is None)): if (li_module.linkFirstPassObject( moduleList, module, self.firstPassLIGraph, 'GEN_VIVADO_DCPS', 'GEN_VIVADO_DCPS') is None): module.moduleDependency['GEN_VIVADO_DCPS'] = [ self.edf_to_dcp(moduleList, module) ] # it's possible that we got dcp from this compilation # pass. This will happen for the platform modules. elif (len(module.getDependencies('GEN_VIVADO_DCPS')) > 0): continue # we got neither. therefore, we must create a dcp out of the ngc. else: module.moduleDependency['GEN_VIVADO_DCPS'] = [ self.edf_to_dcp(moduleList, module) ] synthDeps = moduleList.getAllDependencies('GEN_VIVADO_DCPS') postSynthTcl = apm_name + '.physical.tcl' topWrapper = moduleList.topModule.wrapperName() newTclFile = open(postSynthTcl, 'w') newTclFile.write('create_project -force ' + moduleList.apmName + ' ' + moduleList.compileDirectory + ' -part ' + self.part + ' \n') # To resolve black boxes, we need to load checkpoints in the # following order: # 1) topModule # 2) platformModule # 3) user program, in any order userModules = [ module for module in moduleList.synthBoundaries() if not module.liIgnore and not module.platformModule ] platformModules = [ module for module in moduleList.synthBoundaries() if not module.liIgnore and module.platformModule ] checkpointCommands = [] if (not moduleList.getAWBParamSafe('area_group_tool', 'AREA_GROUPS_ENABLE')): for module in [moduleList.topModule ] + platformModules + userModules: dcps.append(module.getDependencies('GEN_VIVADO_DCPS')) checkpoint = model.convertDependencies( module.getDependencies('GEN_VIVADO_DCPS')) # There should only be one checkpoint here. if (len(checkpoint) > 1): print "Error too many checkpoints for " + str( module.name) + ": " + str(checkpoint) continue if (len(checkpoint) == 0): print "No checkpoints for " + str( module.name) + ": " + str(checkpoint) continue newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') # We're attempting the new, parallel flow. else: # we need to issue seperate place commands. Therefore, we attempt to place each design # There may be some special area groups in platforms -- handle them elabAreaConstraints = area_group_tool.AreaConstraints(moduleList) elabAreaConstraints.loadAreaConstraintsElaborated() #self.area_constraints = area_group_tool.AreaConstraints(moduleList) for module in userModules: # Did we get a placed module already? if ((module.name in elabAreaConstraints.constraints) and ('LIBRARY_DCP' in elabAreaConstraints.constraints[ module.name].attributes)): # we need to locate the dcp corresponding to this area group. candidates = moduleList.getAllDependencies( 'GIVEN_VIVADO_DCPS') for dcpCandidate in moduleList.getAllDependencies( 'GIVEN_VIVADO_DCPS'): if dcpCandidate.attributes['module'] == module.name: dcp = str(model.modify_path_hw(dcpCandidate)) model.dictionary_list_create_append( module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp) dcps.append(dcp) else: dcp = self.place_dcp(moduleList, module) model.dictionary_list_create_append( module.moduleDependency, 'GEN_VIVADO_PLACEMENT_DCPS', dcp) dcps.append(dcp) for module in [moduleList.topModule] + platformModules: checkpoint = model.convertDependencies( module.getDependencies('GEN_VIVADO_DCPS')) dcps.append(checkpoint) # There should only be one checkpoint here. if (len(checkpoint) > 1): print "Error too many checkpoints for " + str( module.name) + ": " + str(checkpoint) continue if (len(checkpoint) == 0): print "No checkpoints for " + str( module.name) + ": " + str(checkpoint) continue newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') # We can have parent/child relationships in the user modules. # Vivado requires that checkpoints be read in topological # order. def isBlackBox(module): if (self.firstPassLIGraph.modules[module.name].getAttribute( 'BLACK_BOX_AREA_GROUP')): return 1 return 0 for module in sorted(userModules, key=isBlackBox): checkpoint = model.convertDependencies( module.getDependencies('GEN_VIVADO_PLACEMENT_DCPS')) # There should only be one checkpoint here. if (len(checkpoint) > 1): print "Error too many checkpoints for " + str( module.name) + ": " + str(checkpoint) continue if (len(checkpoint) == 0): print "No checkpoints for " + str( module.name) + ": " + str(checkpoint) continue #read in new checkoutpoint newTclFile.write('read_checkpoint ' + checkpoint[0] + '\n') emitPlatformAreaGroups = (moduleList.getAWBParam( 'area_group_tool', 'AREA_GROUPS_GROUP_PLATFORM_CODE') != 0) # platformModule refers to the main platform module, not # the subordinate device area groups. platformModule = ( self.firstPassLIGraph.modules[module.name].getAttribute( 'BLACK_BOX_AREA_GROUP') is None) and (not self.firstPassLIGraph.modules[ module.name].getAttribute('PLATFORM_MODULE') is None) allowAGPlatform = ( not platformModule) or emitPlatformAreaGroups emitDeviceGroups = (moduleList.getAWBParam( 'area_group_tool', 'AREA_GROUPS_PAR_DEVICE_AG') != 0) allowAGDevice = (self.firstPassLIGraph.modules[ module.name].getAttribute('BLACK_BOX_AREA_GROUP') is None) or emitDeviceGroups if (allowAGPlatform and allowAGDevice): refName = module.wrapperName() lockPlacement = True lockRoute = self.routeAG # If this is an platform/user-defined area group, the wrapper name may be different. if (not self.firstPassLIGraph.modules[module.name]. getAttribute('BLACK_BOX_AREA_GROUP') is None): refName = elabAreaConstraints.constraints[ module.name].attributes['MODULE_NAME'] lockPlacement = not ( 'NO_PLACE' in elabAreaConstraints.constraints[ module.name].attributes) and lockPlacement lockRoute = not ( 'NO_ROUTE' in elabAreaConstraints.constraints[ module.name].attributes) and lockRoute checkpointCommands.append( 'if { [llength [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]] } {\n') checkpointCommands.append(' puts "Locking ' + refName + '"\n') if (lockRoute): # locking routing requires us to emit an area group. boo. ag_tcl = self.ag_constraints(moduleList, module) self.tcl_ag.append(ag_tcl) checkpointCommands.append(' source ' + str(ag_tcl) + '\n') checkpointCommands.append( ' lock_design -level routing [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n') elif (lockPlacement): checkpointCommands.append( ' lock_design -level placement [get_cells -hier -filter {REF_NAME =~ "' + refName + '"}]\n') checkpointCommands.append('}\n') given_netlists = [ moduleList.env['DEFS']['ROOT_DIR_HW'] + '/' + netlist for netlist in moduleList.getAllDependenciesWithPaths('GIVEN_NGCS') + moduleList.getAllDependenciesWithPaths('GIVEN_EDFS') ] for netlist in given_netlists: newTclFile.write('read_edif ' + netlist + '\n') # We have lots of dangling wires (Thanks, Bluespec). Set the # following properties to silence the warnings. newTclFile.write( "set_property SEVERITY {Warning} [get_drc_checks NSTD-1]\n") newTclFile.write( "set_property SEVERITY {Warning} [get_drc_checks UCIO-1]\n") newTclFile.write("link_design -top " + topWrapper + " -part " + self.part + "\n") newTclFile.write("report_utilization -file " + apm_name + ".link.util\n") newTclFile.write("write_checkpoint -force " + apm_name + ".link.dcp\n") # lock down the area group routing. newTclFile.write("\n".join(checkpointCommands) + "\n") for elf in self.tcl_elfs: newTclFile.write("add_file " + model.modify_path_hw(elf) + "\n") newTclFile.write("set_property MEMDATA.ADDR_MAP_CELLS {" + str(elf.attributes['ref']) + "} [get_files " + model.modify_path_hw(elf) + "]\n") # We will now attempt to link in any bmm that we might have. for bmm in self.tcl_bmms: newTclFile.write("add_file " + model.modify_path_hw(bmm) + "\n") newTclFile.write("set_property SCOPED_TO_REF " + str(bmm.attributes['ref']) + " [get_files " + model.modify_path_hw(bmm) + "]\n") newTclFile.write('set IS_TOP_BUILD 1\n ') newTclFile.write('set IS_AREA_GROUP_BUILD 0\n ') newTclFile.write('set SYNTH_OBJECT ""\n') newTclFile.write('source ' + self.paramTclFile + '\n') for tcl_header in self.tcl_headers: newTclFile.write('source ' + tcl_header + '\n') for tcl_def in self.tcl_defs: newTclFile.write('source ' + tcl_def + '\n') for tcl_func in self.tcl_funcs: newTclFile.write('source ' + tcl_func + '\n') for tcl_alg in self.tcl_algs: newTclFile.write('source ' + tcl_alg + '\n') def dumpPBlockCmd(tgt): return 'dumpPBlockUtilization "' + moduleList.compileDirectory + '/' + tgt + '.util"\n' newTclFile.write(dumpPBlockCmd('link')) newTclFile.write("report_timing_summary -file " + apm_name + ".map.twr\n\n") newTclFile.write("opt_design -directive AddRemap\n") newTclFile.write("report_utilization -file " + apm_name + ".opt.util\n") newTclFile.write(dumpPBlockCmd('opt')) newTclFile.write("write_checkpoint -force " + apm_name + ".opt.dcp\n\n") newTclFile.write( "place_design -no_drc -directive WLDrivenBlockPlacement\n") newTclFile.write(dumpPBlockCmd('place')) newTclFile.write("phys_opt_design -directive AggressiveFanoutOpt\n") newTclFile.write("write_checkpoint -force " + apm_name + ".map.dcp\n") newTclFile.write(dumpPBlockCmd('phyopt')) newTclFile.write("report_utilization -file " + apm_name + ".map.util\n\n") newTclFile.write("route_design\n") newTclFile.write("write_checkpoint -force " + apm_name + ".par.dcp\n") newTclFile.write(dumpPBlockCmd('par')) newTclFile.write("report_timing_summary -file " + apm_name + ".par.twr\n\n") newTclFile.write("report_utilization -hierarchical -file " + apm_name + ".par.util\n") newTclFile.write("report_drc -file " + topWrapper + ".drc\n\n") newTclFile.write("write_bitstream -force " + apm_name + "_par.bit\n") newTclFile.close() # generate bitfile xilinx_bit = moduleList.env.Command( apm_name + '_par.bit', synthDeps + self.tcl_algs + self.tcl_defs + self.tcl_funcs + self.tcl_ag + [self.paramTclFile] + dcps + [postSynthTcl], [ 'touch start.txt; vivado -verbose -mode batch -source ' + postSynthTcl + ' -log ' + moduleList.compileDirectory + '/postsynth.log' ]) moduleList.topModule.moduleDependency['BIT'] = [apm_name + '_par.bit'] # We still need to generate a download script. xilinx_loader.LOADER(moduleList)