def main(argv): # Get treeish if len(argv) != 2: print """\ usage: %s <treeish> where <treeish> is a valid git branch or tag (eg release_1.1.0).""" % (prog_name) sys.exit(1) treeish = argv[1] # Determine root of repo (this repo) on which to operate try: # First try from cwd root, _ = chk_pcores.get_base_pkg( os.getcwd() ) except ValueError: # None found, so use package this script is a part of root = os.path.abspath( os.path.join( os.path.dirname(__file__), '..', '..' ) ) # Add base path to hw_lib_dir and projects_dir # Prepare tree print '+ cd %s' % root os.chdir( root ) print cmd = 'git checkout %s' % treeish print '+ %s' % cmd if 0 != subprocess.call( cmd.split() ): sys.exit(1) print # Make path list pcores_avail = chk_pcores.scan_pcores( os.path.join( 'lib', 'hw' ) ) pcores_needed = set() for project, mhs in chk_pcores.scan_projects( 'projects' ): # Include pcores for each project whose hardware directory is included. # `project` is of the form <project>/hw, so take off the 'hw' and # prepend 'project' to match against STATIC_PATHLIST. if os.path.join( 'projects', os.path.dirname( project ) ) not in STATIC_PATHLIST: continue for inst in mhstools.instances(mhs): paths, errors = chk_pcores.resolve_pcore( pcores_avail, inst.core_name(), mhstools.get_parameter( inst, 'HW_VER' ) ) pcores_needed = pcores_needed.union( paths ) pathlist = STATIC_PATHLIST + list( pcores_needed ) # Create tarball tarball = os.path.join( '..', TARBALL_NAME % treeish.replace('/','_') ) gzip = subprocess.Popen( ['gzip'], stdin=subprocess.PIPE, stdout=open( tarball, 'w' ) ) try: cmd = 'git archive --prefix=%s/ %s' % (TAR_PREFIX, treeish) print '+ %s (paths) | gzip > %s' % (cmd, tarball) rc = subprocess.call( cmd.split() + pathlist, stdout=gzip.stdin ) except OSError, e: if e.errno == os.errno.E2BIG: print """\ %s: list of paths (%d in %d bytes) to include in tarball is %s: too large for this platform (max %d bytes)""" % (prog_name, len(pathlist), len(' '.join(pathlist)), prog_name, os.sysconf('SC_ARG_MAX')) sys.exit(1) else: raise e
def do_overrides( index, ent ): """ Perform reset and clock overrides on the specified entity, if required. The original instance is retained - but disabled - in the MHS file. A special comment marker flags to UNDO to remove such overriden instances. """ core_name = ent.core_name() core_inst = mhstools.get_parameter( ent, 'INSTANCE' ) subst_required = False new_inst = ent.copy() for inst_ent in new_inst.inst_ents: if inst_ent.kw() != 'PORT': continue for args_index in range(len(inst_ent.args)): port, net = inst_ent.args[args_index] if net in opts.xlate: inst_ent.args[args_index] = (port, opts.xlate[net]) subst_required = True if subst_required: print 'Performing overrides on pcore %s' % core_name print ' (instance %s)' % core_inst mhstools.set_disabled_flag( ent, True ) ent.comment = mhstools.DISABLED_FLAG mhs.insert( index+1, new_inst ) print return True
def print_project_pcore_version_report( projects_dir, pcores ): """ Scan all projects, resolve pcore instances, and print reports: missing pcores, out-of-date pcores; Also writes out list of pcores used. """ # Collect data errors = {} cores_used = {} for project, mhs in scan_projects( projects_dir ): for inst in mhstools.instances(mhs): inst.paths, inst.errors = resolve_pcore( pcores, inst.core_name(), mhstools.get_parameter( inst, 'HW_VER' ) ) cores_used[project] = set( sum( (inst.paths for inst in mhstools.instances(mhs)), [] ) ) errors[project] = sum( (inst.errors for inst in mhstools.instances(mhs)), [] ) # Print reports print '2. Project missing and out-of-date pcore report:' for project, errors in errors.iteritems(): if errors: print "\tProject '%s':\n\t\t%s\n" % (project, '\n\t\t'.join(errors) ) print '3. Project pcore use report: written to %s' % PCORE_MANIFEST with open( os.path.expanduser( PCORE_MANIFEST ), 'w' ) as f: print >>f, '\n'.join( '%s,%s' % (project, pcore) for project in cores_used for pcore in cores_used[project] )
def do_subst( index, ent ): """ Perform substitution with nf10_axis_sim_{record,stim}. """ global result global result_1 global result_2 core_name = ent.core_name() core_inst = mhstools.get_parameter( ent, 'INSTANCE' ) m_width = mhstools.get_parameter( ent, 'C_M_AXIS_DATA_WIDTH' ) s_width = mhstools.get_parameter( ent, 'C_S_AXIS_DATA_WIDTH' ) mhstools.set_disabled_flag( ent, True ) print 'Replacing pcore %s (instance %s):' % (core_name, core_inst) # Attempt to infer the correct clock and reset nets ports = mhstools.get_ents_by_kw( ent, 'PORT' ) clock_net = get_override( opts.clocks, core_name, core_inst ) if clock_net is None: clocks = filter( lambda x: clk_re.match(x[0]), ports ) if len(clocks) == 0: print '\terror: failed to infer clock - no candidates found' return False if len(clocks) > 1: print '\terror: failed to infer clock - ambiguous possibilities:' print '\t\t%s' % '\n\t\t'.join( ['%s (net %s)' % x for x in clocks] ) return False clock_net = clocks[0][1] if clock_net in opts.xlate: clock_net = opts.xlate[clock_net] print '\tinferred clock net: %s' % clock_net else: print '\tusing clock net override: %s' % clock_net reset_net = get_override( opts.resets, core_name, core_inst ) if reset_net is None: resets = filter( lambda x: rst_re.match(x[0]), ports ) if len(resets) == 0: print '\terror: failed to infer reset - no candidates found' return False if len(resets) > 1: print '\terror: failed to infer reset - ambiguous possibilities:' print '\t\t%s' % '\n\t\t'.join( ['%s (net %s)' % x for x in resets] ) return False reset_net = resets[0][1] if reset_net in opts.xlate: reset_net = opts.xlate[reset_net] print '\tinferred reset net: %s' % reset_net else: print '\tusing reset net override: %s' % reset_net # Find any AXI Stream ports, and what they're attached to bus_args = mhstools.get_ents_by_kw( ent, 'BUS_INTERFACE' ) s_axis_nets = [net for cls, net in filter( lambda av: s_axis_re.match( av[0] ), bus_args )] s_axis_nets.sort() m_axis_nets = [net for cls, net in filter( lambda av: m_axis_re.match( av[0] ), bus_args )] m_axis_nets.sort() # Insert stimulator and recorder cores for AXI Stream nets attached to this entity if not s_axis_nets and not m_axis_nets: print '\twarning: no AXI Stream nets found, even though core is a candidate\n' return True if s_axis_nets: print '\tnf10_axis_sim_record instance(s) on AXI Stream master net(s):' for netno, net in enumerate(s_axis_nets): # Attempt to infer correct width parameter try: other = mhstools.get_other_inst( mhs, ent, net ) except mhstools.TooManyError, e: print 'error: more than one other instance attached to net %s.' % net print ' instance name (core name) found:' print '\t\t%s' % '\n\t\t'.join( ['%s (%s)' % ( mhstools.get_parameter( x, 'INSTANCE'), x.core_name()) for x in e.ents] ) return False if not other: print '\twarning: nothing else attached to net %s' % net return True other_width = mhstools.get_parameter( other, 'C_M_AXIS_DATA_WIDTH' ) if other_width is not None and s_width is not None and other_width != s_width: print 'error: width of attached instance\'s port disagrees with this instance' print ' net %s' % net print ' this inst = %s, inst %s = %s' % (s_width, mhstools.get_parameter(other, 'INSTANCE'), other_width) return False width = (s_width if other_width is None else other_width) # Perform substitution inst_name_log = 'record_%s' % net result = result + ' & ' + 'record_%s_activity_rec' % net axi_file = os.path.join( opts.axi_path, '%s%s_log.axi' % (core_inst, '_%d' % netno if len(m_axis_nets) != 1 else '' ) ) insert_recorder( mhs, index+1, 'Replacing core %s (instance %s)' % (core_name, core_inst), inst_name_log, RECORDER_VER, axi_file, width, net, clock_net ) print '\t\t%s (%s)' % (net, axi_file)
def main(argv): # Get treeish if len(argv) != 2: print """\ usage: %s <treeish> where <treeish> is a valid git branch or tag (eg release_1.1.0).""" % ( prog_name) sys.exit(1) treeish = argv[1] # Determine root of repo (this repo) on which to operate try: # First try from cwd root, _ = chk_pcores.get_base_pkg(os.getcwd()) except ValueError: # None found, so use package this script is a part of root = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')) # Add base path to hw_lib_dir and projects_dir # Prepare tree print '+ cd %s' % root os.chdir(root) print cmd = 'git checkout %s' % treeish print '+ %s' % cmd if 0 != subprocess.call(cmd.split()): sys.exit(1) print # Make path list pcores_avail = chk_pcores.scan_pcores(os.path.join('lib', 'hw')) pcores_needed = set() for project, mhs in chk_pcores.scan_projects('projects'): # Include pcores for each project whose hardware directory is included. # `project` is of the form <project>/hw, so take off the 'hw' and # prepend 'project' to match against STATIC_PATHLIST. if os.path.join('projects', os.path.dirname(project)) not in STATIC_PATHLIST: continue for inst in mhstools.instances(mhs): paths, errors = chk_pcores.resolve_pcore( pcores_avail, inst.core_name(), mhstools.get_parameter(inst, 'HW_VER')) pcores_needed = pcores_needed.union(paths) pathlist = STATIC_PATHLIST + list(pcores_needed) # Create tarball tarball = os.path.join('..', TARBALL_NAME % treeish.replace('/', '_')) gzip = subprocess.Popen(['gzip'], stdin=subprocess.PIPE, stdout=open(tarball, 'w')) try: cmd = 'git archive --prefix=%s/ %s' % (TAR_PREFIX, treeish) print '+ %s (paths) | gzip > %s' % (cmd, tarball) rc = subprocess.call(cmd.split() + pathlist, stdout=gzip.stdin) except OSError, e: if e.errno == os.errno.E2BIG: print """\ %s: list of paths (%d in %d bytes) to include in tarball is %s: too large for this platform (max %d bytes)""" % ( prog_name, len(pathlist), len( ' '.join(pathlist)), prog_name, os.sysconf('SC_ARG_MAX')) sys.exit(1) else: raise e
def subst_mhs(mhs, targets, opts): """ Perform AXI Stream source/sink substitutions """ def get_override(overrides, name, inst): """ Returns override by instance if one exists, failing which, by core, failing which, by default, failing which, None. """ if inst in overrides: return overrides[inst] if name in overrides: return overrides[name] if "" in overrides: return overrides[""] return None def do_subst(index, ent): """ Perform substitution with nf10_axis_sim_{record,stim}. """ global result global result_1 global result_2 core_name = ent.core_name() core_inst = mhstools.get_parameter(ent, "INSTANCE") m_width = mhstools.get_parameter(ent, "C_M_AXIS_DATA_WIDTH") s_width = mhstools.get_parameter(ent, "C_S_AXIS_DATA_WIDTH") mhstools.set_disabled_flag(ent, True) print "Replacing pcore %s (instance %s):" % (core_name, core_inst) # Attempt to infer the correct clock and reset nets ports = mhstools.get_ents_by_kw(ent, "PORT") clock_net = get_override(opts.clocks, core_name, core_inst) if clock_net is None: clocks = filter(lambda x: clk_re.match(x[0]), ports) if len(clocks) == 0: print "\terror: failed to infer clock - no candidates found" return False if len(clocks) > 1: print "\terror: failed to infer clock - ambiguous possibilities:" print "\t\t%s" % "\n\t\t".join(["%s (net %s)" % x for x in clocks]) return False clock_net = clocks[0][1] if clock_net in opts.xlate: clock_net = opts.xlate[clock_net] print "\tinferred clock net: %s" % clock_net else: print "\tusing clock net override: %s" % clock_net reset_net = get_override(opts.resets, core_name, core_inst) if reset_net is None: resets = filter(lambda x: rst_re.match(x[0]), ports) if len(resets) == 0: print "\terror: failed to infer reset - no candidates found" return False if len(resets) > 1: print "\terror: failed to infer reset - ambiguous possibilities:" print "\t\t%s" % "\n\t\t".join(["%s (net %s)" % x for x in resets]) return False reset_net = resets[0][1] if reset_net in opts.xlate: reset_net = opts.xlate[reset_net] print "\tinferred reset net: %s" % reset_net else: print "\tusing reset net override: %s" % reset_net # Find any AXI Stream ports, and what they're attached to bus_args = mhstools.get_ents_by_kw(ent, "BUS_INTERFACE") s_axis_nets = [net for cls, net in filter(lambda av: s_axis_re.match(av[0]), bus_args)] s_axis_nets.sort() m_axis_nets = [net for cls, net in filter(lambda av: m_axis_re.match(av[0]), bus_args)] m_axis_nets.sort() # Insert stimulator and recorder cores for AXI Stream nets attached to this entity if not s_axis_nets and not m_axis_nets: print "\twarning: no AXI Stream nets found, even though core is a candidate\n" return True if s_axis_nets: print "\tnf10_axis_sim_record instance(s) on AXI Stream master net(s):" for netno, net in enumerate(s_axis_nets): # Attempt to infer correct width parameter try: other = mhstools.get_other_inst(mhs, ent, net) except mhstools.TooManyError, e: print "error: more than one other instance attached to net %s." % net print " instance name (core name) found:" print "\t\t%s" % "\n\t\t".join( ["%s (%s)" % (mhstools.get_parameter(x, "INSTANCE"), x.core_name()) for x in e.ents] ) return False if not other: print "\twarning: nothing else attached to net %s" % net return True other_width = mhstools.get_parameter(other, "C_M_AXIS_DATA_WIDTH") if other_width is not None and s_width is not None and other_width != s_width: print "error: width of attached instance's port disagrees with this instance" print " net %s" % net print " this inst = %s, inst %s = %s" % ( s_width, mhstools.get_parameter(other, "INSTANCE"), other_width, ) return False width = s_width if other_width is None else other_width # Perform substitution inst_name_log = "record_%s" % net result = result + " & " + "record_%s_activity_rec" % net axi_file = os.path.join( opts.axi_path, "%s%s_log.axi" % (core_inst, "_%d" % netno if len(m_axis_nets) != 1 else "") ) insert_recorder( mhs, index + 1, "Replacing core %s (instance %s)" % (core_name, core_inst), inst_name_log, RECORDER_VER, axi_file, width, net, clock_net, ) print "\t\t%s (%s)" % (net, axi_file) if m_axis_nets: print "\tnf10_axis_sim_stim instance(s) on AXI Stream slave net(s):" for netno, net in enumerate(m_axis_nets): # Attempt to infer correct width parameter try: other = mhstools.get_other_inst(mhs, ent, net) except mhstools.TooManyError, e: print "error: more than one other instance attached to net %s." % net print " instance name (core name) found:" print "\t\t%s" % "\n\t\t".join( ["%s (%s)" % (mhstools.get_parameter(x, "INSTANCE"), x.core_name()) for x in e.ents] ) return False if not other: print "\twarning: nothing else attached to net %s" % net return True other_width = mhstools.get_parameter(other, "C_S_AXIS_DATA_WIDTH") if other_width is not None and m_width is not None and other_width != m_width: print "error: width of attached instance's port disagrees with this instance" print " net %s" % net print " this inst = %s, inst %s = %s" % ( m_width, mhstools.get_parameter(other, "INSTANCE"), other_width, ) return False width = m_width if other_width is None else other_width # Perform substitution inst_name = "stim_%s" % net result_1 = result_1 + " & " + "stim_%s_activity_stim" % net result_2 = result_2 + " & " + "stim_%s_barrier_req" % net axi_file = os.path.join( opts.axi_path, "%s%s_stim.axi" % (core_inst, "_%d" % netno if len(m_axis_nets) != 1 else "") ) insert_stimulator( mhs, index + 1, "Replacing core %s (instance %s)" % (core_name, core_inst), inst_name, STIM_VER, axi_file, width, net, clock_net, reset_net, inst_name_log, ) print "\t\t%s (%s)" % (net, axi_file)