def check_args(self, args): if args.debug_intfs == 'custom' and args.debug_intfs_list is None: msg.error('A file specifying which interfaces to mark for debug is required when choosing \'custom\' value on --debug_intfs argument') if args.interconnect_regslice is not None: for opt in args.interconnect_regslice: if opt == 'all' and len(args.interconnect_regslice) != 1: msg.error('Invalid combination of values for --interconnect_regslice') if args.jobs > getNumJobs(): msg.warning('Using more Vivado jobs ({}) than the recommended default ({}). Performance of the compilation process might be affected'.format(args.jobs, getNumJobs()))
def load_acc_placement(accList, args): # Read placement info from file if args.placement_file and os.path.exists(args.placement_file): usrPlacement = json.load(open(args.placement_file)) for acc in accList: if acc.name not in usrPlacement: msg.warning('No placement given for acc ' + acc.name) else: placeList = usrPlacement[acc.name] if len(placeList) != acc.num_instances: msg.warning('Placement list does not match number instances, placing only matching instances') acc.SLR = placeList elif args.placement_file: msg.error('Placement file not found: ' + args.user_constraints)
def check_requirements(): if not distutils.spawn.find_executable('vivado'): msg.error('vivado not found. Please set PATH correctly') elif board.arch.device == 'zynq' and not distutils.spawn.find_executable( 'bootgen'): msg.warning('bootgen not found. .bit.bin file will not be generated') vivado_version = str( subprocess.check_output([ 'vivado -version | head -n1 | sed "s/\(Vivado.\+v\)\(\([0-9]\|\.\)\+\).\+/\\2/"' ], shell=True), 'utf-8').strip() if vivado_version < MIN_VIVADO_VERSION: msg.error('Installed Vivado version ({}) not supported (>= {})'.format( vivado_version, MIN_VIVADO_VERSION))
def check_hardware_runtime_args(self, args, num_accs): def prev_power_of_2(num): if num & (num - 1) != 0: num = int(log2(num)) num = int(pow(2, num)) return num if args.cmdin_subqueue_len is not None and args.cmdin_queue_len is not None: msg.error( '--cmdin_subqueue_len and --cmdin_queue_len are mutually exclusive' ) if args.cmdout_subqueue_len is not None and args.cmdout_queue_len is not None: msg.error( '--cmdout_subqueue_len and --cmdout_queue_len are mutually exclusive' ) if args.cmdin_queue_len is not None: args.cmdin_subqueue_len = prev_power_of_2( int(args.cmdin_queue_len / num_accs)) msg.info('Setting --cmdin_subqueue_len to {}'.format( args.cmdin_subqueue_len)) elif args.cmdin_subqueue_len is None: args.cmdin_subqueue_len = max( 64, prev_power_of_2(int(1024 / num_accs))) if args.cmdout_queue_len is not None: args.cmdout_subqueue_len = prev_power_of_2( int(args.cmdout_queue_len / num_accs)) msg.info('Setting --cmdout_subqueue_len to {}'.format( args.cmdout_subqueue_len)) elif args.cmdout_subqueue_len is None: args.cmdout_subqueue_len = max( 64, prev_power_of_2(int(1024 / num_accs))) # The subqueue length has to be checked here in the case the user provides the cmdin queue length if args.cmdin_subqueue_len < 34: msg.warning( 'Value of --cmdin_subqueue_len={} is less than 34, which is the length of the longest command possible. This design might not work with tasks with enough arguments.' .format(args.cmdin_subqueue_len)) if args.spawnout_queue_len < 79: msg.warning( 'Value of --spawnout_queue_len={} is less than 79, which is the length of the longest task possible. This design might not work if an accelerator creates SMP tasks with enough copies, dependencies and/or arguments.' .format(args.spawnout_queue_len))
def gen_wns_report(out_path): wns = None tns = None num_fail = 0 num_total = 0 # Check implementation reports path rpt_path = project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1' rpt_path += '/' + args.name + '_design_wrapper_timing_summary_routed.rpt' if not os.path.exists(rpt_path): msg.warning('Cannot find rpt file. Skipping WNS report') return with open(rpt_path, 'r') as rpt_file: rpt_data = rpt_file.readlines() # Search header line ids = [ idx for idx in range(len(rpt_data) - 1) if (re.match(r'^\s+WNS\(ns\)\s+TNS\(ns\)\s+', rpt_data[idx])) ] if len(ids) != 1: msg.warning( 'Cannot find WNS report table header. Skipping WNS report') return # Get information from 1st row elems = rpt_data[ids[0] + 2].split() wns = float(elems[0]) tns = float(elems[1]) num_fail = int(elems[2]) num_total = int(elems[3]) msg.log('Worst Negative Slack (WNS) summary') if wns >= 0.0: msg.success( str(num_fail) + ' endpoints of ' + str(num_total) + ' have negative slack (WNS: ' + str(wns) + ')') else: msg.warning( str(num_fail) + ' endpoints of ' + str(num_total) + ' have negative slack (WNS: ' + str(wns) + ', TNS: ' + str(tns) + ')') with open(out_path, 'w') as timing_file: timing_file.write('WNS ' + str(wns) + '\n') timing_file.write('TNS ' + str(tns) + '\n') timing_file.write('NUM_ENDPOINTS ' + str(num_total) + '\n') timing_file.write('NUM_FAIL_ENDPOINTS ' + str(num_fail))
def run_boot_step(project_args): global start_time global project_backend_path global petalinux_build_path global petalinux_install_path start_time = project_args['start_time'] project_path = project_args['path'] board = project_args['board'] args = project_args['args'] project_backend_path = project_path + '/' + args.backend project_step_path = project_backend_path + '/scripts/' + script_folder ait_backend_path = ait_path + '/backend/' + args.backend petalinux_build_path = os.path.realpath( os.getenv('PETALINUX_BUILD')) if os.getenv('PETALINUX_BUILD') else '' petalinux_install_path = os.path.realpath(os.getenv( 'PETALINUX_INSTALL')) if os.getenv('PETALINUX_INSTALL') else '' check_requirements() # During the execution of this step disable the Vivado_init.tcl script disable_init_scripts() path_hdf = project_backend_path + '/' + args.name + '/' + args.name + '.sdk/' if os.path.exists(petalinux_install_path + '/.version-history'): # Seems to be petalinux 2019.1 or later (may match other untested versions) command = 'petalinux-config --silentconfig --get-hw-description=' + path_hdf else: # Seems to be petalinux 2018.3 or previous (may match other untested versions) command = 'petalinux-config --oldconfig --get-hw-description=' + path_hdf if args.verbose_info: msg.log('> ' + command) p = subprocess.Popen(command, stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, cwd=petalinux_build_path, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() if retval: restore_init_scripts() msg.error('Generation of petalinux boot files failed', start_time, False) if os.path.exists(petalinux_build_path + '/subsystems/linux/configs/device-tree/'): # Seems to be petalinux 2016.3 (may match other untested versions) if args.verbose_info: msg.log('Fixing devicetree (2016.3 mode)') petalinux_build_dts_path = petalinux_build_path + '/subsystems/linux/configs/device-tree/' shutil.copy2( project_backend_path + '/' + args.name + '/pl_ompss_at_fpga.dtsi', petalinux_build_dts_path) content_dtsi = None with open(petalinux_build_dts_path + '/system-conf.dtsi', 'r') as file: content_dtsi = file.read().splitlines() line = [ idx for idx in range(len(content_dtsi)) if content_dtsi[idx].find('/include/ "pl.dtsi"') != -1 ] content_dtsi.insert(line[0] + 1, '/include/ \"pl_ompss_at_fpga.dtsi\"') board_dtsi_fix_file = project_backend_path + '/board/' + board.name + '/' + board.name + '_boot.dtsi' if os.path.exists(board_dtsi_fix_file): shutil.copy2(board_dtsi_fix_file, petalinux_build_dts_path) content_dtsi.insert(line[0] + 2, '/include/ \"' + board.name + '_boot.dtsi\"') with open(petalinux_build_dts_path + '/system-conf.dtsi', 'w') as file: file.write('\n'.join(content_dtsi)) command = 'petalinux-build -c bootloader -x mrproper' if args.verbose_info: msg.log('> ' + command) p = subprocess.Popen(command, stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, cwd=petalinux_build_path, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() if retval: restore_init_scripts() msg.error('Generation of petalinux boot files failed', start_time, False) elif os.path.exists( petalinux_build_path + '/project-spec/meta-user/recipes-bsp/device-tree/files/'): # Seems to be petalinux 2018.3 or 2019.1 (may match other untested versions) if args.verbose_info: msg.log('Fixing devicetree (2018.3 mode)') petalinux_build_dts_path = petalinux_build_path + '/project-spec/meta-user/recipes-bsp/device-tree/files/' content_dtsi = None with open(petalinux_build_dts_path + '/system-user.dtsi', 'r') as file: content_dtsi = file.read().splitlines() # Remove old includes to pl_bsc.dtsi and insert the new one line = [ idx for idx in range(len(content_dtsi)) if content_dtsi[idx].find('pl_bsc.dtsi') != -1 ] if len(line) == 1: content_dtsi.pop(line[0]) elif len(line) > 1: restore_init_scripts() msg.error( 'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains pl_bsc.dtsi' ) # Remove old includes to pl_ompss_at_fpga.dtsi and insert the new one line = [ idx for idx in range(len(content_dtsi)) if content_dtsi[idx].find('pl_ompss_at_fpga.dtsi') != -1 ] if len(line) == 1: content_dtsi.pop(line[0]) elif len(line) > 1: restore_init_scripts() msg.error( 'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains pl_ompss_at_fpga.dtsi' ) line = [ idx for idx in range(len(content_dtsi)) if content_dtsi[idx].find('pl_ompss_at_fpga.dtsi') != -1 ] content_dtsi.insert( len(content_dtsi), '/include/ \"' + project_backend_path + '/' + args.name + '/pl_ompss_at_fpga.dtsi' + '\"') # Remove old includes to <board>_boot.dtsi and insert the new one line = [ idx for idx in range(len(content_dtsi)) if content_dtsi[idx].find(board.name + '_boot.dtsi') != -1 ] if len(line) == 1: content_dtsi.pop(line[0]) elif len(line) > 1: restore_init_scripts() msg.error( 'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains <board>_bsc.dtsi' ) board_dtsi_fix_file = project_backend_path + '/board/' + board.name + '/' + board.name + '_boot.dtsi' if os.path.exists(board_dtsi_fix_file): shutil.copy2(board_dtsi_fix_file, petalinux_build_dts_path) content_dtsi.insert(len(content_dtsi), '/include/ \"' + board_dtsi_fix_file + '\"') with open(petalinux_build_dts_path + '/system-user.dtsi', 'w') as file: file.write('\n'.join(content_dtsi)) else: msg.warning( 'Devicetree fix failed. Petalinux version cannot be determined. Continuing anyway...' ) command = 'petalinux-build' if args.verbose_info: msg.log('> ' + command) p = subprocess.Popen(command, stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, cwd=petalinux_build_path, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() if retval: restore_init_scripts() msg.error('Generation of petalinux boot files failed', start_time, False) path_bit = project_path + '/' + args.name + '.bit' command = 'petalinux-package --force --boot --fsbl ./images/linux/*_fsbl.elf' command += ' --fpga ' + path_bit + ' --u-boot ./images/linux/u-boot.elf' if args.verbose_info: msg.log('> ' + command) p = subprocess.Popen(command, stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, cwd=petalinux_build_path, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() if retval: msg.error('Generation of petalinux boot files failed', start_time, False) else: shutil.copy2(petalinux_build_path + '/images/linux/BOOT.BIN', project_path) shutil.copy2(petalinux_build_path + '/images/linux/image.ub', project_path) msg.success('Petalinux boot files generated') restore_init_scripts()
def __call__(self, parser, namespace, values, option_string=None): msg.warning('Argument ' + '/'.join(self.option_strings) + ' has changed. Check `ait -h` and fix your program call')
def gen_utilization_report(out_path): av_resources = {} used_resources = {} util_resources = {} # Check implementation reports path rpt_path = project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1' rpt_path += '/' + args.name + '_design_wrapper_utilization_placed.rpt' if not os.path.exists(rpt_path): msg.warning( 'Cannot find rpt file. Skipping bitstream utilization report') return with open(rpt_path, 'r') as rpt_file: rpt_data = rpt_file.readlines() # Search LUT/FF section # NOTE: Possible section names: Slice Logic, CLB Logic ids = [ idx for idx in range(len(rpt_data) - 1) if ((re.match(r'^[0-9]\. ' + 'Slice Logic\n', rpt_data[idx]) and rpt_data[idx + 1] == '--------------\n') or ( re.match(r'^[0-9]\. ' + 'CLB Logic\n', rpt_data[idx]) and rpt_data[idx + 1] == '------------\n')) ] if len(ids) != 1: msg.warning( 'Cannot find LUT/FF info in rpt file. Skipping bitstream utilization report' ) return # Get LUT elems = rpt_data[ids[0] + 6].split('|') used_resources['LUT'] = elems[2].strip() av_resources['LUT'] = elems[4].strip() util_resources['LUT'] = elems[5].strip() # Get FF elems = rpt_data[ids[0] + 11].split('|') used_resources['FF'] = elems[2].strip() av_resources['FF'] = elems[4].strip() util_resources['FF'] = elems[5].strip() # Get DSP # NOTE: Possible section names: DSP, ARITHMETIC ids = [ idx for idx in range(len(rpt_data) - 1) if ((re.match(r'^[0-9]\. ' + 'DSP\n', rpt_data[idx]) and rpt_data[idx + 1] == '------\n') or ( re.match(r'^[0-9]\. ' + 'ARITHMETIC\n', rpt_data[idx]) and rpt_data[idx + 1] == '-------------\n')) ] if len(ids) != 1: msg.warning( 'Cannot find DSP info in rpt file. Skipping bitstream utilization report' ) return elems = rpt_data[ids[0] + 6].split('|') used_resources['DSP'] = elems[2].strip() av_resources['DSP'] = elems[4].strip() util_resources['DSP'] = elems[5].strip() # Search BRAM/URAM # NOTE: Possible section names: Memory, BLOCKRAM ids = [ idx for idx in range(len(rpt_data) - 1) if ((re.match(r'^[0-9]\. ' + 'Memory\n', rpt_data[idx]) and rpt_data[idx + 1] == '---------\n') or ( re.match(r'^[0-9]\. ' + 'BLOCKRAM\n', rpt_data[idx]) and rpt_data[idx + 1] == '-----------\n')) ] if len(ids) != 1: msg.warning( 'Cannot find BRAM info in rpt file. Skipping bitstream utilization report' ) return # BRAM elems = rpt_data[ids[0] + 6].split('|') used_resources['BRAM'] = str(int(float(elems[2].strip()) * 2)) av_resources['BRAM'] = str(int(float(elems[4].strip()) * 2)) util_resources['BRAM'] = elems[5].strip() # URAM # NOTE: It is not placed in the same offset for all boards (search in some lines) # NOTE: It is not available in all boards, so check if valid data is found ids = [ idx for idx in range(ids[0] + 6, ids[0] + 20) if ((re.match(r'^| URAM', rpt_data[idx]))) ] for idx in ids: elems = rpt_data[idx].split('|') if len(elems) >= 6 and elems[1].strip() == 'URAM': used_resources['URAM'] = elems[2].strip() av_resources['URAM'] = elems[4].strip() util_resources['URAM'] = elems[5].strip() break resources_file = open(out_path, 'w') msg.log('Resources utilization summary') for name in ['BRAM', 'DSP', 'FF', 'LUT', 'URAM']: # Check if resource is available if name not in used_resources: continue report_string = '{0:<9} {1:>6} used | {2:>6} available - {3:>6}% utilization' report_string_formatted = report_string.format(name, used_resources[name], av_resources[name], util_resources[name]) msg.log(report_string_formatted) resources_file.write(report_string_formatted + '\n') resources_file.close()
def run_bitstream_step(project_args): global args global board global chip_part global start_time global ait_backend_path global project_backend_path args = project_args['args'] board = project_args['board'] start_time = project_args['start_time'] project_path = project_args['path'] chip_part = board.chip_part + ('-' + board.es if (board.es and not args.ignore_eng_sample) else '') ait_backend_path = ait_path + '/backend/' + args.backend project_backend_path = project_path + '/' + args.backend project_step_path = project_backend_path + '/scripts/' + script_folder # Check if the requirements are met check_requirements() # Remove old directories used on the bitstream step shutil.rmtree(project_step_path, ignore_errors=True) # Create directories and copy necessary files for bitstream step shutil.copytree(ait_backend_path + '/scripts/' + script_folder, project_step_path, ignore=shutil.ignore_patterns('*.py*')) if os.path.isfile(project_backend_path + '/' + args.name + '/' + args.name + '.xpr'): # Enable beta device on Vivado init script if board.board_part: p = subprocess.Popen( 'echo "enable_beta_device ' + chip_part + '\nset_param board.repoPaths [list ' + project_backend_path + '/board/' + board.name + '/board_files]" > ' + project_backend_path + '/scripts/Vivado_init.tcl', shell=True) retval = p.wait() else: p = subprocess.Popen('echo "enable_beta_device ' + chip_part + '" > ' + project_backend_path + '/scripts/Vivado_init.tcl', shell=True) retval = p.wait() os.environ['MYVIVADO'] = project_backend_path + '/scripts' p = subprocess.Popen( 'vivado -nojournal -nolog -notrace -mode batch -source ' + project_step_path + '/generate_bitstream.tcl', cwd=project_backend_path + '/scripts/', stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() del os.environ['MYVIVADO'] if retval: msg.error('Bitstream generation failed', start_time, False) else: if board.arch.device == 'zynq': bif_file = open( project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1/bitstream.bif', 'w') bif_file.write('all:\n' + '{\n' + '\t' + args.name + '_design_wrapper.bit\n' + '}') bif_file.close() p = subprocess.Popen( 'bootgen -image bitstream.bif -arch zynq -process_bitstream bin -w', cwd=project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1', stdout=sys.stdout.subprocess, stderr=sys.stdout.subprocess, shell=True) if args.verbose: for line in iter(p.stdout.readline, b''): sys.stdout.write(line.decode('utf-8')) retval = p.wait() if retval: msg.warning('Could not create .bit.bin file') else: shutil.copy2( glob.glob(project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1/' + args.name + '*.bit.bin')[0], project_path + '/' + args.name + '.bit.bin') shutil.copy2( glob.glob(project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1/' + args.name + '*.bit')[0], project_path + '/' + args.name + '.bit') shutil.copy2( glob.glob(project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1/' + args.name + '*.bin')[0], project_path + '/' + args.name + '.bin') gen_utilization_report(project_path + '/' + args.name + '.resources-impl.txt') gen_wns_report(project_path + '/' + args.name + '.timing-impl.txt') msg.success('Bitstream generated') else: msg.error( 'No Vivado .xpr file exists for the current project. Bitstream generation failed' )
def ait_main(): global args start_time = time.time() args = None parser = ArgParser() args = parser.parse_args() msg.setProjectName(args.name) msg.setPrintTime(args.verbose_info) msg.setVerbose(args.verbose) msg.info('Using ' + args.backend + ' backend') board = json.load(open(ait_path + '/backend/' + args.backend + '/board/' + args.board + '/basic_info.json'), object_hook=JSONObject) # Check vendor-related board arguments parser.vendor_parser[args.backend].check_board_args(args, board) if not int(board.frequency.min) <= args.clock <= int(board.frequency.max): msg.error('Clock frequency requested (' + str(args.clock) + 'MHz) is not within the board range (' + str(board.frequency.min) + '-' + str(board.frequency.max) + 'MHz)') if (args.slr_slices is not None or args.floorplanning_constr is not None) and not hasattr(board.arch, 'slr'): msg.error( 'Use of placement constraints is only available for boards with SLRs' ) project_path = os.path.normpath( os.path.realpath(args.dir + '/' + args.name + '_ait')) project_backend_path = os.path.normpath(project_path + '/' + args.backend) # Add backend to python import path sys.path.insert(0, ait_path + '/backend/' + args.backend + '/scripts') # Check for backend support for the given board if not args.disable_board_support_check: check_board_support(board) sys.stdout = Logger(project_path) sys.stdout.log.write( os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) + '\n\n') get_accelerators(project_path) parser.check_hardware_runtime_args(args, max(2, num_instances)) project_args = { 'path': os.path.normpath( os.path.realpath(args.dir) + '/' + args.name + '_ait'), 'num_accs': num_accs, 'num_instances': num_instances, 'num_acc_creators': num_acc_creators, 'accs': accs, 'board': board, 'args': args } for step in generation_steps[args.backend]: if generation_steps[args.backend].index( args.from_step) <= generation_steps[args.backend].index( step) <= generation_steps[args.backend].index( args.to_step): generation_step_package = os.path.basename( os.path.dirname( glob.glob(ait_path + '/backend/' + args.backend + '/scripts/*-' + step + '/')[0])) generation_step_module = '%s.%s' % (generation_step_package, step) module = importlib.import_module(generation_step_module) step_func = getattr(module, 'STEP_FUNC') msg.info('Starting \'' + step + '\' step') step_start_time = time.time() project_args['start_time'] = step_start_time step_func(project_args) msg.success( 'Step \'' + step + '\' finished. ' + secondsToHumanReadable(int(time.time() - step_start_time)) + ' elapsed') else: msg.warning('Step \'' + step + '\' is disabled') msg.success('Accelerator automatic integration finished. ' + secondsToHumanReadable(int(time.time() - start_time)) + ' elapsed')