Ejemplo n.º 1
0
def check_board_support(board):
    global args

    chip_part = board.chip_part + ('-' + board.es if board.es
                                   and not args.ignore_eng_sample else '')

    if args.verbose_info:
        msg.log('Checking vendor support for selected board')

    module = importlib.import_module('check_board_support')
    step_func = getattr(module, 'check_board_support')
    step_func(chip_part)
Ejemplo n.º 2
0
def gen_wns_report(out_path):
    wns = None
    tns = None
    num_fail = 0
    num_total = 0

    # Check implementation reports path
    rpt_path = project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1'
    rpt_path += '/' + args.name + '_design_wrapper_timing_summary_routed.rpt'
    if not os.path.exists(rpt_path):
        msg.warning('Cannot find rpt file. Skipping WNS report')
        return

    with open(rpt_path, 'r') as rpt_file:
        rpt_data = rpt_file.readlines()

        # Search header line
        ids = [
            idx for idx in range(len(rpt_data) - 1)
            if (re.match(r'^\s+WNS\(ns\)\s+TNS\(ns\)\s+', rpt_data[idx]))
        ]
        if len(ids) != 1:
            msg.warning(
                'Cannot find WNS report table header. Skipping WNS report')
            return

        # Get information from 1st row
        elems = rpt_data[ids[0] + 2].split()
        wns = float(elems[0])
        tns = float(elems[1])
        num_fail = int(elems[2])
        num_total = int(elems[3])

    msg.log('Worst Negative Slack (WNS) summary')
    if wns >= 0.0:
        msg.success(
            str(num_fail) + ' endpoints of ' + str(num_total) +
            ' have negative slack (WNS: ' + str(wns) + ')')
    else:
        msg.warning(
            str(num_fail) + ' endpoints of ' + str(num_total) +
            ' have negative slack (WNS: ' + str(wns) + ', TNS: ' + str(tns) +
            ')')

    with open(out_path, 'w') as timing_file:
        timing_file.write('WNS ' + str(wns) + '\n')
        timing_file.write('TNS ' + str(tns) + '\n')
        timing_file.write('NUM_ENDPOINTS ' + str(num_total) + '\n')
        timing_file.write('NUM_FAIL_ENDPOINTS ' + str(num_fail))
Ejemplo n.º 3
0
def update_resource_utilization(acc):
    global available_resources
    global used_resources

    report_file = project_backend_path + '/HLS/' + acc.name + '/solution1/syn/report/' + acc.name + '_wrapper_csynth.xml'

    tree = cET.parse(report_file)
    root = tree.getroot()

    for resource in root.find('AreaEstimates').find('AvailableResources'):
        available_resources[resource.tag] = int(resource.text)

    if args.verbose_info:
        res_msg = 'Resources estimation for \'' + acc.name + '\': '
        res_msg += ', '.join(
            sorted(
                map(lambda r: r.tag + ' ' + r.text,
                    list(root.find('AreaEstimates').find('Resources')))))
        msg.log(res_msg)

    depleted_resources = False
    error_message = 'Resource utilization over 100%\nResources estimation summary\n'
    for resource in root.find('AreaEstimates').find('Resources'):
        used_resources[resource.tag] = int(
            resource.text) * acc.num_instances + (int(used_resources[
                resource.tag]) if resource.tag in used_resources else 0)
        if used_resources[resource.tag] > available_resources[resource.tag]:
            if available_resources[resource.tag] == 0:
                msg.error(
                    'The HLS code is using resources not available in the selected FPGA'
                )
            utilization_percentage = str(
                round(
                    float(used_resources[resource.tag]) /
                    float(available_resources[resource.tag]) * 100, 2))
            report_string = '{0:<9} {1:>7} used | {2:>7} available - {3:>6}% utilization\n'
            report_string_formatted = report_string.format(
                resource.tag, used_resources[resource.tag],
                available_resources[resource.tag], utilization_percentage)
            error_message += report_string_formatted
            depleted_resources = True

    if not args.disable_utilization_check and depleted_resources:
        msg.error(error_message.rstrip())
Ejemplo n.º 4
0
def run_boot_step(project_args):
    global start_time
    global project_backend_path
    global petalinux_build_path
    global petalinux_install_path

    start_time = project_args['start_time']
    project_path = project_args['path']
    board = project_args['board']
    args = project_args['args']

    project_backend_path = project_path + '/' + args.backend
    project_step_path = project_backend_path + '/scripts/' + script_folder
    ait_backend_path = ait_path + '/backend/' + args.backend

    petalinux_build_path = os.path.realpath(
        os.getenv('PETALINUX_BUILD')) if os.getenv('PETALINUX_BUILD') else ''
    petalinux_install_path = os.path.realpath(os.getenv(
        'PETALINUX_INSTALL')) if os.getenv('PETALINUX_INSTALL') else ''

    check_requirements()

    # During the execution of this step disable the Vivado_init.tcl script
    disable_init_scripts()

    path_hdf = project_backend_path + '/' + args.name + '/' + args.name + '.sdk/'

    if os.path.exists(petalinux_install_path + '/.version-history'):
        # Seems to be petalinux 2019.1 or later (may match other untested versions)
        command = 'petalinux-config --silentconfig --get-hw-description=' + path_hdf
    else:
        # Seems to be petalinux 2018.3 or previous (may match other untested versions)
        command = 'petalinux-config --oldconfig --get-hw-description=' + path_hdf

    if args.verbose_info:
        msg.log('> ' + command)
    p = subprocess.Popen(command,
                         stdout=sys.stdout.subprocess,
                         stderr=sys.stdout.subprocess,
                         cwd=petalinux_build_path,
                         shell=True)

    if args.verbose:
        for line in iter(p.stdout.readline, b''):
            sys.stdout.write(line.decode('utf-8'))

    retval = p.wait()
    if retval:
        restore_init_scripts()
        msg.error('Generation of petalinux boot files failed', start_time,
                  False)

    if os.path.exists(petalinux_build_path +
                      '/subsystems/linux/configs/device-tree/'):
        # Seems to be petalinux 2016.3 (may match other untested versions)
        if args.verbose_info:
            msg.log('Fixing devicetree (2016.3 mode)')

        petalinux_build_dts_path = petalinux_build_path + '/subsystems/linux/configs/device-tree/'
        shutil.copy2(
            project_backend_path + '/' + args.name + '/pl_ompss_at_fpga.dtsi',
            petalinux_build_dts_path)

        content_dtsi = None
        with open(petalinux_build_dts_path + '/system-conf.dtsi', 'r') as file:
            content_dtsi = file.read().splitlines()

        line = [
            idx for idx in range(len(content_dtsi))
            if content_dtsi[idx].find('/include/ "pl.dtsi"') != -1
        ]
        content_dtsi.insert(line[0] + 1, '/include/ \"pl_ompss_at_fpga.dtsi\"')

        board_dtsi_fix_file = project_backend_path + '/board/' + board.name + '/' + board.name + '_boot.dtsi'
        if os.path.exists(board_dtsi_fix_file):
            shutil.copy2(board_dtsi_fix_file, petalinux_build_dts_path)
            content_dtsi.insert(line[0] + 2,
                                '/include/ \"' + board.name + '_boot.dtsi\"')

        with open(petalinux_build_dts_path + '/system-conf.dtsi', 'w') as file:
            file.write('\n'.join(content_dtsi))

        command = 'petalinux-build -c bootloader -x mrproper'
        if args.verbose_info:
            msg.log('> ' + command)
        p = subprocess.Popen(command,
                             stdout=sys.stdout.subprocess,
                             stderr=sys.stdout.subprocess,
                             cwd=petalinux_build_path,
                             shell=True)
        if args.verbose:
            for line in iter(p.stdout.readline, b''):
                sys.stdout.write(line.decode('utf-8'))

        retval = p.wait()
        if retval:
            restore_init_scripts()
            msg.error('Generation of petalinux boot files failed', start_time,
                      False)
    elif os.path.exists(
            petalinux_build_path +
            '/project-spec/meta-user/recipes-bsp/device-tree/files/'):
        # Seems to be petalinux 2018.3 or 2019.1 (may match other untested versions)
        if args.verbose_info:
            msg.log('Fixing devicetree (2018.3 mode)')

        petalinux_build_dts_path = petalinux_build_path + '/project-spec/meta-user/recipes-bsp/device-tree/files/'

        content_dtsi = None
        with open(petalinux_build_dts_path + '/system-user.dtsi', 'r') as file:
            content_dtsi = file.read().splitlines()

        # Remove old includes to pl_bsc.dtsi and insert the new one
        line = [
            idx for idx in range(len(content_dtsi))
            if content_dtsi[idx].find('pl_bsc.dtsi') != -1
        ]
        if len(line) == 1:
            content_dtsi.pop(line[0])
        elif len(line) > 1:
            restore_init_scripts()
            msg.error(
                'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains pl_bsc.dtsi'
            )

        # Remove old includes to pl_ompss_at_fpga.dtsi and insert the new one
        line = [
            idx for idx in range(len(content_dtsi))
            if content_dtsi[idx].find('pl_ompss_at_fpga.dtsi') != -1
        ]
        if len(line) == 1:
            content_dtsi.pop(line[0])
        elif len(line) > 1:
            restore_init_scripts()
            msg.error(
                'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains pl_ompss_at_fpga.dtsi'
            )

        line = [
            idx for idx in range(len(content_dtsi))
            if content_dtsi[idx].find('pl_ompss_at_fpga.dtsi') != -1
        ]
        content_dtsi.insert(
            len(content_dtsi), '/include/ \"' + project_backend_path + '/' +
            args.name + '/pl_ompss_at_fpga.dtsi' + '\"')

        # Remove old includes to <board>_boot.dtsi and insert the new one
        line = [
            idx for idx in range(len(content_dtsi))
            if content_dtsi[idx].find(board.name + '_boot.dtsi') != -1
        ]
        if len(line) == 1:
            content_dtsi.pop(line[0])
        elif len(line) > 1:
            restore_init_scripts()
            msg.error(
                'Uncontrolled path in run_boot_step: more than 1 line of system-user.dtsi contains <board>_bsc.dtsi'
            )

        board_dtsi_fix_file = project_backend_path + '/board/' + board.name + '/' + board.name + '_boot.dtsi'
        if os.path.exists(board_dtsi_fix_file):
            shutil.copy2(board_dtsi_fix_file, petalinux_build_dts_path)
            content_dtsi.insert(len(content_dtsi),
                                '/include/ \"' + board_dtsi_fix_file + '\"')

        with open(petalinux_build_dts_path + '/system-user.dtsi', 'w') as file:
            file.write('\n'.join(content_dtsi))
    else:
        msg.warning(
            'Devicetree fix failed. Petalinux version cannot be determined. Continuing anyway...'
        )

    command = 'petalinux-build'
    if args.verbose_info:
        msg.log('> ' + command)
    p = subprocess.Popen(command,
                         stdout=sys.stdout.subprocess,
                         stderr=sys.stdout.subprocess,
                         cwd=petalinux_build_path,
                         shell=True)
    if args.verbose:
        for line in iter(p.stdout.readline, b''):
            sys.stdout.write(line.decode('utf-8'))

    retval = p.wait()
    if retval:
        restore_init_scripts()
        msg.error('Generation of petalinux boot files failed', start_time,
                  False)

    path_bit = project_path + '/' + args.name + '.bit'
    command = 'petalinux-package --force --boot --fsbl ./images/linux/*_fsbl.elf'
    command += ' --fpga ' + path_bit + ' --u-boot ./images/linux/u-boot.elf'
    if args.verbose_info:
        msg.log('> ' + command)
    p = subprocess.Popen(command,
                         stdout=sys.stdout.subprocess,
                         stderr=sys.stdout.subprocess,
                         cwd=petalinux_build_path,
                         shell=True)
    if args.verbose:
        for line in iter(p.stdout.readline, b''):
            sys.stdout.write(line.decode('utf-8'))

    retval = p.wait()
    if retval:
        msg.error('Generation of petalinux boot files failed', start_time,
                  False)
    else:
        shutil.copy2(petalinux_build_path + '/images/linux/BOOT.BIN',
                     project_path)
        shutil.copy2(petalinux_build_path + '/images/linux/image.ub',
                     project_path)
        msg.success('Petalinux boot files generated')

    restore_init_scripts()
Ejemplo n.º 5
0
def generate_Vivado_variables_tcl():
    global accs
    global args

    vivado_project_variables = '# File automatically generated by the Accelerator Integration Tool. Edit at your own risk.\n' \
                               + '\n' \
                               + '## AIT messages procedures\n' \
                               + '# Error\n' \
                               + 'proc aitError {msg} {\n' \
                               + '   puts "\[AIT\] ERROR: $msg"\n' \
                               + '   exit 1\n' \
                               + '}\n' \
                               + '# Warning\n' \
                               + 'proc aitWarning {msg} {\n' \
                               + '   puts "\[AIT\] WARNING: $msg"\n' \
                               + '}\n' \
                               + '\n' \
                               + '# Info\n' \
                               + 'proc aitInfo {msg} {\n' \
                               + '   puts "\[AIT\] INFO: $msg"\n' \
                               + '}\n' \
                               + '\n' \
                               + '# Log\n' \
                               + 'proc aitLog {msg} {\n' \
                               + '   puts "\[AIT\]: $msg"\n' \
                               + '}\n' \
                               + '\n' \
                               + '# Paths\n' \
                               + 'variable path_Project ' + os.path.relpath(project_backend_path, project_backend_path + '/scripts') + '\n' \
                               + 'variable path_Repo ' + os.path.relpath(project_backend_path + '/HLS/', project_backend_path + '/scripts') + '\n' \
                               + '\n' \
                               + '# Project variables\n' \
                               + 'variable name_Project ' + args.name + '\n' \
                               + 'variable name_Design ' + args.name + '_design\n' \
                               + 'variable target_lang ' + args.target_language + '\n' \
                               + 'variable num_accs ' + str(num_instances) + '\n' \
                               + 'variable num_acc_creators ' + str(num_acc_creators) + '\n' \
                               + 'variable num_jobs ' + str(args.jobs) + '\n' \
                               + 'variable ait_call "' + str(re.escape(os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]))) + '"\n' \
                               + 'variable bitInfo_note ' + str(re.escape(args.bitinfo_note)) + '\n' \
                               + 'variable version_major_ait ' + str(VERSION_MAJOR) + '\n' \
                               + 'variable version_minor_ait ' + str(VERSION_MINOR) + '\n' \
                               + 'variable version_bitInfo ' + str(BITINFO_VERSION).lower() + '\n' \
                               + 'variable version_wrapper ' + (str(args.wrapper_version).lower() if args.wrapper_version else '0') + '\n' \
                               + '\n' \
                               + '# IP caching variables\n' \
                               + 'variable IP_caching ' + str(not args.disable_IP_caching).lower() + '\n'

    if not args.disable_IP_caching:
        vivado_project_variables += 'variable path_CacheLocation ' + os.path.realpath(args.IP_cache_location) + '\n'

    regslice_all = '0'
    regslice_mem = '0'
    regslice_hwruntime = '0'
    if args.interconnect_regslice is not None:
        for opt in args.interconnect_regslice:
            if opt == 'all':
                regslice_all = '1'
            elif opt == 'mem':
                regslice_mem = '1'
            elif opt == 'hwruntime':
                regslice_hwruntime = '1'

    vivado_project_variables += '\n' \
                                + '# Bitstream variables\n' \
                                + 'variable interconOpt ' + str(args.interconnect_opt + 1) + '\n' \
                                + 'variable debugInterfaces ' + str(args.debug_intfs) + '\n' \
                                + 'variable interconRegSlice_all ' + regslice_all + '\n' \
                                + 'variable interconRegSlice_mem ' + regslice_mem + '\n' \
                                + 'variable interconRegSlice_hwruntime ' + regslice_hwruntime + '\n' \
                                + 'variable interleaving_stride ' + (hex(args.memory_interleaving_stride) if args.memory_interleaving_stride is not None else str(args.memory_interleaving_stride)) + '\n'\
                                + 'variable simplify_interconnection ' + str(args.simplify_interconnection).lower() + '\n' \
                                + 'variable floorplanning_constr ' + str(args.floorplanning_constr) + '\n' \
                                + 'variable slr_slices ' + str(args.slr_slices) + '\n' \
                                + '\n' \
                                + '# ' + board.name + ' board variables\n' \
                                + 'variable board ' + board.name + '\n' \
                                + 'variable chipPart ' + chip_part + '\n' \
                                + 'variable clockFreq ' + str(args.clock) + '\n' \
                                + 'variable arch_device ' + board.arch.device + '\n'

    if args.slr_slices is not None or args.floorplanning_constr is not None:
        vivado_project_variables += 'variable board_slr_num ' + str(board.arch.slr.num) + '\n' \
                                    + 'variable board_slr_master ' + str(board.arch.slr.master) + '\n'

    vivado_project_variables += 'variable address_map [dict create]\n' \
                                + 'dict set address_map "ompss_base_addr" ' + board.address_map.ompss_base_addr + '\n' \
                                + 'dict set address_map "mem_base_addr" ' + board.address_map.mem_base_addr + '\n' \
                                + 'dict set address_map "mem_type" ' + board.mem.type + '\n'

    if board.arch.device == 'zynq' or board.arch.device == 'zynqmp':
        vivado_project_variables += 'dict set address_map "mem_size" ' + hex(decimalFromHumanReadable(board.mem.size)) + '\n'
    elif board.arch.device == 'alveo':
        vivado_project_variables += 'dict set address_map "mem_num_banks" ' + str(board.mem.num_banks) + '\n' \
                                    + 'dict set address_map "mem_bank_size" ' + hex(decimalFromHumanReadable(board.mem.bank_size)) + '\n'

    if board.board_part:
        vivado_project_variables += '\n' \
                                    + 'variable boardPart [list ' + ' '.join(board.board_part) + ']\n'
    vivado_project_variables += '\n' \
                                + '# Hardware Instrumentation variables\n' \
                                + 'variable hwcounter ' + str(args.hwcounter) + '\n' \
                                + 'variable hwinst ' + str(args.hwinst) + '\n'

    vivado_project_variables += '\n' \
                                + '# HW runtime variables\n' \
                                + 'variable hwruntime ' + str(args.hwruntime) + '\n' \
                                + 'variable extended_hwruntime ' + str(args.extended_hwruntime) + '\n' \
                                + 'variable lock_hwruntime ' + str(args.lock_hwruntime) + '\n' \
                                + 'variable cmdInSubqueue_len ' + str(args.cmdin_subqueue_len) + '\n' \
                                + 'variable cmdOutSubqueue_len ' + str(args.cmdout_subqueue_len) + '\n' \
                                + 'variable spawnInQueue_len ' + str(args.spawnin_queue_len) + '\n' \
                                + 'variable spawnOutQueue_len ' + str(args.spawnout_queue_len) + '\n' \
                                + 'variable hwruntime_interconnect ' + str(args.hwruntime_interconnect) + '\n' \
                                + 'variable enable_spawn_queues ' + str(not args.disable_spawn_queues) + '\n'

    vivado_project_variables += '\n' \
                                + '# List of accelerators\n' \
                                + 'set accs [list'

    for acc in accs[0:num_accs]:
        acc_name = str(acc.type) + ':' + str(acc.num_instances) + ':' + acc.name

        vivado_project_variables += ' ' + acc_name

    vivado_project_variables += ']\n'

    # Generate acc instance list with SLR info
    # Placement info is only needed for registers, constraints are dumped into constraint file
    if (args.slr_slices == 'acc') or (args.slr_slices == 'all'):
        acc_pl_dict = 'set acc_placement [dict create '
        for acc in accs[0:num_accs]:
            acc_pl_dict += ' ' + str(acc.type) + ' [list'
            for slrnum in acc.SLR:
                acc_pl_dict += ' ' + str(slrnum)
            acc_pl_dict += ']'
        acc_pl_dict += ']'
        vivado_project_variables += acc_pl_dict + '\n'

    # Generate acc constraint file
    if (args.floorplanning_constr == 'acc') or (args.floorplanning_constr == 'all'):
        accConstrFiles = open(f'{project_backend_path}/board/{board.name}/constraints/acc_floorplan.xdc', 'w')
        for acc in accs[0:num_accs]:
            # Instantiate each accelerator with a single instance and placement info
            for instanceNumber in range(acc.num_instances):
                accBlock = f'{acc.name}_{instanceNumber}'
                accConstrFiles.write(f'add_cells_to_pblock [get_pblocks slr{acc.SLR[instanceNumber]}_pblock] '
                                     + '[get_cells {'
                                     + f'*/{accBlock}/Adapter_outStream '
                                     + f'*/{accBlock}/Adapter_inStream '
                                     + f'*/{accBlock}/accID '
                                     + f'*/{accBlock}/{acc.name}_ompss'
                                     + '}]\n')
        accConstrFiles.close()

    if args.hwruntime == 'pom':
        picos_args_hash = '{}-{}-{}-{}-{}-{}-{}-{}-{}-{}'.format(
                          args.picos_max_args_per_task,
                          args.picos_max_deps_per_task,
                          args.picos_max_copies_per_task,
                          args.picos_num_dcts,
                          args.picos_tm_size,
                          args.picos_dm_size,
                          args.picos_vm_size,
                          args.picos_dm_ds,
                          args.picos_dm_hash,
                          args.picos_hash_t_size)
        vivado_project_variables += '\n' \
                                    + '# Picos parameter hash\n' \
                                    + 'variable picos_args_hash {}'.format(picos_args_hash)

    if args.datainterfaces_map and os.path.exists(args.datainterfaces_map):
        if args.verbose_info:
            msg.log('Parsing user data interfaces map: ' + args.datainterfaces_map)

        vivado_project_variables += '\n' \
                                    + '# List of datainterfaces map\n' \
                                    + 'set dataInterfaces_map [list'

        with open(args.datainterfaces_map) as map_file:
            map_data = map_file.readlines()
            for map_line in map_data:
                elems = map_line.strip().replace('\n', '').split('\t')
                if len(elems) >= 2 and len(elems[0]) > 0 and elems[0][0] != '#':
                    vivado_project_variables += ' {' + elems[0] + ' ' + elems[1] + '}'

        vivado_project_variables += ']\n'
    elif args.datainterfaces_map:
        msg.error('User data interfaces map not found: ' + args.datainterfaces_map)
    else:
        vivado_project_variables += '\n' \
                                    + '# List of datainterfaces map\n' \
                                    + 'set dataInterfaces_map [list]\n'

    if args.debug_intfs == 'custom' and os.path.exists(args.debug_intfs_list):
        if args.verbose_info:
            msg.log('Parsing user-defined interfaces to debug: ' + args.debug_intfs_list)

        vivado_project_variables += '\n' \
                                    + '# List of debugInterfaces list\n' \
                                    + 'set debugInterfaces_list [list'

        with open(args.debug_intfs_list) as map_file:
            map_data = map_file.readlines()
            for map_line in map_data:
                elems = map_line.strip().replace('\n', '')
                if elems[0][0] != '#':
                    vivado_project_variables += ' ' + str(elems)

        vivado_project_variables += ']\n'
    elif args.debug_intfs == 'custom':
        msg.error('User-defined interfaces to debug file not found: ' + args.debug_intfs_list)

    vivado_project_variables_file = open(project_backend_path + '/scripts/projectVariables.tcl', 'w')
    vivado_project_variables_file.write(vivado_project_variables)
    vivado_project_variables_file.close()
Ejemplo n.º 6
0
def run_design_step(project_args):
    global args
    global board
    global accs
    global chip_part
    global start_time
    global num_accs
    global num_instances
    global num_acc_creators
    global ait_backend_path
    global project_backend_path

    args = project_args['args']
    board = project_args['board']
    accs = project_args['accs']
    start_time = project_args['start_time']
    num_accs = project_args['num_accs']
    num_instances = project_args['num_instances']
    num_acc_creators = project_args['num_acc_creators']
    project_path = project_args['path']

    chip_part = board.chip_part + ('-' + board.es if (board.es and not args.ignore_eng_sample) else '')
    ait_backend_path = ait_path + '/backend/' + args.backend
    project_backend_path = project_path + '/' + args.backend
    project_step_path = project_backend_path + '/scripts/' + script_folder

    # Check if the requirements are met
    check_requirements()

    # Load accelerator placement info
    load_acc_placement(accs, args)

    # Remove old directories used on the design step
    shutil.rmtree(project_step_path, ignore_errors=True)
    shutil.rmtree(project_backend_path + '/board', ignore_errors=True)
    shutil.rmtree(project_backend_path + '/IPs', ignore_errors=True)
    shutil.rmtree(project_backend_path + '/templates', ignore_errors=True)

    # Create directories and copy necessary files for design step
    shutil.copytree(ait_backend_path + '/scripts/' + script_folder, project_step_path, ignore=shutil.ignore_patterns('*.py*'))
    shutil.copytree(ait_backend_path + '/board/' + board.name, project_backend_path + '/board/' + board.name)

    os.makedirs(project_backend_path + '/IPs')
    os.makedirs(project_backend_path + '/templates')
    shutil.copy2(ait_backend_path + '/templates/dummy_acc.tcl', project_backend_path + '/templates')

    ip_list = [ip for ip in os.listdir(ait_backend_path + '/IPs/') if re.search(r'.*\.(zip|v|vhdl)$', ip)]
    for ip in ip_list:
        shutil.copy2(ait_backend_path + '/IPs/' + ip, project_backend_path + '/IPs')

    for template in glob.glob(ait_backend_path + '/templates/hwruntime/' + args.hwruntime
                              + '/' + ('extended/' if args.extended_hwruntime else '')
                              + '*.tcl'):
        shutil.copy2(template, project_backend_path + '/templates')

    for ipdef in glob.glob(ait_backend_path + '/IPs/hwruntime/' + args.hwruntime + '/*.zip'):
        shutil.copy2(ipdef, project_backend_path + '/IPs')

    if args.memory_interleaving_stride is not None:
        subprocess.check_output(['sed -i "s/\`undef __ENABLE__/\`define __ENABLE__/" ' + project_backend_path + '/IPs/addrInterleaver.v'], shell=True)
        subprocess.check_output(['sed -i "s/\`define __WIDTH__ 64/\`define __WIDTH__ ' + str(board.mem.addr_width) + '/" ' + project_backend_path + '/IPs/addrInterleaver.v'], shell=True)

    if args.user_constraints and os.path.exists(args.user_constraints):
        constraints_path = project_backend_path + '/board/' + board.name + '/constraints'
        if not os.path.exists(constraints_path):
            os.mkdir(constraints_path)
        if args.verbose_info:
            msg.log('Adding user constraints file: ' + args.user_constraints)
        shutil.copy2(args.user_constraints, constraints_path + '/')
    elif args.user_constraints:
        msg.error('User constraints file not found: ' + args.user_constraints)

    if args.user_pre_design and os.path.exists(args.user_pre_design):
        user_pre_design_ext = args.user_pre_design.split('.')[-1] if len(args.user_pre_design.split('.')) > 1 else ''
        if user_pre_design_ext != 'tcl':
            msg.error('Invalid extension for PRE design TCL script: ' + args.user_pre_design)
        elif args.verbose_info:
            msg.log('Adding pre design user script: ' + args.user_pre_design)
        shutil.copy2(args.user_pre_design, project_step_path + '/userPreDesign.tcl')
    elif args.user_pre_design:
        msg.error('User PRE design TCL script not found: ' + args.user_pre_design)

    if args.user_post_design and os.path.exists(args.user_post_design):
        user_post_design_ext = args.user_post_design.split('.')[-1] if len(args.user_post_design.split('.')) > 1 else ''
        if user_post_design_ext != 'tcl':
            msg.error('Invalid extension for POST design TCL script: ' + args.user_post_design)
        elif args.verbose_info:
            msg.log('Adding post design user script: ' + args.user_post_design)
        shutil.copy2(args.user_post_design, project_step_path + '/userPostDesign.tcl')
    elif args.user_post_design:
        msg.error('User POST design TCL script not found: ' + args.user_post_design)

    # Generate tcl file with project variables
    generate_Vivado_variables_tcl()

    # Enable beta device on Vivado init script
    if os.path.exists(project_backend_path + '/board/' + board.name + '/board_files'):
        p = subprocess.Popen('echo "enable_beta_device ' + chip_part + '\nset_param board.repoPaths [list '
                             + project_backend_path + '/board/' + board.name + '/board_files]" > '
                             + project_backend_path + '/scripts/Vivado_init.tcl', shell=True)
        retval = p.wait()
    else:
        p = subprocess.Popen('echo "enable_beta_device ' + chip_part + '" > '
                             + project_backend_path + '/scripts/Vivado_init.tcl', shell=True)
        retval = p.wait()

    os.environ['MYVIVADO'] = project_backend_path

    p = subprocess.Popen('vivado -nojournal -nolog -notrace -mode batch -source '
                         + project_step_path + '/generate_design.tcl',
                         cwd=project_backend_path + '/scripts', stdout=sys.stdout.subprocess,
                         stderr=sys.stdout.subprocess, shell=True)

    if args.verbose:
        for line in iter(p.stdout.readline, b''):
            sys.stdout.write(line.decode('utf-8'))

    retval = p.wait()
    del os.environ['MYVIVADO']
    if retval:
        msg.error('Block Design generation failed', start_time, False)
    else:
        msg.success('Block Design generated')

    if (args.hwruntime == 'pom'):
        regex_strings = [
            (r'MAX_ARGS_PER_TASK = [0-9]*', 'MAX_ARGS_PER_TASK = {}'.format(args.picos_max_args_per_task)),
            (r'MAX_DEPS_PER_TASK = [0-9]*', 'MAX_DEPS_PER_TASK = {}'.format(args.picos_max_deps_per_task)),
            (r'MAX_COPIES_PER_TASK = [0-9]*', 'MAX_COPIES_PER_TASK = {}'.format(args.picos_max_copies_per_task)),
            (r'NUM_DCTS = [0-9]*', 'NUM_DCTS = {}'.format(args.picos_num_dcts)),
            (r'TM_SIZE = [0-9]*', 'TM_SIZE = {}'.format(args.picos_tm_size)),
            (r'DM_SIZE = [0-9]*', 'DM_SIZE = {}'.format(args.picos_dm_size)),
            (r'VM_SIZE = [0-9]*', 'VM_SIZE = {}'.format(args.picos_vm_size)),
            (r'DM_DS = "[a-zA-Z_]*"', 'DM_DS = "{}"'.format(args.picos_dm_ds)),
            (r'DM_HASH = "[a-zA-Z_]*"', 'DM_HASH = "{}"'.format(args.picos_dm_hash)),
            (r'HASH_T_SIZE = [0-9]*', 'HASH_T_SIZE = {}'.format(args.picos_hash_t_size))]

        config_file_path = glob.glob(project_backend_path + '/IPs/bsc_ompss_picosompssmanager_*/')[0] + 'src/config.sv'

        with open(config_file_path, 'r') as config_file:
            config_str = config_file.read()
        for regex_str in regex_strings:
            config_str = re.sub(regex_str[0], regex_str[1], config_str, count=1)
        with open(config_file_path, 'w') as config_file:
            config_file.write(config_str)
Ejemplo n.º 7
0
def run_HLS_step(project_args):
    global args
    global board
    global chip_part
    global start_time
    global num_accs
    global ait_backend_path
    global project_backend_path
    global used_resources
    global available_resources

    args = project_args['args']
    board = project_args['board']
    start_time = project_args['start_time']
    num_accs = project_args['num_accs']
    project_path = project_args['path']
    accs = project_args['accs']

    chip_part = board.chip_part + ('-' + board.es if
                                   (board.es
                                    and not args.ignore_eng_sample) else '')
    ait_backend_path = ait_path + '/backend/' + args.backend
    project_backend_path = project_path + '/' + args.backend
    project_step_path = project_backend_path + '/scripts/' + script_folder

    # Check if the requirements are met
    check_requirements()

    # Remove old directories used on the HLS step
    shutil.rmtree(project_step_path, ignore_errors=True)
    shutil.rmtree(project_backend_path + '/HLS', ignore_errors=True)

    # Create directories and copy necessary files for HLS step
    shutil.copytree(ait_backend_path + '/scripts/' + script_folder,
                    project_step_path,
                    ignore=shutil.ignore_patterns('*.py*'))
    os.makedirs(project_backend_path + '/HLS')

    for hls_file in glob.glob(ait_path + '/backend/' + args.backend +
                              '/HLS/src/hwruntime/' + args.hwruntime +
                              '/*.cpp'):
        acc_file = os.path.basename(hls_file)
        acc_name = os.path.splitext(acc_file)[0]
        acc_aux = Accelerator(0, acc_name, 1, acc_file, hls_file)
        accs.append(acc_aux)
    if args.extended_hwruntime:
        for extended_hls_file in glob.glob(ait_path + '/backend/' +
                                           args.backend +
                                           '/HLS/src/hwruntime/' +
                                           args.hwruntime + '/extended/*.cpp'):
            acc_file = os.path.basename(extended_hls_file)
            acc_name = os.path.splitext(acc_file)[0]
            acc_aux = Accelerator(0, acc_name, 1, acc_file, extended_hls_file)
            accs.append(acc_aux)

    msg.info('Synthesizing ' + str(num_accs) + ' accelerator' +
             ('s' if num_accs > 1 else ''))

    available_resources = dict()
    if args.hwruntime in hwruntime_resources[args.backend]:
        used_resources = hwruntime_resources[args.backend][args.hwruntime][
            args.extended_hwruntime]
    else:
        used_resources = dict()

    for acc in range(0, num_accs):
        synthesize_accelerator(accs[acc])

    if len(accs) > num_accs:
        msg.info('Synthesizing ' + str(len(accs) - num_accs) +
                 ' additional auxiliary IP' +
                 ('s' if len(accs) - num_accs > 1 else ''))

        for acc in range(num_accs, len(accs)):
            synthesize_accelerator(accs[acc])

    resources_file = open(
        project_path + '/' + args.name + '.resources-hls.txt', 'w')
    msg.log('Resources estimation summary')
    for res_name, res_value in sorted(used_resources.items()):
        if res_name in available_resources:
            available = available_resources[res_name]
        else:
            available = 0
        if available > 0:
            utilization_percentage = str(
                round(float(res_value) / float(available) * 100, 2))
            report_string = '{0:<9} {1:>7} used | {2:>7} available - {3:>6}% utilization'
            report_string_formatted = report_string.format(
                res_name, res_value, available, utilization_percentage)
            msg.log(report_string_formatted)
            resources_file.write(report_string_formatted + '\n')
    resources_file.close()
Ejemplo n.º 8
0
def gen_utilization_report(out_path):
    av_resources = {}
    used_resources = {}
    util_resources = {}

    # Check implementation reports path
    rpt_path = project_backend_path + '/' + args.name + '/' + args.name + '.runs/impl_1'
    rpt_path += '/' + args.name + '_design_wrapper_utilization_placed.rpt'
    if not os.path.exists(rpt_path):
        msg.warning(
            'Cannot find rpt file. Skipping bitstream utilization report')
        return

    with open(rpt_path, 'r') as rpt_file:
        rpt_data = rpt_file.readlines()

        # Search LUT/FF section
        # NOTE: Possible section names: Slice Logic, CLB Logic
        ids = [
            idx for idx in range(len(rpt_data) - 1)
            if ((re.match(r'^[0-9]\. ' + 'Slice Logic\n', rpt_data[idx])
                 and rpt_data[idx + 1] == '--------------\n') or (
                     re.match(r'^[0-9]\. ' + 'CLB Logic\n', rpt_data[idx])
                     and rpt_data[idx + 1] == '------------\n'))
        ]
        if len(ids) != 1:
            msg.warning(
                'Cannot find LUT/FF info in rpt file. Skipping bitstream utilization report'
            )
            return

        # Get LUT
        elems = rpt_data[ids[0] + 6].split('|')
        used_resources['LUT'] = elems[2].strip()
        av_resources['LUT'] = elems[4].strip()
        util_resources['LUT'] = elems[5].strip()

        # Get FF
        elems = rpt_data[ids[0] + 11].split('|')
        used_resources['FF'] = elems[2].strip()
        av_resources['FF'] = elems[4].strip()
        util_resources['FF'] = elems[5].strip()

        # Get DSP
        # NOTE: Possible section names: DSP, ARITHMETIC
        ids = [
            idx for idx in range(len(rpt_data) - 1)
            if ((re.match(r'^[0-9]\. ' + 'DSP\n', rpt_data[idx])
                 and rpt_data[idx + 1] == '------\n') or (
                     re.match(r'^[0-9]\. ' + 'ARITHMETIC\n', rpt_data[idx])
                     and rpt_data[idx + 1] == '-------------\n'))
        ]
        if len(ids) != 1:
            msg.warning(
                'Cannot find DSP info in rpt file. Skipping bitstream utilization report'
            )
            return
        elems = rpt_data[ids[0] + 6].split('|')
        used_resources['DSP'] = elems[2].strip()
        av_resources['DSP'] = elems[4].strip()
        util_resources['DSP'] = elems[5].strip()

        # Search BRAM/URAM
        # NOTE: Possible section names: Memory, BLOCKRAM
        ids = [
            idx for idx in range(len(rpt_data) - 1)
            if ((re.match(r'^[0-9]\. ' + 'Memory\n', rpt_data[idx])
                 and rpt_data[idx + 1] == '---------\n') or (
                     re.match(r'^[0-9]\. ' + 'BLOCKRAM\n', rpt_data[idx])
                     and rpt_data[idx + 1] == '-----------\n'))
        ]
        if len(ids) != 1:
            msg.warning(
                'Cannot find BRAM info in rpt file. Skipping bitstream utilization report'
            )
            return

        # BRAM
        elems = rpt_data[ids[0] + 6].split('|')
        used_resources['BRAM'] = str(int(float(elems[2].strip()) * 2))
        av_resources['BRAM'] = str(int(float(elems[4].strip()) * 2))
        util_resources['BRAM'] = elems[5].strip()

        # URAM
        # NOTE: It is not placed in the same offset for all boards (search in some lines)
        # NOTE: It is not available in all boards, so check if valid data is found
        ids = [
            idx for idx in range(ids[0] + 6, ids[0] + 20)
            if ((re.match(r'^| URAM', rpt_data[idx])))
        ]
        for idx in ids:
            elems = rpt_data[idx].split('|')
            if len(elems) >= 6 and elems[1].strip() == 'URAM':
                used_resources['URAM'] = elems[2].strip()
                av_resources['URAM'] = elems[4].strip()
                util_resources['URAM'] = elems[5].strip()
                break

    resources_file = open(out_path, 'w')
    msg.log('Resources utilization summary')
    for name in ['BRAM', 'DSP', 'FF', 'LUT', 'URAM']:
        # Check if resource is available
        if name not in used_resources:
            continue

        report_string = '{0:<9} {1:>6} used | {2:>6} available - {3:>6}% utilization'
        report_string_formatted = report_string.format(name,
                                                       used_resources[name],
                                                       av_resources[name],
                                                       util_resources[name])
        msg.log(report_string_formatted)
        resources_file.write(report_string_formatted + '\n')
    resources_file.close()
Ejemplo n.º 9
0
def get_accelerators(project_path):
    global accs
    global num_accs
    global num_instances
    global num_acc_creators

    if args.verbose_info:
        msg.log('Searching accelerators in folder: ' + os.getcwd())

    accs = []
    acc_types = []
    acc_names = []
    num_accs = 0
    num_instances = 0
    num_acc_creators = 0
    args.extended_hwruntime = False  # Can't be enabled if no accelerator requires it
    args.lock_hwruntime = False  # Will not be enabled if no accelerator requires it

    for file_ in sorted(glob.glob(os.getcwd() + '/ait_*.json')):
        acc_config_json = json.load(open(file_))
        for acc_config in acc_config_json:
            acc = Accelerator(acc_config)

            if not re.match('^[A-Za-z][A-Za-z0-9_]*$', acc.name):
                msg.error(
                    '\'' + acc.name +
                    '\' is an invalid accelerator name. Must start with a letter and contain only letters, numbers or underscores'
                )

            msg.info('Found accelerator \'' + acc.name + '\'')

            num_accs += 1
            num_instances += acc.num_instances

            if acc.type in acc_types:
                msg.error('Two accelerators use the same type: \'' +
                          str(acc.type) +
                          '\' (maybe you should use the onto clause)')
            elif acc.name in acc_names:
                msg.error(
                    'Two accelerators use the same name: \'' + str(acc.name) +
                    '\' (maybe you should change the fpga task definition)')
            acc_types.append(acc.type)
            acc_names.append(acc.name)

            # Check if the acc is a task creator
            if acc.task_creation:
                args.extended_hwruntime = True
                num_acc_creators += acc.num_instances
                accs.insert(0, acc)
            else:
                accs.append(acc)

            # Check if the acc needs instrumentation support
            if acc.instrumentation:
                args.hwinst = True

            # Check if the acc needs lock support
            if acc.lock:
                args.lock_hwruntime = True

    if num_accs == 0:
        msg.error('No accelerators found')

    # Generate the .xtasks.config file
    xtasks_config_file = open(
        project_path + '/' + args.name + '.xtasks.config', 'w')
    xtasks_config = 'type\t#ins\tname\t    \n'
    for acc in accs:
        xtasks_config += str(acc.type).zfill(19) + '\t' + str(
            acc.num_instances).zfill(3) + '\t' + acc.name.ljust(
                31)[:31] + '\t000\n'
    xtasks_config_file.write(xtasks_config)
    xtasks_config_file.close()

    if args.hwinst:
        hwinst_acc_json_string = json.dumps(
            {
                'full_path': ait_path + '/backend/' + args.backend +
                '/HLS/src/Adapter_instr.cpp',
                'filename': 'Adapter_instr.cpp',
                'name': 'Adapter_instr',
                'type': 0,
                'num_instances': 1,
                'task_creation': 'false',
                'instrumentation': 'false',
                'periodic': 'false',
                'lock': 'false'
            },
            indent=4)
        hwinst_acc_json = json.loads(hwinst_acc_json_string)
        hwinst_acc = Accelerator(hwinst_acc_json)
        accs.append(hwinst_acc)
Ejemplo n.º 10
0
def run_synthesis_step(project_args):
    global args
    global board
    global chip_part
    global start_time
    global ait_backend_path
    global project_backend_path

    args = project_args['args']
    board = project_args['board']
    start_time = project_args['start_time']
    project_path = project_args['path']

    chip_part = board.chip_part + ('-' + board.es if
                                   (board.es
                                    and not args.ignore_eng_sample) else '')
    ait_backend_path = ait_path + '/backend/' + args.backend
    project_backend_path = project_path + '/' + args.backend
    project_step_path = project_backend_path + '/scripts/' + script_folder

    # Check if the requirements are met
    check_requirements()

    # Remove old directories used on the synthesis step
    shutil.rmtree(project_step_path, ignore_errors=True)

    # Create directories and copy necessary files for synthesis step
    shutil.copytree(ait_backend_path + '/scripts/' + script_folder,
                    project_step_path,
                    ignore=shutil.ignore_patterns('*.py*'))

    if os.path.isfile(project_backend_path + '/' + args.name + '/' +
                      args.name + '.xpr'):
        # Enable beta device on Vivado init script
        if board.board_part:
            p = subprocess.Popen(
                'echo "enable_beta_device ' + chip_part +
                '\nset_param board.repoPaths [list ' + project_backend_path +
                '/board/' + board.name + '/board_files]" > ' +
                project_backend_path + '/scripts/Vivado_init.tcl',
                shell=True)
            retval = p.wait()
        else:
            p = subprocess.Popen('echo "enable_beta_device ' + chip_part +
                                 '" > ' + project_backend_path +
                                 '/scripts/Vivado_init.tcl',
                                 shell=True)
            retval = p.wait()

        user_id = str(hex(random.randrange(2**32)))
        msg.log('Setting bitstream user id: ' + user_id)
        p = subprocess.Popen('sed -i s/BITSTREAM_USERID/' + user_id + '/ ' +
                             project_backend_path + '/board/' + board.name +
                             '/constraints/basic_constraints.xdc',
                             shell=True)
        retval = p.wait()

        os.environ['MYVIVADO'] = project_backend_path + '/scripts'

        p = subprocess.Popen(
            'vivado -nojournal -nolog -notrace -mode batch -source ' +
            project_step_path + '/synthesize_design.tcl',
            cwd=project_backend_path + '/scripts/',
            stdout=sys.stdout.subprocess,
            stderr=sys.stdout.subprocess,
            shell=True)

        if args.verbose:
            for line in iter(p.stdout.readline, b''):
                sys.stdout.write(line.decode('utf-8'))

        retval = p.wait()
        del os.environ['MYVIVADO']
        if retval:
            msg.error('Hardware synthesis failed', start_time, False)
        else:
            msg.success('Hardware synthesized')
    else:
        msg.error(
            'No Vivado .xpr file exists for the current project. Hardware synthesis failed'
        )