Пример #1
0
def graph_cp(conf, dur, output_dirname):
    common.mkdir(output_dirname)
    length = config.str2dur(dur)
    dirname = conf.get("changepoint", "temp_cp_data")
    cpd = ChangePointData(dirname)
    cpd.load()
    cpd_top_dt, cpd_end_dt = cpd.term()
    top_dt = cpd_end_dt - length
    if top_dt < cpd.term()[0]:
        top_dt = cpd.term()[0]
    end_dt = cpd_end_dt

    for evdef in cpd.iter_evdef():
        fn = "{0}_{1}.pdf".format(evdef.host, evdef.gid)
        ret = cpd.get(evdef, top_dt, end_dt)
        if ret is None:
            continue
        l_label, l_data, l_score = zip(*ret)

        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        fig = plt.figure()
        ax = fig.add_subplot(111)
        ax.plot(l_label, l_data, "r")
        ax2 = ax.twinx()
        ax2.plot(l_label, l_score)
        import matplotlib.dates as mdates
        days = mdates.WeekdayLocator()
        daysFmt = mdates.DateFormatter('%m-%d')
        ax.xaxis.set_major_locator(days)
        ax.xaxis.set_major_formatter(daysFmt)
        plt.savefig(output_dirname + "/" + fn)
        plt.close()
Пример #2
0
 def copy_files(self, build_type, lib_names=None):
     """Copy all source directories and source files listed at the <tool_name>_copy_files key. The build_type selects the <tool_name>_copy_files key using the
        tool_name_<build_type> key value from the hdltool_<toolset>.cfg.
        The <tool_name>_copy_files key expects a source and a destination pair per listed directory or file:
        
        - The sources need to be specified with absolute path or relative to the HDL library source directory where the hdllib.cfg is stored
        - The destinations need to be specified with absolute path or relative to HDL library build directory where the project file (e.g. mpf, qpf) gets stored
        
        Arguments:
        - lib_names      : one or more HDL libraries
     """
     if lib_names==None: lib_names=self.lib_names
     lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names)
     tool_name_key = 'tool_name_' + build_type
     tool_name_value = self.tool_dict[tool_name_key]
     tool_name_copy_key = tool_name_value + '_copy_files'
     for lib_dict in cm.listify(lib_dicts):
         if tool_name_copy_key in lib_dict:
             lib_path = self.libs.get_filePath(lib_dict)
             build_dir_path = self.get_lib_build_dirs(build_type, lib_dicts=lib_dict)
             cm.mkdir(build_dir_path)
             key_values = lib_dict[tool_name_copy_key].split()
             sources = key_values[0::2]
             destinations = key_values[1::2]
             file_io = zip(sources, destinations)
             for fpn_io in file_io:
                 sourcePathName = cm.expand_file_path_name(fpn_io[0], lib_path)
                 destinationPath = cm.expand_file_path_name(fpn_io[1], build_dir_path)
                 if os.path.isfile(sourcePathName):
                     shutil.copy(sourcePathName, destinationPath)     # copy file
                 else:
                     copy_tree(sourcePathName, destinationPath)       # copy directory tree (will create new destinationPath directory)
Пример #3
0
 def write_season_file(self, season, chrome):
     """ Update single season """
     print 'Getting episodes...',
     eps = self.get_ep_list(season, detail=True)
     root = E('root')
     print 'Parsing episodes...',
     for ep in eps:
         print ep.num,
         srcs = []
         try:
             for src in EpPage(ep.url).iframe_srcs:
                 try:
                     p = SourcePage(src, chrome)
                     if not ep.duration:
                         ep.duration = str(p.duration)
                     if not ep.img_src:
                         ep.img_src = p.img_src
                     srcs.append(dict(url=p.mp4_url, bitrate=get_bitrate(p.url)))
                 except (requests.exceptions.Timeout, TimeoutException):
                     pass
         except (requests.exceptions.Timeout, TimeoutException):
             pass
         except NoSourceError:
             pass
         if len(srcs) > 0:
             ep_xml = ep.to_xml()
             for src in srcs:
                 ep_xml.append(E('source', url=src['url'], bitrate=str(src['bitrate'])))
             root.append(ep_xml)
         else:
             print '(no source found)',
     print 'OK'
     mkdir(self.get_local_xml_dir())
     root.getroottree().write(self.get_local_xml_file(season))
Пример #4
0
 def create_quartus_project_file(self, lib_names=None):
     """Create the Quartus project file (QPF) for all HDL libraries that have a toplevel entity key synth_top_level_entity.
     
        Note:
        . Default if the synth_top_level_entity key is defined but left empty then the top level entity has the same name as the lib_name in hdl_lib_name.
          Otherwise synth_top_level_entity can specify another top level entity name in the library. Each HDL library can only have one Quartus project
          file
        . The project revision has the same name as the lib_name and will result in a <lib_name>.sof FPGA image file. 
        . For each additional revision a subdirectory can be used. 
          This subdirectory can be named 'revisions/' and lists a number of revisions as subdirectories. Each revision will have a separate hdllib.cfg file and a 
          .vhd file with the toplevel entity. The toplevel .vhd file specifies the <g_design_name> for the revision in the generics. 
     
        Arguments:
        - lib_names      : one or more HDL libraries
     """
     if lib_names==None: lib_names=self.lib_names
     lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names)
     syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts)
     for syn_dict in cm.listify(syn_dicts):
         # Open qpf for each HDL library that has a synth_top_level_entity
         lib_name = syn_dict['hdl_lib_name']
         qpf_name = lib_name + '.qpf'
         qpf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict)
         cm.mkdir(qpf_path)
         qpfPathName = cm.expand_file_path_name(qpf_name, qpf_path)
         with open(qpfPathName, 'w') as fp:
             fp.write('PROJECT_REVISION = "%s"\n' % lib_name)
Пример #5
0
def main(options):
    server_version = options.server_version
    scanner_version = options.scanner_version
    src_of_the_plugins = options.plugins_folder
    src_of_the_project = options.projects_folder
    noa = options.number_of_attempts
    wait = options.wait
    system = platform.system()
    dst = options.client_folder
    print_log_files = options.print_log
    common.mkdir(dst)

    # 0, a) Try to build the plugins with 'build.py'

    if system == 'Windows':
        common.run_cmd('py', ['-3', 'build.py', '--all'])
    elif system == 'Linux':
        common.run_cmd('python3', ['tools/build.py', '--all'])

    if options.init == True:
        # 0, b) download sonar-server'

        download_sq_server(server_version, dst)

        # 1) download sonar-scanner

        download_sq_scanner(scanner_version, system, dst)

        # 2) unzip both server and scanner

        src = os.path.join(dst, 'sonarqube-%s.zip' % server_version)
        unzip(src, dst)
        if 'Windows' == system:
            src = os.path.join(dst, 'sonar-scanner-cli-%s-windows.zip' % scanner_version)
        elif 'Linux' == system:
            src = os.path.join(dst, 'sonar-scanner-cli-%s-linux.zip' % scanner_version)
        unzip(src, dst)

    # 3) copy the plugins into the server dir

    path = [dst, 'sonarqube-%s' % server_version, 'extensions', 'plugins']
    path = os.path.join(*path)
    copy_all_files_from_folder(src_of_the_plugins, path)

    # 4) start the server with the defult config

    start_sq_server(server_version, system, dst)

    # 5) Validate the server is started succesfully
    # 6) Analyze the given project

    sleep(60)
    if validate_running_of_sq_server(server_version, noa, wait):
        print('SonarQube started properly!')
    else:
        print(('SonarQube did not start in time (-noa=%s (number of attempts))' % (noa)))
        if print_log_files:
            print_log(server_version, dst)
        exit(1)
Пример #6
0
def galaxy_install(install_path):
	roles_path = os.path.join(install_path, 'roles')
	common.mkdir(roles_path)
	common.run('ansible-galaxy install -f -r "%s" -p "%s"' % (os.path.join(install_path, 'requirements.yml'), common.roles_path))
	if os.path.exists(custom_requirements_path):
		if not os.path.exists(custom_roles_path):
			common.mkdir(custom_roles_path)
		common.run('ansible-galaxy install -f -r "%s" -p "%s"' % (custom_requirements_path, custom_roles_path))
Пример #7
0
def _move(entry, source_root_path, dest_root_path):
    entry = entry.strip("/")
    source_path = join(source_root_path, entry)
    dest_path = join(dest_root_path, entry)

    if not exists(source_path):
        warn("entry does not exist: " + entry)
        return

    mkdir(dirname(dest_path))
    shutil.move(source_path, dest_path)
Пример #8
0
def download_edizon(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', 'EdiZon'))
    shutil.move(app_path,
                os.path.join(temp_directory, 'switch', 'EdiZon', 'EdiZon.nro'))

    return release.tag_name
Пример #9
0
def galaxy_install(install_path):
    roles_path = os.path.join(install_path, 'roles')
    common.mkdir(roles_path)
    common.run(
        'ansible-galaxy install -f -r "%s" -p "%s"' %
        (os.path.join(install_path, 'requirements.yml'), common.roles_path))
    if os.path.exists(custom_requirements_path):
        if not os.path.exists(custom_roles_path):
            common.mkdir(custom_roles_path)
        common.run('ansible-galaxy install -f -r "%s" -p "%s"' %
                   (custom_requirements_path, custom_roles_path))
Пример #10
0
def download_kosmos_toolbox(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', 'KosmosToolbox'))
    shutil.move(app_path, os.path.join(temp_directory, 'switch', 'KosmosToolbox', 'KosmosToolbox.nro'))
    common.copy_module_file('kosmos-toolbox', 'config.json', os.path.join(temp_directory, 'switch', 'KosmosToolbox', 'config.json'))

    return release.tag_name
Пример #11
0
def download_tesla_menu(module, temp_directory):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/.overlays'))
    common.move(app_path,
                temp_directory.joinpath('switch/.overlays/ovlmenu.ovl'))

    return release.tag_name
Пример #12
0
 def updater_write(self):
     update_script_path = os.path.join(self.ota_path, "META-INF", "com",
                                       "google", "android")
     cn.mkdir(update_script_path)
     new_ub = os.path.join(update_script_path, "update-binary")
     with open(new_ub, "w", encoding="UTF-8", newline="\n") as f:
         for line in self.us.script:
             f.write(line)
     new_uc = os.path.join(update_script_path, "updater-script")
     with open(new_uc, "w", encoding="UTF-8", newline="\n") as f:
         f.write("# Dummy file; update-binary is a shell script.\n")
Пример #13
0
def download_lockpick(module, temp_directory, deepsea_version, deepsea_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/Lockpick'))
    common.move(app_path, temp_directory.joinpath(
        'switch/Lockpick/Lockpick.nro'))

    return get_version(module, release, 0)
Пример #14
0
def stage_flag_8():
    common.mkdir(common.FLAG_8_DIRECTORY)
    filenames_dict = common.get_flag_8_filenames()
    files_to_create = filenames_dict['all_files']
    flag_file = filenames_dict['flag_file']

    for filepath in files_to_create:
        common.write_to_file(filepath, "duck\n")

    # Overwrite the contents of the flag file
    common.write_to_file(flag_file, "goose\n" + get_formatted_flag_line(8))
Пример #15
0
def download_nxdumptool(module, temp_directory, deepsea_version, deepsea_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/NXDumpTool'))
    common.move(app_path, temp_directory.joinpath(
        'switch/NXDumpTool/NXDumpTool.nro'))

    return get_version(module, release, 0)
Пример #16
0
def download_nxmtp(module, temp_directory, deepsea_version, parameters,
                   deepsea_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/nxmtp'))
    common.move(app_path, temp_directory.joinpath('switch/nxmtp/nxmtp.nro'))

    return get_version(module, release, 0)
Пример #17
0
def get_saver():
    lst = [n for n in tf.global_variables() if n.name.startswith('movement_') and '/' not in n.name]
    saver = tf.train.Saver(var_list=lst)
    path_model = '../../saved_models/0/movement_save/'
    common.mkdir(path_model)

    def save(sess):
        saver.save(sess=sess, save_path=path_model)

    def restore(sess):
        saver.restore(sess=sess, save_path=path_model)
    return save, restore
Пример #18
0
def build(temp_directory, deepsea_version, command, auto_build):
    results = []

    modules_filename = 'deepsea.json'
    if command == common.Command.KosmosMinimal:
        modules_filename = 'deepsea-minimal.json'
    elif command == common.Command.SDSetup:
        modules_filename = 'sdsetup.json'

    # Open up modules.json
    with open(modules_filename) as json_file:
        # Parse JSON
        data = json.load(json_file)

        # Loop through modules
        for module in data:
            # Running a SDSetup Build
            if command == common.Command.SDSetup:
                # Only show prompts when it's not an auto build.
                if not auto_build:
                    print(f'Downloading {module["name"]}...')

                # Make sure module directory is created.
                module_directory = temp_directory.joinpath(
                    module['sdsetup_module_name'])
                common.mkdir(module_directory)

                # Download the module.
                download = globals()[module['download_function_name']]
                version = download(module, module_directory,
                                   deepsea_version, False)
                if version is None:
                    return None

                # Auto builds have a different prompt at the end for parsing.
                if auto_build:
                    results.append(
                        f'{module["sdsetup_module_name"]}:{version}')
                else:
                    results.append(f'  {module["name"]} - {version}')

            # Running a Kosmos Build
            else:
                # Download the module.
                print(f'Downloading {module["name"]}...')
                download = globals()[module['download_function_name']]
                version = download(module, temp_directory,
                                   deepsea_version, True)
                if version is None:
                    return None
                results.append(f'  {module["name"]} - {version}')

    return results
Пример #19
0
def download_status_monitor_overlay(module, temp_directory):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/.overlays'))
    common.move(
        app_path,
        temp_directory.joinpath('switch/.overlays/Status-Monitor-Overlay.ovl'))

    return release.tag_name
Пример #20
0
def download_nxdumptool(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', 'NXDumpTool'))
    shutil.move(
        app_path,
        os.path.join(temp_directory, 'switch', 'NXDumpTool', 'NXDumpTool.nro'))

    return get_version(module, release, 0)
Пример #21
0
def wsj_train_lms(data_root, src_dict_suffix, out_dir="local_lm", max_order=4):
    # Train a language model on WSJ lm training corpus
    # Here we don't do things the Kaldi way. Kaldi uses its own
    # derivative-based language modeling. We'll do modified Kneser-Ney
    # smoothing, which is a little more widespread.

    src_dict_dir = os.path.join(data_root, "local",
                                "dict" + src_dict_suffix + "_larger")
    dst_lm_dir = os.path.join(data_root, "local", out_dir)
    mkdir(dst_lm_dir)

    vocab = set(x.split()[0]
                for x in cat(os.path.join(src_dict_dir, "lexicon.txt")))
    vocab.remove("!SIL")
    pipe_to(sorted(vocab), os.path.join(dst_lm_dir, "wordlist.txt"))

    with gzip.open(os.path.join(src_dict_dir, "cleaned.gz"), "rt") as f:
        text = f.read()

    sents = ngram_lm.text_to_sents(text,
                                   sent_end_expr=r"\n",
                                   word_delim_expr=r" +")
    del text

    ngram_counts = ngram_lm.sents_to_ngram_counts(sents, max_order)
    ngram_counts[0]["<UNK>"] = 0  # add to vocab

    # find any ngrams that contain words that aren't part of the vocabulary.
    # we'll prune them. By pruning them we mean completely removing them.
    # Modified Kneser-Ney can use the frequency statistics before removing
    # them
    to_prune = set(ngram_counts[0]) - vocab
    to_prune.remove("<S>")
    for i, ngram_count in enumerate(ngram_counts[1:]):
        if i:
            to_prune.update(x for x in ngram_count
                            if x[:-1] in to_prune or x[-1] in to_prune)
        else:
            to_prune.update(x for x in ngram_count
                            if x[0] in to_prune or x[-1] in to_prune)

    prob_list = ngram_lm.ngram_counts_to_prob_list_kneser_ney(
        ngram_counts, sos="<S>", to_prune=to_prune)
    del ngram_counts

    lm = ngram_lm.BackoffNGramLM(prob_list, sos="<S>", eos="</S>", unk="<UNK>")
    # "pruning" here means removing the probability mass of the sos token and
    # redistributing to the other unigrams. "<S>" will remain in the
    # vocabulary
    lm.prune_by_name({"<S>"})
    del prob_list

    print("Corpus PPL:", lm.corpus_perplexity(sents))
Пример #22
0
def download_kosmos_updater(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', 'KosmosUpdater'))
    shutil.move(app_path, os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'KosmosUpdater.nro'))
    common.copy_module_file('kosmos-updater', 'internal.db', os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'internal.db'))
    common.sed('KOSMOS_VERSION', kosmos_version, os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'internal.db'))

    return release.tag_name
Пример #23
0
def download_ovl_sysmodules(module, temp_directory, deepsea_version,
                            parameters, deepsea_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/.overlays'))
    common.move(app_path,
                temp_directory.joinpath('switch/.overlays/ovlSysmodules.ovl'))

    return get_version(module, release, 0)
Пример #24
0
def download_savemii(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    bundle_path = download_asset(module, release, 0)
    if bundle_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'wiiu', 'apps'))
    with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
        zip_ref.extractall(os.path.join(temp_directory, 'wiiu', 'apps'))

    common.delete_path(bundle_path)

    return get_version(module, release, 0)
Пример #25
0
def download_lockpick_rcm(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    payload_path = download_asset(module, release, 0)
    if payload_path is None:
        return None

    if kosmos_build:
        common.mkdir(os.path.join(temp_directory, 'bootloader', 'payloads'))
        shutil.move(payload_path, os.path.join(temp_directory, 'bootloader', 'payloads', 'Lockpick_RCM.bin'))
    else:
        shutil.move(payload_path, os.path.join(temp_directory, 'Lockpick_RCM.bin'))

    return release.tag_name
Пример #26
0
def generate_road_run_configurations(road, location):
    road_sub_dir = os.path.join(location, str(road.id))
    mkdir(road_sub_dir)
    receptor_df = generate_receptors_df_for_road(road)
    road_df = road_to_dataframe(road)
    print road_sub_dir
    for (i, combination) in enumerate(input_file_parameter_combinations()):
        run_dir = os.path.join(road_sub_dir, str(i))
        mkdir(run_dir)
        receptor_df.to_csv(os.path.join(run_dir, "receptors.csv"), index=False)
        road_df.to_csv(os.path.join(run_dir, "roads.csv"), index=False)
        generate_run_files(run_dir, road_df, combination)
    print "generated configurations for road ", road.id
Пример #27
0
def get_sentences(dir):
    parent_dir = os.path.abspath(os.path.dirname(
        os.getcwd()))  #C:\Users\jee_s\Desktop\助研\中文语料处理
    orgDir = os.path.join(parent_dir, "摘要文件")
    toDir = os.path.join(parent_dir, dir)
    common.mkdir(toDir)
    all_dirs, all_files, all_names = common.getdir(orgDir)
    for i in all_files[1:]:  #所有摘要文件
        print(i)
        filename = os.path.basename(i)
        # print(os.path.splitext(i))
        sents = splitSentence(i)
        common.save_file(sents, os.path.join(toDir, filename))
Пример #28
0
def download_lockpick_rcm(module, temp_directory, isotope_version, isotope_build):
    release = get_latest_release(module)
    payload_path = download_asset(module, release, 0)
    if payload_path is None:
        return None

    if isotope_build:
        common.mkdir(temp_directory.joinpath('bootloader/payloads'))
        common.move(payload_path, temp_directory.joinpath(
            'bootloader/payloads/Lockpick_RCM.bin'))
    else:
        common.move(payload_path, temp_directory.joinpath('Lockpick_RCM.bin'))

    return get_version(module, release, 0)
Пример #29
0
def download_status_monitor_overlay(module, temp_directory, kosmos_version,
                                    kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
    shutil.move(
        app_path,
        os.path.join(temp_directory, 'switch', '.overlays',
                     'Status-Monitor-Overlay.ovl'))

    return release.tag_name
Пример #30
0
def download_ovl_sysmodules(module, temp_directory, kosmos_version,
                            kosmos_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
    shutil.move(
        app_path,
        os.path.join(temp_directory, 'switch', '.overlays',
                     'ovlSysmodules.ovl'))

    return get_version(module, release, 0)
Пример #31
0
def download_appstore(module, temp_directory, kosmos_version, kosmos_build):
    release = get_latest_release(module)
    bundle_path = download_asset(module, release, 0)
    if bundle_path is None:
        return None

    with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
        zip_ref.extractall(temp_directory)
    
    common.delete_path(bundle_path)
    common.mkdir(os.path.join(temp_directory, 'switch', 'appstore'))
    shutil.move(os.path.join(temp_directory, 'appstore.nro'), os.path.join(temp_directory, 'switch', 'appstore', 'appstore.nro'))

    return release.name
Пример #32
0
def main():
    project_list = common.get_project_list()

    parser = argparse.ArgumentParser()

    parser.add_argument("-rc",
                        "--recompute_clusters",
                        action="store_true",
                        help="recompute clustering for selected projects")
    parser.add_argument(
        "-c",
        "--cluster",
        type=str,
        help="path to the json file that contains clustering information")
    parser.add_argument("-g",
                        "--graph",
                        action="store_true",
                        help="set to regenerate graphs from the programs")
    parser.add_argument("-d",
                        "--dir",
                        type=str,
                        required=True,
                        help="The output directory")
    parser.add_argument("-p",
                        "--projectset",
                        type=str,
                        help="A project set name to work on")
    args = parser.parse_args()

    if args.projectset:
        project_list = common.get_corpus_set(args.projectset)

    args.dir = os.path.abspath(
        os.path.join(common.WORKING_DIR, 'results', args.dir))

    if os.path.exists(args.dir):
        rotate_log_dir(args.dir)

    common.mkdir(args.dir)
    common.set_output_dir(args.dir)
    kernel_dir = os.path.join(args.dir, "kernel_directory")
    common.mkdir(kernel_dir)

    backend.run(project_list, args, kernel_dir)
    print("\n********* END OF BACKEND **********\n")
    frontend.run(project_list, args, kernel_dir)

    collect_stray_output(project_list, args.dir)
    sanity.check_run(project_list, args.dir)
Пример #33
0
def download_kosmos_updater(module, temp_directory, deepsea_version, deepsea_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/DeepSea-Updater'))
    common.move(app_path, temp_directory.joinpath(
        'switch/DeepSea-Updater/DeepSea-Updater.nro'))
    common.copy_module_file('deepsea-updater', 'internal.db',
                            temp_directory.joinpath('switch/DeepSea-Updater/internal.db'))
    common.sed('DEEPSEA_VERSION', deepsea_version,
               temp_directory.joinpath('switch/DeepSea-Updater/internal.db'))

    return get_version(module, release, 0)
Пример #34
0
def download_isotope_updater(module, temp_directory, isotope_version, isotope_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/Isotope-Updater'))
    common.move(app_path, temp_directory.joinpath(
        'switch/Isotope-Updater/Isotope-Updater.nro'))
    common.copy_module_file('isotope-updater', 'internal.db',
                            temp_directory.joinpath('switch/Isotope-Updater/internal.db'))
    common.sed('ISOTOPE_VERSION', isotope_version,
               temp_directory.joinpath('switch/Isotope-Updater/internal.db'))

    return get_version(module, release, 0)
Пример #35
0
def download_isotope_toolbox(module, temp_directory, isotope_version, isotope_build):
    release = get_latest_release(module)
    app_path = download_asset(module, release, 0)
    if app_path is None:
        return None

    common.mkdir(temp_directory.joinpath('switch/Isotope-Toolbox'))

    common.move(app_path, temp_directory.joinpath(
        'switch/Isotope-Toolbox/Isotope-Toolbox.nro'))

    common.copy_module_file('isotope-toolbox', 'config.json',
                            temp_directory.joinpath('switch/Isotope-Toolbox/config.json'))

    return get_version(module, release, 0)
Пример #36
0
def download_appstore(module, temp_directory, isotope_version, isotope_build):
    release = get_latest_release(module)
    bundle_path = download_asset(module, release, 0)
    if bundle_path is None:
        return None

    with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
        zip_ref.extractall(temp_directory)

    common.delete(bundle_path)
    common.mkdir(temp_directory.joinpath('switch/appstore'))
    common.move(temp_directory.joinpath('appstore.nro'),
                temp_directory.joinpath('switch/appstore/appstore.nro'))

    return get_version(module, release, 0)
Пример #37
0
def build(temp_directory, kosmos_version, kosmos_build, auto_build):
    results = []

    # Open up modules.json
    with open('modules.json') as json_file:
        # Parse JSON
        data = json.load(json_file)

        # Loop through modules
        for module in data:
            sdsetup_opts = module['sdsetup']

            # Running a Kosmos Build
            if kosmos_build:
                # Download the module.
                print(f'Downloading {module["name"]}...')
                download = globals()[module['download_function_name']]
                version = download(module, temp_directory, kosmos_version,
                                   kosmos_build)
                if version is None:
                    return None
                results.append(f'  {module["name"]} - {version}')

            # Running a SDSetup Build
            elif not kosmos_build and sdsetup_opts['included']:
                # Only show prompts when it's not an auto build.
                if not auto_build:
                    print(f'Downloading {module["name"]}...')

                # Make sure module directory is created.
                module_directory = os.path.join(temp_directory,
                                                sdsetup_opts['name'])
                common.mkdir(module_directory)

                # Download the module.
                download = globals()[module['download_function_name']]
                version = download(module, module_directory, kosmos_version,
                                   kosmos_build)
                if version is None:
                    return None

                # Auto builds have a different prompt at the end for parsing.
                if auto_build:
                    results.append(f'{sdsetup_opts["name"]}:{version}')
                else:
                    results.append(f'  {module["name"]} - {version}')

    return results
Пример #38
0
def get_saver(position, blank=''):
    lst = [
        n for n in tf.global_variables()
        if n.name.startswith(blank + 'latent') and '/' not in n.name
    ]
    saver = tf.train.Saver(var_list=lst)
    path_model = SAVE_PATH_TEMPLATE % position
    common.mkdir(path_model)

    def save(sess):
        saver.save(sess=sess, save_path=path_model)

    def restore(sess):
        saver.restore(sess=sess, save_path=path_model)

    return save, restore
Пример #39
0
    def create_quartus_settings_file(self, lib_names=None):
        """Create the Quartus settings file (QSF) for all HDL libraries that have a toplevel entity key synth_top_level_entity.
        
           Note:
           . No support for revisions, so only one qsf per qpf
           
           Arguments:
           - lib_names      : one or more HDL libraries
        """
        if lib_names==None: lib_names=self.lib_names
        lib_dicts = self.libs.get_dicts(key='hdl_lib_name', values=lib_names)
        syn_dicts = self.libs.get_dicts(key='synth_top_level_entity', values=None, dicts=lib_dicts)
        for syn_dict in cm.listify(syn_dicts):
            # Open qsf for each HDL library that has a synth_top_level_entity
            lib_name = syn_dict['hdl_lib_name']
            lib_path = self.libs.get_filePath(syn_dict)
            top_level_entity = syn_dict['synth_top_level_entity']
            if top_level_entity=='':
                top_level_entity = lib_name
            qsf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict)
            cm.mkdir(qsf_path)

            # One qsf per lib_name
            qsf_name = lib_name + '.qsf'
            qsfPathName = cm.expand_file_path_name(qsf_name, qsf_path)
            with open(qsfPathName, 'w') as fp:
                fp.write('# synth_top_level_entity\n')
                fp.write('set_global_assignment -name TOP_LEVEL_ENTITY %s\n' % top_level_entity)

                fp.write('\n')
                fp.write('# quartus_qsf_files\n')
                quartus_qsf_files = syn_dict['quartus_qsf_files'].split()
                for fn in quartus_qsf_files:
                    filePathName = cm.expand_file_path_name(fn, lib_path)
                    fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName)

                fp.write('\n')
                fp.write('# All used HDL library *_lib.qip files in order with top level last\n')
                use_lib_names = self.derive_all_use_libs('synth', lib_name)
                use_lib_order = self.derive_lib_order('synth', use_lib_names)
                #use_lib_dicts = self.libs.get_dicts('hdl_lib_name', values=use_lib_order)    # uses original libs.dicts order, but
                use_lib_dicts = self.get_lib_dicts_from_lib_names(lib_names=use_lib_order)    # must preserve use_lib_order order to ensure that top level design qip with sdc file is include last in qsf
                for lib_dict in cm.listify(use_lib_dicts):
                    qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict)
                    qip_name = lib_dict['hdl_lib_name'] + '_lib.qip'
                    qipPathName = cm.expand_file_path_name(qip_name, qip_path)
                    fp.write('set_global_assignment -name QIP_FILE %s\n' % qipPathName)
Пример #40
0
def copy_config(source_path):
    if not os.path.isabs(source_path):
        source_path = os.path.join(os.getcwd(), source_path)
    destination_path = os.path.join(config_path, config_filename)
    if source_path and source_path != destination_path:
        if os.path.exists(source_path):
            if not os.path.exists(destination_path):
                common.mkdir(config_path)
                copyfile(source_path, destination_path)
            else:
                output.warning("Destination file %s already exists." % destination_path)
                if click.confirm('Do you want to overwrite it?'):
                    os.remove(destination_path)
                    copyfile(source_path, destination_path)
                else:
                    output.abort("To run osxstrap without copying config, use the osxstrap command.")
        else:
            output.abort("Input file %s does not exist." % source_path)
Пример #41
0
 def create_lib_order_files(self, build_type, lib_names=None):
     """Create the compile order file '<lib_name>_lib_order.txt' for all HDL libraries in the specified list of lib_names.
     
        The file is stored in the sim build directory of the HDL library.
        The file is read by commands.do in Modelsim to avoid having to derive the library compile order in TCL.
     """
     if lib_names==None: lib_names=self.lib_names
     lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names)
     for lib_dict in cm.listify(lib_dicts):
         lib_name = lib_dict['hdl_lib_name']
         use_libs = self.derive_all_use_libs(build_type, lib_name)
         lib_order = self.derive_lib_order(build_type, use_libs)
         file_name = lib_name + '_lib_order.txt'
         file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
         cm.mkdir(file_path)
         filePathName = os.path.join(file_path, file_name)
         with open(filePathName, 'w') as fp:
             for lib in lib_order:
                 fp.write('%s ' % lib)
Пример #42
0
 def create_modelsim_lib_compile_ip_files(self, lib_names=None):
     """Create the '<lib_name>_lib_compile_ip.txt' file for all HDL libraries in the specified list of lib_names.
     
        The file is stored in the sim build directory of the HDL library.
        The file is read by commands.do in Modelsim to know which IP needs to be compiled before the library is compiled.
     """
     if lib_names==None: lib_names=self.lib_names
     lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names)
     for lib_dict in cm.listify(lib_dicts):
         if 'modelsim_compile_ip_files' in lib_dict:
             compile_ip_files = lib_dict['modelsim_compile_ip_files'].split()
             lib_name = lib_dict['hdl_lib_name']
             file_name = lib_name + '_lib_compile_ip.txt'
             file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
             cm.mkdir(file_path)
             filePathName = os.path.join(file_path, file_name)
             with open(filePathName, 'w') as fp:
                 for fpn in compile_ip_files:
                     # Write the expanded file path name for <lib_name>_lib_compile_ip.txt so that it can be executed directly from its location in SVN using the Modelsim "do"-command in the commands.do.
                     # An alternative would be to write the basename, so only <lib_name>_lib_compile_ip.txt, but that would require copying the basename file to the mpf build directory
                     efpn = os.path.expandvars(fpn)
                     fp.write('%s ' % efpn)
Пример #43
0
    options, args = op.parse_args()

    conf = config.open_config(options.conf)
    config.set_common_logging(conf, _logger, ["log_db",])
    dirname = conf.get("changepoint", "temp_cp_data")

    if len(args) == 0:
        sys.exit(usage)
    mode = args.pop(0)
    if mode == "make":
        cpd = ChangePointData(dirname)
        binsize = conf.getdur("changepoint", "cf_bin")
        cf_r = conf.getfloat("changepoint", "cf_r")
        cf_smooth = conf.getint("changepoint", "cf_smooth")
        cpd.init(binsize, cf_r, cf_smooth)
        common.mkdir(cpd.dirname)
        common.rm_dirchild(cpd.dirname)
        cpd.update(conf)
        cpd.dump()
    elif mode == "update":
        cpd = ChangePointData(dirname)
        cpd.load()
        cpd.update(conf)
        cpd.dump()
    elif mode == "graph":
        if len(args) < 2:
            sys.exit("give me term length and output directory name of graphs")
        graph_cp(conf, args[0], args[1])
    elif mode == "heat":
        if len(args) < 2:
            sys.exit("give me term length and output filename of graphs")
Пример #44
0
    def create_modelsim_project_file(self, lib_names=None):
        """Create the Modelsim project file for all technology libraries and RTL HDL libraries.
        
           Arguments:
           - lib_names       : one or more HDL libraries
        """
        if lib_names==None: lib_names=self.lib_names
        lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names)
        for lib_dict in cm.listify(lib_dicts):
            # Open mpf
            lib_name = lib_dict['hdl_lib_name']
            mpf_name = lib_name + '.mpf'
            mpf_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
            cm.mkdir(mpf_path)
            mpfPathName = os.path.join(mpf_path, mpf_name)
            with open(mpfPathName, 'w') as fp:
                # Write [Library] section for all used libraries
                fp.write('[Library]\n')
                # . map used vendor technology libs to their target directory
                for technologyName in cm.listify(self.technologyNames):
                    tech_dict = self.read_hdl_libraries_technology_file(technologyName)
                    for lib_clause, lib_work in tech_dict.iteritems():
                        fp.write('%s = %s\n' % (lib_clause, lib_work))
                # . not used vendor technology libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause
                for tech_dict in self.removed_dicts:
                    fp.write('%s = work\n' % tech_dict['hdl_library_clause_name'])
                # . all used libs for this lib_name
                use_lib_names = self.derive_all_use_libs('sim', lib_name)
                use_lib_dicts = self.libs.get_dicts('hdl_lib_name', use_lib_names)
                use_lib_build_sim_dirs = self.get_lib_build_dirs('sim', lib_dicts=use_lib_dicts)
                use_lib_clause_names = self.libs.get_key_values('hdl_library_clause_name', use_lib_dicts)
                for lib_clause, lib_dir in zip(cm.listify(use_lib_clause_names), cm.listify(use_lib_build_sim_dirs)):
                    lib_work = os.path.join(lib_dir, 'work')
                    fp.write('%s = %s\n' % (lib_clause, lib_work))
                # . work
                fp.write('work = work\n')
                # . others modelsim default libs
                model_tech_dir = os.path.expandvars(self.tool_dict['model_tech_dir'])
                fp.write('others = %s\n' % os.path.join(model_tech_dir, 'modelsim.ini'))
                
                # Write [Project] section for all used libraries
                fp.write('[Project]\n')
                fp.write('Project_Version = 6\n')  # must be >= 6 to fit all
                fp.write('Project_DefaultLib = work\n')
                fp.write('Project_SortMethod = unused\n')
                
                # - project files
                synth_files = lib_dict['synth_files'].split()
                test_bench_files = lib_dict['test_bench_files'].split()
                project_files = synth_files + test_bench_files
                if 'modelsim_compile_ip_files' in lib_dict:
                    compile_ip_files = lib_dict['modelsim_compile_ip_files'].split()
                    project_files += compile_ip_files
                fp.write('Project_Files_Count = %d\n' % len(project_files))
                lib_path = self.libs.get_filePath(lib_dict)
                for i, fn in enumerate(project_files):
                    filePathName = cm.expand_file_path_name(fn, lib_path)
                    fp.write('Project_File_%d = %s\n' % (i, filePathName))

                project_file_p_defaults_hdl     = 'vhdl_novitalcheck 0 group_id 0 cover_nofec 0 vhdl_nodebug 0 vhdl_1164 1 vhdl_noload 0 vhdl_synth 0 vhdl_enable0In 0 vlog_1995compat 0 last_compile 0 vhdl_disableopt 0 cover_excludedefault 0 vhdl_vital 0 vhdl_warn1 1 vhdl_warn2 1 vhdl_explicit 1 vhdl_showsource 0 cover_covercells 0 vhdl_0InOptions {} vhdl_warn3 1 vlog_vopt {} cover_optlevel 3 voptflow 1 vhdl_options {} vhdl_warn4 1 toggle - ood 0 vhdl_warn5 1 cover_noshort 0 compile_to work cover_nosub 0 dont_compile 0 vhdl_use93 2002 cover_stmt 1'
                project_file_p_defaults_vhdl    = 'file_type vhdl'
                project_file_p_defaults_verilog = 'file_type verilog'
                project_file_p_defaults_tcl     = 'last_compile 0 compile_order -1 file_type tcl group_id 0 dont_compile 1 ood 1'

                project_folders = []
                offset = 0

                nof_synth_files = len(synth_files)
                if nof_synth_files>0:
                    project_folders.append('synth_files')
                    for i in range(nof_synth_files):  

                        # Add file type specific settings
                        file_ext = synth_files[i].split('.')[-1]
                        if file_ext=='vhd' or file_ext=='vhdl':
                            project_file_p_defaults_file_specific = project_file_p_defaults_vhdl
                        elif file_ext=='v':
                             project_file_p_defaults_file_specific = project_file_p_defaults_verilog
                        else:
                             print '\nERROR - Undefined file extension in synth_files:', synth_files[i]
                             sys.exit()

                        fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific))
                offset = nof_synth_files

                nof_test_bench_files = len(test_bench_files)
                if nof_test_bench_files>0:
                    project_folders.append('test_bench_files')
                    for i in range(nof_test_bench_files):

                        # Add file type specific settings
                        file_ext = test_bench_files[i].split('.')[-1]
                        if file_ext=='vhd' or file_ext=='vho' or file_ext=='vhdl':
                            project_file_p_defaults_file_specific = project_file_p_defaults_vhdl
                        elif file_ext=='v':
                             project_file_p_defaults_file_specific = project_file_p_defaults_verilog
                        else:
                             print '\nERROR - Undefined file extension in test_bench_files:', test_bench_files[i]
                             sys.exit()

                        fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific))
                offset += nof_test_bench_files

                if 'modelsim_compile_ip_files' in lib_dict:
                    nof_compile_ip_files = len(compile_ip_files)
                    if nof_compile_ip_files>0:
                        project_folders.append('compile_ip_files')
                        for i in range(nof_compile_ip_files):
                            fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_tcl))
                    offset += nof_compile_ip_files
                        
                # - project folders
                fp.write('Project_Folder_Count = %d\n' % len(project_folders))
                for i, fd in enumerate(project_folders):
                    fp.write('Project_Folder_%d = %s\n' % (i, fd))
                    fp.write('Project_Folder_P_%d = folder {Top Level}\n' % i)
                    
                # - simulation configurations
                fp.write('Project_Sim_Count = %d\n' % len(test_bench_files))
                project_sim_p_defaults = 'Generics {} timing default -std_output {} -nopsl 0 +notimingchecks 0 selected_du {} -hazards 0 -sdf {} ok 1 -0in 0 -nosva 0 +pulse_r {} -absentisempty 0 -multisource_delay {} +pulse_e {} vopt_env 1 -coverage 0 -sdfnoerror 0 +plusarg {} -vital2.2b 0 -t default -memprof 0 is_vopt_flow 0 -noglitch 0 -nofileshare 0 -wlf {} -assertdebug 0 +no_pulse_msg 0 -0in_options {} -assertfile {} -sdfnowarn 0 -Lf {} -std_input {}'
                project_sim_p_search_libraries = '-L {}'
                if 'modelsim_search_libraries' in self.tool_dict:
                    project_sim_p_search_libraries = '-L {'
                    for sl in self.tool_dict['modelsim_search_libraries'].split():
                        project_sim_p_search_libraries += sl
                        project_sim_p_search_libraries += ' '
                    project_sim_p_search_libraries += '}'
                project_sim_p_otherargs = 'OtherArgs {}'
                project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683 -quiet}'
                project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683}'
                project_sim_p_otherargs = 'OtherArgs {+nowarn8684 +nowarn8683 +nowarnTFMPC +nowarnPCDPC}'  # nowarn on verilog IP connection mismatch warnings
                project_sim_p_optimization = 'is_vopt_opt_used 2'  # = when 'Enable optimization' is not selected in GUI
                project_sim_p_optimization = 'is_vopt_opt_used 1 voptargs {OtherVoptArgs {} timing default VoptOutFile {} -vopt_keep_delta 0 -0in 0 -fvopt {} VoptOptimize:method 1 -vopt_00 2 +vopt_notimingcheck 0 -Lfvopt {} VoptOptimize:list .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.listbox -Lvopt {} +vopt_acc {} VoptOptimize .vopt_opt.nb.canvas.notebook.cs.page1.cs -vopt_hazards 0 VoptOptimize:Buttons .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.bf 0InOptionsWgt .vopt_opt.nb.canvas.notebook.cs.page3.cs.zf.ze -0in_options {}}' # = when 'Enable optimization' is selected in GUI for full visibility
                for i, fn in enumerate(test_bench_files):
                    fName = os.path.basename(fn)
                    tbName = os.path.splitext(fName)[0]
                    fp.write('Project_Sim_%d = %s\n' % (i, tbName))
                for i, fn in enumerate(test_bench_files):
                    fName = os.path.basename(fn)
                    tbName = os.path.splitext(fName)[0]
                    fp.write('Project_Sim_P_%d = folder {Top Level} additional_dus work.%s %s %s %s %s\n' % (i, tbName, project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization))
                    
                # Write [vsim] section
                fp.write('[vsim]\n')
                fp.write('RunLength = 0 ps\n')
                fp.write('resolution = 1fs\n')
                fp.write('IterationLimit = 5000\n')       # According to 'verror 3601' the default is 5000, typically 100 is enough, but e.g. the ip_stratixiv_phy_xaui_0 requires more.
                fp.write('DefaultRadix = decimal\n')
Пример #45
0
            help="configuration file path")
    op.add_option("-e", action="store_true", dest="make_event",
            default=False, help="only making event set")
    op.add_option("-p", "--parallel", action="store", dest="pal", type="int",
            default=1, help="multithreading")
    #op.add_option("-r", action="store_true", dest="rflag",
    #        default=False, help="using pcalg library in R")
    op.add_option("--test", action="store_true", dest="test",
            default=False, help="test pc_log; do with first term")
    op.add_option("--debug", action="store_true", dest="debug",
            default=False, help="set logging level to DEBUG")
    (options, args) = op.parse_args()

    conf = config.open_config(options.conf)
    lv = logging.DEBUG if options.debug else logging.INFO
    config.set_common_logging(conf, _logger, ["evfilter"], lv = lv)

    common.mkdir(conf.get("dag", "output_dir"))
    l_args = pc_all_args(conf)
    if options.test:
        test_pc(l_args); sys.exit()
    elif options.make_event:
        test_edict(l_args); sys.exit()

    if options.pal == 1:
        pc_sthread(l_args)
    else:
        pc_mthread(l_args, options.pal)


Пример #46
0
def main(options):
    if not os.path.isabs(options.builddir):
        options.builddir = os.path.join(PROJECT_DIR, options.builddir)

    if options.clean:
        clean()
        common.rmdir(options.builddir)

    common.mkdir(options.builddir)

    if options.all or options.dist:
        options.cpp = True
        options.csharp = True
        options.gui = True
        options.java = True
        options.python = True
        options.rpg = True

    # install dependencies
    common.run_cmd('mvn', ['install:install-file', '-DgroupId=com.frontendart.columbus',
                    '-DartifactId=graphsupportlib', '-Dversion=1.0',
                    '-Dpackaging=jar', '-Dfile=lib/graphsupportlib-1.0.jar'])
    common.run_cmd('mvn', ['install:install-file', '-DgroupId=com.frontendart.columbus',
                    '-DartifactId=graphlib', '-Dversion=1.0', '-Dpackaging=jar',
                    '-Dfile=lib/graphlib-1.0.jar'])

    # sonarqube-core-plugin
    mvn_install('sonarqube-core-plugin')

    # sourcemeter-analyzer-base
    mvn_install('sonarqube-analyzers/sourcemeter-analyzer-base')

    # sonarqube-gui-plugin
    if options.gui:
        usersguide()
        mvn_install('sonarqube-gui-plugin')

    # analyzers
    if options.cpp:
        mvn_install('sonarqube-analyzers/sourcemeter-analyzer-cpp')
    if options.csharp:
        mvn_install('sonarqube-analyzers/sourcemeter-analyzer-csharp')
    if options.java:
        mvn_install('sonarqube-analyzers/sourcemeter-analyzer-java')
    if options.python:
        mvn_install('sonarqube-analyzers/sourcemeter-analyzer-python')
    if options.rpg:
        mvn_install('sonarqube-analyzers/sourcemeter-analyzer-rpg')

    target_dir = os.path.join(options.builddir, PACKAGENAME)
    common.mkdir(target_dir)
    common.mkdir('%s/doc' % target_dir)
    common.mkdir('%s/plugins' % target_dir)
    try:
        shutil.copy('doc/UG.html', '%s/doc' % target_dir)
        shutil.copy('README.md', target_dir)
    except OSError:
        print('Cannot copy doc files.')
    copy_jars('src/sonarqube-core-plugin/target/', '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-gui-plugin/target/', '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-analyzers/sourcemeter-analyzer-cpp/target/',
              '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-analyzers/sourcemeter-analyzer-csharp/target/',
              '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-analyzers/sourcemeter-analyzer-java/target/',
              '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-analyzers/sourcemeter-analyzer-python/target/',
              '%s/plugins' % target_dir)
    copy_jars('src/sonarqube-analyzers/sourcemeter-analyzer-rpg/target/',
              '%s/plugins' % target_dir)

    if options.dist:
        tarfile_name = os.path.join(options.builddir,
                                    '%s.tar.gz' % PACKAGENAME)
        tar = tarfile.open(tarfile_name, 'w:gz')
        tar.add(target_dir, arcname=PACKAGENAME)
        tar.close()

    print('\nBUILD SUCCESS\n')
Пример #47
0
 def initFolder():
     mkdir(self.resultFolder)
     mkdir(self.resultFolder)
     mkdir(self.rawDataStoreFolder)
     mkdir(self.rawDataBackupFolder)
Пример #48
0
def initialize():
    shutil.rmtree(path.root)
    mkdir(path.root)
    mkdir(path.db.root)
    mkdir(path.db.dj)
    mkdir(path.db.star)
    mkdir(path.db.nm)
    mkdir(path.db.hd)
    mkdir(path.db.mx)
    mkdir(path.db.ex)
    mkdir(path.db.club)
    mkdir(path.db.mission)
    mkdir(path.db.master)
    mkdir(path.img.root)
    mkdir(path.img.icon)
    mkdir(path.img.star)
    mkdir(path.img.pop)
    mkdir(path.img.club)
    mkdir(path.img.mission)
Пример #49
0
    def create_quartus_ip_lib_file(self, lib_names=None):
        """Create the Quartus IP file <hdl_lib_name>_lib.qip for all HDL libraries. The <hdl_lib_name>.qip file contains the list of files that are given
           by the synth_files key and the quartus_*_file keys.
           
           Note:
           . Use post fix '_lib' in QIP file name *_lib.qip to avoid potential conflict with *.qip that may come with the IP.
           . The HDL library *_lib.qip files contain all files that are listed by the synth_files key. Hence when these qip files are included then
             the Quartus project will analyse all files even if there entity is not instantiated in the design. This is fine, it is unnecessary
             to parse the hierarchy of the synth_top_level_entity VHDL file to find and include only the source files that are actually used.
        
           Arguments:
           - lib_names      : one or more HDL libraries
        """
        if lib_names==None: lib_names=self.lib_names
        lib_dicts = self.libs.get_dicts('hdl_lib_name', values=lib_names)
        for lib_dict in cm.listify(lib_dicts):
            # Open qip
            lib_name = lib_dict['hdl_lib_name']
            lib_path = self.libs.get_filePath(lib_dict)
            qip_name = lib_name + '_lib.qip'
            qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict)
            cm.mkdir(qip_path)
            qipPathName = cm.expand_file_path_name(qip_name, qip_path)
            with open(qipPathName, 'w') as fp:
                if 'synth_files' in lib_dict:
                    fp.write('# synth_files\n')
                    synth_files = lib_dict['synth_files'].split()
                    for fn in synth_files:
                        filePathName = cm.expand_file_path_name(fn, lib_path)

                        file_ext = fn.split('.')[-1]
                        if file_ext=='vhd' or file_ext=='vhdl':
                            file_type = 'VHDL_FILE'                         
                        elif file_ext=='v':
                            file_type = 'VERILOG_FILE'                              
                        else:
                             print '\nERROR - Undefined file extension in synth_files:', fn
                             sys.exit()

                        fp.write('set_global_assignment -name %s %s -library %s\n' % (file_type, filePathName, lib_name + '_lib'))
    
                if 'quartus_vhdl_files' in lib_dict:
                    fp.write('\n')
                    fp.write('# quartus_vhdl_files\n')
                    quartus_vhdl_files = lib_dict['quartus_vhdl_files'].split()
                    for fn in quartus_vhdl_files:
                        filePathName = cm.expand_file_path_name(fn, lib_path)

                        file_ext = fn.split('.')[-1]
                        if file_ext=='vhd' or file_ext=='vhdl':
                            file_type = 'VHDL_FILE'                         
                        elif file_ext=='v':
                            file_type = 'VERILOG_FILE'                              
                        else:
                             print '\nERROR - Undefined file extension in quartus_vhdl_files:', fn
                             sys.exit()

                        fp.write('set_global_assignment -name VHDL_FILE %s -library %s\n' % (filePathName, lib_name + '_lib'))
                    
                if 'quartus_qip_files' in lib_dict:
                    fp.write('\n')
                    fp.write('# quartus_qip_files\n')
                    quartus_qip_files = lib_dict['quartus_qip_files'].split()
                    for fn in quartus_qip_files:
                        filePathName = cm.expand_file_path_name(fn, lib_path)
                        fp.write('set_global_assignment -name QIP_FILE %s\n' % filePathName)

                if 'quartus_tcl_files' in lib_dict:
                    fp.write('\n')
                    fp.write('# quartus_tcl_files\n')
                    quartus_tcl_files = lib_dict['quartus_tcl_files'].split()
                    for fn in quartus_tcl_files:
                        filePathName = cm.expand_file_path_name(fn, lib_path)
                        fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName)
                    
                if 'quartus_sdc_files' in lib_dict:
                    fp.write('\n')
                    fp.write('# quartus_sdc_files\n')
                    quartus_sdc_files = lib_dict['quartus_sdc_files'].split()
                    for fn in quartus_sdc_files:
                        filePathName = cm.expand_file_path_name(fn, lib_path)
                        fp.write('set_global_assignment -name SDC_FILE %s\n' % filePathName)