Exemple #1
0
def download_with_sra_prefetch(aspera_key, outdir, pickle_prefix, ena_id):
    command = ['prefetch', '', ena_id]

    if aspera_key is not None:
        _, ascp, _ = utils.run_command_popen_communicate(['which', 'ascp'], False, None, False)
        command[1] = '-a {ascp}|{aspera_key}'.format(ascp=ascp.splitlines()[0], aspera_key=aspera_key)

    run_successfully, stdout, stderr = utils.run_command_popen_communicate(command, False, 3600, True)
    if run_successfully:
        _, prefetch_outdir, _ = utils.run_command_popen_communicate(['echo', '$HOME/ncbi/public/sra'], True, None,
                                                                    False)

        try:
            os.rename(os.path.join(prefetch_outdir.splitlines()[0], ena_id + '.sra'),
                      os.path.join(outdir, ena_id + '.sra'))
        except OSError as e:
            print('Found the following error:'
                  '{}'.format(e))

            from shutil import copy as shutil_copy

            shutil_copy(os.path.join(prefetch_outdir.splitlines()[0], ena_id + '.sra'),
                        os.path.join(outdir, ena_id + '.sra'))
            os.remove(os.path.join(prefetch_outdir.splitlines()[0], ena_id + '.sra'))

    utils.save_variable_to_pickle(run_successfully, outdir, pickle_prefix + '.' + ena_id)
Exemple #2
0
    def _ExecuteCopyTasks(self, tasks, text):
        for task in tasks:
            if (not self.DryRun and not task.SourcePath.exists()):
                raise CompilerException(
                    "Cannot {0}-copy '{1!s}' to destination.".format(
                        text, task.SourcePath)) from FileNotFoundError(
                            str(task.SourcePath))

            if not task.DestinationPath.parent.exists():
                if self.DryRun:
                    self.LogDryRun("mkdir '{0!s}'.".format(
                        task.DestinationPath.parent))
                else:
                    try:
                        task.DestinationPath.parent.mkdir(parents=True)
                    except OSError as ex:
                        raise CompilerException(
                            "Error while creating '{0!s}'.".format(
                                task.DestinationPath.parent)) from ex

            self.LogDebug("{0}-copying '{1!s}'.".format(text, task.SourcePath))
            if self.DryRun:
                self.LogDryRun("Copy '{0!s}' to '{1!s}'.".format(
                    task.SourcePath, task.DestinationPath))
            else:
                try:
                    shutil_copy(str(task.SourcePath),
                                str(task.DestinationPath))
                except OSError as ex:
                    raise CompilerException(
                        "Error while copying '{0!s}'.".format(
                            task.SourcePath)) from ex
    def restore(self, backupfile=None):
        if backupfile is None:
            dir_path = Path('/backup')
            for f in dir_path.iterdir():
                if f.name == 'NetworkManager':
                    file_path = f
                    break
        else:
            file_path = Path(backupfile)

        if file_path.exists():
            tmp_dir = Path(mkdtemp())
            tar = tar_open(file_path)
            tar.extractall(tmp_dir)
            tar.close()

            if tmp_dir.joinpath('nat').exists():
                external_interface = 'eth1'
                with tmp_dir.joinpath('nat').open('r', encoding='utf-8') as fd:
                    external_interface = fd.readline().strip()
                self.set_nat(True, True, external_interface)

            for f in self.backup_files:
                print("Restoring {0}".format(f))
                tmp_path = tmp_dir.joinpath(f.relative_to('/'))
                if tmp_path.exists():
                    shutil_copy(tmp_path, f)
        self.apply_changes()
        return n4d.responses.build_successful_call_response("True")
Exemple #4
0
	def __InstallGitHooks(self):
		self._host.LogNormal("  Installing Git hooks...")
		pocInstallationPath =   Path(self._host.PoCConfig['INSTALL.PoC']['InstallationDirectory'])
		hookRunnerPath =        pocInstallationPath / "tools/git/hooks/run-hook.sh"

		if (not hookRunnerPath.exists()):
			raise ConfigurationException("Runner script '{0!s}' not found.".format(hookRunnerPath)) from FileNotFoundError(str(hookRunnerPath))

		gitDirectoryPath =      self.__GetGitDirectory()
		gitHookDirectoryPath =  gitDirectoryPath / "hooks"

		for hookName in ["pre-commit"]:
			gitHookPath = gitHookDirectoryPath / hookName
			if gitHookPath.exists():
				if (gitHookPath.is_symlink() and (gitHookPath.resolve() == hookRunnerPath)):
					self._host.LogNormal("  '{0}' hook is already configured for PoC.".format(hookName))
				else:
					self._host.LogWarning("  '{0}' hook is already in use by another script.".format(hookName))
			else:
				self._host.LogNormal("  Setting '{0}' hook for PoC...".format(hookName))
				self._host.LogDebug("symlink '{0!s}' -> '{1!s}'.".format(gitHookPath, hookRunnerPath))
				try:
					gitHookPath.symlink_to(hookRunnerPath)
				except OSError as ex:
					# if symlink fails, do a copy as backup solution
					if getattr(ex, 'winerror', None) == 1314:
						self._host.LogDebug("copy '{0!s}' to '{1!s}'.".format(hookRunnerPath, gitHookPath))
						try:
							shutil_copy(str(hookRunnerPath), str(gitHookPath))
						except OSError as ex2:
							raise ConfigurationException() from ex2
Exemple #5
0
    def post_handle(self, full_path, config):
        """ Runs after callback services have been already invoked, performs clean up if configured to.
        """
        if config.move_processed_to:
            shutil_copy(full_path, config.move_processed_to)

        if config.delete_after_pick_up:
            os.remove(full_path)
Exemple #6
0
    def copy(self, destination):
        """
        Copy the globaly given file path to the given destination.

        :param str destination: The destination of the copy.
        """

        if self.exists(self.path):
            shutil_copy(self.path, destination)
Exemple #7
0
    def copy_top_and_dependancies(self, dest_file):

        dest_folder = os_command.get_directory(dest_file)
        logger.info("Copy topologie file and dependancies")

        if self.path != os.path.abspath(dest_file):
            shutil_copy(self.path, os.path.abspath(dest_file))

        self.copy_dependancies(dest_folder)
Exemple #8
0
def _do_copy(src, dst):
    log.trace("copying %s => %s", src, dst)
    shutil_copy(src, dst)
    try:
        copystat(src, dst)
    except (IOError, OSError) as e:  # pragma: no cover
        # shutil.copystat gives a permission denied when using the os.setxattr function
        # on the security.selinux property.
        log.debug('%r', e)
Exemple #9
0
    def copy_dependancies(self, dest_folder):

        dest_folder = os.path.abspath(dest_folder)

        file_to_copy = self.get_include_file_list()
        # print(file_to_copy)
        for file in file_to_copy:
            if os.path.abspath(os_command.get_directory(file)) !=\
                    os.path.abspath(dest_folder):
                # print("Copy: "+file+":to: "+dest_folder)
                shutil_copy(file, dest_folder)
Exemple #10
0
 def copy(self, source, target):
     source = self._resolve_path(source)
     target = self._resolve_path(target)
     if isdir(source):
         # Copy inside target
         if exists(target):
             target = join(target, basename(source))
         copytree(source, target)
     else:
         # Will overwrite target file
         shutil_copy(source, target)
Exemple #11
0
 def copy(self, source, target):
     source = self._resolve_path(source)
     target = self._resolve_path(target)
     if isdir(source):
         # Copy inside target
         if exists(target):
             target = join(target, basename(source))
         copytree(source, target)
     else:
         # Will overwrite target file
         shutil_copy(source, target)
    def copy_resource_asset(file_name):
        asset_path = normpath(path_join(asset_prefix, file_name))
        output_path = normpath(path_join(asset_output_prefix, file_name))

        if path_exists(asset_path):
            info('%s -> %s' % (asset_path, output_path))
            if not dummy:
                shutil_copy(asset_path, output_path)
        else:
            error('File does not exist: %s' % asset_path)
            copy_failed = True
            return
Exemple #13
0
def main(config):
    dataPath = os.path.join(globalParameters["WorkingPath"], \
        globalParameters["BenchmarkDataPath"])
    pushWorkingPath(globalParameters["BenchmarkProblemsPath"])
    ensurePath(dataPath)
    totalTestFails = 0
    for benchmarkProblemTypeConfig in config:
        problemTypeConfig = benchmarkProblemTypeConfig[0]
        if len(benchmarkProblemTypeConfig) < 2:
            problemSizeGroupConfigs = [{}]
        else:
            problemSizeGroupConfigs = benchmarkProblemTypeConfig[1:]
        for problemSizeGroupIdx in range(0, len(problemSizeGroupConfigs)):
            problemSizeGroupConfig = problemSizeGroupConfigs[
                problemSizeGroupIdx]
            print2("ProblemTypeConfig: %s" % problemTypeConfig)
            problemTypeObj = ProblemType(problemTypeConfig)
            globalParameters["EnableHalf"] = problemTypeObj["DataType"].isHalf(
            )

            # results files will be named
            newResultsFileName = os.path.join(dataPath, "%s_%02u.csv" \
                % (str(problemTypeObj), problemSizeGroupIdx) )
            newSolutionsFileName = os.path.join(dataPath, "%s_%02u.yaml" \
                % (str(problemTypeObj), problemSizeGroupIdx) )

            # skip if possible
            if globalParameters["ForceRedoBenchmarkProblems"] or \
                not os.path.exists(newResultsFileName):

                # Benchmark Problem Size Group
                (resultsFileBaseFinal, benchmarkErrors) = benchmarkProblemType(problemTypeConfig, \
                    problemSizeGroupConfig, problemSizeGroupIdx)
                totalTestFails += benchmarkErrors

                print("clientExit=%u %s for %s" %\
                        (totalTestFails, "(ERROR)" if totalTestFails else "(PASS)", \
                        globalParameters["ConfigPath"]))

                # Copy Data
                resultsFileBase = resultsFileBaseFinal
                resultsFileName = "%s.csv" % (resultsFileBase)
                solutionsFileName = "%s.yaml" % (resultsFileBase)
                shutil_copy(resultsFileName, newResultsFileName)
                shutil_copy(solutionsFileName, newSolutionsFileName)
            else:
                print1("# %s_%02u already benchmarked; skipping." %
                       (str(problemTypeObj), problemSizeGroupIdx))

    popWorkingPath()

    if globalParameters["ExitOnFails"] and totalTestFails:
        sys.exit(1)
    def copy_resource_stream(file_name):
        asset_path = path_join(stream_path, file_name)
        output_path = path_join(stream_output_path, file_name)

        if path_exists(asset_path):
            info('%s -> %s' % (asset_path, output_path))
            if not dummy:
                shutil_copy(asset_path, output_path)
        else:
            error('File does not exist: %s' % asset_path)
            copy_failed = True
            return
    def post_process_of_delineated_data(cfg):
        """Do some necessary transfer for subbasin, stream, and flow direction raster."""
        # inputs
        stream_net_file = cfg.taudems.streamnet_shp
        subbasin_file = cfg.taudems.subbsn_m
        flow_dir_file_tau = cfg.taudems.d8flow_m
        stream_raster_file = cfg.taudems.stream_m
        # outputs
        # -- shapefile
        shp_dir = cfg.dirs.geoshp
        UtilClass.mkdir(shp_dir)
        # ---- outlet, copy from DirNameUtils.TauDEM
        FileClass.copy_files(cfg.taudems.outlet_m, cfg.vecs.outlet)
        # ---- reaches
        output_reach_file = cfg.vecs.reach
        # ---- subbasins
        subbasin_vector_file = cfg.vecs.subbsn
        # -- raster file
        output_subbasin_file = cfg.spatials.subbsn
        output_flow_dir_file = cfg.spatials.d8flow
        output_stream_link_file = cfg.spatials.stream_link
        output_hillslope_file = cfg.spatials.hillslope

        id_map = StreamnetUtil.serialize_streamnet(stream_net_file,
                                                   output_reach_file)
        RasterUtilClass.raster_reclassify(subbasin_file, id_map,
                                          output_subbasin_file, GDT_Int32)
        StreamnetUtil.assign_stream_id_raster(stream_raster_file,
                                              output_subbasin_file,
                                              output_stream_link_file)

        # Convert D8 encoding rule to ArcGIS
        if cfg.is_TauDEM:
            shutil_copy(flow_dir_file_tau, output_flow_dir_file)
        else:
            D8Util.convert_code(flow_dir_file_tau, output_flow_dir_file)

        # convert raster to shapefile (for subbasin and basin)
        print "Generating subbasin vector..."
        VectorUtilClass.raster2shp(output_subbasin_file, subbasin_vector_file,
                                   "subbasin", FieldNames.subbasin_id)
        mask_file = cfg.spatials.mask
        basin_vector = cfg.vecs.bsn
        print "Generating basin vector..."
        VectorUtilClass.raster2shp(mask_file, basin_vector, "basin",
                                   FieldNames.basin)
        # delineate hillslope
        DelineateHillslope.downstream_method_whitebox(output_stream_link_file,
                                                      flow_dir_file_tau,
                                                      output_hillslope_file)
Exemple #16
0
    def test_run_replace(self, mocked_parse_args):
        """
        MBCPlaylistScript.run() test. --replace is used.
        """

        # preparing mocked argument parsing
        current_dir = dirname(__file__)
        random_program = mbc_choose_any_program(self.table_path)
        yesterday = get_yesterday()
        sample_file = join(current_dir, 'resources', 'sample.mp3')
        input_file = join(current_dir, 'resources', 'sample_for_replace_test.mp3')

        # copy file
        shutil_copy(sample_file, input_file)
        self.assertTrue(exists(input_file))
        del sample_file

        expected_args = Namespace(
            input=input_file,
            output=None,
            playlist_date=yesterday,
            program_id=random_program.id,
            table_path=self.table_path,
            ffmpeg_path=None,
            list_programs=False,
            update_table=False,
            version=False,
            print_only=False,
            replace=True  # replace
        )

        # mocking done
        mocked_parse_args.return_value = expected_args

        mbc_playlist.MBCPlaylistScript().run()

        # if ffprobe is available, also test this.
        # metadata's description should equal to crawled text
        if exists(backends.FFPROBE_PATH):
            crawler = playlist.MBCRadioPlaylistCrawler()
            pl = crawler.get_playlist(random_program.id, yesterday)
            pl_text = mbc_playlist.MBCPlaylist.format_text(pl)

            probe = backends.FFProbe()
            md = probe.probe(input_file)

            self.assertEqual(pl_text, md.metadata.description)

        unlink(input_file)
Exemple #17
0
    def redeploy_on_parent_changed(self, changed_service_name,
                                   changed_service_impl_name):

        # Local aliases
        to_auto_deploy = []

        # Iterate over all current services to check if any of them subclasses the service just deployed ..
        for impl_name, service_info in self.services.items():

            # .. skip the one just deployed ..
            if impl_name == changed_service_impl_name:
                continue

            # .. a Python class represening each service ..
            service_class = service_info['service_class']
            service_module = getmodule(service_class)

            # .. get all parent classes of that ..
            service_mro = getmro(service_class)

            # .. try to find the deployed service amount parents ..
            for base_class in service_mro:
                if issubclass(base_class,
                              Service) and (not base_class is Service):
                    if base_class.get_name() == changed_service_name:

                        # Do not deploy services that are defined in the same module their parent is
                        # because that would be an infinite loop of auto-deployment.
                        if getmodule(base_class) is service_module:
                            continue

                        # .. if it was found, add it to the list of what needs to be auto-redeployed ..
                        to_auto_deploy.append(service_info)

        # We will not always have any services to redeploy
        if to_auto_deploy:

            # Inform users that we are to auto-redeploy services and why we are doing it
            logger.info('Base service `%s` changed; auto-redeploying `%s`',
                        changed_service_name,
                        sorted(item['name'] for item in to_auto_deploy))

            # Go through each child service found and hot-deploy it
            for item in to_auto_deploy:
                module_path = getsourcefile(item['service_class'])
                logger.info('Copying `%s` to `%s`', module_path)

                shutil_copy(module_path,
                            self.server.hot_deploy_config.pickup_dir)
Exemple #18
0
def copy(src, dst, *, follow_symlinks=True):
    """
    Copies a source file to a destination file or directory.

    Equivalent to "shutil.copy".

    Source and destination can also be binary opened file-like objects.

    .. versionadded:: 1.0.0

    Args:
        src (path-like object or file-like object): Source file.
        dst (path-like object or file-like object): Destination file or directory.
        follow_symlinks (bool): If True, follow symlinks.

    Raises:
         FileNotFoundError: Destination directory not found.
    """
    src, src_is_storage = format_and_is_storage(src, True)
    dst, dst_is_storage = format_and_is_storage(dst, True)

    if not src_is_storage and not dst_is_storage:
        return shutil_copy(src, dst, follow_symlinks=follow_symlinks)

    if not hasattr(dst, "read"):
        with ignore_exception(PermissionError):
            # Tries to write if not enough permission to check if destination is a
            # directory
            if isdir(dst):
                dst = join(dst, basename(src))

    _copy(src, dst, src_is_storage, dst_is_storage, follow_symlinks)
    def copyfile(src, dest):
        # python 3 needed to import module files localy
        if(sys_hexversion >= 0x03000000):
            with tempfile.NamedTemporaryFile(delete=False) as temp_file:
                with open(src) as src_file:
                    for line in src_file:
                        if(line.startswith("import spectrumtranslate")):
                            line = "from . import spectrumtranslate\n"
                        elif(line.startswith("import spectrumnumber")):
                            line = "from . import spectrumnumber\n"
                        temp_file.write(line.encode())
                temp_file.close()
                shutil_copy(temp_file.name, os.path.join(dest, src))
                os.remove(temp_file.name)

        # python 2 doesn't
        else:
            shutil_copy(src, dest)
def taskFile(config, file_info, uo_path):
    ''' This is the function that is called on thread creation. 
    It is responsible for executing the downloading. verifying,
    and installation of files. Also it writes the updated hashes
    to the configuration file. '''
    if file_info['DisplayName'] in config['Hashes']:
        local_f_md5 = config['Hashes'][file_info[
            'DisplayName']]  # Get key from dictionary instead of computing.
    else:
        local_f_md5 = file_hash.grab_hash(
            uo_path +
            file_info['DisplayName'])  # Compute the hash of the local file

    if local_f_md5 == True:  # If the file doesn't exist..
        dl_file = grab_file(file_info['URL'])  # Download it,
        le_file = pull_file(dl_file)  # Extract it.
        config['Hashes'][file_info['DisplayName']] = file_info['Hash']
        file_parser.conf_write(config)

        for files in le_file:
            shutil_copy(files, uo_path + files)  # Move it to the uo_directory.
            print(" [%s]  Moved to the Ultima Directory." % files)

    elif local_f_md5:  # If hash is computed.
        if file_hash.check_hash(
                local_f_md5, file_info['Hash']):  # Check against the XML Hash
            config['Hashes'][file_info['DisplayName']] = file_info['Hash']
            file_parser.conf_write(config)
            print(" [%s]  Matching Hashes. Not installing." %
                  file_info['DisplayName'])

        else:
            dl_file = grab_file(file_info['URL'])  # Else, download the file
            le_file = pull_file(dl_file)  #  Extract the file.
            config['Hashes'][file_info['DisplayName']] = file_info['Hash']
            file_parser.conf_write(config)

            for files in le_file:
                shutil_copy(files, uo_path +
                            files)  #  Move the file to the new location.
                print(" [%s]  Moved to the Ultima Directory." % files)

    else:
        print(" [%s]  Bad file." % file_info['DisplayName'])
Exemple #21
0
    def __InstallGitHooks(self):
        self._host.LogNormal("  Installing Git hooks...")
        pyIPCMIInstallationPath = Path(
            self._host.Config[self._host.LibraryKey]['InstallationDirectory'])
        hookRunnerPath = pyIPCMIInstallationPath / "tools/git/hooks/run-hook.sh"

        if (not hookRunnerPath.exists()):
            raise ConfigurationException(
                "Runner script '{0!s}' not found.".format(
                    hookRunnerPath)) from FileNotFoundError(
                        str(hookRunnerPath))

        gitDirectoryPath = self.__GetGitDirectory()
        gitHookDirectoryPath = gitDirectoryPath / "hooks"

        for hookName in ["pre-commit"]:
            gitHookPath = gitHookDirectoryPath / hookName
            if gitHookPath.exists():
                if (gitHookPath.is_symlink()
                        and (gitHookPath.resolve() == hookRunnerPath)):
                    self._host.LogNormal(
                        "  '{0}' hook is already configured for pyIPCMI.".
                        format(hookName))
                else:
                    self._host.LogWarning(
                        "  '{0}' hook is already in use by another script.".
                        format(hookName))
            else:
                self._host.LogNormal(
                    "  Setting '{0}' hook for pyIPCMI...".format(hookName))
                self._host.LogDebug("symlink '{0!s}' -> '{1!s}'.".format(
                    gitHookPath, hookRunnerPath))
                try:
                    gitHookPath.symlink_to(hookRunnerPath)
                except OSError as ex:
                    # if symlink fails, do a copy as backup solution
                    if getattr(ex, 'winerror', None) == 1314:
                        self._host.LogDebug("copy '{0!s}' to '{1!s}'.".format(
                            hookRunnerPath, gitHookPath))
                        try:
                            shutil_copy(str(hookRunnerPath), str(gitHookPath))
                        except OSError as ex2:
                            raise ConfigurationException() from ex2
Exemple #22
0
    def atualizar_arquivos(self, versao):
        """ Pega os arquivos baixados de uma versão e sobrescreve os arquivos
        da versão atualmente em execução """

        destino_upgrade = os_path.join(self.dest_download,
                                       'safira-ide-{}/'.format(versao))

        lista_arquivos = self.listar_arquivos(destino_upgrade, versao)

        for arquivo in lista_arquivos:
            arquivo = arquivo.strip('/')

            # Obter o diretório anterior ../
            regx2 = r'(.{1,})(\/|\\).*$'

            destino_1 = re_search(regx2, str(self.dest_download)).group(1)

            destino_final = os_path.join(destino_1, arquivo)
            local_arquivo_enviar = os_path.join(destino_upgrade, arquivo)

            ultimo_diretorio_destino = re_search(regx2, destino_final).group(1)

            # Se o diretório não existe, crie-o
            if not os_path.exists(ultimo_diretorio_destino):
                print('[cria] ', ultimo_diretorio_destino)

                # Cria os diretórios e subdiretórios
                os_makedirs(ultimo_diretorio_destino)
            else:
                print('[exis] ', ultimo_diretorio_destino)

            try:
                print('[de  ] ', local_arquivo_enviar)
                print('[para] ', destino_final)
                # Tenta copiar o arquivo para o destino
                shutil_copy(local_arquivo_enviar, destino_final)
            except Exception as erro:
                return [
                    False, "Erro ao copiar arquivo: " + erro + 'Arquivo' +
                    local_arquivo_enviar + 'destino' + destino_final
                ]
        return [True, ""]
def get_norm_info_for_net(net_info, files_for_normalization=None):
    if net_info.parent_net_name == 'Network trained from scratch':
        # In this case I can either calculate a new normalization info based on training data
        if net_info.path_to_normalization_info is None:
            if files_for_normalization is None:
                raise ValueError(
                    'You have to provide either normalization info or data files based in which it should be calculated.'
                )
            normalization_info, net_info.path_to_normalization_info = calculate_normalization_info(
                files_for_normalization,
                plot_histograms=False,
                user_correction=False,
                path_to_norm_info=net_info.path_to_net)
        else:
            normalization_info = load_normalization_info(
                net_info.path_to_normalization_info)
            shutil_copy(net_info.path_to_normalization_info,
                        net_info.path_to_net)
            net_info.path_to_normalization_info = net_info.path_to_net + os.path.basename(
                net_info.path_to_normalization_info)
    else:
        # In this case (retraining) we need to provide a normalization info.
        # This normalization info should in general come from the folder of retrained network,
        #  however it is also compatible with older version of the program with normalization info placed in a different folder
        if net_info.path_to_normalization_info is None:
            raise ValueError(
                'You must provide normalization info for retraining existing network'
            )
        normalization_info = load_normalization_info(
            net_info.path_to_normalization_info)
        shutil_copy(net_info.path_to_normalization_info, net_info.path_to_net)
        net_info.path_to_normalization_info = os.path.join(
            net_info.path_to_net,
            os.path.basename(net_info.path_to_normalization_info))

    # region Get sampling interval from normalization info
    # TODO: this does not really fits here put is too small for me to create separate function
    net_info.sampling_interval = get_sampling_interval_from_normalization_info(
        net_info.path_to_normalization_info)
    # endregion

    return normalization_info
def copy_release():

    if path_exists(STATIC_OUTPUT_PATH):
        rmtree(STATIC_OUTPUT_PATH)

    for pattern in RELEASE_PAGE_FILES:
        for f in glob(path_join(STATIC_TEMPLATE_PATH, pattern)):
            srcfile = normpath(f)
            dstfile = normpath(path_join(STATIC_OUTPUT_PATH, relpath(f, STATIC_TEMPLATE_PATH)))
            dst_dir = dirname(dstfile)
            if dst_dir != "" and not path_exists(dst_dir):
                makedirs(dst_dir)
            shutil_copy(srcfile, dstfile)

    for f in RELEASE_FILES:
        srcfile = normpath(f)
        dstfile = normpath(path_join(STATIC_OUTPUT_PATH, f))
        dst_dir = dirname(dstfile)
        if dst_dir != "" and not path_exists(dst_dir):
            makedirs(dst_dir)
        shutil_copy(srcfile, dstfile)

    shutil_copy("benchmark.canvas.js", normpath(path_join(STATIC_OUTPUT_PATH, "benchmark.canvas.js")))

    shutil_copytree(normpath('staticmax'), path_join(STATIC_OUTPUT_PATH, 'staticmax'))

    copy_release_capture(config_name=DEFAULT_CAPTURE_NAME)
Exemple #25
0
    def fazer_backup_versao(self):
        """Faz uma cópia dos arquivos em um diretório de backups"""

        lista_arquivos = self.listar_arquivos2('.')

        for arquivo_origem in lista_arquivos:
            arquivo_origem = arquivo_origem.strip('/')
            # Obter o diretório anterior ../
            regx2 = r'(.{1,})(\/|\\).*$'

            destino_final_file = self.dest_backup
            destino_final_arquivo = os_path.join(destino_final_file,
                                                 arquivo_origem)

            ultimo_diretorio_destino = re_search(
                regx2, destino_final_arquivo).group(1)

            # Se o diretório não existe, crie-o
            if not os_path.exists(ultimo_diretorio_destino):
                print('[cria] ', ultimo_diretorio_destino)

                # Cria os diretórios e subdiretórios
                os_makedirs(ultimo_diretorio_destino)
            else:
                print('[exis] ', ultimo_diretorio_destino)

            try:
                print('[de  ] ', arquivo_origem)
                print('[para] ', destino_final_arquivo)
                # Tenta copiar o arquivo para o destino
                shutil_copy(arquivo_origem, destino_final_arquivo)
            except Exception as erro:
                return [
                    False, "Erro ao copiar arquivo: " + erro + 'Arquivo' +
                    arquivo_origem + 'destino' + destino_final_arquivo
                ]

        return [True, ""]
def taskFile(config, file_info, uo_path):
    ''' This is the function that is called on thread creation. 
    It is responsible for executing the downloading. verifying,
    and installation of files. Also it writes the updated hashes
    to the configuration file. '''
    if file_info['DisplayName'] in config['Hashes']:
        local_f_md5 = config['Hashes'][file_info['DisplayName']]            # Get key from dictionary instead of computing.
    else:
        local_f_md5 = file_hash.grab_hash(uo_path + file_info['DisplayName'])  # Compute the hash of the local file

    if local_f_md5 == True:                                                 # If the file doesn't exist..
        dl_file = grab_file(file_info['URL'])                               # Download it,
        le_file = pull_file(dl_file)                                        # Extract it.
        config['Hashes'][file_info['DisplayName']] = file_info['Hash']
        file_parser.conf_write(config)

        for files in le_file:
            shutil_copy(files, uo_path + files)                             # Move it to the uo_directory.
            print(" [%s]  Moved to the Ultima Directory." % files)

    elif local_f_md5:                                                       # If hash is computed.
        if file_hash.check_hash(local_f_md5, file_info['Hash']):            # Check against the XML Hash
            config['Hashes'][file_info['DisplayName']] = file_info['Hash']
            file_parser.conf_write(config)
            print(" [%s]  Matching Hashes. Not installing." % file_info['DisplayName'])

        else:
            dl_file = grab_file(file_info['URL'])                           # Else, download the file
            le_file = pull_file(dl_file)                                    #  Extract the file.
            config['Hashes'][file_info['DisplayName']] = file_info['Hash']
            file_parser.conf_write(config)

            for files in le_file:
                shutil_copy(files, uo_path + files)                        #  Move the file to the new location.
                print(" [%s]  Moved to the Ultima Directory." % files)      

    else:
        print(" [%s]  Bad file." % file_info['DisplayName'])
Exemple #27
0
def rms_contrast(filename, input_path, output_path, limit=80):
    """
    Assign image to folder with images with the same rms contrast
    @param filename: image name
    @param input_path: path to folder where image is located
    @param output_path: path to folder with folders with images with the same rms contrast
    @param limit: maximum number od images with the same contrast in one folder

    """
    path = f"{input_path}/{filename}"
    contrast_count = {}
    image = cv2.imread(path, 0) / 255
    mean = cv2.mean(image)[0]
    height, width = image.shape
    result = 0
    for i in range(height):
        for j in range(width):
            result += (image[i][j] - mean) ** 2
    result = round(sqrt(result / (height * width)), 2)
    contrast_count[result] = contrast_count.get(result, 0) + 1
    result_path = f"{output_path}/{result}"
    create_directory(result_path)
    if len(os.listdir(result_path)) < limit:
        shutil_copy(input_path, result_path)
Exemple #28
0
def copy(src, dst):
    """
    Copies a source file to a destination file or directory.

    Equivalent to "shutil.copy".

    Source and destination can also be binary opened file-like objects.

    Args:
        src (path-like object or file-like object): Source file.
        dst (path-like object or file-like object):
            Destination file or directory.

    Raises:
         IOError: Destination directory not found.
    """
    # Handles path-like objects and checks if storage
    src, src_is_storage = format_and_is_storage(src)
    dst, dst_is_storage = format_and_is_storage(dst)

    # Local files: Redirects to "shutil.copy"
    if not src_is_storage and not dst_is_storage:
        return shutil_copy(src, dst)

    # Checks destination
    if not hasattr(dst, 'read'):
        try:
            # If destination is directory: defines an output file inside it
            if isdir(dst):
                dst = join(dst, basename(src))

            # Checks if destination dir exists
            elif not isdir(dirname(dst)):
                raise FileNotFoundError("No such file or directory: '%s'" %
                                        dst)

        except PermissionError:
            # Unable to check target directory due to missing read access,
            # but do not raise to allow to write if possible
            print('PermissionError reached')
            pass

    # Performs copy
    _copy(src, dst, src_is_storage, dst_is_storage)
Exemple #29
0
	def _RunSimulation(self, testbench): # mccabe:disable=MC0001
		# select modelsim.ini from precompiled
		precompiledModelsimIniPath = self.Directories.PreCompiled
		device_vendor = self._pocProject.Board.Device.Vendor
		if device_vendor is Vendors.Altera:
			precompiledModelsimIniPath /= self.Host.PoCConfig['CONFIG.DirectoryNames']['AlteraSpecificFiles']
		elif device_vendor is Vendors.Lattice:
			precompiledModelsimIniPath /= self.Host.PoCConfig['CONFIG.DirectoryNames']['LatticeSpecificFiles']
		elif device_vendor is Vendors.Xilinx:
			precompiledModelsimIniPath /= self.Host.PoCConfig['CONFIG.DirectoryNames']['XilinxSpecificFiles']

		precompiledModelsimIniPath /= "modelsim.ini"
		if not precompiledModelsimIniPath.exists():
			raise SimulatorException("Modelsim ini file '{0!s}' not found.".format(precompiledModelsimIniPath)) \
				from FileNotFoundError(str(precompiledModelsimIniPath))

		simBuildPath = self.Directories.Working / self.COCOTB_SIMBUILD_DIRECTORY
		# create temporary directory for Cocotb if not existent
		if (not (simBuildPath).exists()):
			self.LogVerbose("Creating build directory for simulator files.")
			self.LogDebug("Build directory: {0!s}".format(simBuildPath))
			try:
				simBuildPath.mkdir(parents=True)
			except OSError as ex:
				raise SimulatorException("Error while creating '{0!s}'.".format(simBuildPath)) from ex

		# write local modelsim.ini
		modelsimIniPath = simBuildPath / "modelsim.ini"
		if modelsimIniPath.exists():
			try:
				modelsimIniPath.unlink()
			except OSError as ex:
				raise SimulatorException("Error while deleting '{0!s}'.".format(modelsimIniPath)) from ex

		with modelsimIniPath.open('w') as fileHandle:
			fileContent = dedent("""\
				[Library]
				others = {0!s}
				""").format(precompiledModelsimIniPath)
			fileHandle.write(fileContent)

		#
		self.LogNormal("Running simulation...")
		cocotbTemplateFilePath = self.Host.Directories.Root / \
															self.Host.PoCConfig[testbench.ConfigSectionName]['CocotbMakefile'] # depends on testbench
		topLevel =      testbench.TopLevel
		cocotbModule =  testbench.ModuleName

		# create one VHDL line for each VHDL file
		vhdlSources = ""
		for file in self._pocProject.Files(fileType=FileTypes.VHDLSourceFile):
			if (not file.Path.exists()):
				raise SimulatorException("Cannot add '{0!s}' to Cocotb Makefile.".format(file.Path)) \
					from FileNotFoundError(str(file.Path))
			vhdlSources += str(file.Path) + " "

		# copy Cocotb (Python) files to temp directory
		self.LogVerbose("Copying Cocotb (Python) files into temporary directory.")
		cocotbTempDir = str(self.Directories.Working)
		for file in self._pocProject.Files(fileType=FileTypes.CocotbSourceFile):
			if (not file.Path.exists()):
				raise SimulatorException("Cannot copy '{0!s}' to Cocotb temp directory.".format(file.Path)) \
					from FileNotFoundError(str(file.Path))
			self.LogDebug("copy {0!s} {1}".format(file.Path, cocotbTempDir))
			try:
				shutil_copy(str(file.Path), cocotbTempDir)
			except OSError as ex:
				raise SimulatorException("Error while copying '{0!s}'.".format(file.Path)) from ex

		# read/write Makefile template
		self.LogVerbose("Generating Makefile...")
		self.LogDebug("Reading Cocotb Makefile template file from '{0!s}'".format(cocotbTemplateFilePath))
		with cocotbTemplateFilePath.open('r') as fileHandle:
			cocotbMakefileContent = fileHandle.read()

		cocotbMakefileContent = cocotbMakefileContent.format(PoCRootDirectory=str(self.Host.Directories.Root),
																													VHDLSources=vhdlSources,
																													TopLevel=topLevel, CocotbModule=cocotbModule)

		cocotbMakefilePath = self.Directories.Working / "Makefile"
		self.LogDebug("Writing Cocotb Makefile to '{0!s}'".format(cocotbMakefilePath))
		with cocotbMakefilePath.open('w') as fileHandle:
			fileHandle.write(cocotbMakefileContent)

		# execute make
		make = Make(self.Host.Platform, self.DryRun, logger=self.Logger)
		if (SimulationSteps.ShowWaveform in self._simulationSteps): make.Parameters[Make.SwitchGui] = 1
		testbench.Result = make.RunCocotb()
 def copy(self, filemeta_obj_from, filemeta_obj_to):
     frompath = self._get_filepath(filemeta_obj_from)
     topath = self._get_filepath(filemeta_obj_to)
     self._create_dir(filemeta_obj_to)
     shutil_copy(frompath, topath)
def writeCMake(outputPath, solutions, kernels, libraryStaticFiles, clientName):
    print1("# Writing Custom CMake")
    ##############################################################################
    # Min Naming
    ##############################################################################
    if globalParameters["ShortNames"] and not globalParameters["MergeFiles"]:
        solutionSerialNaming = Solution.getSerialNaming(solutions)
        kernelSerialNaming = Solution.getSerialNaming(kernels)
    else:
        solutionSerialNaming = None
        kernelSerialNaming = None
    solutionMinNaming = Solution.getMinNaming(solutions)
    kernelMinNaming = Solution.getMinNaming(kernels)
    solutionWriter = SolutionWriter( \
        solutionMinNaming, solutionSerialNaming, \
        kernelMinNaming, kernelSerialNaming)
    kernelWriterSource = KernelWriterSource( \
        kernelMinNaming, kernelSerialNaming)
    kernelWriterAssembly = KernelWriterAssembly( \
        kernelMinNaming, kernelSerialNaming)

    generatedFile = open(os.path.join(outputPath, "Generated.cmake"), "w")
    generatedFile.write(CMakeHeader)
    generatedFile.write("set( TensileClient_SOLUTIONS\n")

    # write solution names
    if globalParameters["MergeFiles"]:
        generatedFile.write("  ${CMAKE_SOURCE_DIR}/Solutions.h\n")
        generatedFile.write("  ${CMAKE_SOURCE_DIR}/Solutions.cpp\n")
    else:
        for solution in solutions:
            solutionName = solutionWriter.getSolutionName(solution)
            generatedFile.write("  ${CMAKE_SOURCE_DIR}/Solutions/%s.h\n" \
                % (solutionName) )
            generatedFile.write("  ${CMAKE_SOURCE_DIR}/Solutions/%s.cpp\n" \
                % (solutionName) )
    generatedFile.write("  )\n")

    # write kernel names
    generatedFile.write("set( TensileClient_KERNELS\n")
    if globalParameters["MergeFiles"]:
        generatedFile.write("  ${CMAKE_SOURCE_DIR}/Kernels.h\n")
        generatedFile.write("  ${CMAKE_SOURCE_DIR}/Kernels.cpp\n")
    else:
        for kernel in kernels:
            kernelName = kernelWriterSource.getKernelName(kernel) if kernel[
                "KernelLanguage"] == "Source" else kernelWriterAssembly.getKernelName(
                    kernel)
            generatedFile.write("  ${CMAKE_SOURCE_DIR}/Kernels/%s.h\n" %
                                (kernelName))
            generatedFile.write("  ${CMAKE_SOURCE_DIR}/Kernels/%s.cpp\n" %
                                kernelName)
    generatedFile.write("  )\n")

    generatedFile.write("set( TensileClient_SOURCE\n")
    for fileName in libraryStaticFiles:
        # copy file
        shutil_copy( os.path.join(globalParameters["SourcePath"], fileName), \
            outputPath )
        # add file to cmake
        generatedFile.write("  ${CMAKE_SOURCE_DIR}/%s\n" % fileName)
    generatedFile.write("  )\n\n")

    # close generated cmake
    generatedFile.close()
Exemple #32
0
	def copy(src, dst):
		return shutil_copy(src, dst)
Exemple #33
0
def align(file_name, config, c):
    '''This function is the "main" of the classifier part of the program.

    Keyword arguments:
    file_name -- the name of the fasta file that is to be typed

    Sets everything in motion and retrieves and distributes all the results.

    '''
    # Set warning flags
    WARNINGS = dict()

    # Get database and output name
    db_name = get_organism(config, c)
    out_name = file_name.split("/")[-1]
    output = "%s/%s.CanSNPer" % (config["tmp_path"], out_name)

    # Get the sequences from our SQLite3 database, and write them
    # to tmp files that progressiveMauve can read
    c.execute("SELECT Organism, Strain, Sequence FROM Sequences WHERE Organism = ?", (db_name,))
    seq_counter = 0  # Counter for the number of sequences
    seq_uids = dict()

    reference_sequences = dict()

    if config["verbose"]:
        print("#Fetching reference sequence(s) ...")
    for row in c.fetchall():
        seq_counter += 1
        # 32 char long unique hex string used for unique tmp file names
        seq_uids[seq_counter] = uuid4().hex
        reference_sequences[seq_counter] = row[1]  # save the name of the references
        if not path.exists(config["tmp_path"]):
            makedirs(config["tmp_path"])
        tmp_file = open("%s/CanSNPer_reference_sequence." % config["tmp_path"] +
                        seq_uids[seq_counter] +
                        ".fa", "w")  # Open a tmp file
        tmp_file.write(">%s.%s\n%s\n" % (row[0], row[1], row[2]))  # Write to tmp file
        tmp_file.close()

    # Check if the file exists
    if not path.isfile(file_name):
        exit("#[ERROR in %s] No such file: %s" % (config["query"], file_name))

    # Parallelised running of several progressiveMauve processes
    if config["num_threads"] == 0 or config["num_threads"] > seq_counter:
        max_threads = seq_counter
    else:
        max_threads = config["num_threads"]

    if config["verbose"]:
        print("#Aligning sequence against %i reference sequence(s) ..." % len(reference_sequences))

    processes = list()
    mauve_jobs = list()
    x2f_jobs = list()
    for i in range(1, seq_counter + 1):
        if config["save_align"]:
            fasta_name = reference_sequences[i]
        else:
            fasta_name = seq_uids[i]

        # Write the commands that will be run. one for each reference sequence
        mauve_jobs.append("%s --output=%s.%s.xmfa " % (config["mauve_path"], output, seq_uids[i]) +
                          "%s/CanSNPer_reference_sequence.%s.fa %s > " % (config["tmp_path"],
                                                                          seq_uids[i], file_name) +
                          "/dev/null 2> %s/CanSNPer_err%s.txt" % (config["tmp_path"], seq_uids[i]))

        x2f_jobs.append("%s %s.%s.xmfa %s/CanSNPer_reference_sequence.%s.fa " % (config["x2fa_path"],
                        output, seq_uids[i], config["tmp_path"], seq_uids[i]) +
                        "0 %s.%s.fa 2> %s/CanSNPer_xerr%s.txt" % (output, fasta_name,
                                                                  config["tmp_path"], seq_uids[i]))

    # Starting the processes that use progressiveMauve to align sequences
    while True:
        while mauve_jobs and len(processes) < max_threads:
            job = mauve_jobs.pop()
            processes.append(Popen(job, shell=True))
            if config["dev"]:
                print("#[DEV] progressiveMauve command: %s" % job)
        for p in processes:
            if p.poll() is not None:
                processes.remove(p)
        if not processes and not mauve_jobs:
            break
        time.sleep(0.5)
    for uid in seq_uids:  # Errorcheck mauve, cant continue if it crashed
        mauve_error_check(seq_uids[uid], config)
    while True:
        while x2f_jobs and len(processes) < max_threads:
            job = x2f_jobs.pop()
            processes.append(Popen(job, shell=True))
            if config["dev"]:
                print("#[DEV] x2fa.py command: %s" % job)
        for p in processes:
            if p.poll() is not None:
                processes.remove(p)
        if not processes and not x2f_jobs:
            break
        time.sleep(0.5)
    for uid in seq_uids:  # Errorcheck x2fa.py
        x2fa_error_check(seq_uids[uid], config)

    # Now we have aligned sequences, read them into memory and
    # start working through the tree
    alternates = dict()
    for i in range(1, seq_counter + 1):
        if config["save_align"]:
            fasta_name = reference_sequences[i]
        else:
            fasta_name = seq_uids[i]
        fasta_name_readable = reference_sequences[i]
        alignment_file = open("%s.%s.fa" % (output, fasta_name), "r")
        sequences = alignment_file.read().split(">")[1:]
        alignment_file.close()
        reference = "".join(sequences[0].split("\n")[1:])
        alternate = "".join(sequences[1].split("\n")[1:])
        alternates[reference_sequences[i]] = alternate
        identity_counter = 0
        for j in range(0, len(reference)):
            if reference[j] == alternate[j]:
                identity_counter += 1
        if config["verbose"]:
            print("#Seq identity with %s: %.2f%s" % (fasta_name_readable, float(identity_counter) /
                  float(len(reference)) * 100, "%"))
        if float(identity_counter) / float(len(reference)) < 0.8:
            WARNINGS["ALIGNMENT_WARNING"] = "#[WARNING in %s] Sequence identity between %s and a reference strain of" % (config["query"], out_name) +\
                " %s was only %.2f percent" % (db_name, float(identity_counter) / float(len(reference)) * 100)

    root = find_tree_root(db_name, c, config)  # Find the root of the tree we are using
    if config["verbose"]:
        print("#Using tree root:", root)

    if config["allow_differences"]:  # Check whether or not to force the first tree node
        force_flag = True
    else:
        force_flag = False

    if config["list_snps"]:  # Make a raw list of which SNPs the sequence has
        snp_out_file = open("%s_snplist.txt" % file_name, "w")
        snplist = snp_lister(alternates, db_name, out_name, config, c)
        for snp in snplist:
            snp_out_file.write("\t".join(snp) + "\n")
        snp_out_file.close()

    if config["draw_tree"]:  # Draw a tree and mark positions
        snplist = snp_lister(alternates, db_name, out_name, config, c)
        if config["galaxy"]:
            tree_file_name = getcwd() + "/CanSNPer_tree_galaxy.pdf"
        else:
            tree_file_name = "%s_tree.pdf" % file_name
        draw_ete2_tree(db_name, snplist[1:], tree_file_name, config, c)
    # Tree walker!
    tree_location = multi_tree_walker(root, alternates, db_name, config["allow_differences"],
                                      list(), config, c, force_flag)

    # print(the results of our walk)
    if config["tab_sep"]:
        print("%s\t%s" % (out_name, tree_location[0]))
    else:
        print("Classification of %s: %s" % (out_name, tree_location[0]))

    if tree_location[1]:
        incorrect_snps = ""
        for incorrect_snp in tree_location[1]:
            incorrect_snps += str(incorrect_snp) + " "
        WARNINGS["TREE_WARNING"] = "#[WARNING in %s] these SNPs were not in the derived state: %s" % (config["query"], str(incorrect_snps))
        if config["verbose"]:
            print("#A forced tree walk was conducted")

    try:  # print(any warnings that may have been collected)
        stderr.write(str(WARNINGS["ALIGNMENT_WARNING"]) + "\n")
    except KeyError:
        pass
    try:
        stderr.write(str(WARNINGS["TREE_WARNING"]) + "\n")
    except KeyError:
        pass

    # Remove a bunch of tmp files
    while seq_counter:
        if config["save_align"]:
            destination = getcwd()
            srcfile = "%s.%s.fa" % (output, reference_sequences[seq_counter])
            shutil_copy(srcfile, destination)
            silent_remove("%s.%s.fa" % (output, reference_sequences[seq_counter]))
        silent_remove("%s.%s.fa" % (output, seq_uids[seq_counter]))
        silent_remove("%s/CanSNPer_reference_sequence.%s.fa" % (config["tmp_path"],
                      seq_uids[seq_counter]))
        silent_remove("%s/CanSNPer_reference_sequence.%s.fa.sslist" % (config["tmp_path"],
                      seq_uids[seq_counter]))
        silent_remove("%s.sslist" % (file_name))
        silent_remove("%s.%s.xmfa" % (output, seq_uids[seq_counter]))
        silent_remove("%s.%s.xmfa.bbcols" % (output, seq_uids[seq_counter]))
        silent_remove("%s.%s.xmfa.backbone" % (output, seq_uids[seq_counter]))
        seq_counter -= 1
Exemple #34
0
def main(config):
    libraryLogicPath = os.path.join(globalParameters["WorkingPath"], \
        globalParameters["LibraryLogicPath"])
    pushWorkingPath(globalParameters["LibraryClientPath"])

    ##############################################################################
    # Copy Source Files
    ##############################################################################
    pushWorkingPath("source")
    filesToCopy = [
        "Client.cpp", "Client.h", "DeviceStats.h", "ReferenceCPU.h",
        "TensorUtils.h", "MathTemplates.cpp", "MathTemplates.h",
        "KernelHeader.h", "Tools.h", "CMakeLists.txt", "TensileConfig.cmake",
        "TensileConfigVersion.cmake"
    ]

    for f in filesToCopy:
        shutil_copy(os.path.join(globalParameters["SourcePath"], f),
                    globalParameters["WorkingPath"])
    if globalParameters["RuntimeLanguage"] == "OCL":
        shutil_copy(
            os.path.join(globalParameters["SourcePath"], "FindOpenCL.cmake"),
            globalParameters["WorkingPath"])
    else:
        shutil_copy(
            os.path.join(globalParameters["SourcePath"], "FindHIP.cmake"),
            globalParameters["WorkingPath"])
        shutil_copy(
            os.path.join(globalParameters["SourcePath"], "FindHCC.cmake"),
            globalParameters["WorkingPath"])

    ##############################################################################
    # Read Logic Files
    ##############################################################################
    logicFiles = [os.path.join(libraryLogicPath, f) for f \
        in os.listdir(libraryLogicPath) \
        if (os.path.isfile(os.path.join(libraryLogicPath, f)) \
        and os.path.splitext(f)[1]==".yaml")]
    print1("LogicFiles: %s" % logicFiles)
    functions = []
    functionNames = []
    enableHalf = False
    for logicFileName in logicFiles:
        (scheduleName, deviceNames, problemType, solutionsForType, \
            indexOrder, exactLogic, rangeLogic) \
            = YAMLIO.readLibraryLogicForSchedule(logicFileName)
        if problemType["DataType"].isHalf():
            enableHalf = True
        functions.append((scheduleName, problemType))
        functionNames.append("tensile_%s" % (problemType))
    globalParameters["EnableHalf"] = enableHalf

    ##############################################################################
    # Write Generated Header
    ##############################################################################
    forBenchmark = False
    solutions = None
    problemSizes = None
    stepName = None
    writeClientParameters(forBenchmark, solutions, problemSizes, stepName, \
        functions)
    popWorkingPath()  # source

    ##############################################################################
    # Run Build Script
    ##############################################################################
    # if redo=true, clobber the build directory
    if globalParameters["ForceRedoLibraryClient"]:
        rmtree(os.path.join(globalParameters["WorkingPath"], "build"), \
            ignore_errors=True)
    pushWorkingPath("build")

    # write runScript
    path = globalParameters["WorkingPath"]
    forBenchmark = False
    runScriptName = writeRunScript(path, libraryLogicPath, forBenchmark)

    # run runScript
    process = Popen(runScriptName, cwd=globalParameters["WorkingPath"])
    process.communicate()
    if process.returncode:
        printWarning("ClientWriter Benchmark Process exited with code %u" %
                     process.returncode)
    popWorkingPath()  # build

    popWorkingPath()  # LibraryClient

    return process.returncode
Exemple #35
0
def old_connect_with_db(db_path=DB_PATH, deep=0, db_l=db):
    """
    Создает соединение с БД для Pony ORM версии 0.73

    :param db_path: путь к БД
    :param deep: глубина рекурсии
    :param db_l: объект БД
    :return:
    """
    from os.path import isfile, split, join
    from os import remove, rename
    from sys import exit
    from time import ctime
    from shutil import copy as shutil_copy

    if deep > 5:
        print(
            'в коннекте с базой данных наблюдается большая рекурсия, значит что-то идет не так'
        )
        exit()

    if not isfile(db_path):
        db_l.bind(provider=cfg.get("db", "type"),
                  filename=db_path,
                  create_db=True)
        db_l.generate_mapping(create_tables=True)
        """
                db_l.connect(allow_auto_upgrade=True,
                     create_tables=True,
                     create_db=True,
                     provider=cfg.get("db", "type"),
                     filename=db_path)
        """

        print('create db')
    else:

        try:
            db_l.bind(provider=cfg.get("db", "type"), filename=db_path)
            db_l.generate_mapping()
        except Exception as e:
            print(
                'при создании бд произошла какая-то ошибка (видимо, структура БД была изменена)\n',
                e)
            print('попытка исправить.....')
            try:
                db_l.bind(
                    provider=cfg.get("db", "type"),
                    filename=db_path,
                    create_tables=True,
                )
                db_l.generate_mapping()
                print('получилось')
            except Exception as e:
                print("Создаём бекап а затем удаляем БД")
                t = ctime().split()[1:]
                t[0], t[1], t[2] = t[2], t[1], t[0]
                copy_name = shutil_copy(db_path, DB_BACKUPS)
                new_name = join(
                    split(copy_name)[0],
                    '_'.join(t).replace(":", "-") + "_" + split(db_path)[1])
                rename(copy_name, new_name)
                print("создан бекап:", new_name)
                print("Удалена исходная база данных, создаём новую")
                remove(db_path)
                print(
                    '\n=========================================\n\n\t\tдля создания новой БД перезапустите код.....'
                )
                print('\n=========================================')
                exit()
Exemple #36
0
 def copy(self, filemeta_obj_from, filemeta_obj_to):
     frompath = self._get_filepath(filemeta_obj_from)
     topath = self._get_filepath(filemeta_obj_to)
     self._create_dir(filemeta_obj_to)
     shutil_copy(frompath, topath)
    def ConfigureJB(self):
        if pathExists('%scomponents/zapNC.config' % PluginPath) is False:
            self.ZapNC=("1:0:1:1163:2AF8:13E:820000:0:0:0:")
        else:
            with open('%scomponents/zapNC.config' % PluginPath, 'r') as ZAPconfig:
                tmp=ZAPconfig.readline().split('"')[1]
                self.ZapNC=(tmp)
                ZAPconfig.close()
        if pathExists('%scomponents/zapCP.config' % PluginPath) is False:
            self.ZapCP=("1:0:1:332d:3390:71:820000:0:0:0:")
        else:
            with open('%scomponents/zapCP.config' % PluginPath, 'r') as ZAPconfig:
                tmp=ZAPconfig.readline().split('"')[1]
                self.ZapCP=(tmp)
                ZAPconfig.close()
        self.j00zekBouquetsNCBin='%scomponents/j00zekBouquetsNC%s' % (PluginPath,binType)
        self.j00zekBouquetsCPBin='%scomponents/j00zekBouquetsCP%s' % (PluginPath,binType)
        self.ExcludedSIDsTemplate='%scomponents/excludedSIDs.template' % PluginPath
        self.ExcludedSIDsFileName='userbouquet.excludedSIDs.j00zekAutobouquet.tv'
        self.ExcludedSIDsFile='/etc/enigma2/%s' % self.ExcludedSIDsFileName
        self.IncludedTranspondersTemplate='%scomponents/transponders.PL' % PluginPath
        self.IncludedTranspondersFile='/tmp/transponders.PL'
        self.runlist = []
        if pathExists('%s/components/CheckType.sh' % PluginPath) is True:
            self.runlist.append(('%s/components/CheckType.sh' % PluginPath))
        
        self.ZapTo=""
        
        #tylko polskie transpondery
        if config.plugins.GOS.j00zekBouquetsNC.value.endswith('PL'):
            if pathExists(self.IncludedTranspondersFile) is False:
                os_symlink(self.IncludedTranspondersTemplate,self.IncludedTranspondersFile)
        else:
            if pathExists(self.IncludedTranspondersFile) is True:
                os_remove(self.IncludedTranspondersFile)
        #kanaly do pominiecia
        if config.plugins.GOS.j00zekBouquetsExcludeBouquet.value == True:
            self.ExcludeSIDS="ExcludeSIDS"
            ExcludedSIDsFileNeedsUpdate=1
            if pathExists(self.ExcludedSIDsFile) is False:
                from shutil import copy as shutil_copy
                shutil_copy(self.ExcludedSIDsTemplate,self.ExcludedSIDsFile)
        else:
            self.ExcludeSIDS=""
            ExcludedSIDsFileNeedsUpdate=0
            
        #sprawdzamy schemat pliku bouquets.tv
        hasNewline=1
        if config.plugins.GOS.j00zekBouquetsNC.value !="NA":
            ncNeedsUpdate=1
        else:
            ncNeedsUpdate=0
        if config.plugins.GOS.j00zekBouquetsCP.value !="NA":
            cpNeedsUpdate=1
        else:
            cpNeedsUpdate=0
                
        windowsEOL=''
        with open("/etc/enigma2/bouquets.tv", "r") as bouquetsTV:
            for line in bouquetsTV:
                if windowsEOL == '' and line.endswith('\r\n'):
                    windowsEOL='\r'
                if line.endswith('\n'):
                    hasNewline=1
                else:
                    hasNewline=0
                if line.find(self.ExcludedSIDsFileName) > 0:
                    ExcludedSIDsFileNeedsUpdate=0
                if line.find('userbouquet.ncplus.j00zekAutobouquet.tv') > 0:
                    ncNeedsUpdate=0
                if line.find('userbouquet.CP.j00zekAutobouquet.tv') > 0:
                    cpNeedsUpdate=0
            bouquetsTV.close()
        #dopisujemy nasze bukiety
        if ncNeedsUpdate == 1:
            with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
                if hasNewline == 0:
                    bouquetsTV.write('\n')
                bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "userbouquet.ncplus.j00zekAutobouquet.tv" ORDER BY bouquet%s\n' % windowsEOL)
                bouquetsTV.close()
                hasNewline=1
            with open("/etc/enigma2/bouquets.radio", "a") as bouquetsTV:
                if hasNewline == 0:
                    bouquetsTV.write('\n')
                bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "userbouquet.ncplus.j00zekAutobouquet.radio" ORDER BY bouquet%s\n' % windowsEOL)
                bouquetsTV.close()
                hasNewline=1
        if cpNeedsUpdate == 1:
            with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
                if hasNewline == 0:
                    bouquetsTV.write('\n')
                bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "userbouquet.CP.j00zekAutobouquet.tv" ORDER BY bouquet%s\n' % windowsEOL)
                bouquetsTV.close()
                hasNewline=1
        if ExcludedSIDsFileNeedsUpdate == 1:
            with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
                if hasNewline == 0:
                    bouquetsTV.write('\n')
                bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "%s" ORDER BY bouquet%s\n' % (self.ExcludedSIDsFileName,windowsEOL))
                bouquetsTV.close()

        if config.plugins.GOS.j00zekBouquetsNC.value != 'NA':
            self.runlist.append("%s %s %s %s %s" % ( self.j00zekBouquetsNCBin, config.plugins.GOS.j00zekBouquetsNC.value, \
                                config.plugins.GOS.j00zekBouquetsAction.value, self.ZapNC, self.ExcludeSIDS))
            self.ZapTo=self.ZapNC
        if config.plugins.GOS.j00zekBouquetsCP.value != 'NA':
            self.runlist.append("%s %s %s %s %s" % ( self.j00zekBouquetsCPBin, config.plugins.GOS.j00zekBouquetsCP.value, \
                                config.plugins.GOS.j00zekBouquetsAction.value, self.ZapCP, self.ExcludeSIDS))
            if self.ZapTo == "":
                self.ZapTo = self.ZapCP
Exemple #38
0
	def _RunCompile(self, netlist, device):
		self.LogVerbose("Patching coregen.cgp and .cgc files...")
		# read netlist settings from configuration file
		xcoInputFilePath =    netlist.XcoFile
		cgcTemplateFilePath =  self.Directories.Netlist / "template.cgc"
		cgpFilePath =          self.Directories.Working / "coregen.cgp"
		cgcFilePath =          self.Directories.Working / "coregen.cgc"
		xcoFilePath =          self.Directories.Working / xcoInputFilePath.name

		if (self.Host.Platform == "Windows"):
			WorkingDirectory = ".\\temp\\"
		else:
			WorkingDirectory = "./temp/"

		# write CoreGenerator project file
		cgProjectFileContent = dedent("""\
			SET addpads = false
			SET asysymbol = false
			SET busformat = BusFormatAngleBracketNotRipped
			SET createndf = false
			SET designentry = VHDL
			SET device = {Device}
			SET devicefamily = {DeviceFamily}
			SET flowvendor = Other
			SET formalverification = false
			SET foundationsym = false
			SET implementationfiletype = Ngc
			SET package = {Package}
			SET removerpms = false
			SET simulationfiles = Behavioral
			SET speedgrade = {SpeedGrade}
			SET verilogsim = false
			SET vhdlsim = true
			SET workingdirectory = {WorkingDirectory}
			""".format(
			Device=device.ShortName.lower(),
			DeviceFamily=device.FamilyName.lower(),
			Package=(str(device.Package).lower() + str(device.PinCount)),
			SpeedGrade=device.SpeedGrade,
			WorkingDirectory=WorkingDirectory
		))

		self.LogDebug("Writing CoreGen project file to '{0}'.".format(cgpFilePath))
		with cgpFilePath.open('w') as cgpFileHandle:
			cgpFileHandle.write(cgProjectFileContent)

		# write CoreGenerator content? file
		self.LogDebug("Reading CoreGen content file to '{0}'.".format(cgcTemplateFilePath))
		with cgcTemplateFilePath.open('r') as cgcFileHandle:
			cgContentFileContent = cgcFileHandle.read()

		cgContentFileContent = cgContentFileContent.format(
			name="lcd_ChipScopeVIO",
			device=device.ShortName,
			devicefamily=device.FamilyName,
			package=(str(device.Package) + str(device.PinCount)),
			speedgrade=device.SpeedGrade
		)

		self.LogDebug("Writing CoreGen content file to '{0}'.".format(cgcFilePath))
		with cgcFilePath.open('w') as cgcFileHandle:
			cgcFileHandle.write(cgContentFileContent)

		# copy xco file into temporary directory
		self.LogVerbose("Copy CoreGen xco file to '{0}'.".format(xcoFilePath))
		self.LogDebug("cp {0!s} {1!s}".format(xcoInputFilePath, self.Directories.Working))
		try:
			shutil_copy(str(xcoInputFilePath), str(xcoFilePath), follow_symlinks=True)
		except OSError as ex:
			raise CompilerException("Error while copying '{0!s}'.".format(xcoInputFilePath)) from ex

		# change working directory to temporary CoreGen path
		self.LogDebug("cd {0!s}".format(self.Directories.Working))
		try:
			chdir(str(self.Directories.Working))
		except OSError as ex:
			raise CompilerException("Error while changing to '{0!s}'.".format(self.Directories.Working)) from ex

		# running CoreGen
		# ==========================================================================
		self.LogVerbose("Executing CoreGen...")
		coreGen = self._toolChain.GetCoreGenerator()
		coreGen.Parameters[coreGen.SwitchProjectFile] =  "."		# use current directory and the default project name
		coreGen.Parameters[coreGen.SwitchBatchFile] =    str(xcoFilePath)
		coreGen.Parameters[coreGen.FlagRegenerate] =    True

		try:
			coreGen.Generate()
		except ISEException as ex:
			raise CompilerException("Error while compiling '{0!s}'.".format(netlist)) from ex
		if coreGen.HasErrors:
			raise SkipableCompilerException("Error while compiling '{0!s}'.".format(netlist))
Exemple #39
0
    def _RunSimulation(self, testbench):  # mccabe:disable=MC0001
        # select modelsim.ini from precompiled
        precompiledModelsimIniPath = self.Directories.PreCompiled
        device_vendor = self._pyIPCMIProject.Board.Device.Vendor
        if device_vendor is Vendors.Altera:
            precompiledModelsimIniPath /= self.Host.Config[
                'CONFIG.DirectoryNames']['AlteraSpecificFiles']
        elif device_vendor is Vendors.Lattice:
            precompiledModelsimIniPath /= self.Host.Config[
                'CONFIG.DirectoryNames']['LatticeSpecificFiles']
        elif device_vendor is Vendors.Xilinx:
            precompiledModelsimIniPath /= self.Host.Config[
                'CONFIG.DirectoryNames']['XilinxSpecificFiles']

        precompiledModelsimIniPath /= "modelsim.ini"
        if not precompiledModelsimIniPath.exists():
            raise SimulatorException("ModelSim ini file '{0!s}' not found.".format(precompiledModelsimIniPath)) \
             from FileNotFoundError(str(precompiledModelsimIniPath))

        simBuildPath = self.Directories.Working / self.COCOTB_SIMBUILD_DIRECTORY
        # create temporary directory for Cocotb if not existent
        if (not (simBuildPath).exists()):
            self.LogVerbose("Creating build directory for simulator files.")
            self.LogDebug("Build directory: {0!s}".format(simBuildPath))
            try:
                simBuildPath.mkdir(parents=True)
            except OSError as ex:
                raise SimulatorException(
                    "Error while creating '{0!s}'.".format(
                        simBuildPath)) from ex

        # write local modelsim.ini
        modelsimIniPath = simBuildPath / "modelsim.ini"
        if modelsimIniPath.exists():
            try:
                modelsimIniPath.unlink()
            except OSError as ex:
                raise SimulatorException(
                    "Error while deleting '{0!s}'.".format(
                        modelsimIniPath)) from ex

        with modelsimIniPath.open('w') as fileHandle:
            fileContent = dedent("""\
				[Library]
				others = {0!s}
				""").format(precompiledModelsimIniPath)
            fileHandle.write(fileContent)

        #
        self.LogNormal("Running simulation...")
        cocotbTemplateFilePath = self.Host.Directories.Root / \
                     self.Host.Config[testbench.ConfigSectionName]['CocotbMakefile'] # depends on testbench
        topLevel = testbench.TopLevel
        cocotbModule = testbench.ModuleName

        # create one VHDL line for each VHDL file
        vhdlSources = ""
        for file in self._pyIPCMIProject.Files(
                fileType=FileTypes.VHDLSourceFile):
            if (not file.Path.exists()):
                raise SimulatorException("Cannot add '{0!s}' to Cocotb Makefile.".format(file.Path)) \
                 from FileNotFoundError(str(file.Path))
            vhdlSources += str(file.Path) + " "

        # copy Cocotb (Python) files to temp directory
        self.LogVerbose(
            "Copying Cocotb (Python) files into temporary directory.")
        cocotbTempDir = str(self.Directories.Working)
        for file in self._pyIPCMIProject.Files(
                fileType=FileTypes.CocotbSourceFile):
            if (not file.Path.exists()):
                raise SimulatorException("Cannot copy '{0!s}' to Cocotb temp directory.".format(file.Path)) \
                 from FileNotFoundError(str(file.Path))
            self.LogDebug("copy {0!s} {1}".format(file.Path, cocotbTempDir))
            try:
                shutil_copy(str(file.Path), cocotbTempDir)
            except OSError as ex:
                raise SimulatorException("Error while copying '{0!s}'.".format(
                    file.Path)) from ex

        # read/write Makefile template
        self.LogVerbose("Generating Makefile...")
        self.LogDebug(
            "Reading Cocotb Makefile template file from '{0!s}'".format(
                cocotbTemplateFilePath))
        with cocotbTemplateFilePath.open('r') as fileHandle:
            cocotbMakefileContent = fileHandle.read()

        cocotbMakefileContent = cocotbMakefileContent.format(
            pyIPCMIRootDirectory=str(self.Host.Directories.Root),
            VHDLSources=vhdlSources,
            TopLevel=topLevel,
            CocotbModule=cocotbModule)

        cocotbMakefilePath = self.Directories.Working / "Makefile"
        self.LogDebug(
            "Writing Cocotb Makefile to '{0!s}'".format(cocotbMakefilePath))
        with cocotbMakefilePath.open('w') as fileHandle:
            fileHandle.write(cocotbMakefileContent)

        # execute make
        make = Make(self.Host.Platform, self.DryRun, logger=self.Logger)
        if (SimulationSteps.ShowWaveform in self._simulationSteps):
            make.Parameters[Make.SwitchGui] = 1
        testbench.Result = make.RunCocotb()
Exemple #40
0
def benchmarkProblemType( problemTypeConfig, problemSizeGroupConfig, \
    problemSizeGroupIdx ):

    benchmarkTestFails = 0

    # convert config to full benchmark process (resolves defaults)
    print1("")
    print1(HR)
    print1("# Converting Config to BenchmarkProcess Object")
    print1(HR)
    print1("")
    benchmarkProcess = BenchmarkProcess( problemTypeConfig, \
        problemSizeGroupConfig )

    problemTypeName = str(benchmarkProcess.problemType)
    problemSizeGroupName = "%s_%02u" % (problemTypeName, problemSizeGroupIdx)
    pushWorkingPath(problemSizeGroupName)
    ensurePath(os.path.join(globalParameters["WorkingPath"], "Data"))

    totalBenchmarkSteps = len(benchmarkProcess)
    resultsFileBaseFinal = None
    winners = WinningParameterDict()
    print1("# NumBenchmarkSteps: %u" % totalBenchmarkSteps)
    print1("")
    print1(HR)
    print1("# Done Creating BenchmarkProcess Object")
    print1(HR)

    ##############################################################################
    # For Each Benchmark Step
    ##############################################################################
    for benchmarkStepIdx in range(0, totalBenchmarkSteps):

        benchmarkStep = benchmarkProcess[benchmarkStepIdx]
        if winners.winners == {}:
            # perf optimization to skip the initial winners creation
            # this helps a little here but really helps below with avoiding the super-expensive
            # removeHardcoded step below - that can use a fast-path to create
            # winners when needed.
            print1(
                "# Empty winners - use fast initialization of hardcodedParameters"
            )
            resultingHardcodedParameterList = benchmarkStep.hardcodedParameters
        else:
            resultingHardcodedParameterList = \
                winners.wpdUpdate( benchmarkStep.hardcodedParameters )

        benchmarkStep.hardcodedParameters = resultingHardcodedParameterList
        numHardcoded = len(benchmarkStep.hardcodedParameters)
        stepName = str(benchmarkStep)
        shortName = benchmarkStep.abbreviation()
        print1("\n")
        print1(HR)
        currentTime = time.time()
        elapsedTime = currentTime - startTime
        print1("# BenchmarkStep: %s - %s %.3fs" %
               (problemSizeGroupName, stepName, elapsedTime))
        print1("# NumProblems: %u" %
               benchmarkStep.problemSizes.totalProblemSizes)
        print1("# BenchmarkParameters:")
        for paramName in benchmarkStep.benchmarkParameters:
            paramValues = benchmarkStep.benchmarkParameters[paramName]
            printStr = "#     %s = { %s" % (paramName, paramValues[0])
            for paramValueIdx in range(1, len(paramValues)):
                printStr += ", %s" % str(paramValues[paramValueIdx])
            printStr += " }"
            print1(printStr)

        if False:
            # print1(hardcoded parameters and their winners
            print1("# HardcodedParameters | WinningParameters:")
            paramDictIdx = 0
            hardcodedMinNaming = \
                Solution.getMinNaming(benchmarkStep.hardcodedParameters)
            for paramDict in benchmarkStep.hardcodedParameters:
                winningParameters = winners[paramDict]
                print1("#    (%u) %s | %s" % (paramDictIdx, \
                    Solution.getNameMin(paramDict, hardcodedMinNaming), \
                    Solution.getNameFull(winningParameters) ))
                paramDictIdx += 1
        pushWorkingPath(shortName)

        ############################################################################
        # Copy Files to Benchmark Source Directory
        ############################################################################
        stepBaseDir = globalParameters["WorkingPath"]
        sourceDir = \
          os.path.join(stepBaseDir, "source" )
        ensurePath(sourceDir)
        pushWorkingPath("sourceTmp")
        filesToCopy = [
            "SolutionMapper.h",
            "Client.cpp",
            "Client.h",
            "CMakeLists.txt",
            "DeviceStats.h",
            "TensorUtils.h",
            "MathTemplates.cpp",
            "MathTemplates.h",
            "TensileTypes.h",
            "tensile_bfloat16.h",
            "KernelHeader.h",
            "ReferenceCPU.h",
            "SolutionHelper.cpp",
            "SolutionHelper.h",
            "Tools.cpp",
            "Tools.h",
        ]

        for f in filesToCopy:
            shutil_copy(os.path.join(globalParameters["SourcePath"], f),
                        globalParameters["WorkingPath"])
        if globalParameters["RuntimeLanguage"] == "OCL":
            shutil_copy(
                os.path.join(globalParameters["SourcePath"],
                             "FindOpenCL.cmake"),
                globalParameters["WorkingPath"])
        else:
            shutil_copy(
                os.path.join(globalParameters["SourcePath"], "FindHIP.cmake"),
                globalParameters["WorkingPath"])
            shutil_copy(
                os.path.join(globalParameters["SourcePath"], "FindHCC.cmake"),
                globalParameters["WorkingPath"])

        ############################################################################
        # Enumerate Benchmark Permutations
        ############################################################################
        solutions = []
        totalBenchmarkPermutations = 1
        for benchmarkParamName in benchmarkStep.benchmarkParameters:
            totalBenchmarkPermutations *= len(
                benchmarkStep.benchmarkParameters[benchmarkParamName])
        maxPossibleSolutions = totalBenchmarkPermutations * numHardcoded
        print1("# MaxPossibleSolutions: %u = %u (hardcoded) * %u (benchmark)" % \
            (maxPossibleSolutions, numHardcoded, totalBenchmarkPermutations))

        benchmarkPermutations = []
        for i in range(0, totalBenchmarkPermutations):
            permutation = {}
            pIdx = i
            for benchmarkParamName in benchmarkStep.benchmarkParameters:
                benchmarkParamValues = deepcopy( \
                    benchmarkStep.benchmarkParameters[benchmarkParamName])
                valueIdx = pIdx % len(benchmarkParamValues)
                permutation[benchmarkParamName] = benchmarkParamValues[
                    valueIdx]
                pIdx /= len(benchmarkParamValues)
            benchmarkPermutations.append(permutation)

        ############################################################################
        # Enumerate Solutions = Hardcoded * Benchmark
        ############################################################################
        print1("# Enumerating Solutions")
        if globalParameters["PrintLevel"] >= 1:
            progressBar = ProgressBar(maxPossibleSolutions)
        solutionSet = set()  # avoid duplicates for nlca=-1, 1
        for hardcodedIdx in range(0, numHardcoded):
            solutions.append([])
            hardcodedParamDict = benchmarkStep.hardcodedParameters[
                hardcodedIdx]
            for benchmarkIdx in range(0, len(benchmarkPermutations)):
                benchmarkPermutation = benchmarkPermutations[benchmarkIdx]
                solution = {
                    "ProblemType": deepcopy(benchmarkProcess.problemType.state)
                }
                solution.update(benchmarkPermutation)
                solution.update(hardcodedParamDict)
                if benchmarkStepIdx > 0:
                    winningParameters = winners[hardcodedParamDict]
                    if winningParameters == None:
                        # this is a joined parameter that didn't have a winner, that's okay
                        continue
                    solution.update(winningParameters)

                # append default parameters where necessary
                for initialSolutionParameterName in benchmarkStep.initialSolutionParameters:
                    if initialSolutionParameterName not in solution:
                        solution[initialSolutionParameterName] = \
                            benchmarkStep.initialSolutionParameters[initialSolutionParameterName]
                # TODO check if solution matches problem size for exact tile kernels
                solutionObject = Solution(solution)
                if solutionObject["Valid"]:
                    if solutionObject not in solutionSet:
                        solutionSet.add(solutionObject)
                        solutions[hardcodedIdx].append(solutionObject)
                else:
                    if globalParameters["PrintSolutionRejectionReason"]:
                        print1("rejecting solution %s" % str(solutionObject))
                if globalParameters["PrintLevel"] >= 1:
                    progressBar.increment()

        # remove hardcoded that don't have any valid benchmarks
        removeHardcoded = []
        for hardcodedIdx in range(0, numHardcoded):
            if len(solutions[hardcodedIdx]) == 0:
                hardcodedParamDict = benchmarkStep.hardcodedParameters[
                    hardcodedIdx]
                removeHardcoded.append(hardcodedParamDict)
        removesExist = len(removeHardcoded) > 0
        for hardcodedParam in removeHardcoded:
            benchmarkStep.hardcodedParameters.remove(hardcodedParam)

        if removesExist:
            print1(
                "# Updating winners since enumeration removed unused hardcoded solutions.  removeHardcoded=%u winners=%u"
                % (len(removeHardcoded), len(winners.winners)))
            winners.wpdUpdate(benchmarkStep.hardcodedParameters)
            if globalParameters["PrintLevel"] >= 1:
                print1("")
            numHardcoded = len(benchmarkStep.hardcodedParameters)
            # remove from solution 2D list also
            for solutionList in shallowcopy(solutions):
                if len(solutionList) == 0:
                    solutions.remove(solutionList)
        elif winners.winners == {}:
            print1("# Populating initial winners (%u solutions)\n" %
                   len(benchmarkStep.hardcodedParameters))
            for hcParm in benchmarkStep.hardcodedParameters:
                winners.winners[FrozenDictionary(hcParm)] = [{}, -1]

        print1("# Actual Solutions: %u / %u\n" % ( len(solutions), \
            maxPossibleSolutions ))

        # create linear list
        solutionList = []
        for i in range(0, len(solutions)):
            solutionsForHardcoded = solutions[i]
            for j in range(0, len(solutionsForHardcoded)):
                solution = solutionsForHardcoded[j]
                solutionList.append(solution)
        if len(solutionList) == 0:
            msg = "Your parameters resulted in 0 valid solutions."
            if globalParameters["PrintSolutionRejectionReason"]:
                msg += "\nExamine reject and backtrace messages above to see why and where solutions were rejected."
            else:
                msg += "\nYou should re-run with \"PrintSolutionRejectionReason: True\" to see why each parameter combination was rejected."
            printExit(msg)
        if globalParameters["PrintLevel"] >= 1:
            for i in range(0, len(solutions)):
                solutionsForHardcoded = solutions[i]
                for j in range(0, len(solutionsForHardcoded)):
                    solution = solutionsForHardcoded[j]
                    print2("#    (%u:%u) %s" % (i, j, \
                        Solution.getNameFull(solution) ))
            print2(HR)

        # write benchmarkFiles
        writeBenchmarkFiles(stepBaseDir, solutionList, benchmarkStep.problemSizes, \
            shortName, filesToCopy)

        print1("# Copying files that differ from sourceTmp -> source")
        sourceTmp = globalParameters["WorkingPath"]
        files = os.listdir(sourceTmp)
        for f in files:
            f0 = os.path.join(sourceTmp, f)
            f1 = os.path.join(sourceDir, f)
            if os.path.isdir(f0):
                #print "cpDir:", f0, f1
                if os.path.isdir(f1):
                    shutil.rmtree(f1, True)
                shutil.copytree(f0, f1)
            elif not os.path.exists(f1) or not filecmp.cmp(f0, f1):
                #print "cp:", f0, f1
                shutil.copy(f0, f1)
        shutil.rmtree(sourceTmp, True)

        popWorkingPath()  # source

        ############################################################################
        # Run Benchmark Script
        ############################################################################
        resultsFileBase = os.path.normpath(os.path.join( \
            globalParameters["WorkingPath"], "../Data", shortName))
        if benchmarkStep.isFinal():
            resultsFileBaseFinal = resultsFileBase
        resultsFileName = resultsFileBase + ".csv"
        solutionsFileName = resultsFileBase + ".yaml"
        if not os.path.exists(resultsFileName) or \
            globalParameters["ForceRedoBenchmarkProblems"]:
            pushWorkingPath("build")

            # write runScript
            libraryLogicPath = None
            path = globalParameters["WorkingPath"]
            forBenchmark = True
            runScriptName = writeRunScript(path, libraryLogicPath,
                                           forBenchmark)

            # run runScript
            process = Popen(runScriptName, cwd=globalParameters["WorkingPath"])
            process.communicate()
            if process.returncode:
                benchmarkTestFails += 1
                printWarning(
                    "BenchmarkProblems: Benchmark Process exited with code %u"
                    % process.returncode)
            popWorkingPath()  # build
        else:
            print1("# Already benchmarked; skipping.")

        ############################################################################
        # Winners -> Determined Parameters
        ############################################################################
        results = getResults(resultsFileName, solutions)
        print2("CSV Results: %s" % results)
        winners.addResults(benchmarkStep.hardcodedParameters, \
            benchmarkPermutations, solutions, results)

        ############################################################################
        # Write Solutions YAML
        ############################################################################
        YAMLIO.writeSolutions(solutionsFileName, benchmarkStep.problemSizes, \
            solutions )

        # End Iteration
        popWorkingPath()  # stepName
        currentTime = time.time()
        elapsedTime = currentTime - startTime
        print1("%s\n# %s\n# %s: End - %.3fs\n%s\n" \
            % (HR, problemSizeGroupName, shortName, elapsedTime, HR))

    popWorkingPath()  # ProblemType
    return (resultsFileBaseFinal, benchmarkTestFails)
Exemple #41
0
 def ConfigureJB(self):
     if pathExists('%scomponents/zapNC.config' % PluginPath) is False:
         self.ZapNC=("1:0:1:1139:2AF8:13E:820000:0:0:0:")
     else:
         with open('%scomponents/zapNC.config' % PluginPath, 'r') as ZAPconfig:
             tmp=ZAPconfig.readline().split('"')[1]
             self.ZapNC=(tmp)
             ZAPconfig.close()
     if pathExists('%scomponents/zapCP.config' % PluginPath) is False:
         self.ZapCP=("1:0:1:3396:3390:71:820000:0:0:0:")
     else:
         with open('%scomponents/zapCP.config' % PluginPath, 'r') as ZAPconfig:
             tmp=ZAPconfig.readline().split('"')[1]
             self.ZapCP=(tmp)
             ZAPconfig.close()
     self.BouquetsNCBin='%scomponents/j00zekBouquetsNC' % (PluginPath)
     self.BouquetsCPBin='%scomponents/j00zekBouquetsCP' % (PluginPath)
     self.ExcludedSIDsTemplate='%scomponents/excludedSIDs.template' % PluginPath
     self.ExcludedSIDsFileName='userbouquet.excludedSIDs.j00zekAutobouquet.tv'
     self.ExcludedSIDsFile='/etc/enigma2/%s' % self.ExcludedSIDsFileName
     self.IncludedTranspondersTemplate='%scomponents/PLtransponders.cfg' % PluginPath
     self.IncludedTranspondersFile='/tmp/transponders.PL'
     self.runlist = []
     self.runlist.append(('[ -f /tmp/.ChannelsNotUpdated ] && rm -f /tmp/.ChannelsNotUpdated 2>/dev/null'))
     if pathExists('%s/components/CheckType.sh' % PluginPath) is True:
         self.runlist.append(('chmod 755 %s/components/*.sh' % PluginPath))
         self.runlist.append(('%s/components/CheckType.sh' % PluginPath))
     
     self.ZapTo=""
     
     #tylko polskie transpondery
     if j00zekConfig.BouquetsNC.value.endswith('PL'):
         if not pathExists(self.IncludedTranspondersFile):
             os_symlink(self.IncludedTranspondersTemplate,self.IncludedTranspondersFile)
     else:
         if pathExists(self.IncludedTranspondersFile):
             os_remove(self.IncludedTranspondersFile)
     #kanaly do pominiecia
     if j00zekConfig.BouquetsExcludeBouquet.value == True:
         self.ExcludeSIDS="1"
         ExcludedSIDsFileNeedsUpdate=1
         if pathExists(self.ExcludedSIDsFile) is False:
             from shutil import copy as shutil_copy
             shutil_copy(self.ExcludedSIDsTemplate,self.ExcludedSIDsFile)
     else:
         self.ExcludeSIDS="0"
         ExcludedSIDsFileNeedsUpdate=0
         
     #sprawdzamy schemat pliku bouquets.tv
     hasNewline=1
     if j00zekConfig.BouquetsNC.value !="NA":
         ncNeedsUpdate=1
     else:
         ncNeedsUpdate=0
     if j00zekConfig.BouquetsCP.value !="NA":
         cpNeedsUpdate=1
     else:
         cpNeedsUpdate=0
             
     windowsEOL=''
     with open("/etc/enigma2/bouquets.tv", "r") as bouquetsTV:
         for line in bouquetsTV:
             if windowsEOL == '' and line.endswith('\r\n'):
                 windowsEOL='\r'
             if line.endswith('\n'):
                 hasNewline=1
             else:
                 hasNewline=0
             if line.find(self.ExcludedSIDsFileName) > 0:
                 ExcludedSIDsFileNeedsUpdate=0
             if line.find('userbouquet.ncplus.j00zekAutobouquet.tv') > 0:
                 ncNeedsUpdate=0
             if line.find('userbouquet.CP.j00zekAutobouquet.tv') > 0:
                 cpNeedsUpdate=0
         bouquetsTV.close()
     #dopisujemy nasze bukiety
     if ncNeedsUpdate == 1:
         with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
             if hasNewline == 0:
                 bouquetsTV.write('\n')
             bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "userbouquet.ncplus.j00zekAutobouquet.tv" ORDER BY bouquet%s\n' % windowsEOL)
             bouquetsTV.close()
             hasNewline=1
     if cpNeedsUpdate == 1:
         with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
             if hasNewline == 0:
                 bouquetsTV.write('\n')
             bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "userbouquet.CP.j00zekAutobouquet.tv" ORDER BY bouquet%s\n' % windowsEOL)
             bouquetsTV.close()
             hasNewline=1
     if ExcludedSIDsFileNeedsUpdate == 1:
         with open("/etc/enigma2/bouquets.tv", "a") as bouquetsTV:
             if hasNewline == 0:
                 bouquetsTV.write('\n')
             bouquetsTV.write('#SERVICE 1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "%s" ORDER BY bouquet%s\n' % (self.ExcludedSIDsFileName,windowsEOL))
             bouquetsTV.close()
     
     if j00zekConfig.BouquetsNC.value == '49188PL' and j00zekConfig.syncPLtransponders.value == True:
          self.runlist.append('%scomponents/SyncFromWeb.sh' % PluginPath)
     if j00zekConfig.BouquetsNC.value != 'NA':
         self.runlist.append("%s %s %s %s %s '%s'" % ( self.BouquetsNCBin, j00zekConfig.BouquetsNC.value, \
                             j00zekConfig.BouquetsAction.value, self.ZapNC, self.ExcludeSIDS, j00zekConfig.Znacznik.value))
         self.ZapTo=self.ZapNC
     if j00zekConfig.BouquetsCP.value != 'NA':
         self.runlist.append("%s %s %s %s %s '%s'" % ( self.BouquetsCPBin, j00zekConfig.BouquetsCP.value, \
                             j00zekConfig.BouquetsAction.value, self.ZapCP, self.ExcludeSIDS, j00zekConfig.Znacznik.value))
         if self.ZapTo == "":
             self.ZapTo = self.ZapCP
     if j00zekConfig.BouquetsAction.value in ("1st","all") and  j00zekConfig.Clear1st.value == True:
         self.runlist.append(('%s/components/clear1st.sh' % PluginPath))
     if j00zekConfig.ClearBouquets.value:
         self.runlist.append("%s/components/ClearBouquets" % PluginPath)
     return