def _two_label_performance(target_names, params):
    # get_params
    noise_amount = params['noise_amount']

    # set params
    params['target_names'] = target_names
    print_params(**params)

    # fit hopfield
    print('\n.. fitting hopfield\n')
    hf, X, y, target_names, params = fit_hopfield(params)
    print_params(**params)

    # recall
    print('\n.. recalling\n')
    X, X_noise, X_recall = recall_with_noise(clf=hf, X=X,
                                             noise_amount=noise_amount)

    print_header('result')
    similarities, accurate = get_recalling_performance(X, X_recall)
    print('similarity:', np.mean(similarities))
    print('accuracy:', np.mean(accurate))

    similarity = np.mean(similarities)
    accuracy = np.mean(accurate)
    return similarity, accuracy
Esempio n. 2
0
    def run(self):
        """
        Run the test.
        """

        utils.print_header("Start {}...\n".format(self.get_kind_of_test()))

        if not self.options.log:
            utils.start_capture_console()
        self.start_time = time.time()
        if self.options.adding or self.options.getting \
                and self.options.clients > 1:
            self.start_tester_in_thread()
        else:
            self.list_tester.append(self.create_tester())
            utils.run_async_method(None, self.list_tester[-1].test)

        self.finish_time = time.time()

        utils.stop_capture_console()
        self.collect_result()
        with open(self.result_path, 'w') as result:
            self.write_result(result)
        self.write_result(sys.stdout)
        requests_sender.RequestsSender.close_log_file()

        utils.print_header("\nFinish {}\n".format(self.get_kind_of_test()))
Esempio n. 3
0
    async def build_schema_req(args: dict):
        """
        Build ADD schema request.

        :param args: arguments for building ADD schema request.
        :return: schema request, request info.
        """
        submitter_did = args['submitter_did']
        try:
            data = {
                'name': utils.generate_random_string(prefix='test'),
                'version': '1.0',
                'attr_names': ['test']
            }

            utils.print_header("\n======= Build schema request =======")
            schema_req = await ledger.build_schema_request(submitter_did,
                                                           json.dumps(data))

            del data['attr_names']
            data['dest'] = submitter_did
            req_info = json.dumps({'kind': 'schema', 'data': data})
            req = json.dumps({'request': schema_req})

            return req, req_info

        except Exception as e:
            utils.force_print_error_to_console(
                "Cannot build schema request. Skip building...")
            utils.force_print_error_to_console(str(e))
            return ""
Esempio n. 4
0
 def compare_directories(self):
     for song in set(
             list(self.backup_dict.keys()) + list(self.source_dict.keys())):
         in_backup = song in self.backup_dict.keys()
         in_source = song in self.source_dict.keys()
         if in_backup and in_source:
             continue
         elif in_backup and not in_source:
             self.files_to_backcopy += 1
         elif in_source and not in_backup:
             self.files_to_backup += 1
     utils.print_header("ANALYSIS SUMMARY")
     print(
         "There are {} files in your source destination, {} of which need backup."
         .format(len(self.source_dict.keys()), self.files_to_backup))
     if self.files_to_backup > 0 and not self.configs.backup_enabled:
         print("Backup is disabled. The {} files will not be backed up.".
               format(self.files_to_backup))
     print(
         "There are {} files in your backup destination, {} of which you don't have locally."
         .format(len(self.backup_dict.keys()), self.files_to_backcopy))
     if self.files_to_backcopy > 0 and not self.configs.backcopy_enabled:
         print(
             "Backcopy is disabled. The {} files will not be copied to your local directory."
             .format(self.files_to_backcopy))
Esempio n. 5
0
    async def build_nym_req(args: dict):
        """
        Build ADD nym request.

        :param args: arguments for building ADD nym request.
        :return: nym request, request info.
        """
        wallet_handle = args['wallet_handle']
        submitter_did = args['submitter_did']
        try:
            utils.print_header_for_step('Create did')
            did, _, = await signus.create_and_store_my_did(wallet_handle, '{}')

            # Send NYM to ledger
            utils.print_header("\n======== Build NYM request ========")
            nym_req = await ledger.build_nym_request(submitter_did, did, None,
                                                     None, None)
            req_info = json.dumps({'kind': 'nym', 'data': {'target_did': did}})
            req = json.dumps({'request': nym_req})

            return req, req_info

        except Exception as e:
            utils.force_print_error_to_console(
                "Cannot build nym request. Skip building...")
            utils.force_print_error_to_console(str(e))
            return ""
Esempio n. 6
0
def main():

    if utils.inputMatch([]):
        utils.print_header('spark')
        utils.print_option("copy", "setup files (copy jar files)")
        utils.print_option("setup", "setup configuration (which IPs)")
        utils.print_option("bashrc", "add startup to bashrc")
        utils.print_option("start-all", "run start-dfs")
        utils.print_option("launchui", "display ui")
        utils.print_option(
            "full", "run full setup (copy->setup->bashrc->start-all)")
        utils.print_header('')

    if utils.inputMatch(['copy']):
        setupSoftFiles()

    if utils.inputMatch(['setup']):
        setupConfigurationFiles()

    if utils.inputMatch(['restart']):
        restart()

    if utils.inputMatch(['bashrc']):
        setupBashrc()

    if utils.inputMatch(['start-all']):
        startAll()

    if utils.inputMatch(['launchui']):
        launchUi()

    if utils.inputMatch(['full']):
        setupFull()
Esempio n. 7
0
    def submit_several_reqs_from_files(self, args, files, kind):
        """
        Submit several request that stored in files.

        :param args: arguments to submit requests.
        :param files: return by
        request_builder.RequestBuilder.build_several_adding_req_to_files
        :param kind: kind of request.
        """
        threads = list()
        utils.print_header('\n\tSubmitting {} requests...'.format(kind))
        if not self.log:
            utils.start_capture_console()

        for file_name in files:
            temp_thread = threading.Thread(target=self.submit_reqs_in_thread,
                                           kwargs={
                                               'args': args,
                                               'file': file_name,
                                               'kind': kind
                                           })
            temp_thread.start()
            threads.append(temp_thread)

        for thread in threads:
            thread.join()

        utils.stop_capture_console()
        utils.print_header('\n\tSubmitting requests complete')
Esempio n. 8
0
def verify_ssh():
    '''
    Verify that all ssh settings has been applied.

    Not a CIS test.

    '''
    #
    print_header("11 BONUS - Verify ssh settings")

    #
    print_header("11.1 BONUS - Verify ssh settings")
    config = ConfigParser.SafeConfigParser()
    config.read('%s/hardening/config.cfg' % app.SYCO_VAR_PATH)
    counter = 0
    for setting in config.options('ssh'):
        counter += 1
        print_header("11.%s Verify ssh settings - %s" %
            (counter, config.get('ssh', setting)))

        check_not_empty("grep %s /etc/ssh/ssh_config" % config.get('ssh', setting))

    #
    print_header("11.2 BONUS - Verify ssh settings")
    counter = 0
    for setting in config.options('sshd'):
        counter += 1

        print_header("11.%s Verify sshd settings - %s" %
            (counter, config.get('sshd', setting)))

        check_not_empty("grep %s /etc/ssh/sshd_config" % config.get('sshd', setting))
def main():
    dataset_id = os.getenv("DATASET_ID")
    version = 1

    print_header()

    log.info("Getting requests session from client...")

    xray_recorder.begin_segment("Monitor")

    origo_config = Config()

    start_http_server(8000)
    log.info("Started prometheus server")

    origo_config.config["cacheCredentials"] = True
    event_poster = PostEvent(config=origo_config)

    def _post_event():
        threading.Timer(event_interval, _post_event).start()
        post_event(dataset_id, version, event_poster)

    _post_event()

    websocket_uri = (
        f"{websocket_base_url}?dataset_id={dataset_id}&webhook_token={webhook_token}"
    )
    asyncio.get_event_loop().run_until_complete(
        listen_to_websocket(websocket_uri))

    log.info(
        f"Sending and listening to events continuously, sending every {event_interval} seconds"
    )

    exit(1)
Esempio n. 10
0
def fetch_llvm_binutils(root_folder, update, ref):
    """
    Download llvm and binutils or update them if they exist
    :param root_folder: Working directory
    :param update: Boolean indicating whether sources need to be updated or not
    :param ref: The ref to checkout the monorepo to
    """
    p = root_folder.joinpath("llvm-project")
    if p.is_dir():
        if update:
            utils.print_header("Updating LLVM")
            subprocess.run(
                ["git", "-C", p.as_posix(), "checkout", ref], check=True)
            subprocess.run(
                ["git", "-C", p.as_posix(), "pull", "--rebase"], check=True)
    else:
        utils.print_header("Downloading LLVM")
        subprocess.run([
            "git", "clone", "-b", ref, "git://github.com/llvm/llvm-project",
            p.as_posix()
        ],
                       check=True)

    # One might wonder why we are downloading binutils in an LLVM build script :)
    # We need it for the LLVMgold plugin, which can be used for LTO with ld.gold,
    # which at the time of writing this, is how the Google Pixel 3 kernel is built
    # and linked.
    utils.download_binutils(root_folder)
Esempio n. 11
0
def main():

    if utils.inputMatch([]):
        utils.print_header('package')
        utils.print_option(
            "restore", "copy delivery from common data storage to each local disk")
        utils.print_option(
            "postinstallscript", "run postinstallscript scripts")
        utils.print_option("bashrc", "add startup to bashrc")
        utils.print_option("profile", "add startup to etc/profile.d")
        utils.print_option("sync", "upload new code")
        utils.print_option("full", "run full setup")
        utils.print_header('')

    if utils.inputMatch(['restore']):
        restoreFromStore()

    if utils.inputMatch(['postinstallscript']):
        setupPostinstall()

    if utils.inputMatch(['full']):
        setupFull()

    if utils.inputMatch(['sync']):
        synchronize()

    if utils.inputMatch(['bashrc']):
        setupBashrc()

    if utils.inputMatch(['profile']):
        setupProfile()

    if utils.inputMatch(['addjars']):
        addjars()
Esempio n. 12
0
def invoke_ninja(args, dirs, stage):
    """
    Invoke ninja to run the actual build
    :param args: The args variable generated by parse_parameters
    :param dirs: An instance of the Directories class with the paths to use
    :return:
    """
    utils.print_header("Building LLVM stage %d" % stage)

    if stage == 1:
        build_folder = dirs.stage1_folder
        install_folder = None
    else:
        build_folder = dirs.build_folder
        install_folder = dirs.install_folder

    time_started = time.time()

    subprocess.run('ninja', check=True, cwd=build_folder.as_posix())

    print()
    print("LLVM build duration: " +
          str(datetime.timedelta(seconds=int(time.time() - time_started))))

    if install_folder is not None:
        subprocess.run(['ninja', 'install'],
                       check=True,
                       cwd=build_folder.as_posix(),
                       stdout=subprocess.DEVNULL,
                       stderr=subprocess.DEVNULL)

        utils.create_gitignore(install_folder)
Esempio n. 13
0
def generate_pgo_profiles(args, dirs):
    """
    Build a set of kernels across a few architectures to generate PGO profiles
    :param args: The args variable generated by parse_parameters
    :param dirs: An instance of the Directories class with the paths to use
    :return:
    """

    utils.print_header("Building PGO profiles")

    # Run kernel/build.sh
    subprocess.run([
        dirs.root_folder.joinpath("kernel", "build.sh"), '-b',
        dirs.build_folder, '--pgo',
        str(args.pgo).lower(), '-t', args.targets
    ],
                   check=True,
                   cwd=dirs.build_folder.as_posix())

    # Combine profiles
    subprocess.run([
        dirs.build_folder.joinpath("stage1", "bin", "llvm-profdata"), "merge",
        "-output=%s" % dirs.build_folder.joinpath("profdata.prof").as_posix()
    ] + glob.glob(
        dirs.build_folder.joinpath("stage2", "profiles",
                                   "*.profraw").as_posix()),
                   check=True)
Esempio n. 14
0
    def run(self):
        """
        Run the test.
        """

        utils.print_header("Start {}...\n".format(self.get_kind_of_test()))

        if not self.options.log:
            utils.start_capture_console()
        self.start_time = time.time()
        if self.options.adding or self.options.getting \
                and self.options.clients > 1:
            self.start_tester_in_thread()
        else:
            self.list_tester.append(self.create_tester())
            utils.run_async_method(None, self.list_tester[-1].test)

        self.finish_time = time.time()

        utils.stop_capture_console()
        self.collect_result()
        with open(self.result_path, 'w') as result:
            self.write_result(result)
        self.write_result(sys.stdout)
        requests_sender.RequestsSender.close_log_file()

        utils.print_header("\nFinish {}\n".format(self.get_kind_of_test()))
Esempio n. 15
0
def run_script(name, finished=False):
    if (name in run_after) and (not finished):
        return
    u.print_header(f'Running {name}...', log=True)
    script_path = u.scripts_dir / name
    suffix = script_path.suffix

    if suffix == '.py':
        sname = name.strip('.py')
        try:
            exec(f'import {sname}; main_ret = {sname}.main(); del {sname}',
                 globals())  # del after?

        except FileExistsError:
            u.log('FILEEXISTS')

    elif suffix == '.sh':
        process = sp.Popen(
            ['sh', name],
            stdout=sp.PIPE,
            stderr=sp.STDOUT,
            universal_newlines=True,
        )

        while process.poll() is None:
            line = process.stdout.readline()
            u.log(line.strip())

        if process.returncode:
            raise sp.CalledProcessError(
                f'Child process returned {process.returncode}.')
Esempio n. 16
0
def verify_ssh():
    '''
    Verify that all ssh settings has been applied.

    Not a CIS test.

    '''
    #
    print_header("11 BONUS - Verify ssh settings")

    #
    print_header("11.1 BONUS - Verify ssh settings")
    config = ConfigParser.SafeConfigParser()
    config.read('%s/hardening/config.cfg' % app.SYCO_VAR_PATH)
    counter = 0
    for setting in config.options('ssh'):
        counter += 1
        print_header("11.%s Verify ssh settings - %s" %
                     (counter, config.get('ssh', setting)))

        check_not_empty("grep %s /etc/ssh/ssh_config" %
                        config.get('ssh', setting))

    #
    print_header("11.2 BONUS - Verify ssh settings")
    counter = 0
    for setting in config.options('sshd'):
        counter += 1

        print_header("11.%s Verify sshd settings - %s" %
                     (counter, config.get('sshd', setting)))

        check_not_empty("grep %s /etc/ssh/sshd_config" %
                        config.get('sshd', setting))
Esempio n. 17
0
    async def test(self):
        """
        The function execute testing steps.
        """
        if not self.log:
            utils.start_capture_console()

        # 1. Create pool config.
        await self._create_pool_config()

        # 2. Open pool ledger
        await self._open_pool()

        # 3. Create My Wallet and Get Wallet Handle
        await self._create_wallet()
        await self._open_wallet()

        # 4 Create and sender DID
        await self._create_submitter_did()

        await self._test()

        await self._close_pool_and_wallet()
        utils.print_header("\n\t======== Finished ========")

        utils.stop_capture_console()
Esempio n. 18
0
    async def test(self):
        """
        The function execute testing steps.
        """
        if not self.log:
            utils.start_capture_console()

        # 1. Create pool config.
        await self._create_pool_config()

        # 2. Open pool ledger
        await self._open_pool()

        # 3. Create My Wallet and Get Wallet Handle
        await self._create_wallet()
        await self._open_wallet()

        # 4 Create and sender DID
        await self._create_submitter_did()

        await self._test()

        await self._close_pool_and_wallet()
        utils.print_header("\n\t======== Finished ========")

        utils.stop_capture_console()
Esempio n. 19
0
def invoke_cmake(args, dirs, env_vars, stage):
    """
    Invoke cmake to generate the build files
    :param args: The args variable generated by parse_parameters
    :param dirs: An instance of the Directories class with the paths to use
    :param env_vars: An instance of the EnvVars class with the compilers/linker to use
    :param stage: What stage we are at
    :return:
    """
    # Add the defines, point them to our build folder, and invoke cmake
    cmake = ['cmake', '-G', 'Ninja', '-Wno-dev']
    defines = build_cmake_defines(args, dirs, env_vars, stage)
    for key in defines:
        newdef = '-D' + key + '=' + defines[key]
        cmake += [newdef]
    cmake += [dirs.root_folder.joinpath("llvm-project", "llvm").as_posix()]

    if stage == 1:
        cwd = dirs.stage1_folder.as_posix()
    else:
        cwd = dirs.build_folder.as_posix()

    utils.print_header("Configuring LLVM stage %d" % stage)

    subprocess.run(cmake, check=True, cwd=cwd)
Esempio n. 20
0
def sshKeysDeploy():
    """ Deploy key ssh """

    (listnodes, nodes) = instanceListAll()

    os.system("eval `ssh-agent -s`")
    os.system("ssh-add")

    # prepare authentification forwarding
    os.system("eval `ssh-agent`")
    os.system("ssh-add ~/.ssh/google_compute_engine")

    allnodes = listnodes['all']

    listP = []

    for nodename in allnodes:

        utils.print_header('Generating SSH Keys for ' + nodename)

        # get node info
        node = nodes[nodename]

        externalIp = node['externalIp']

        listCommand = []
        listCommand.append("rm -rf .ssh/id_rsa*")
        listCommand.append("ssh-keygen -q -t rsa -N \"\" -f ~/.ssh/id_rsa")
        listCommand.append("cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys")
        listCommand.append("chmod 0600 ~/.ssh/authorized_keys")

        listCommand.append("echo \"StrictHostKeyChecking no\" >> .ssh/config")
        listCommand.append("chmod 600  .ssh/config")

        if nodename == 'cluster-data-master' or True:
            for nameother in allnodes:

                if nameother == nodename:
                    pass
                else:
                    localCommand = "ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa {0}@{1}".format(
                        username, nameother)

                    listCommand.append(localCommand)
        else:

            nameother = 'cluster-data-master'
            localCommand = "ssh-copy-id -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa {0}@{1}".format(
                username, nameother)

            listCommand.append(localCommand)

        command = ';'.join(listCommand)

        # Execute command
        P = utils.sshAsync(externalIp, command)
        listP.append(P)

    utils.waitTermination(listP)
Esempio n. 21
0
def runMain():

    if utils.inputMatch([]):
        utils.print_header('deep')

        utils.print_option("full", "cluster->restore->download deep eval")
        utils.print_option("restore-eval", "restore deep models evaluations")
        utils.print_option("upload-eval", "tar and upload deep models evaluations")

        utils.print_option("restore-model", "restore deep models (download from bucket and untar)")
        utils.print_option("eval-tomaster", "copy evals from slaves to master")
        utils.print_option("eval-frommaster", "copy evals from master to slaves")
        utils.print_option("eval-permanent", "copy evals from slaves to master")
            
        utils.print_option("eval-sync", "eval-tomaster->eval-frommaster")
        
        utils.print_header('')

    if utils.inputMatch(['full']):
        full()


    if utils.inputMatch(['restore-eval']):
        restoreDeepEval()

    if utils.inputMatch(['restore-model']):
        restoreDeepModels()

    if utils.inputMatch(['eval-tomaster']):
        syncEvalToMaster()

    if utils.inputMatch(['eval-frommaster']):
        syncEvalFromMaster()

    if utils.inputMatch(['upload-eval']):
        uploadDeepEval()

    if utils.inputMatch(['eval-permanent']):
        permanentEvalToMaster()

    if utils.inputMatch(['upload-subdirs'], doPartial=True):
        host = sys.argv[2]
        source = sys.argv[3]
        prefix = sys.argv[4]

        suffix = sys.argv[5]
        utils.ssh(cluster.ipGetAny(host),'cd {0};ls -1 -d */ > listdir.txt'.format(source))
        utils.exec_command('gcloud compute copy-files '+host+':'+os.path.join(source, 'listdir.txt')+' .')
        
        with open ("listdir.txt", "r") as myfile:
            data=myfile.readlines()

        utils.exec_command('rm listdir.txt')
        data = [os.path.dirname(d.rstrip()) for d in data]

        command = getUploadCommand(source, data, prefix, suffix)

        print command
        utils.ssh(cluster.ipGetAny(host), command)
Esempio n. 22
0
def main():
    ''' main logical flow
    '''
    usage = "usage: %prog [options] arg"
    
    parser = OptionParser(usage=usage)
    parser.add_option("-u", "--update_cache", dest="update_cache", action="store_true", 
                      help="get all puppet client node from puppet master and "
                      "cache them locally")
    parser.add_option("-f", "--failed_node", dest="failed_node", action="store_true",
                      help="show nodes whose kick operation is failed. This param "
                      "is only effective before a new kick operation")
    parser.add_option("-s", "--succ_node", dest="succ_node", action="store_true",
                      help="show nodes whose kick operation is successful. This "
                      "param is only effective before a new kick operation")
    parser.add_option("-m", "--manual", dest="manual", action="store_true", 
                      help="show manual")
    parser.add_option("-p", "--pattern", dest="pattern", default=None,
                      help="The pattern is used to filter nodes from all_node_list")
    parser.add_option("-d", "--display", dest="display", action="store_true",
                      help="display the nodes who will be kicked, but not do it "
                      "in fact")
    
    (options, args) = parser.parse_args()
    print_header()
    
    pkick = Pkick()
    
    
    if options.update_cache:
        Pkick().update_node_cache()
        exit()
    
    if options.succ_node and options.failed_node:
        print_errmsg("-f/--failed_node and -s/--succ_node can not be used together")
        exit()

    if options.succ_node:
        succ_nodes = pkick.get_previous_node(True)
        print_pre_node_box(True, succ_nodes)
        exit()
    
    if options.failed_node:
        failed_nodes = pkick.get_previous_node(False, append_err=True)
        failed_node_err = ["%s -->error: %s" % (node[0].strip(), node[1].strip()) for node in failed_nodes]
        print_pre_node_box(False, failed_node_err)
        exit()

    node_list = pkick.filter_node(options.pattern)

    if options.display:
        print_node_box(node_list)
        exit()
    else:
        puppet_kick(node_list)
        color = Colorize()
        color.colorize_output(RESULT_Q, node_list)
        exit()
 def print(self):
     print_header("QUIZ: " + self.name)
     for number, question in enumerate(self.questions):
         print()
         print(
             change_style("Question {}: ".format(number + 1), "question") +
             change_style(question.body, "bold"))
         for i, option in enumerate(question.options):
             print(change_style(str(i + 1) + ") ", "bold") + option)
Esempio n. 24
0
def invoke_configure(build_folder, install_folder, root_folder, target,
                     host_arch):
    """
    Invokes the configure script to generate a Makefile
    :param build_folder: Build directory
    :param install_folder: Directory to install binutils to
    :param root_folder: Working directory
    :param target: Target to compile for
    :param host_arch: Host architecture to optimize for
    """
    configure = [
        root_folder.joinpath(utils.current_binutils(), "configure").as_posix(),
        '--prefix=%s' % install_folder.as_posix(),
        '--enable-deterministic-archives', '--enable-plugins', '--quiet',
        'CFLAGS=-O2 -march=%s -mtune=%s' % (host_arch, host_arch),
        'CXXFLAGS=-O2 -march=%s -mtune=%s' % (host_arch, host_arch)
    ]
    configure_arch_flags = {
        "arm-linux-gnueabi": [
            '--disable-multilib', '--disable-nls', '--with-gnu-as',
            '--with-gnu-ld',
            '--with-sysroot=%s' % install_folder.joinpath(target).as_posix()
        ],
        "mipsel-linux-gnu": [
            '--disable-compressed-debug-sections', '--enable-new-dtags',
            '--enable-shared',
            '--enable-targets=mips64el-linux-gnuabi64,mips64el-linux-gnuabin32',
            '--enable-threads'
        ],
        "powerpc-linux-gnu": [
            '--enable-lto', '--enable-relro', '--enable-shared',
            '--enable-threads', '--disable-gdb', '--disable-sim',
            '--disable-werror', '--with-pic', '--with-system-zlib'
        ],
        "x86_64-linux-gnu": [
            '--enable-lto', '--enable-relro', '--enable-shared',
            '--enable-targets=x86_64-pep', '--enable-threads', '--disable-gdb',
            '--disable-werror', '--with-pic', '--with-system-zlib'
        ]
    }
    configure_arch_flags['aarch64-linux-gnu'] = configure_arch_flags[
        'arm-linux-gnueabi'] + ['--enable-ld=default', '--enable-gold']
    configure_arch_flags['powerpc64-linux-gnu'] = configure_arch_flags[
        'powerpc-linux-gnu']
    configure_arch_flags['powerpc64le-linux-gnu'] = configure_arch_flags[
        'powerpc-linux-gnu']

    configure += configure_arch_flags.get(target, [])

    # If the current machine is not the target, add the prefix to indicate
    # that it is a cross compiler
    if not host_is_target(target):
        configure += ['--program-prefix=%s-' % target, '--target=%s' % target]

    utils.print_header("Building %s binutils" % target)
    subprocess.run(configure, check=True, cwd=build_folder.as_posix())
Esempio n. 25
0
 async def _open_pool(self):
     """
     Open pool config and get wallet handle.
     """
     try:
         utils.print_header("\n\tOpen pool ledger")
         self.pool_handle = await pool.open_pool_ledger(
             self.pool_name, None)
     except IndyError as e:
         utils.print_error(str(e))
Esempio n. 26
0
    async def _create_submitter_did(self):
        try:
            utils.print_header("\n\tCreate DID to use when sending")

            self.submitter_did, _ = await signus.create_and_store_my_did(
                self.wallet_handle, json.dumps({'seed': self.seed}))

        except Exception as e:
            utils.print_error(str(e))
            raise
Esempio n. 27
0
    async def _create_submitter_did(self):
        try:
            utils.print_header("\n\tCreate DID to use when sending")

            self.submitter_did, _ = await signus.create_and_store_my_did(
                self.wallet_handle, json.dumps({'seed': self.seed}))

        except Exception as e:
            utils.print_error(str(e))
            raise
Esempio n. 28
0
def run_end_scripts(scripts, plot=False):
    u.print_header('running end scripts...', log=True)
    u.log('End scripts:', *scripts, end='\t\n')

    for script in scripts:
        run_script(script, finished=True)

    if plot:
        u.print_header('plotting...', log=True)
        plot_all()
Esempio n. 29
0
 async def _open_pool(self):
     """
     Open pool config and get wallet handle.
     """
     try:
         utils.print_header("\n\tOpen pool ledger")
         self.pool_handle = await pool.open_pool_ledger(
             self.pool_name,
             None)
     except IndyError as e:
         utils.print_error(str(e))
Esempio n. 30
0
def runOnMasterX(command):
    """
    Run a command on all nodes
    """

    (listnodes, nodes) = instanceListAll()
    mastername = listnodes['master'][0]
    master = nodes[mastername]
    externalIp = master['externalIp']

    utils.print_header('ON [' + mastername + '] running: ' + command)
    utils.sshX(externalIp, command)
def exercicio1():
    utils.print_header(1)
    x, y, labels = load_iris(os.path.join(constants.DATA_DIR, constants.FILENAME_IRIS_DATABASE))
    a, d = x.shape  # N samples, d attributes

    print('a)')
    for i in range(d):
        print('\tAttribute {}: Mean={:.3f}, Variance={:.3f}'.format(i, utils.mean(x[:, i]), utils.variance(x[:, i])))

    print('b)')
    for i in range(labels.shape[0]):
        print('\tClass {}: {}'.format(i, labels[i]))
        for j in range(d):
            print('\t\tAttribute {}: Mean={:.3f}, Variance={:.3f}'.format(
                j, utils.mean(x[(y == i)[:, 0], j]), utils.variance(x[(y == i)[:, 0], j]))
            )

    print('c)')
    print('\tThe histograms will be displayed')
    f, ax = plt.subplots(1, d, sharex=False, sharey=True)
    for j in range(d):
        # show title only in the top
        ax[j].set_title('Attribute {}'.format(j))
        hist_bins = np.linspace(x[:, j].min(), x[:, j].max(), num=16)
        ax[j].hist(np.vstack([
            x[(y == i)[:, 0], j]
            for i in range(labels.shape[0])
        ]).T, bins=hist_bins, linewidth=0, color=['r', 'b', 'g'])
    plot_fname = os.path.join(constants.OUTPUT_DIR, 'exercicio1-c.pdf')
    plt.legend(labels, loc='upper center', bbox_to_anchor=(0.5, 0.07), ncol=3, bbox_transform=plt.gcf().transFigure)
    plt.tight_layout()
    plt.subplots_adjust(bottom=0.15)
    f.set_figheight(3)
    f.set_figwidth(8)
    plt.savefig(plot_fname, bbox_inches='tight')
    plt.show()
    print('\tThis plot was saved: {}'.format(plot_fname))

    print('d)')
    print('\tA plot will be displayed...')
    x_pca = utils.pca(x, n_components=2)
    # format the plot to mimic Slide 21 of Aula 3
    x_pca[:, 1] *= -1
    a = plt.scatter(x_pca[np.where(y == 0)[0], 1], x_pca[np.where(y == 0)[0], 0], c='r', marker='^', lw=0, s=100)
    b = plt.scatter(x_pca[np.where(y == 1)[0], 1], x_pca[np.where(y == 1)[0], 0], c='b', marker='o', lw=0, s=100)
    c = plt.scatter(x_pca[np.where(y == 2)[0], 1], x_pca[np.where(y == 2)[0], 0], c='g', marker='s', lw=0, s=100)
    plt.xlim([-1.5, 1.5])
    plt.ylim([-4, 4])
    plt.legend((a, b, c), tuple(labels), loc='upper left', fontsize=10)
    plot_fname = os.path.join(constants.OUTPUT_DIR, 'exercicio1-d.pdf')
    plt.savefig(plot_fname, bbox_inches='tight')
    plt.show()
    print('\tThis plot was saved: {}'.format(plot_fname))
def exercicio3():
    utils.print_header(3)
    x, y = load_knn_data(
        os.path.join(constants.DATA_DIR, constants.FILENAME_KNN_DATABASE))
    N = x.shape[0]
    T = wilcoxon_test(x, y)
    print('Wilcoxon Test: T={}, Null Hypothesis={}'.format(
        T, 'Reject' if reject_null_wilconxon(T, N) else 'Accept'))
    T = signal_test(x, y)
    print('Signal Test: T={}, Null Hypothesis={}'.format(
        T, 'Reject' if reject_null_signal(T, N) else 'Accept'))
    exit()
Esempio n. 33
0
def check_dependencies():
    """
    Makes sure that the base dependencies of cmake, curl, git, and ninja are installed
    """
    utils.print_header("Checking dependencies")
    required_commands = ["cmake", "curl", "git", "ninja"]
    for command in required_commands:
        output = shutil.which(command)
        if output is None:
            raise RuntimeError(command +
                               " could not be found, please install it!")
        print(output)
Esempio n. 34
0
def instanceKill(instance_name):
    """
    Kill one instance
    """

    utils.print_header('killing ' + instance_name)

    command = 'gcloud -q compute instances ' + \
        'delete ' + instance_name

    P = utils.exec_commandAsync(command)
    return P
Esempio n. 35
0
    async def _open_wallet(self):
        """
        Open wallet and get wallet handle.
        """

        try:
            utils.print_header("\n\tOpen wallet")
            self.wallet_handle = await wallet.open_wallet(
                self.wallet_name, None, None)
        except IndyError as e:
            utils.print_error(str(e))
            raise
Esempio n. 36
0
 def end_game(self, answer_guessed):
     utils.clear_screen()
     if self.did_win() or answer_guessed:
         title = " YOU WON "
         message = "Correct!"
     else:
         title = "GAME OVER"
         message = "Game over."
     utils.print_header(title)
     self.strikes.display_hangman()
     print("{} The answer was \"{}{}\"".format(message, self.answer,
                                               "" if self.answer.ends_with_punctuation() else "."))
Esempio n. 37
0
def exercicio5():
    utils.print_header(5)

    years, times = load_runner(
        os.path.join(constants.DATA_DIR, constants.FILENAME_RUNNER_DATABASE))
    N = years.shape[0]

    f, w0_hat, w1_hat = utils.linear_model(years, times)
    y_pred = np.array([f(year) for year in years])

    tau_b = utils.KendallTauB(years, times)
    p = utils.Pearson(years, times)

    # Slide 59, Aula 4
    def reject_kendall(tau, alpha):
        return abs(tau) > utils.get_z(alpha) * np.sqrt(
            (2 * (2 * N + 5)) / (9 * N * (N - 1)))

    # Slide 52, Aula 4
    def reject_pearson(p, alpha):
        return abs((p * np.sqrt(N - 2)) /
                   (np.sqrt(1 - (p**2)))) > utils.t_student(N - 2, alpha / 2)

    print('a)')
    print('\tLinear equation: {:.3f} {} {:.3f}x'.format(
        w0_hat, '+' if w1_hat >= 0 else '-', abs(w1_hat)))
    print('\tRMSE: {:.3f}'.format(utils.RMSE(y_pred, times)))
    plt.scatter(years, times, linewidths=0)
    plt.plot(years, f(years), c='r')
    plt.axhline(y=f(2016), color='g', linestyle='--')
    plt.scatter(2016, f(2016), c='g', linewidths=0)
    plt.tight_layout()
    plot_fname = os.path.join(constants.OUTPUT_DIR, 'exercicio5-a.pdf')
    plt.savefig(plot_fname, bbox_inches='tight')
    plt.show()

    print('b)')
    print('\tPrediction for 2016: {:.3f} seconds'.format(f(2016)))

    print('c)')
    print('\tKendall\'s tau: {:.3f}'.format(tau_b))
    print('\tNull hypothesis rejected:\n\t- 95%: {}\n\t- 99%: {}'.format(
        reject_kendall(tau_b, 0.05), reject_kendall(tau_b, 0.01)))

    print('d)')
    print('\tPearson correlation coefficient: {:.3f}'.format(p))
    if abs(p) > 0.85:
        print(
            '\t|p| > 0.85 and null hypothesis rejected:\n\t- 95%: {}\n\t- 99%: {}'
            .format(reject_pearson(p, 0.05), reject_pearson(p, 0.01)))

    exit()
Esempio n. 38
0
    async def build_several_getting_req_to_files(self, args, req_kind,
                                                 number_of_file,
                                                 data_files: list):
        """
        Build several ADD request and write them to list of temporary files.
        :param args: contain all necessary arguments to build a request
                    (pool_handle, wallet_handle, submitter_did)
        :param req_kind: kind of ADD request (schema, nym, attribute, claim).
        :param number_of_file: number of temporary file you want to store
                               requests. Number of request will be divided
                               equally among temp files.
        :param data_files: list file that store request information.
        :return:
        """
        utils.print_header("\n\tBuilding several get {} requests..."
                           .format(req_kind))
        if not self.log:
            utils.start_capture_console()

        req_builder = RequestBuilder.get_getting_req_builder(req_kind)

        files = list()
        lst_opened_files = list()
        file_iter = 0

        for data_file_path in data_files:
            with open(data_file_path, 'r') as data_file:
                for line in data_file:
                    if str(line) == '\n':
                        continue
                    req = await req_builder(args, json.dumps(line))
                    if file_iter >= number_of_file:
                        file_iter = 0
                    if file_iter >= len(lst_opened_files):
                        file_name = utils.generate_random_string(
                            suffix='_{}.txt'.format(str(time.time())))
                        temp_file = open(file_name, 'w')
                        lst_opened_files.append(temp_file)
                        files.append(file_name)

                    print(req, file=lst_opened_files[file_iter])
                    file_iter += 1

        for file in lst_opened_files:
            file.close()

        if not self.log:
            utils.stop_capture_console()

        utils.print_header("\n\tBuilding request complete")

        return files
Esempio n. 39
0
    async def _open_wallet(self):
        """
        Open wallet and get wallet handle.
        """

        try:
            utils.print_header("\n\tOpen wallet")
            self.wallet_handle = await wallet.open_wallet(
                self.wallet_name,
                None, None)
        except IndyError as e:
            utils.print_error(str(e))
            raise
def exercicio5():
    utils.print_header(5)
    np.random.seed(constants.SEED)
    data = load_car(
        os.path.join(constants.DATA_DIR, constants.FILENAME_CAR_DATABASE))
    train_data, test_data = utils.train_test_split(data)

    clf = utils.DecisionTreeClassifier(max_depth=2, min_samples_split=2)
    clf.fit(train_data[:, :-1], train_data[:, -1])
    y_pred = clf.predict(test_data[:, :-1])
    clf.show()
    print('Accuracy: {:.2f}%'.format(utils.accuracy(test_data[:, -1], y_pred)))
    exit()
Esempio n. 41
0
    async def _close_pool_and_wallet(self):
        """
        Clean up after testing complete.
        """
        utils.print_header("\n\tClose wallet")
        try:
            await wallet.close_wallet(self.wallet_handle)
        except Exception as e:
            utils.print_error("Cannot close wallet."
                              "Skip closing wallet...")
            utils.print_error(str(e))

        utils.print_header("\n\tClose pool")
        try:
            await pool.close_pool_ledger(self.pool_handle)
        except Exception as e:
            utils.print_error("Cannot close pool."
                              "Skip closing pool...")
            utils.print_error(str(e))

        utils.print_header("\n\tDelete wallet")
        try:
            await wallet.delete_wallet(self.wallet_name, None)
        except Exception as e:
            utils.print_error("Cannot delete wallet."
                              "Skip deleting wallet...")
            utils.print_error(str(e))

        utils.print_header("\n\tDelete pool")
        try:
            await pool.delete_pool_ledger_config(self.pool_name)
        except Exception as e:
            utils.print_error("Cannot delete pool."
                              "Skip deleting pool...")
            utils.print_error(str(e))
Esempio n. 42
0
def main():
    utils.print_header("MUSIC BACKUP PROGRAM")
    configs = Configs.from_json("config.json")
    analyzer = Analyzer(configs)
    dst_files = analyzer.get_backup_files()
    src_files = analyzer.get_source_files()
    analyzer.compare_directories()
    summary = ""
    if analyzer.files_to_backup > 0 and configs.backup_enabled:
        utils.print_header("COPYING TO BACKUP")
        print("Starting copying process...\n")
        copier = Copier(configs.source_path, configs.backup_path, src_files,
                        dst_files, analyzer.files_to_backup)
        backed_up_count = copier.copy()
        summary += "Backed up a total of {} files!".format(backed_up_count)
    if analyzer.files_to_backcopy > 0 and configs.backcopy_enabled:
        utils.print_header("COPYING TO LOCAL")
        print("Starting copying process...")
        copier = Copier(configs.backup_path, configs.backcopy_path, dst_files,
                        src_files, analyzer.files_to_backcopy)
        backcopied_count = copier.copy()
        summary += "Copied a total of {} files to your local!".format(
            backcopied_count)
    if summary and (configs.backcopy_enabled or configs.backup_enabled):
        utils.print_header("SUMMARY")
        print(summary)
    print("\nComplete!")
    return
Esempio n. 43
0
    async def _close_pool_and_wallet(self):
        """
        Clean up after testing complete.
        """
        utils.print_header("\n\tClose wallet")
        try:
            await wallet.close_wallet(self.wallet_handle)
        except Exception as e:
            utils.print_error("Cannot close wallet." "Skip closing wallet...")
            utils.print_error(str(e))

        utils.print_header("\n\tClose pool")
        try:
            await pool.close_pool_ledger(self.pool_handle)
        except Exception as e:
            utils.print_error("Cannot close pool." "Skip closing pool...")
            utils.print_error(str(e))

        utils.print_header("\n\tDelete wallet")
        try:
            await wallet.delete_wallet(self.wallet_name, None)
        except Exception as e:
            utils.print_error("Cannot delete wallet."
                              "Skip deleting wallet...")
            utils.print_error(str(e))

        utils.print_header("\n\tDelete pool")
        try:
            await pool.delete_pool_ledger_config(self.pool_name)
        except Exception as e:
            utils.print_error("Cannot delete pool." "Skip deleting pool...")
            utils.print_error(str(e))
Esempio n. 44
0
def runOnAllNodes(command):
    """
    Run a command on all nodes
    """

    listnodes, nodes = instanceListAll()

    allnodes = listnodes['all']

    for nodename in allnodes:

        utils.print_header('ON [' + nodename + '] running: ' + command)
        node = nodes[nodename]
        externalIp = node['externalIp']
        utils.ssh(externalIp, command)
Esempio n. 45
0
 def populate_dictionary(self, filepath, dictionary):
     with os.scandir(filepath) as it:
         for entry in it:
             if entry.is_file() and self.has_suffix(entry.name):
                 if dictionary.get(entry.name):
                     if not self.duplicates_flag:
                         self.duplicates_flag = True
                         utils.print_header("DUPLICATES")
                     print("Possible duplicate of '{}' found!".format(
                         entry.name))
                     print(" ==> {}\n ==> {}\n".format(
                         entry.path, dictionary.get(entry.name)))
                 dictionary[entry.name] = entry.path
             elif entry.is_dir():
                 self.populate_dictionary(entry.path, dictionary)
Esempio n. 46
0
def test_agent(env,
               agent,
               run=0,
               episodes=5,
               time_steps=500,
               initial_state=None,
               initial_noise=None,
               render=True,
               deterministic=True):

    stats = EpisodeStats(episode_lengths=np.zeros(episodes),
                         episode_rewards=np.zeros(episodes),
                         episode_loss=np.zeros(episodes))

    print_header(3, 'Testing')

    for e in range(episodes):

        s = env.reset(initial_state=initial_state,
                      noise_amplitude=initial_noise)

        for t in range(time_steps):

            if render:
                env.render()

            a = agent.get_action(s, deterministic=deterministic)
            s, r, d, _ = env.step(tn(a))

            stats.episode_rewards[e] += r
            stats.episode_lengths[e] = t

            if d:
                break

        pr_stats = {
            'run': run,
            'steps': int(stats.episode_lengths[e] + 1),
            'episode': e + 1,
            'episodes': episodes,
            'reward': stats.episode_rewards[e]
        }
        print_stats(pr_stats)

    if render:
        env.viewer.close()

    return stats
Esempio n. 47
0
def instanceLaunch(instance_name,
                   commonmode='ro',
                   withcommondata=True,
                   withlocaldata=True,
                   preemptible=False):

    instanceListClean()

    utils.print_header('starting ' + instance_name)

    startupFile = getStartupFile()

    startupFileDump(
        startupFile,
        withcommondata=withcommondata, withlocaldata=withlocaldata)

    command = 'gcloud compute instances ' + \
        'create ' + instance_name + ' ' \
        '--image ' + image_name + ' ' \
        '--scopes storage-full'

    if preemptible:
        command = command + ' --preemptible'

    if withcommondata:
        command = command + \
            ' --disk name=' + diskcommonname + \
            ',device-name=sdb,mode=' + commonmode

    if withlocaldata:
        if not(useSSD):
            command = command + \
                ' --disk name=' + diskLocalDataName(instance_name) + ',' + \
                'device-name=sdc,mode=rw'
        else:
            command = command + \
                ' --local-ssd interface=SCSI'

    command = command + \
        ' --metadata-from-file startup-script={0} '.format(startupFile)

    command = command + \
        '  --machine-type ' + machinetype + ' '

    P = utils.exec_commandAsync(command)

    return P
Esempio n. 48
0
    async def build_claim_req(args: dict):
        """
        Build ADD claim request.

        :param args: arguments to build ADD claim request.
        :return: claim request, request info.
        """
        import string
        import random
        pool_handle = args['pool_handle']
        wallet_handle = args['wallet_handle']
        submitter_did = args['submitter_did']
        try:
            utils.print_header("\n======= Create did =======")
            did, verkey = await signus.create_and_store_my_did(wallet_handle,
                                                               '{}')

            utils.print_header("\n======= Build nym request =======")
            nym_req = await ledger.build_nym_request(submitter_did, did,
                                                     verkey,
                                                     None, None)

            utils.print_header("\n======= Send nym request =======")
            await ledger.sign_and_submit_request(pool_handle, wallet_handle,
                                                 submitter_did, nym_req)

            seq_no = random.randint(1, 1000000)
            signature_type = 'CL'
            data = {"primary": {
                "n": utils.generate_random_string(characters=string.digits),
                "s": utils.generate_random_string(characters=string.digits),
                "rms": utils.generate_random_string(characters=string.digits),
                "r": {"name": utils.generate_random_string(
                    characters=string.digits)},
                "rctxt": utils.generate_random_string(
                    characters=string.digits),
                "z": utils.generate_random_string(characters=string.digits)}}

            utils.print_header("\n======= Build claim request =======")
            claim_req = await ledger.build_claim_def_txn(did, seq_no,
                                                         signature_type,
                                                         json.dumps(data))

            req_info = json.dumps({'kind': 'claim',
                                   'data': {'issuer_did': did,
                                            'seq_no': seq_no,
                                            'signature_type': signature_type}})
            req = json.dumps({'request': claim_req, 'submitter_did': did})

            return req, req_info

        except Exception as e:
            utils.force_print_error_to_console(
                "Cannot build claim request. Skip building...")
            utils.force_print_error_to_console(str(e))
            return ""
Esempio n. 49
0
    async def _create_pool_config(self):
        """
        Create pool configuration from genesis file.
        """
        try:
            utils.print_header("\n\n\tCreate ledger config "
                               "from genesis txn file")

            pool_config = json.dumps(
                {'genesis_txn': self.config.pool_genesis_file})
            await pool.create_pool_ledger_config(self.pool_name,
                                                 pool_config)
        except IndyError as e:
            if e.error_code == 306:
                utils.print_warning("The ledger already exists, moving on...")
            else:
                utils.print_error(str(e))
                raise
Esempio n. 50
0
    async def build_several_adding_req_to_files(self, args: dict, req_kind,
                                                number_of_file, number_of_req):
        """
        Build several ADD request and write them to list of temporary files.
        :param args: contain all necessary arguments to build a request
                    (pool_handle, wallet_handle, submitter_did)
        :param req_kind: kind of ADD request (schema, nym, attribute, claim).
        :param number_of_file: number of temporary file you want to store
                               requests. Number of request will be divided
                               equally among temp files.
        :param number_of_req: total of requests you want to build.
        :return: list of temporary file name.
        """
        utils.print_header("\n\tBuilding several {} requests..."
                           .format(req_kind))
        if not self.log:
            utils.start_capture_console()
        works = RequestBuilder.divide(number_of_file, number_of_req)

        req_builder = RequestBuilder.get_adding_req_builder(req_kind)

        files = list()
        print(self.req_info_file_path)
        req_info_file = open(self.req_info_file_path, "w")
        for work in works:
            file_name = utils.generate_random_string(
                suffix='_{}.txt'.format(str(time.time())))
            file_name = os.path.join(self.path, file_name)
            temp_file = open(file_name, "w")
            utils.print_ok_green(str(work))
            for i in range(work):
                req = await req_builder(args)
                print(req[1], file=req_info_file)
                print(req[0], file=temp_file)
            temp_file.close()
            files.append(file_name)
        req_info_file.close()

        if not self.log:
            utils.stop_capture_console()

        utils.print_header("\n\tBuilding request complete")

        return files
Esempio n. 51
0
    async def _create_wallet(self):
        """
        Create wallet.
        """

        try:
            utils.print_header("\n\tCreate wallet")
            await wallet.create_wallet(self.pool_name,
                                       self.wallet_name,
                                       None, None, None)
        except IndyError as e:
            if e.error_code == 203:
                utils.print_warning(
                    "Wallet '%s' already exists.  "
                    "Skipping wallet creation..." % str(
                        self.wallet_name))
            else:
                utils.print_error(str(e))
                raise
Esempio n. 52
0
def verify_network():
    '''
    Verify that the network config settings in the hardning config file has
    been applied.

    Not a CIS test.

    '''
    print_header("10 BONUS - Verify network settings")

    config = ConfigParser.SafeConfigParser()
    config.read('%s/hardening/config.cfg' % app.SYCO_VAR_PATH)
    counter = 0
    for setting in config.options('network'):
        counter += 1
        print_header(
            "10.{0} Verify network settings - {1}".format(
            counter, config.get('network', setting))
        )
        check_not_empty("grep %s /etc/sysctl.conf" % config.get('network', setting))
Esempio n. 53
0
def runOnAllNodesAsync(command):
    """
    Run a command on all nodes
    """

    listnodes, nodes = instanceListAll()

    allnodes = listnodes['all']

    listP = []

    for nodename in allnodes:

        utils.print_header('ON [' + nodename + '] running: ' + command)
        node = nodes[nodename]
        externalIp = node['externalIp']
        P = utils.sshAsync(externalIp, command)
        listP.append(P)

    utils.waitTermination(listP)
def hopfield_single_performance(
        n_sample,
        n_label,
        noise_amount,
        fit_mode,
        save_fig,
        ):

    # parameters
    params = {
        'n_sample': n_sample,
        'n_label': n_label,
        'noise_amount': noise_amount,
        'fit_mode': fit_mode,
        }
    print_params(**params)

    # fit hopfield
    print('\n.. fitting hopfield\n')
    hf, X, y, target_names, params = fit_hopfield(params)
    print_params(**params)

    # recall
    print('\n.. recalling\n')
    X, X_noise, X_recall = recall_with_noise(clf=hf, X=X,
                                             noise_amount=noise_amount)

    print_header('result')
    similarities, accurate = get_recalling_performance(X, X_recall)
    print('similarity:', np.mean(similarities))
    print('accuracy:', np.mean(accurate))

    # compare 3 images & save
    if save_fig:
        print('\n.. view recalling result\n')
        view_recalling_result(X, X_noise, X_recall,
                              accurate=accurate, **params)

    similarity = np.mean(similarities)
    accuracy = np.mean(accurate)
    return similarity, accuracy
Esempio n. 55
0
def main():

    if utils.inputMatch([]):
        utils.print_header('hadoop')
        utils.print_option("copy", "setup files (copy jar files)")
        utils.print_option("setup", "setup configuration (which IPs)")
        utils.print_option("bashrc", "add startup to bashrc")
        utils.print_option("format", "format hadoop")
        utils.print_option("start-dfs", "run start-dfs")
        utils.print_option("stop-dfs", "run stop-dfs")
        utils.print_option("status", "get status")
        utils.print_option(
            "full", "run full setup (copy->setup->bashrc->format->start-dfs)")
        utils.print_header('')

    if utils.inputMatch(['copy']):
        setupSoftFiles()

    if utils.inputMatch(['setup']):
        setupConfigurationFiles()

    if utils.inputMatch(['bashrc']):
        setupBashrc()

    if utils.inputMatch(['format']):
        formatFileSystem()

    if utils.inputMatch(['start-dfs']):
        startDfs()

    if utils.inputMatch(['stop-dfs']):
        stopDfs()

    if utils.inputMatch(['status']):
        getStatus()

    if utils.inputMatch(['full']):
        setupFullWithFormat()
Esempio n. 56
0
def full():

    timeStart = getCurrentTime()
    main.setupFull()
    timeSetup = getCurrentTime()

    additionPython()
    cleanHdfs()
    cutBrisbane()

    storeBrisbaneOnHdfs()
    timeStore = getCurrentTime()

    runProduction()
    timeProduction = getCurrentTime()

    utils.print_header("Timing")
    printElapsed("setup", timeSetup - timeStart)
    printElapsed("store", timeStore - timeStart)
    printElapsed("production", timeProduction - timeStart)
    utils.print_header("")

    viewProductionDistributed()
Esempio n. 57
0
    async def build_attribute_req(args: dict):
        """
        Build ADD attribute request.

        :param args: arguments to build ADD attribute request.
        :return: attribute request, request info.
        """
        pool_handle = args['pool_handle']
        wallet_handle = args['wallet_handle']
        submitter_did = args['submitter_did']
        try:
            utils.print_header("\n======= Create did =======")
            did, verkey = await signus.create_and_store_my_did(wallet_handle,
                                                               '{}')

            utils.print_header("\n======= Build nym request =======")
            nym_req = await ledger.build_nym_request(submitter_did, did,
                                                     verkey,
                                                     None, None)

            utils.print_header("\n======= Send nym request =======")
            await ledger.sign_and_submit_request(pool_handle, wallet_handle,
                                                 submitter_did, nym_req)

            data = {'endpoint': {'ha': '127.0.0.1:5555'}}

            utils.print_header("\n======= Build attribute request =======")
            attr_req = await ledger.build_attrib_request(did, did, None,
                                                         json.dumps(data),
                                                         None)

            req_info = json.dumps({'kind': 'attribute',
                                   'data': {'target_did': did,
                                            'raw_name': 'endpoint'}})
            req = json.dumps({'request': attr_req, 'submitter_did': did})

            return req, req_info

        except Exception as e:
            utils.force_print_error_to_console(
                "Cannot build attribute request. Skip building...")
            utils.force_print_error_to_console(str(e))
            return ""