コード例 #1
0
ファイル: create.py プロジェクト: retr0h/kindly
def create(ctx):
    '''Create a new cluster. '''

    args = ctx.obj.get('args')
    command_args = {}

    c = config.Config(args, command_args)
    s = spec.DeploymentSpec(c.kindly_file)
    d = driver.Kind(c, s)
    p = packager.Helm(c, s)
    o = orchestrator.Kubectl(c, s)

    cluster_name = s.cluster_name
    if d.exists():
        msg = f"Cluster '{cluster_name}' already exists"
        util.abort_with_message(msg)

    with halo.Halo(
            text=f"Creating cluster '{cluster_name}'",
            spinner='dots',
            enabled=c.spinner,
    ) as spinner:
        d.create()
        spinner.succeed()

    if s.template_spec_contains('image'):
        with halo.Halo(
                text=f"Loading image '{s.image}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            d.load_image()
            spinner.succeed()

    if s.template_spec_contains('packager'):
        with halo.Halo(
                text=f"Create package namespace '{s.packager_namespace}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            o.create_namespace()
            spinner.succeed()

        with halo.Halo(
                text=f"Install package '{s.packager_chart}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            p.install()
            spinner.succeed()

    if s.template_spec_contains('configs'):
        with halo.Halo(
                text=
                f"Create objects from yaml definitions in '{s.configs_path}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            o.create_objects()
            spinner.succeed()
コード例 #2
0
ファイル: plotter.py プロジェクト: uncommoncode/robopen
def run_gcode(gcodes, device):
    with eleksdraw.open_device(device) as device:
        commands = GCodeCommandWrapper(device, gcode.GCode)
        with halo.Halo(text='Startup...', spinner='hearts'):
            device.run_command('', soft_error=True)
            if device.get_state() != eleksdraw.State.IDLE:
                raise RuntimeError(
                    'Device not ready to draw in state: {}'.format(
                        device.get_state()))
            # GRBL recommends a soft reset on start.
            device.run_command(grbl.GRBL.soft_reset())
            commands.set_units_mm()  # pylint: disable=E1101
            commands.set_coordinates_absolute()  # pylint: disable=E1101
            commands.set_feed_rate(1000)  # pylint: disable=E1101

        try:
            for command in tqdm.tqdm(gcodes):
                device.run_command(command)
        except KeyboardInterrupt:
            with halo.Halo(text='Terminating...', spinner='monkey'):
                device.run_command(gcode.GCode.pen_up())
                device.run_command(gcode.GCode.move_fast((0, 0)))

        with halo.Halo(text='Waiting for run to complete...',
                       spinner='hearts'):
            while device.get_state() == eleksdraw.State.RUN:
                time.sleep(1.0)

        print('Final state: {}'.format(device.get_state()))
コード例 #3
0
def get_extractor(
    path: str, format_: str, backend: str, sigpaths: List[str], should_save_workspace=False, disable_progress=False
) -> FeatureExtractor:
    """
    raises:
      UnsupportedFormatError
      UnsupportedArchError
      UnsupportedOSError
    """
    if format_ not in (FORMAT_SC32, FORMAT_SC64):
        if not is_supported_format(path):
            raise UnsupportedFormatError()

        if not is_supported_arch(path):
            raise UnsupportedArchError()

        if not is_supported_os(path):
            raise UnsupportedOSError()

    if format_ == FORMAT_DOTNET:
        import capa.features.extractors.dnfile.extractor

        return capa.features.extractors.dnfile.extractor.DnfileFeatureExtractor(path)

    if backend == "smda":
        from smda.SmdaConfig import SmdaConfig
        from smda.Disassembler import Disassembler

        import capa.features.extractors.smda.extractor

        smda_report = None
        with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
            config = SmdaConfig()
            config.STORE_BUFFER = True
            smda_disasm = Disassembler(config)
            smda_report = smda_disasm.disassembleFile(path)

        return capa.features.extractors.smda.extractor.SmdaFeatureExtractor(smda_report, path)
    else:
        import capa.features.extractors.viv.extractor

        with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
            vw = get_workspace(path, format_, sigpaths)

            if should_save_workspace:
                logger.debug("saving workspace")
                try:
                    vw.saveWorkspace()
                except IOError:
                    # see #168 for discussion around how to handle non-writable directories
                    logger.info("source directory is not writable, won't save intermediate workspace")
            else:
                logger.debug("CAPA_SAVE_WORKSPACE unset, not saving workspace")

        return capa.features.extractors.viv.extractor.VivisectFeatureExtractor(vw, path)
コード例 #4
0
ファイル: main.py プロジェクト: ekmixon/capa
def get_extractor(path: str,
                  format: str,
                  backend: str,
                  sigpaths: List[str],
                  should_save_workspace=False,
                  disable_progress=False) -> FeatureExtractor:
    """
    raises:
      UnsupportedFormatError:
    """
    if backend == "smda":
        from smda.SmdaConfig import SmdaConfig
        from smda.Disassembler import Disassembler

        import capa.features.extractors.smda.extractor

        smda_report = None
        with halo.Halo(text="analyzing program",
                       spinner="simpleDots",
                       stream=sys.stderr,
                       enabled=not disable_progress):
            config = SmdaConfig()
            config.STORE_BUFFER = True
            smda_disasm = Disassembler(config)
            smda_report = smda_disasm.disassembleFile(path)

        return capa.features.extractors.smda.extractor.SmdaFeatureExtractor(
            smda_report, path)
    else:
        import capa.features.extractors.viv.extractor

        with halo.Halo(text="analyzing program",
                       spinner="simpleDots",
                       stream=sys.stderr,
                       enabled=not disable_progress):
            if format == "auto" and path.endswith(EXTENSIONS_SHELLCODE_32):
                format = "sc32"
            elif format == "auto" and path.endswith(EXTENSIONS_SHELLCODE_64):
                format = "sc64"
            vw = get_workspace(path, format, sigpaths)

            if should_save_workspace:
                logger.debug("saving workspace")
                try:
                    vw.saveWorkspace()
                except IOError:
                    # see #168 for discussion around how to handle non-writable directories
                    logger.info(
                        "source directory is not writable, won't save intermediate workspace"
                    )
            else:
                logger.debug("CAPA_SAVE_WORKSPACE unset, not saving workspace")

        return capa.features.extractors.viv.extractor.VivisectFeatureExtractor(
            vw, path)
コード例 #5
0
ファイル: apply.py プロジェクト: retr0h/kindly
def apply(ctx):
    '''Apply configs to an existing cluster. '''

    args = ctx.obj.get('args')
    command_args = {}

    c = config.Config(args, command_args)
    s = spec.DeploymentSpec(c.kindly_file)
    d = driver.Kind(c, s)
    p = packager.Helm(c, s)
    o = orchestrator.Kubectl(c, s)

    if not d.exists():
        msg = (f"Cluster '{s.cluster_name}' does not exist.  "
               "Please execute create subcommand.")
        util.abort_with_message(msg)

    with halo.Halo(
            text="Setting orchestrator's context",
            spinner='dots',
            enabled=c.spinner,
    ) as spinner:
        o.set_context()
        spinner.succeed()

    if s.template_spec_contains('image'):
        with halo.Halo(
                text=f"Loading image '{s.image}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            d.load_image()
            spinner.succeed()

    if s.template_spec_contains('packager'):
        with halo.Halo(
                text=f"Upgrade package '{s.packager_chart}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            p.upgrade()
            spinner.succeed()

    if s.template_spec_contains('configs'):
        with halo.Halo(
                text=f"Apply updated configs in '{s.configs_path}'",
                spinner='dots',
                enabled=c.spinner,
        ) as spinner:
            o.update_objects()
            spinner.succeed()
コード例 #6
0
ファイル: cloudformation.py プロジェクト: acaire/stax
    def wait_for_stack_update(self, action=None):
        """
        Wait for a stack change/update
        """
        kwargs = {'text': '{self.name}: {action} Pending'}
        if action == 'deletion':
            kwargs['color'] = 'red'
        spinner = halo.Halo(**kwargs)
        spinner.start()

        while True:
            try:
                req = self.client.describe_stacks(StackName=self.name)
            except botocore.exceptions.ClientError as err:
                if err.response['Error']['Message'].find(
                        'does not exist') != -1:
                    if action == 'deletion':
                        return spinner.succeed(
                            f'{self.name}: DELETE_COMPLETE (or stack not found)'
                        )
                    raise StackNotFound(f'{self.name} stack no longer exists')
                raise

            status = req['Stacks'][0]['StackStatus']

            spinner.text = f'{self.name}: {status}'
            if status in FAILURE_STATES:
                return spinner.fail()
            elif status in SUCCESS_STATES:
                return spinner.succeed()

            time.sleep(1)
コード例 #7
0
def generate_player_toion_toioff(season):
    """
    Generates TOION and TOIOFF at 5v5 for each player in this season.
    :param season: int, the season
    :return: df with columns Player, TOION, TOIOFF, and TOI60.
    """

    spinner = halo.Halo()
    spinner.start(text='Generating TOI60 for {0:d}'.format(season))

    team_by_team = []
    allteams = ss.get_teams_in_season(season)
    for i, team in enumerate(allteams):
        if os.path.exists(ss.get_team_toi_filename(season, team)):
            spinner.start(text='Generating TOI60 for {0:d} {1:s} ({2:d}/{3:d})'.format(
                season, ss.team_as_str(team), i + 1, len(allteams)))
            toi_indiv = get_5v5_player_season_toi(season, team)
            team_by_team.append(toi_indiv)
            spinner.stop()

    toi60 = pd.concat(team_by_team)
    toi60 = toi60.groupby('PlayerID').sum().reset_index()
    toi60.loc[:, 'TOI%'] = toi60.TOION / (toi60.TOION + toi60.TOIOFF)
    toi60.loc[:, 'TOI60'] = toi60['TOI%'] * 60

    return toi60
コード例 #8
0
    def execute(self, project_dict):
        with halo.Halo(text=self.SPINNER_MESSAGE, spinner="dots"):
            project_id = self.client.create(**project_dict)

        self.logger.log(
            self.CREATE_SUCCESS_MESSAGE_TEMPLATE.format(project_id))
        self.logger.log(self.get_instance_url(project_id))
コード例 #9
0
ファイル: actions.py プロジェクト: pcskys/pdm
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Optional[Iterable[str]] = None,
    requirements: Optional[Dict[str, Dict[str, Requirement]]] = None,
) -> Dict[str, Candidate]:
    """Performs the locking process and update lockfile.

    :param project: the project instance
    :param strategy: update stratege: reuse/eager/all
    :param tracked_names: required when using eager strategy
    :param requirements: An optional dictionary of requirements, read from pyproject
        if not given.
    """
    check_project_file(project)
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    requirements = requirements or project.all_dependencies

    # TODO: switch reporter at io level.
    with halo.Halo(text="Resolving dependencies", spinner="dots") as spin:
        reporter = project.get_reporter(requirements, tracked_names, spin)
        resolver = project.core.resolver_class(provider, reporter)
        mapping, dependencies, summaries = resolve(
            resolver, requirements, project.environment.python_requires)
        data = format_lockfile(mapping, dependencies, summaries)
        spin.succeed("Resolution success")
    project.write_lockfile(data)

    return mapping
コード例 #10
0
ファイル: cli.py プロジェクト: SupersonicAds/gdbt
def validate(scope: str, update: bool) -> None:
    """Validate the configuration"""
    try:
        check_for_updates()
        console.out("")
        with halo.Halo(text="Loading", spinner="dots") as spinner:
            spinner.text = "Evaluating paths"
            path_current = pathlib.Path(scope).expanduser().resolve()
            path_base = gdbt.code.templates.TemplateLoader(
                path_current).base_path

            spinner.text = "Loading configuration"
            configuration = gdbt.code.configuration.load(path_current)
            templates = gdbt.code.templates.load(path_current)

            spinner.text = "Resolving resources"
            for name, template in templates.items():
                template.resolve(name, configuration, str(path_base), update)

            spinner.succeed(
                rich.style.Style(
                    color="green",
                    bold=True).render("Configuration is valid!\n"))
    except gdbt.errors.Error as exc:
        console.print(f"[red][b]ERROR[/b] {exc.text}")
        raise SystemExit(1)
コード例 #11
0
ファイル: grids.py プロジェクト: pvthinker/Nyles
    def __init__(self, fine, coarse):
        assert any(coarse.incr != fine.incr)
        comm = MPI.COMM_WORLD
        nprocs = comm.Get_size()
        myrank = comm.Get_rank()
        matshape = tuple(coarse.incr // fine.incr)
        xshape = coarse.shape
        dummyshape = [j//i for i, j in zip(matshape, xshape)]
        ngbs = fine.neighbours
        nh = 1
        dummysize, domainindices = topo.get_variable_shape(
            dummyshape, ngbs, nh)
        N = np.prod(dummysize)

        self.shape = dummyshape
        self.neighbours = ngbs
        self.size = dummysize
        self.N = N
        self.domainindices = domainindices
        self.nh = nh
        self.x = np.zeros((self.N,))
        self.A = []

        self.halo = halo.Halo({"nh": nh, "size": dummysize, "neighbours": ngbs,
                               "domainindices": domainindices, "shape": self.shape})
コード例 #12
0
def cli_search(ctx, query=None, count=30, spinner='dots', html=False):
    client = Anicode()
    with halo.Halo(text=(('searching for {fore.CYAN}{style.BRIGHT}{query}'
                          '{style.RESET_ALL}...').format(**locals(), **COLOR)),
                   spinner=spinner) as spinner:
        results = client.search(query, count=count)

    if len(results) <= 0:
        sys.stdout.write(
            ('{fore.YELLOW}no results found{fore.RESET}\n').format(**COLOR))
        sys.exit(1)

    _render_results(results)
    while True:
        try:
            _copy_result(_select_result(results), html=html)
            sys.exit(0)
        except ValueError as exc:
            sys.stdout.write(
                ('{style.BRIGHT}{fore.RED}ERROR:{style.RESET_ALL} '
                 '{exc}\n').format(**locals(), **COLOR))
        except (
                KeyboardInterrupt,
                EOFError,
        ) as exc:
            sys.stdout.write(
                ('{fore.YELLOW}user interrupted{fore.RESET}\n').format(
                    **COLOR))
            sys.exit(1)
コード例 #13
0
    def build(self, repository_path: Path) -> Optional[str]:
        assert repository_path.exists() and repository_path.is_dir()
        try:  # TODO: Implement dependency resolution
            # TODO: Implement compiler bootstrap
            package_json = repository_path.joinpath("package.json")

            json_stuff = json.loads(package_json.read_text())  # type: ignore

            git.checkout(str(repository_path),
                         json_stuff["version"])  # type: ignore

            build_script = json_to_obj.BuildInstructions(
                json_stuff["build"]).get()  # type: ignore
            deps: List[Dict[str, str]] = json_stuff.get("deps",
                                                        [])  # type: ignore
            assert isinstance(deps, list)
            compiler = dep_resolver.resolve_compiler(
                json_stuff["compiler"])  # type: ignore
            build_script.insert(0, compiler)
            check_sum = json_to_obj.Hashes(
                json_stuff["hashes"]).get()  # type: ignore

            outcome = utils.run_subprocess(
                build_script,
                loading_text=f"Building (script: {' '.join(build_script)})",
                fail_text="Build failed!",
                success_text="Build succeeded!",
                text_color="yellow",
                spinner_color="cyan",
                stderr=subprocess.STDOUT,
                cwd=str(repository_path),
            )
            ctx = click.get_current_context()
            if outcome is not None:
                log_path = self.create_build_log(
                    outcome.stdout.decode())  # type: ignore
                ctx.fail(f"See the build log at {log_path}")

            target_exe = Path(repository_path.joinpath(
                json_stuff["target"]))  # type: ignore
            if not utils.is_an_executable(target_exe):
                ctx.fail("Could not find target executable!")
            with halo.Halo("Checking hash") as spinner:  # type: ignore
                if utils.check_hash(target_exe.read_bytes(), check_sum):
                    spinner.succeed(
                        "Built executable matched checksum!")  # type: ignore
                else:
                    spinner.fail(f"Checksum mismatched (checksum: {check_sum})"
                                 )  # type: ignore
            return str(target_exe)

        except OSError as exception:
            raise click.ClickException(
                "The package.json does not exist for this package. Please consult the maintainer"
            ) from exception
        except KeyError as exception:
            raise click.ClickException(
                "The package.json is invalid. Please consult the maintainer"
            ) from exception
コード例 #14
0
    def execute(self, **kwargs):
        with halo.Halo(text=self.WAITING_FOR_RESPONSE_MESSAGE, spinner="dots"):
            try:
                instances = self.client.artifacts_list(**kwargs)
            except sdk_exceptions.GradientSdkError as e:
                raise exceptions.ReceivingDataFailedError(e)

        self._log_objects_list(instances, kwargs)
コード例 #15
0
ファイル: datasets.py プロジェクト: Paperspace/gradient-cli
    def execute(self, dataset_version_id, source_paths, target_path):
        self.assert_supported(dataset_version_id)

        dataset_version_id = self.resolve_dataset_version_id(dataset_version_id)

        target_path = os.path.abspath(target_path)

        if not source_paths:
            source_paths = ['/']

        status_text = 'Downloading files'

        with halo.Halo(text=status_text, spinner='dots') as status:
            with WorkerPool() as pool:
                for source_path in source_paths:
                    source_path = self.normalize_path(source_path)

                    list_objects = None
                    is_file = False
                    has_trailing_slash = source_path.endswith('/')

                    if not has_trailing_slash:
                        result = self.get_object(dataset_version_id, source_path)
                        if result is not None:
                            list_objects = [([result], False)]
                            is_file = True

                    if not list_objects:
                        list_objects = self.list_objects(
                            dataset_version_id=dataset_version_id,
                            path=source_path,
                            recursive=True,
                            absolute=True,
                            max_keys=max(pool.worker_count * 2, 64)
                        )

                    def update_status():
                        status.text = '{}: {} ({})  '.format(status_text, source_path, pool.completed_count())

                    for results, _ in list_objects:
                        if not results:
                            break

                        pre_signeds = self.client.generate_pre_signed_s3_urls(
                            dataset_version_id,
                            calls=[dict(method='getObject', params=dict(Key=r['key'])) for r in results],
                        )

                        for result, pre_signed in zip(results, pre_signeds):
                            if is_file:
                                path = target_path
                            elif has_trailing_slash:
                                path = os.path.join(target_path, result['key'][len(source_path)-1:])
                            else:
                                path = os.path.join(target_path, result['key'])

                            update_status()
                            pool.put(self._get, url=pre_signed.url, path=path)
コード例 #16
0
    def train(self,
              blackbox: Playable,
              env: gym.Env,
              iterations: int,
              samples_per_update: int,
              show_after: bool,
              postprocess: Callable[[np.ndarray], np.ndarray],
              stop_at=1e15):
        timestamp = time.time()

        spinner = halo.Halo(text="Initialising full train", spinner="dots")
        spinner.start()

        fitnesses = []

        progress = []
        progress_symbol = "#"
        fitness = -1e6
        best = -1e6
        for i in range(iterations):
            spinner.text = self._spinner_text(progress, fitness, best)

            if 20 * i / iterations > len(progress):
                progress.append(progress_symbol)

            fitness = blackbox(self.predict, env, samples_per_update,
                               postprocess) / samples_per_update

            if fitness > best:
                best = fitness
                self.update()

                if best >= stop_at:
                    break

            else:
                self.reset()
                self.explore()
                self.update()

            fitnesses.append(fitness)

        spinner.succeed(
            f"time: {time.time() - timestamp} seconds -- Final fitness {fitness} -- iterations {i}"
        )

        self.reset()

        if show_after:
            fitness = blackbox(self.predict, env, 3, postprocess, True) / 3

            print("show got %.3f" % fitness)

            plt.figure(figsize=(20, 10))
            x = np.arange(len(fitnesses))
            sb.scatterplot(x, fitnesses)
            sb.lineplot(x, fitnesses)
            plt.show()
コード例 #17
0
ファイル: cli.py プロジェクト: SupersonicAds/gdbt
def plan(scope: str, update: bool) -> None:
    """Plan the changes"""
    try:
        check_for_updates()
        console.out("")
        with halo.Halo(text="Loading", spinner="dots") as spinner:
            spinner.text = "Evaluating paths"
            path_current = pathlib.Path(scope).expanduser().resolve()
            path_base = gdbt.code.templates.TemplateLoader(
                path_current).base_path
            path_relative = path_current.relative_to(path_base)

            spinner.text = "Loading configuration"
            configuration = gdbt.code.configuration.load(path_current)
            templates = gdbt.code.templates.load(path_current)

            spinner.text = "Resolving resources"
            resources_desired = {
                name: typing.cast(
                    gdbt.resource.ResourceGroup,
                    template.resolve(name, configuration, str(path_base),
                                     update),
                )
                for name, template in templates.items()
            }

            spinner.text = "Loading resource state"
            states = gdbt.state.StateLoader(configuration).load(path_relative)
            resources_current_meta = {
                name: state.resource_meta
                for name, state in states.items()
            }

            spinner.text = "Refreshing resource state"
            resources_current = gdbt.resource.ResourceLoader(
                configuration).load(resources_current_meta)

            spinner.text = "Calculating plan"
            plan = gdbt.state.Plan.plan(resources_current, resources_desired)
            summary = gdbt.state.Plan.summary(resources_current,
                                              resources_desired, plan)
            spinner.text = "Rendering plan"
            plan_rendered, changes_pending = gdbt.state.PlanRenderer(
                plan).render(summary)

            if not changes_pending:
                spinner.succeed(
                    rich.style.Style(
                        color="green",
                        bold=True).render("Dashboards are up to date!\n"))
                return

        console.out(plan_rendered)
        os._exit(0)
    except gdbt.errors.Error as exc:
        console.print(f"[red][b]ERROR[/b] {exc.text}")
        raise SystemExit(1)
コード例 #18
0
ファイル: video_fom.py プロジェクト: meharc/tensor
def main(options):
    if not os.path.exists(options.infile):
        logging.error("Cannot open video file %s", options.infile)
        exit(1)

    writeout = False
    if options.outfile:
        writeout = True
        outfile = options.outfile
        fourcc = cv2.VideoWriter_fourcc(*options.codec)
        # TODO: Auto-detect shape of output video. If that is not set correctly,
        #       nothing will be written out to the video file.
        out_vid = cv2.VideoWriter(outfile, fourcc, 30.0, (1920, 1080), True)

    logging.info("Detecting FOMs in {infile} and writing out to {out} (using: {codec})".format(
        infile=options.infile,
        out=outfile if options.outfile else "live stream",
        codec=options.codec))

    current = nxt = None
    frame_count = 0
    spinner = halo.Halo(text="Detecting FOM...", spinner='dots')
    spinner.start()
    try:
        for frame, color_frame in video_frames(options.infile):
            prev = current
            current = nxt
            nxt = frame
            if prev is None or current is None:
                prev_color_frame = color_frame
                continue
            bounding_rect = detect_fom((current, prev, nxt),
                                       psi=options.psi,
                                       gamma=options.gamma)
            if bounding_rect:
                spinner.text = "Found another FOM"
                cv2.rectangle(prev_color_frame, bounding_rect[0], bounding_rect[1], 127, 2)

            cv2.imshow('Detecting FOM...', prev_color_frame)
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break

            if writeout:
                out_vid.write(prev_color_frame)
            prev_color_frame = color_frame

            frame_count += 1
            if frame_count % 30 == 0:
                spinner.text = "Time: {} seconds".format(frame_count)
        spinner.succeed(text="Done")
    except KeyboardInterrupt:
        spinner.succeed(text="Interrupted by user")
    except Exception as ex:
        spinner.fail("An error occurred: {}".format(ex))
    finally:
        if writeout:
            out_vid.release()
コード例 #19
0
ファイル: grids.py プロジェクト: pvthinker/Nyles
    def __init__(self, grid, param):
        for key in ['shape', 'incr', 'procs']:
            setattr(self, key, grid[key])

        for key in ['omega', 'ndeepest', 'myrank']:
            setattr(self, key, param[key])

        nh = 1
        procs0 = [i*p for i, p in zip(self.incr, self.procs)]
        # print("---------> procs0=", procs0, self.myrank)
        loc = topo.rank2loc(self.myrank, procs0)

        neighbours = topo.get_neighbours(loc, procs0, incr=self.incr)
        size, domainindices = topo.get_variable_shape(
            self.shape, neighbours, nh)
        N = np.prod(size)

        self.domainindices = domainindices
        self.size = size
        self.N = N
        self.nh = nh
        self.neighbours = neighbours

        self.x = np.zeros((self.N,))
        self.y = np.zeros((self.N,))
        self.b = np.zeros((self.N,))
        self.r = np.zeros((self.N,))

        self.msk = np.zeros(self.size)
        k0, k1, j0, j1, i0, i1 = self.domainindices
        self.msk[k0:k1, j0:j1, i0:i1] = 1.
        # self.xx = self.x
        # self.xx.shape = self.size
        # self.bb = self.b
        # self.bb.shape = self.size
        # self.rr = self.r
        # self.rr.shape = self.size

        # self.xx = self.x.reshape(self.size)
        # self.bb = self.b.reshape(self.size)
        # self.rr = self.r.reshape(self.size)

        # self.xx = np.reshape(self.x, self.size)
        # self.bb = np.reshape(self.b, self.size)
        # self.rr = np.reshape(self.r, self.size)

        # self.xx = np.zeros(self.size)
        # self.bb = np.zeros(self.size)
        # self.rr = np.zeros(self.size)

        # self.x = self.xx.ravel()
        # self.b = self.bb.ravel()
        # self.r = self.rr.ravel()

        self.halo = halo.Halo({"nh": nh, "size": size, "neighbours": neighbours,
                               "domainindices": domainindices, "shape": self.shape})
コード例 #20
0
    def __init__(
        self,
        bbin_path: str = "~/.config/binbin",
        bin_path: str = "~/bin",
        app_path: str = "~/app",
    ):
        bbin_dir = Path(bbin_path).expanduser()
        binaries_dir = Path(bin_path).expanduser()
        app_dir = Path(app_path).expanduser()

        self._bbin_path = bbin_dir
        self._app_path = app_dir
        self._bin_path = binaries_dir

        if not (bbin_dir.exists() and bbin_dir.is_dir()):
            interface.warn("Bbin's index is not initialized! Initalizing...")
            git.clone(
                BBIN_URL,
                str(bbin_dir),
                with_spinner=False,
                success_text=
                f"Finished initializing bbin's index at {bbin_dir}",
            )
        else:
            self.update()

        if not (binaries_dir.exists() and binaries_dir.is_dir()):
            with halo.Halo(f"Creating binary directory at {bin_path}"
                           ) as spinner:  # type: ignore
                binaries_dir.mkdir(parents=True)
                # Make sure it is on the $PATH
                userpath.append(str(binaries_dir))  # type: ignore
                spinner.succeed("Done")  # type: ignore

        if not (app_dir.exists() and app_dir.is_dir()):
            with halo.Halo(f"Creating app directory at {app_dir}"
                           ) as spinner:  # type: ignore
                app_dir.mkdir(parents=True)
                spinner.succeed("Done")  # type: ignore

        assert bbin_dir.exists() and bbin_dir.is_dir()
        assert app_dir.exists() and app_dir.is_dir()
        assert binaries_dir.exists() and binaries_dir.is_dir()
コード例 #21
0
ファイル: osm.py プロジェクト: andreweland/citychef
def Halo(*args, **kw):
    ipython = False
    try:
        get_ipython()
        ipython = True
    except NameError:
        pass
    if ipython:
        return halo.HaloNotebook(*args, **kw)
    return halo.Halo(*args, **kw)
コード例 #22
0
ファイル: tensorboards.py プロジェクト: Exitussru/GradientCI
    def execute(self, id, experiments):
        """
        :param str id:
        :param list[str] experiments:
        """
        with halo.Halo(text=self.SPINNER_MESSAGE, spinner="dots"):
            tensorboard = self.client.add_experiments(
                id, added_experiments=list(experiments))

        self._log_object(tensorboard)
コード例 #23
0
def store():
    spinner = halo.Halo(text="Loading data...", spinner="dots")
    spinner.start()
    data = json.load(open("/data/import/assets.json"))
    spinner.succeed("Data loaded")

    spinner = halo.Halo(text="Importing", spinner="dots")
    spinner.start()
    total = len(data)
    for i, meta in enumerate(data):
        aid = meta["id"]
        spinner.text = "Inserting asset {} of {} (ID {}) ".format(
            i, total, aid)

        asset = asset_factory(meta)

        elastic.index(index='assets', id=aid, body=asset.meta)
        cache.set("asset", aid, json.dumps(asset.meta))

    spinner.succeed("Inserted {} assets".format(total))
コード例 #24
0
def spinner(itr, *, text=None):
    s = halo.Halo(text=text)
    s.start()
    try:
        for i, x in enumerate(itr):
            # s.info(f"."*i)
            s.stop_and_persist(text="." * (i + 1))
            s.start()
            yield x
    finally:
        s.succeed("OK")
コード例 #25
0
ファイル: main.py プロジェクト: ivankabestwill/capa
def get_extractor_py2(path, format, disable_progress=False):
    import capa.features.extractors.viv

    with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
        vw = get_workspace(path, format, should_save=False)

        try:
            vw.saveWorkspace()
        except IOError:
            # see #168 for discussion around how to handle non-writable directories
            logger.info("source directory is not writable, won't save intermediate workspace")

    return capa.features.extractors.viv.VivisectFeatureExtractor(vw, path)
コード例 #26
0
 def install(self, executable: str, action: enums.InstallAction) -> None:
     # TODO: refactor code to reduce duplication
     # TODO: Handle already exists
     if action in {enums.InstallAction.move, "move"}:
         with halo.Halo(
                 "Moving %s to %s" %
             (executable, self._bin_path)) as spinner:  # type: ignore
             shutil.move(executable, str(self.bin_path))
             spinner.succeed("Done!")  # type: ignore
     elif action in {enums.InstallAction.symlink, "symlink"}:
         with halo.Halo(
                 "Symlinking %s to %s" %
             (executable, self._bin_path)) as spinner:  # type: ignore
             self._bin_path.joinpath(Path(executable).name).symlink_to(
                 Path(executable))
             spinner.succeed("Done!")  # type: ignore
     elif action in {enums.InstallAction.copy, "copy"}:
         with halo.Halo(
                 "Copying %s to %s" %
             (executable, self._bin_path)) as spinner:  # type: ignore
             shutil.copy2(executable, self.bin_path)
             spinner.succeed("Done!")  # type: ignore
コード例 #27
0
def delete(ctx):
    '''Delete an existing cluster. '''

    args = ctx.obj.get('args')
    command_args = {}

    c = config.Config(args, command_args)
    s = spec.DeploymentSpec(c.kindly_file)
    d = driver.Kind(c, s)

    with halo.Halo(text='Deleting cluster', spinner='dots',
                   enabled=c.spinner) as spinner:
        d.delete()
        spinner.succeed()
コード例 #28
0
ファイル: main.py プロジェクト: ccDev-Labs/capa
def get_extractor_py3(path, format, backend, disable_progress=False):
    if backend == "smda":
        from smda.SmdaConfig import SmdaConfig
        from smda.Disassembler import Disassembler

        import capa.features.extractors.smda

        smda_report = None
        with halo.Halo(text="analyzing program",
                       spinner="simpleDots",
                       stream=sys.stderr,
                       enabled=not disable_progress):
            config = SmdaConfig()
            config.STORE_BUFFER = True
            smda_disasm = Disassembler(config)
            smda_report = smda_disasm.disassembleFile(path)

        return capa.features.extractors.smda.SmdaFeatureExtractor(
            smda_report, path)
    else:
        import capa.features.extractors.viv

        with halo.Halo(text="analyzing program",
                       spinner="simpleDots",
                       stream=sys.stderr,
                       enabled=not disable_progress):
            vw = get_workspace(path, format, should_save=False)

            try:
                vw.saveWorkspace()
            except IOError:
                # see #168 for discussion around how to handle non-writable directories
                logger.info(
                    "source directory is not writable, won't save intermediate workspace"
                )

        return capa.features.extractors.viv.VivisectFeatureExtractor(vw, path)
コード例 #29
0
    def execute(self, dataset_version_id, source_paths, target_path):
        self.assert_supported(dataset_version_id)

        if not target_path:
            target_path = '/'
        else:
            target_path = self.normalize_path(target_path)
            if not target_path.endswith('/'):
                target_path += '/'

        status_text = 'Uploading files'

        with halo.Halo(text=status_text, spinner='dots') as status:
            with WorkerPool() as pool:
                for source_path in source_paths:
                    has_trailing_slash = source_path.endswith(os.path.sep)
                    source_path = os.path.abspath(source_path)
                    source_name = os.path.basename(source_path)

                    def update_status():
                        status.text = '{}: {} ({})'.format(
                            status_text, source_path, pool.completed_count())

                    results = []

                    for source_path_is_file, path in self._list_files(
                            source_path):
                        path = path.replace(os.path.sep, '/')

                        key = target_path
                        if not source_path_is_file:
                            if not has_trailing_slash:
                                key += source_name + '/'
                            key += path[len(source_path) + 1:]

                        mimetype = mimetypes.guess_type(
                            key)[0] or 'application/octet-stream'

                        results.append(
                            dict(key=key, path=path, mimetype=mimetype))

                        if len(results) == pool.worker_count:
                            self._sign_and_put(dataset_version_id, pool,
                                               results, update_status)
                            results = []

                    if results:
                        self._sign_and_put(dataset_version_id, pool, results,
                                           update_status)
コード例 #30
0
    def __init__(self,
                 r_min,
                 r_max,
                 redshift=0.0,
                 input_halo=None,
                 powSpec=None,
                 k_min=None,
                 k_max=None):
        """
        Do not call parent __init__ because we don't need the
        kernel object here.
        """
        self.log_r_min = numpy.log10(r_min)
        self.log_r_max = numpy.log10(r_max)
        self.r_array = numpy.logspace(
            self.log_r_min, self.log_r_max,
            defaults.default_precision["corr_npoints"])
        if r_min == r_max:
            self.log_r_min = numpy.log10(r_min)
            self.log_r_max = numpy.log10(r_min)
            self.r_array = numpy.array([r_min])
        self.xi_array = numpy.zeros(self.r_array.size)

        if input_halo is None:
            input_halo = halo.Halo(redshift)
        self.halo = input_halo
        self.halo.set_redshift(redshift)
        if ((k_min is not None or k_max is not None)
                and not self.halo.get_extrapolation()
                and (k_min < self.halo._k_min or k_max > self.halo._k_max)):
            self.halo.set_extrapolation(True)

        if k_min is None:
            k_min = self.halo._k_min
        self._ln_k_min = numpy.log(k_min)
        if k_max is None:
            k_max = self.halo._k_max
        self._ln_k_max = numpy.log(k_max)

        if powSpec == None:
            powSpec = 'linear_power'
        try:
            self.power_spec = self.halo.__getattribute__(powSpec)
        except AttributeError or TypeError:
            print "WARNING: Invalid input for power spectra variable,"
            print "\t setting to linear_power"
            self.power_spec = self.halo.__getattribute__('linear_power')
        self.initialized_spline = False
        return None