Exemplo n.º 1
0
def main(alphas: Optional[List[float]] = Option([1., ], help=''),
         exponents: Optional[List[int]] = Option([1., ], help=''),
         sigmas: Optional[List[float]] = Option([0.1, ], help=''),
         epsilons: Optional[List[float]] = Option([0.1, ], help='')):

    with open("full_solution", 'rb') as input_file:
        full_solution = pickle.load(input_file)

    with open("reference_solution", 'rb') as input_file:
        reference_solution = pickle.load(input_file)

    # perform the registration
    for alpha in alphas:
        for exponent in exponents:
            for sigma in sigmas:
                for epsilon in epsilons:
                    gs = geodesic_shooting.GeodesicShooting(alpha=alpha, exponent=exponent)
                    result = gs.register(reference_solution, full_solution, sigma=sigma,
                                         epsilon=epsilon, iterations=5000, return_all=True)

                    transformed_input = result['transformed_input']

                    norm = (np.linalg.norm((full_solution - transformed_input).flatten())
                            / np.linalg.norm(full_solution.flatten()))
                    with open('relative_errors.txt', 'a') as errors_file:
                        errors_file.write(f"{alpha}\t{exponent}\t{sigma}\t{epsilon}\t{norm}\t"
                                          f"{result['iterations']}\t{result['time']}\t"
                                          f"{result['reason_registration_ended']}\n")
Exemplo n.º 2
0
def main(
    model: Supported = Argument(Supported.default, help="sensor model"),
    port: str = Argument("/dev/ttyUSB0", help="serial port"),
    db_path: Path = Argument(Path("pypms.sqlite"), help="sensor messages DB"),
    samples: int = Option(4, "--samples", "-n"),
    interval: int = Option(20, "--interval", "-i"),
):
    """
    Read raw messages from a supported sensor and store them on a sqlite DB.
    After reading the sensor, decode all messages on DB and print them.
    """

    # get DB context manager
    message_db = pypms_db(db_path)
    sensor = Sensor[model]

    # read from sensor and write to DB
    with message_db() as db, SensorReader(sensor, port, interval, samples) as reader:
        # read one obs from each sensor at the time
        with progressbar(reader(raw=True), length=samples, label=f"reading {sensor}") as progress:
            for obs in progress:
                write_message(db, sensor, obs)

    # read and decode all messages on the DB
    with message_db() as db:
        # extract obs from one sensor at the time
        print(sensor)
        for obs in read_obs(db, sensor):
            print(obs)
Exemplo n.º 3
0
def search(
        subject: str = Option(..., "-s", "--subject"),
        name: Optional[str] = Option(None, "-n", "--name"),
        desc: Optional[str] = Option(None, "-d", "--desc"),
        repo: Optional[str] = Option(None, "-r", "--repo"),
):
    search_packages(name=name, subject=subject, desc=desc, repo=repo)
Exemplo n.º 4
0
def deploy(
        environment: str = Option("",
                                  "-e",
                                  "--environment",
                                  help=_environment_help),
        organization: str = Option("",
                                   "-o",
                                   "--organization",
                                   help=_organization_help),
        graph: Path = Option(None, help=_graph_help),
        graph_version_id: str = Option("", help=_graph_version_id_help),
):
    """Deploy a previously uploaded graph version

    You can specify either '--graph-version-id' to deploy a specific version, or
    '--graph' to deploy the latest uploaded version of a graph.
    """
    ids = IdLookup(
        environment_name=environment,
        organization_name=organization,
        explicit_graph_path=graph,
        explicit_graph_version_id=graph_version_id,
    )

    with abort_on_error("Deploy failed"):
        deploy_graph_version(ids.graph_version_id, ids.environment_id)

    sprint(f"[success]Graph deployed.")
Exemplo n.º 5
0
def predict_model(
    model: NeuralNetwork = Option(
        NeuralNetwork.simple, "--model", "-m", case_sensitive=False
    ),
    weights_file: Optional[str] = Argument(None),
    media_folder: str = Option(
        FOLDER, help="The folder where the YouTube-8M files are stored"
    ),
    batch: int = Option(BATCH_SIZE, help="Number of records to process per batch"),
    outfile: str = Option(OUTFILE, "-o", help="The output file"),
    calculate_map: bool = Option(
        False, "--map", help="Calculate average map of the test dataset instead"
    ),
):
    kwargs = locals()
    log.info(f"Launching train function for model simple_model with arguments {kwargs}")
    if model == NeuralNetwork.netvlad:
        if batch == BATCH_SIZE:
            # Assume no batch size was given, set it to default
            batch = NETVLAD_BATCH_SIZE
            kwargs["batch"] = NETVLAD_BATCH_SIZE
        if batch > NETVLAD_BATCH_SIZE:
            raise ValueError(
                f"NetVLAD batch size must be <= {NETVLAD_BATCH_SIZE}, got {batch}"
            )

        m = NetVLADModel()
    else:
        m = SimpleModel()

    m.predict(**kwargs)
Exemplo n.º 6
0
def _tool_chain(
    grid_size: str = Option(
        default='50x50',
        help=textwrap.dedent(
            'The size for a randomly created grid of letters (a-z only).'),
    ),
    words_file_path: Path = Option(
        default=Path('payload/words.txt'),
        help=textwrap.dedent(
            'A path to a custom text file with words to search.'),
    ),
    words_limit: int = Option(
        default=5,
        help=textwrap.dedent('Search N random words from a given text file.'),
    ),
    word: str = Option(
        default='',
        help=textwrap.dedent(
            'A custom word to search in a grid of letters e.g "foo".'),
    ),
) -> None:
    """The tool searches words in a randomly generated grid of letters."""
    grid_height, grid_width = tuple(map(int, grid_size.split('x')))
    _validate_puzzle_grid_size(grid_size)
    with RandomWordsGrid(
            grid_size=GridSize(grid_height, grid_width)) as grid:  # type: Grid
        board = grid.content.to_coordinates()
        if word:
            _validate_puzzle_word(word)
            start_word_search_puzzle(HiddenWord(board, word))
        else:
            _validate_puzzle_words_path(words_file_path)
            random_words = _random_words(path=words_file_path,
                                         limit=words_limit)
            start_words_search_puzzle(HiddenWords(board, random_words))
Exemplo n.º 7
0
def pull(
        organization: str = Option("",
                                   "-o",
                                   "--organization",
                                   help=_organization_help),
        graph_version_id: str = Option("", help=_graph_version_id_help),
        force: bool = Option(False, "-f", "--force", help=_force_help),
        graph: Path = Argument(None, exists=True, help=_pull_graph_help),
):
    """Update the code for the current graph"""
    ids = IdLookup(
        organization_name=organization,
        explicit_graph_version_id=graph_version_id,
        explicit_graph_path=graph,
    )
    with abort_on_error("Error downloading graph"):
        b = io.BytesIO(download_graph_zip(ids.graph_version_id))
        editor = GraphDirectoryEditor(ids.graph_file_path, overwrite=force)

    with abort_on_error("Error downloading graph"):
        try:
            with ZipFile(b, "r") as zf:
                editor.add_node_from_zip("graph.yml", "graph.yml", zf)
        except FileOverwriteError as e:
            sprint(f"[error]{e}")
            sprint(
                "[info]Run this command with --force to overwrite local files")
            raise typer.Exit(1)
    sprint(f"[success]Pulled graph content")
Exemplo n.º 8
0
def main(
    tile_cache: str = Argument(..., help="Path to local tile cache"),
    dataset: str = Option(..., help="Dataset Name"),
    version: str = Option(..., help="Dataset Version ID"),
    bucket: str = Option("gfw-tiles", help="AWS Bucket"),
    implementation: str = Option(..., help="Tile Cache Implementation"),
    cores: int = Option(..., help="Number or processes to use to upload tiles"),
) -> None:
    """Upload a local tile cache to S3."""

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(
        logging.Formatter(
            fmt="%(asctime)s %(levelname)-4s %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
        )
    )
    LOGGER.addHandler(handler)

    upload_tiles(
        tile_cache=tile_cache,
        bucket=bucket,
        dataset=dataset,
        version=version,
        implementation=implementation,
        cores=cores,
    )
Exemplo n.º 9
0
def delete(
    repo: str = Option(None, "-r", "--repo"),
    package: str = Option(None, "-p", "--package"),
    version: Optional[str] = Option(None, "-v", "--version"),
    filename: Optional[str] = Option(None, "-f", "--filename"),
):
    delete_file(repo=repo, package=package, version=version, filename=filename)
Exemplo n.º 10
0
def main(N_X: int = Option(100, help='Number of pixels in x-direction'),
         N_T: int = Option(100, help='Number of pixels in time-direction'),
         N_train: int = Option(50, help='Number of training parameters')):

    def exact_solution(x, *, mu=0.25):
        s_l = 1.5 * mu
        s_m = mu
        s_r = 0.5 * mu
        t_intersection = 0.25 / (s_l - s_r)
        return ScalarFunction(data=(2. * (x[..., 1] <= t_intersection) * (0.25 + s_l * x[..., 1] - x[..., 0] >= 0.)
                                    + (2. * (x[..., 1] > t_intersection)
                                       * (0.25 + (s_l - s_m) * t_intersection + s_m * x[..., 1] - x[..., 0] >= 0.))
                                    + (1. * (0.25 + s_l * x[..., 1] - x[..., 0] < 0.)
                                       * (0.5 + s_r * x[..., 1] - x[..., 0] > 0.))))

    def create_fom():
        return AnalyticalModel(exact_solution, n_x=N_X, n_t=N_T, name='Analytical Burgers Model')

    fom = create_fom()

    parameters = np.linspace(0.25, 1.5, N_train)

    snapshots = np.vstack([fom.solve(p).to_numpy().flatten() for p in parameters]).T
    print(snapshots.shape)

    _, S = pod(snapshots, modes=1, product_operator=None, return_singular_values=True)
    filepath_prefix = f'results_nx_{N_X}_nt_{N_T}_Ntrain_{N_train}'
    results_filepath = f'{filepath_prefix}/results'
    pathlib.Path(results_filepath).mkdir(parents=True, exist_ok=True)

    with open(f'{results_filepath}/singular_values_snapshots.txt', 'a') as singular_values_file:
        for s in S:
            singular_values_file.write(f"{s}\n")
Exemplo n.º 11
0
def _parse_commit(
    ctx: Context,
    *,
    input: FileText = Option(
        "-",
        help=
        "A file to read commits from. If `-`, commits will be read from stdin.",
    ),
    output: FileText = Option(
        "-",
        help=
        "A file to write parsed commits to. If `-`, parsed commits will be written to stdout.",
        mode="w",
    ),
    include_unparsed: bool = Option(
        False,
        help="If set, commits which fail to be parsed will be returned."),
) -> None:
    """
    Parses a stream of commits in the given file or from stdin.
    """
    from asyncio import run

    from confuse import Configuration

    from .parse_commit import cli_main

    config = ctx.find_object(Configuration)
    run(
        cli_main(
            config,
            input=input,
            output=output,
            include_unparsed=include_unparsed,
        ))
Exemplo n.º 12
0
def node(
        title: str = Option("", "--title", "-n", help=_name_help),
        component: str = Option("", "-c", "--component", help=_component_help),
        location: Path = Argument(None),
):
    """Add a new node to a graph

    patterns create node --name='My Node' mynode.py
    """
    if component and location:
        abort("Specify either a component or a node location, not both")

    if component:
        ids = IdLookup(find_nearest_graph=True)
        GraphConfigEditor(ids.graph_file_path).add_component_uses(
            component_key=component).write()
        sprint(f"[success]Added component {component} to graph")
        return

    if not location:
        sprint("[info]Nodes can be python files like [code]ingest.py")
        sprint("[info]Nodes can be sql files like [code]aggregate.sql")
        sprint(
            "[info]You also can add a subgraph like [code]processor/graph.yml")
        message = "Enter a name for the new node file"
        location = prompt_path(message, exists=False)

    if location.exists():
        abort(f"Cannot create node: {location} already exists")

    ids = IdLookup(node_file_path=location, find_nearest_graph=True)
    # Update the graph yaml
    node_file = "/".join(location.absolute().relative_to(
        ids.graph_directory).parts)
    node_title = title or (location.parent.name
                           if location.name == "graph.yml" else location.stem)
    with abort_on_error("Adding node failed"):
        editor = GraphConfigEditor(ids.graph_file_path)
        editor.add_node(
            title=node_title,
            node_file=node_file,
            id=str(random_node_id()),
        )

    # Write to disk last to avoid partial updates
    if location.suffix == ".py":
        location.write_text(_PY_FILE_TEMPLATE)
    elif location.suffix == ".sql":
        location.write_text(_SQL_FILE_TEMPLATE)
    elif location.name == "graph.yml":
        location.parent.mkdir(exist_ok=True, parents=True)
        GraphConfigEditor(location, read=False).set_name(node_title).write()
    else:
        abort("Node file must be graph.yml or end in .py or .sql")
    editor.write()

    sprint(f"\n[success]Created node [b]{location}")
    sprint(
        f"\n[info]Once you've edited the node and are ready to run the graph, "
        f"use [code]patterns upload")
Exemplo n.º 13
0
def out(
        when: str = Option(None, help="Set a specific time to clock out"),
        id: str = Option(None,
                         help="The id of the clok instance to clok out of."),
        m: str = Option(None, help="Journal Message to add to record"),
):
    """Clock out from a job"""
    if when:
        when = parse_date_and_time(when)

    if id is not None and when is not None:
        clok = Clok.clok_out_by_id(id, when, verbose=True)
    else:
        clok = Clok.get_last_record()
        if (datetime.now() - clok.time_in).total_seconds() / (60 * 60) > 12:
            typer.confirm(
                "The last clocked in time is more than 12 hours ago, are you\n"
                "are you sure you want to clok out now?")

        if when is not None:
            clok = Clok.clock_out_when(when, verbose=True)
        else:
            clok = Clok.clock_out(verbose=True)
    if m is not None:
        clok.add_journal(m)
Exemplo n.º 14
0
def signup(
    server_name: str = Option("default", prompt=True),
    server_address: str = Option(..., prompt=True),
    user: str = Option(..., "-u", prompt=True),
    password: str = Option(...,
                           "-p",
                           prompt=True,
                           confirmation_prompt=True,
                           hide_input=True),
):
    """
    Sign up on the Link Server

    Args:
        user : Username
        password: Password
    """
    add(server_name, server_address)
    server_config = get_current_server_config()
    server_address = server_config[SERVER_ADDR]
    server_config[SERVER_USER] = user

    endpoint = urllib.parse.urljoin(server_address, "/user/")
    data = f'{{"username":"******","password":"******"}}'
    resp = requests.post(endpoint, data=data)
    print(resp.status_code)
    print(resp.text)

    if resp.status_code == 200:
        update_curent_server_config(server_config)
        create_token(password)
Exemplo n.º 15
0
def trigger(
        organization: str = Option("",
                                   "-o",
                                   "--organization",
                                   help=_organization_help),
        environment: str = Option("",
                                  "-e",
                                  "--environment",
                                  help=_environment_help),
        graph: Path = Option(None, exists=True, help=_graph_help),
        graph_version_id: str = Option("", help=_graph_version_id_help),
        type: TypeChoices = Option(TypeChoices.pubsub, hidden=True),
        node: Path = Argument(..., exists=True, help=_node_help),
):
    """Trigger a node on a deployed graph to run immediately"""
    ids = IdLookup(
        environment_name=environment,
        organization_name=organization,
        explicit_graph_path=graph,
        node_file_path=node,
        explicit_graph_version_id=graph_version_id,
        find_nearest_graph=True,
    )
    with abort_on_error("Error triggering node"):
        trigger_node(
            ids.node_id,
            ids.graph_version_id,
            ids.environment_id,
            execution_type=type,
        )

    sprint(f"[success]Triggered node {node}")
Exemplo n.º 16
0
def csv(
    ctx: Context,
    capture: bool = Option(False,
                           "--capture",
                           help="write raw messages instead of observations"),
    overwrite: bool = Option(False,
                             "--overwrite",
                             help="overwrite file, if already exists"),
    path: Path = Argument(Path(),
                          help="csv formatted file",
                          show_default=False),
):
    """Read sensor and print measurements"""
    if path.is_dir():  # pragma: no cover
        path /= f"{datetime.now():%F}_pypms.csv"
    mode = "w" if overwrite else "a"
    logger.debug(f"open {path} on '{mode}' mode")
    with ctx.obj["reader"] as reader, path.open(mode) as csv:
        sensor_name = reader.sensor.name
        if not capture:
            logger.debug(f"capture {sensor_name} observations to {path}")
            # add header to new files
            if path.stat().st_size == 0:
                obs = next(reader())
                csv.write(f"{obs:header}\n")
            for obs in reader():
                csv.write(f"{obs:csv}\n")
        else:
            logger.debug(f"capture {sensor_name} messages to {path}")
            # add header to new files
            if path.stat().st_size == 0:
                csv.write("time,sensor,hex\n")
            for raw in reader(raw=True):
                csv.write(f"{raw.time},{sensor_name},{raw.hex}\n")
Exemplo n.º 17
0
def analyze(
        repository: Path = Argument(...,
                                    help="The path to the bare repository."),
        metrics: Optional[List[AvailableMetrics]] = Argument(
            None, case_sensitive=False),
        commits:
    Optional[FileText] = Option(
        None,
        help=
        "The newline-separated input file of commit ids. Commit ids are read from stdin if this is not passed.",  # pylint: disable=line-too-long
    ),
        custom_metrics: List[str] = Option([]),
        workers: int = 1,
) -> None:
    """Analyze commits of a repository.

    Either provide the commit ids to analyze on stdin or as a file argument."""
    from sys import stdin  # pylint: disable=import-outside-toplevel

    from pyrepositoryminer.analyze import (  # pylint: disable=import-outside-toplevel
        worker, )

    metrics = metrics if metrics else []
    ids = (
        id.strip() for id in (commits if commits else stdin)  # pylint: disable=superfluous-parens
    )
    with make_pool(workers, repository, metrics, custom_metrics) as pool:
        results = (res for res in pool.imap(worker, ids) if res is not None)
        for result in results:
            print(result)
Exemplo n.º 18
0
def main(
    xblocks: int = Argument(..., help='Number of blocks in x direction.'),
    yblocks: int = Argument(..., help='Number of blocks in y direction.'),
    snapshots: int = Argument(
        ...,
        help=
        'Number of snapshots for basis generation per component. In total SNAPSHOTS^(XBLOCKS * YBLOCKS).'
    ),
    rbsize: int = Argument(..., help='Size of the reduced basis.'),
    grid: int = Option(60, help='Use grid with 2*NI*NI elements.'),
    product: Choices('euclidean h1') = Option(
        'h1',
        help=
        'Product w.r.t. which to orthonormalize and calculate Riesz representatives.'
    ),
    testing: bool = Option(
        False,
        help='Load the gui and exit right away (for functional testing).'),
):
    """Thermalblock demo with GUI."""

    if not testing:
        app = QtWidgets.QApplication(sys.argv)
        win = RBGui(xblocks, yblocks, snapshots, rbsize, grid, product)
        win.show()
        sys.exit(app.exec_())

    from pymor.discretizers.builtin.gui import qt
    qt._launch_qt_app(
        lambda: RBGui(xblocks, yblocks, snapshots, rbsize, grid, product),
        block=False)
Exemplo n.º 19
0
def serial(
        ctx: Context,
        format: Optional[Format] = Option(None,
                                          "--format",
                                          "-f",
                                          help="formatted output"),
        decode: Optional[Path] = Option(None, help="decode captured messages"),
):
    """Read sensor and print measurements"""
    reader = ctx.obj["reader"]
    if decode:
        reader = MessageReader(decode, reader.sensor, reader.samples)
    with reader:
        if format == "hexdump":
            for n, raw in enumerate(reader(raw=True)):
                echo(raw.hexdump(n))
        elif format:
            if format == "csv":
                obs = next(reader())
                echo(f"{obs:header}")
            for obs in reader():
                echo(f"{obs:{format}}")
        else:  # pragma: no cover
            for obs in reader():
                echo(str(obs))
Exemplo n.º 20
0
def bridge(
        mqtt_topic: str = Option("homie/+/+/+", help="mqtt root/topic"),
        mqtt_host: str = Option("mqtt.eclipse.org", help="mqtt server"),
        mqtt_port: int = Option(1883, help="server port"),
        mqtt_user: str = Option("", help="server username",
                                show_default=False),
        mqtt_pass: str = Option("", help="server password",
                                show_default=False),
        db_host: str = Option("influxdb", help="database server"),
        db_port: int = Option(8086, help="server port"),
        db_user: str = Option("root", help="server username"),
        db_pass: str = Option("root", help="server password"),
        db_name: str = Option("homie", help="database name"),
):
    """Bridge between MQTT and InfluxDB servers"""
    pub = client_pub(host=db_host,
                     port=db_port,
                     username=db_user,
                     password=db_pass,
                     db_name=db_name)

    def on_sensordata(data: Data) -> None:
        pub(time=data.time,
            tags={"location": data.location},
            data={data.measurement: data.value})

    client_sub(
        topic=mqtt_topic,
        host=mqtt_host,
        port=mqtt_port,
        username=mqtt_user,
        password=mqtt_pass,
        on_sensordata=on_sensordata,
    )
Exemplo n.º 21
0
def cli_upload(
    path: Path = Argument(...,
                          exists=True,
                          dir_okay=True,
                          file_okay=True,
                          readable=True,
                          resolve_path=True),
    auth_key: Optional[str] = Option(None, envvar='SMOKESHOW_AUTH_KEY'),
    root_url: str = Option(ROOT_URL, envvar='SMOKESHOW_ROOT_URL'),
    github_status_description: Optional[str] = Option(
        None, envvar='SMOKESHOW_GITHUB_STATUS_DESCRIPTION'),
    github_coverage_threshold: Optional[float] = Option(
        None, envvar='SMOKESHOW_GITHUB_COVERAGE_THRESHOLD'),
) -> None:
    try:
        asyncio.run(
            upload(
                path,
                auth_key=auth_key,
                github_status_description=github_status_description,
                github_coverage_threshold=github_coverage_threshold,
                root_url=root_url,
            ))
    except ValueError as e:
        print(e, file=sys.stderr)
        raise Exit(1)
Exemplo n.º 22
0
def get_versions_file(
        subject: str = Option(..., "-s", "--subject"),
        repo: str = Option(None, "-r", "--repo"),
        file_path: str = Option(..., "-f", "--file-path"),
):

    get_version_for_file(subject=subject, repo=repo, file_path=file_path)
Exemplo n.º 23
0
def upload(
    deploy: bool = Option(True, "--deploy/--no-deploy", help=_deploy_help),
    organization: str = Option("", "-o", "--organization", help=_organization_help),
    environment: str = Option("", "-e", "--environment", help=_environment_help),
    graph: Path = Argument(None, exists=True, help=_graph_help),
    publish_component: bool = Option(False, help=_component_help),
):
    """Upload a new version of a graph to Patterns"""
    ids = IdLookup(
        environment_name=environment,
        organization_name=organization,
        explicit_graph_path=graph,
    )

    with abort_on_error("Upload failed"):
        resp = upload_graph_version(
            ids.graph_file_path,
            ids.organization_id,
            add_missing_node_ids=not publish_component,
        )

    graph_version_id = resp["uid"]
    ui_url = resp["ui_url"]
    sprint(f"\n[success]Uploaded new graph version with id [b]{graph_version_id}")
    errors = resp.get("errors", [])
    if publish_component:
        errors = [
            e
            for e in errors
            if not e["message"].startswith("Top level input is not connected")
            and not (
                e["message"].startswith("Parameter")
                and e["message"].endswith("has no default or value")
            )
        ]
    if errors:
        sprint(f"[error]Graph contains the following errors:")
        for error in errors:
            sprint(f"\t[error]{error}")

    if publish_component:
        with abort_on_error("Error creating component"):
            resp = create_graph_component(graph_version_id)
            resp_org = resp["organization"]["slug"]
            resp_version = resp["version_name"]
            resp_component = resp["component"]["slug"]
            resp_id = resp["uid"]
            sprint(
                f"[success]Published graph component "
                f"[b]{resp_org}/{resp_component}[/b] "
                f"with version [b]{resp_version}[/b] "
                f"at id [b]{resp_id}"
            )
    elif deploy:
        with abort_on_error("Deploy failed"):
            deploy_graph_version(graph_version_id, ids.environment_id)
        sprint(f"[success]Graph deployed")

    sprint(f"\n[info]Visit [code]{ui_url}[/code] to view your graph")
Exemplo n.º 24
0
class Opt:
    DRY: Final[bool] = Option(False,
                              "--dry",
                              help="dry run, would not fire actual requests")
    ACCEPT_ALL: Final[bool] = Option(False,
                                     "--yes",
                                     help="confirm to all",
                                     show_default=False)
Exemplo n.º 25
0
def delete(
    repo: str = Option(None, "-r", "--repo"),
    is_operation_confirmed: bool = Option(
        False,
        "-y",
        help="auto confirm operation",
    ),
):
    delete_repo(repo=repo, is_operation_confirmed=is_operation_confirmed)
Exemplo n.º 26
0
def session(region: str = Option(default='us-east-1',
                                 help="Sets the AWS region.",
                                 metavar="REGION"),
            profile: str = Option(default=None,
                                  help="Shared credential profile to use.",
                                  metavar="PROFILE")):
    global sess, ec2
    sess = boto3.session.Session(region_name=region, profile_name=profile)
    ec2 = sess.resource('ec2')
Exemplo n.º 27
0
def environments(
    organization: str = Option("", "--organization", "-o", help=_organization_help),
    print_json: bool = Option(False, "--json", help=_json_help),
):
    """List environments"""
    ids = IdLookup(organization_name=organization)
    with abort_on_error("Error listing environments"):
        es = list(paginated_environments(ids.organization_id))
    _print_objects(es, print_json)
Exemplo n.º 28
0
def set(user: Optional[str] = Option(None, "-u"),
        address: Optional[str] = Option("-a")):
    server_config = get_current_server_config()
    if user:
        server_config[SERVER_USER] = user
    if address:
        server_config[SERVER_ADDR] = address

    update_curent_server_config(server_config)
Exemplo n.º 29
0
def graphs(
    organization: str = Option("", help=_organization_help),
    print_json: bool = Option(False, "--json", help=_json_help),
):
    """List graphs"""
    ids = IdLookup(organization_name=organization)
    with abort_on_error("Error listing graphs"):
        gs = list(paginated_graphs(ids.organization_id))
    _print_objects(gs, print_json)
Exemplo n.º 30
0
def clone(
        organization: str = Option("",
                                   "-o",
                                   "--organization",
                                   help=_organization_help),
        graph: str = Option("", help=_graph_help),
        graph_version_id: str = Option("",
                                       "-v",
                                       "--version",
                                       help=_graph_version_id_help),
        component: str = Option("", "--component", help=_component_help),
        directory: Path = Argument(None, exists=False, help=_graph_help),
):
    """Download the code for a graph"""
    if not graph and not directory and not component:
        if graph_version_id:
            abort(
                f"Missing graph directory argument."
                f"\ntry [code]patterns clone -v {graph_version_id} new_graph")
        else:
            abort(f"Missing graph argument."
                  f"\ntry [code]patterns clone graph-to-clone")
    component_match = COMPONENT_RE.fullmatch(component)
    if component and not component_match:
        abort(
            "Invalid component version. Must be in the form organization/component@v1"
        )

    component_name = component_match.group(2) if component_match else None

    ids = IdLookup(
        organization_name=organization,
        explicit_graph_name=graph or component_name or directory.name,
        explicit_graph_version_id=graph_version_id,
    )
    if not directory:
        if component:
            directory = Path(component_name)
        elif graph:
            directory = Path(graph)
        elif graph_version_id:
            with abort_on_error("Error"):
                directory = Path(ids.graph_name)
        else:
            abort("Specify --graph, --graph-version-id, or a directory")

    with abort_on_error("Error cloning graph"):
        if component:
            content = download_component_zip(component)
        else:
            content = download_graph_zip(ids.graph_version_id)
        editor = GraphDirectoryEditor(directory, overwrite=False)
        with ZipFile(io.BytesIO(content), "r") as zf:
            editor.add_node_from_zip("graph.yml", "graph.yml", zf)

    sprint(f"[success]Cloned graph into {directory}")