Esempio n. 1
0
    def test_reads_config_map(self):
        config = reader.read([{"x": "y"}, {1: 2}, {"x": "override y"}])
        assert config == {"x": "override y", 1: 2}

        config = reader.read(
            [
                {"x": "y"},
                {1: 2},
                {"x": "override y"},
                "tests/fixtures/parsing/yaml_file.yml",
                "tests/fixtures/parsing/json_file.json",
            ]
        )
        assert config == {"x": 1, "y": 2, 1: 2, "foo": "bar", "type": "json"}
Esempio n. 2
0
def resume(ctx, polyaxonfile, u):
    """Resume run.

    Uses [Caching](/references/polyaxon-cli/#caching)

    Examples:

    \b
    ```bash
    $ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 resume
    ```
    """
    content = None
    if polyaxonfile:
        content = "{}".format(reader.read(polyaxonfile))

    # Check if we need to upload
    if u:
        ctx.invoke(upload, sync=False)

    owner, project_name, run_uuid = get_project_run_or_local(
        ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True)
    try:
        polyaxon_client = RunClient(owner=owner,
                                    project=project_name,
                                    run_uuid=run_uuid)
        response = polyaxon_client.resume(override_config=content)
        Printer.print_success("Run was resumed with uid {}".format(
            response.uuid))
    except (ApiException, HTTPError) as e:
        handle_cli_error(e,
                         message="Could not resume run `{}`.".format(run_uuid))
        sys.exit(1)
Esempio n. 3
0
 def test_reads_yaml_stream(self):
     stream = """---
     x: y
     1: 2
     """
     config = reader.read(stream)
     assert config == {"x": "y", 1: 2}
Esempio n. 4
0
    def test_parallel_pipeline(self):
        run_config = V1CompiledOperation.read(
            [
                reader.read(
                    os.path.abspath(
                        "tests/fixtures/pipelines/simple_parallel_pipeline.yml"
                    )
                ),
                {"kind": "compiled_operation"},
            ]
        )

        run_config = CompiledOperationSpecification.apply_context(run_config)
        assert len(run_config.run.operations) == 4
        assert run_config.run.operations[0].name == "job1"
        assert run_config.run.operations[0].dependencies is None
        assert run_config.run.operations[1].name == "job2"
        assert run_config.run.operations[1].dependencies is None
        assert run_config.run.operations[2].name == "experiment1"
        assert run_config.run.operations[2].dependencies is None
        assert run_config.run.operations[3].name == "experiment2"
        assert run_config.run.operations[3].dependencies is None
        dag_strategy = run_config.run
        assert set(dag_strategy.sort_topologically(dag_strategy.dag)[0]) == {
            "job1",
            "job2",
            "experiment1",
            "experiment2",
        }
        assert run_config.run.concurrency == 2
        assert run_config.schedule is None
Esempio n. 5
0
    def test_dag_pipeline(self):
        run_config = V1CompiledOperation.read(
            [
                reader.read(
                    os.path.abspath("tests/fixtures/pipelines/simple_dag_pipeline.yml")
                ),
                {"kind": "compiled_operation"},
            ]
        )

        run_config = CompiledOperationSpecification.apply_context(run_config)
        assert len(run_config.run.operations) == 5
        assert run_config.run.operations[0].name == "job1"
        assert run_config.run.operations[1].name == "experiment1"
        assert run_config.run.operations[1].dependencies == ["job1"]
        assert run_config.run.operations[2].name == "experiment2"
        assert run_config.run.operations[2].dependencies == ["job1"]
        assert run_config.run.operations[3].name == "experiment3"
        assert run_config.run.operations[3].dependencies == ["job1"]
        assert run_config.run.operations[4].name == "job2"
        assert run_config.run.operations[4].dependencies == [
            "experiment1",
            "experiment2",
            "experiment3",
        ]
        dag_strategy = run_config.run
        sorted_dag = dag_strategy.sort_topologically(dag_strategy.dag)
        assert sorted_dag[0] == ["job1"]
        assert set(sorted_dag[1]) == {"experiment1", "experiment2", "experiment3"}
        assert sorted_dag[2] == ["job2"]
        assert run_config.run.concurrency == 3
        assert run_config.schedule is None
Esempio n. 6
0
    def test_sequential_pipeline(self):
        run_config = V1CompiledOperation.read(
            [
                reader.read(
                    os.path.abspath(
                        "tests/fixtures/pipelines/simple_sequential_pipeline.yml"
                    )
                ),
                {"kind": "compiled_operation"},
            ]
        )

        run_config = CompiledOperationSpecification.apply_context(run_config)
        assert run_config.run is not None
        assert len(run_config.run.operations) == 4
        assert run_config.run.operations[0].name == "job1"
        assert run_config.run.operations[1].name == "job2"
        assert run_config.run.operations[1].dependencies == ["job1"]
        assert run_config.run.operations[2].name == "experiment1"
        assert run_config.run.operations[2].dependencies == ["job2"]
        assert run_config.run.operations[3].name == "experiment2"
        assert run_config.run.operations[3].dependencies == ["experiment1"]
        dag_strategy = run_config.run
        assert dag_strategy.sort_topologically(dag_strategy.dag) == [
            ["job1"],
            ["job2"],
            ["experiment1"],
            ["experiment2"],
        ]
        assert run_config.schedule is None
Esempio n. 7
0
    def __init__(self, filepaths):
        filepaths = to_list(filepaths)
        for filepath in filepaths:
            if not os.path.isfile(filepath):
                raise PolyaxonfileError("`{}` must be a valid file".format(filepath))
        self._filenames = [os.path.basename(filepath) for filepath in filepaths]

        self.specification = get_specification(data=reader.read(filepaths))
    def test_run_simple_file_passes(self):
        run_config = V1CompiledOperation.read([
            reader.read(
                os.path.abspath(
                    "tests/fixtures/typing/run_cmd_simple_file.yml")),
            {
                "kind": "compiled_operation"
            },
        ])

        assert run_config.inputs[0].value == "MeanSquaredError"
        assert run_config.inputs[1].value is None
        validated_params = run_config.validate_params()
        assert run_config.inputs[0].value == "MeanSquaredError"
        assert run_config.inputs[1].value is None
        assert {
            "loss": V1Param(value="MeanSquaredError"),
            "num_masks": V1Param(value=None),
        } == {p.name: p.param
              for p in validated_params}
        with self.assertRaises(ValidationError):
            CompiledOperationSpecification.apply_context(run_config)

        validated_params = run_config.validate_params(
            params={"num_masks": {
                "value": 100
            }})
        assert {
            "loss": V1Param(value="MeanSquaredError"),
            "num_masks": V1Param(value=100),
        } == {p.name: p.param
              for p in validated_params}
        assert run_config.run.container.args == [
            "video_prediction_train",
            "--num_masks={{num_masks}}",
            "--loss={{loss}}",
        ]

        with self.assertRaises(ValidationError):
            # Applying context before applying params
            CompiledOperationSpecification.apply_context(run_config)

        run_config.apply_params(params={"num_masks": {"value": 100}})
        run_config = CompiledOperationSpecification.apply_context(run_config)
        run_config = CompiledOperationSpecification.apply_run_contexts(
            run_config)
        assert run_config.version == 1.05
        assert run_config.tags == ["foo", "bar"]
        container = run_config.run.container
        assert isinstance(container, k8s_schemas.V1Container)
        assert container.image == "my_image"
        assert container.command == ["/bin/sh", "-c"]
        assert container.args == [
            "video_prediction_train",
            "--num_masks=100",
            "--loss=MeanSquaredError",
        ]
Esempio n. 9
0
 def test_pipeline_with_no_ops_raises(self):
     run_config = V1CompiledOperation.read(
         [
             reader.read(
                 os.path.abspath("tests/fixtures/pipelines/pipeline_with_no_ops.yml")
             ),
             {"kind": "compiled_operation"},
         ]
     )
     with self.assertRaises(PolyaxonSchemaError):
         CompiledOperationSpecification.apply_context(run_config)
Esempio n. 10
0
 def test_cyclic_pipeline_raises(self):
     run_config = V1CompiledOperation.read(
         [
             reader.read(
                 os.path.abspath("tests/fixtures/pipelines/cyclic_pipeline.yml")
             ),
             {"kind": "compiled_operation"},
         ]
     )
     assert run_config.is_dag_run is True
     assert run_config.has_pipeline is True
     with self.assertRaises(PolyaxonSchemaError):
         CompiledOperationSpecification.apply_context(run_config)
    def test_validation_for_required_inputs_outputs_raises(self):
        # Get compiled_operation data
        run_config = V1CompiledOperation.read([
            reader.read(
                os.path.abspath("tests/fixtures/typing/required_inputs.yml")),
            {
                "kind": "compiled_operation"
            },
        ])
        # Inputs don't have delayed validation by default
        with self.assertRaises(ValidationError):
            run_config.validate_params(is_template=False, check_runs=True)

        run_config = V1CompiledOperation.read([
            reader.read(
                os.path.abspath("tests/fixtures/typing/required_outputs.yml")),
            {
                "kind": "compiled_operation"
            },
        ])
        # Outputs have delayed validation by default
        run_config.validate_params(is_template=False, check_runs=True)
Esempio n. 12
0
    def read(cls, values):
        if isinstance(values, cls.CONFIG):
            return values

        values = to_list(values)
        data = reader.read([{"kind": cls._SPEC_KIND}] + values)
        try:
            config = cls.CONFIG.from_dict(copy.deepcopy(data))
        except TypeError as e:
            raise ValidationError(
                "Received a non valid config `{}`: `{}`".format(cls._SPEC_KIND, e)
            )
        cls.check_data(data)
        return config
Esempio n. 13
0
    def __init__(self, values):
        self._values = to_list(values)

        self._data = reader.read(
            [{"kind": self._SPEC_KIND, "version": SCHEMA_VERSION}] + self._values
        )
        try:
            self._config = self.CONFIG.from_dict(copy.deepcopy(self.data))
        except (ValidationError, TypeError) as e:
            raise PolyaxonfileError(
                "Received a non valid config `{}`: `{}`".format(self._SPEC_KIND, e)
            )
        self.check_data()
        self._extra_validation()
    def test_no_params_for_required_inputs_outputs_raises(self):
        # Get compiled_operation data
        run_config = V1CompiledOperation.read([
            reader.read(
                os.path.abspath("tests/fixtures/typing/required_inputs.yml")),
            {
                "kind": "compiled_operation"
            },
        ])

        # Inputs don't have delayed validation by default
        with self.assertRaises(ValidationError):
            CompiledOperationSpecification.apply_context(run_config)

        run_config = V1CompiledOperation.read([
            reader.read(
                os.path.abspath("tests/fixtures/typing/required_outputs.yml")),
            {
                "kind": "compiled_operation"
            },
        ])
        # Outputs have delayed validation by default
        CompiledOperationSpecification.apply_context(run_config)
Esempio n. 15
0
 def test_matrix_file_passess(self):
     run_config = V1CompiledOperation.read(
         [
             reader.read(
                 os.path.abspath("tests/fixtures/pipelines/matrix_file.yml")
             ),
             {"kind": "compiled_operation"},
         ]
     )
     run_config = CompiledOperationSpecification.apply_context(run_config)
     assert run_config.version == 1.05
     assert run_config.is_dag_run is True
     assert run_config.has_pipeline is True
     assert run_config.schedule is None
     assert run_config.run.concurrency == 4
     assert isinstance(run_config.run, V1Dag)
     assert run_config.run.early_stopping is None
     assert run_config.run.kind == V1Dag.IDENTIFIER
     assert len(run_config.run.operations) == 2
     assert len(run_config.run.components) == 1
     template_hyperband = run_config.run.operations[1].parallel
     assert isinstance(template_hyperband.params["lr"], V1HpLinSpace)
     assert isinstance(template_hyperband.params["loss"], V1HpChoice)
     assert template_hyperband.params["lr"].to_dict() == {
         "kind": "linspace",
         "value": {"start": 0.01, "stop": 0.1, "num": 5},
     }
     assert template_hyperband.params["loss"].to_dict() == {
         "kind": "choice",
         "value": ["MeanSquaredError", "AbsoluteDifference"],
     }
     assert template_hyperband.params["normal_rate"].to_dict() == {
         "kind": "normal",
         "value": {"loc": 0, "scale": 0.9},
     }
     assert template_hyperband.params["dropout"].to_dict() == {
         "kind": "qloguniform",
         "value": {"high": 0.8, "low": 0, "q": 0.1},
     }
     assert template_hyperband.params["activation"].to_dict() == {
         "kind": "pchoice",
         "value": [["relu", 0.1], ["sigmoid", 0.8]],
     }
     assert template_hyperband.params["model"].to_dict() == {
         "kind": "choice",
         "value": ["CDNA", "DNA", "STP"],
     }
     assert template_hyperband.concurrency == 2
     assert isinstance(template_hyperband, V1Hyperband)
Esempio n. 16
0
def generate(polyaxonfile, python_module, build_context, destination,
             copy_path, params):
    """Generate a dockerfile given the polyaxonfile."""
    if all([polyaxonfile, build_context]):
        Printer.print_error(
            "Only a polyaxonfile or a build context option is required.")
        sys.exit(1)

    if build_context:
        try:
            build_context = [
                V1DockerfileType.from_dict(reader.read(build_context))
            ]
        except (PolyaxonSchemaError, ValidationError) as e:
            Printer.print_error("received a non valid build context.")
            Printer.print_error("Error message: {}.".format(e))
            sys.exit(1)
    else:
        specification = check_polyaxonfile(
            polyaxonfile=polyaxonfile,
            python_module=python_module,
            params=params,
            log=False,
        )

        try:
            compiled_operation = specification.compile_operation()
            compiled_operation.apply_params(params=specification.config.params)
            compiled_operation = CompiledOperationSpecification.apply_context(
                compiled_operation)
        except PolyaxonSchemaError:
            Printer.print_error(
                "Could not run this polyaxonfile locally, "
                "a context is required to resolve it dependencies.")
            sys.exit(1)

        build_context = compiled_operation.init_dockerfiles

    for init_dockerfile in build_context:
        generator = DockerFileGenerator(build_context=init_dockerfile,
                                        destination=destination or ".")
        generator.create()
        Printer.print_success("Dockerfile was generated, path: `{}`".format(
            generator.dockerfile_path))

        if copy_path:
            copy_file(generator.dockerfile_path, copy_path)
Esempio n. 17
0
    def test_matrix_early_stopping_file_passes(self):
        run_config = V1CompiledOperation.read(
            [
                reader.read(
                    os.path.abspath(
                        "tests/fixtures/pipelines/matrix_file_early_stopping.yml"
                    )
                ),
                {"kind": "compiled_operation"},
            ]
        )

        run_config = CompiledOperationSpecification.apply_context(run_config)
        assert run_config.run is not None
        assert run_config.is_dag_run is True
        assert run_config.has_pipeline is True
        assert run_config.schedule is None
        assert run_config.run.concurrency == 4
        assert isinstance(run_config.run, V1Dag)
        assert run_config.run.early_stopping[0].kind == "failure_early_stopping"
        assert isinstance(run_config.run.early_stopping[0], V1FailureEarlyStopping)
        assert len(run_config.run.early_stopping) == 1
        assert run_config.run.kind == V1Dag.IDENTIFIER
        assert len(run_config.run.operations) == 2
        assert len(run_config.run.components) == 1
        template_random = run_config.run.operations[1].parallel
        assert isinstance(template_random, V1RandomSearch)
        assert isinstance(template_random.params["lr"], V1HpLinSpace)
        assert isinstance(template_random.params["loss"], V1HpChoice)
        assert template_random.params["lr"].to_dict() == {
            "kind": "linspace",
            "value": {"start": 0.01, "stop": 0.1, "num": 5},
        }
        assert template_random.params["loss"].to_dict() == {
            "kind": "choice",
            "value": ["MeanSquaredError", "AbsoluteDifference"],
        }
        assert template_random.concurrency == 2
        assert template_random.num_runs == 300
        assert template_random.early_stopping[0].kind == "metric_early_stopping"
        assert len(template_random.early_stopping) == 1
        assert isinstance(template_random.early_stopping[0], V1MetricEarlyStopping)
Esempio n. 18
0
def restart(ctx, copy, polyaxonfile, u):
    """Restart run.

    Uses [Caching](/references/polyaxon-cli/#caching)

    Examples:

    \b
    ```bash
    $ polyaxon run --uid=8aac02e3a62a4f0aaa257c59da5eab80 restart
    ```
    """
    content = None
    if polyaxonfile:
        content = "{}".format(reader.read(polyaxonfile))

    # Check if we need to upload
    if u:
        ctx.invoke(upload, sync=False)

    owner, project_name, run_uuid = get_project_run_or_local(
        ctx.obj.get("project"), ctx.obj.get("run_uuid"))
    try:
        polyaxon_client = PolyaxonClient()
        body = V1Run(content=content)
        if copy:
            response = polyaxon_client.runs_v1.copy_run(
                owner, project_name, run_uuid, body)
            Printer.print_success("Run was copied with uid {}".format(
                response.uuid))
        else:
            response = polyaxon_client.runs_v1.restart_run(
                owner, project_name, run_uuid, body)
            Printer.print_success("Run was restarted with uid {}".format(
                response.uuid))
    except (ApiException, HTTPError) as e:
        handle_cli_error(
            e, message="Could not restart run `{}`.".format(run_uuid))
        sys.exit(1)
Esempio n. 19
0
    def test_matrix_file_passes_int_float_types(self):
        run_config = V1CompiledOperation.read(
            [
                reader.read(
                    os.path.abspath(
                        "tests/fixtures/pipelines/matrix_file_with_int_float_types.yml"
                    )
                ),
                {"kind": "compiled_operation"},
            ]
        )

        run_config = CompiledOperationSpecification.apply_context(run_config)
        assert run_config.version == 1.05
        assert run_config.is_dag_run is True
        assert run_config.has_pipeline is True
        assert run_config.schedule is None
        assert run_config.run.concurrency == 4
        assert isinstance(run_config.run, V1Dag)
        assert run_config.run.early_stopping is None
        assert run_config.run.kind == V1Dag.IDENTIFIER
        assert len(run_config.run.operations) == 2
        assert len(run_config.run.components) == 1
        template_grid = run_config.run.operations[1].parallel
        assert isinstance(template_grid, V1GridSearch)
        assert isinstance(template_grid.params["param1"], V1HpChoice)
        assert isinstance(template_grid.params["param2"], V1HpChoice)
        assert template_grid.params["param1"].to_dict() == {
            "kind": "choice",
            "value": [1, 2],
        }
        assert template_grid.params["param2"].to_dict() == {
            "kind": "choice",
            "value": [3.3, 4.4],
        }
        assert template_grid.concurrency == 2
        assert template_grid.early_stopping is None
Esempio n. 20
0
def generate(polyaxonfile, build_context, destination, params):
    """Generate a dockerfile given the polyaxonfile."""
    if all([polyaxonfile, build_context]):
        Printer.print_error(
            "Only a polyaxonfile or a build context option is required.")
        sys.exit(1)

    if build_context:
        try:
            build_context = BuildContextConfig.from_dict(
                reader.read(build_context))
        except (PolyaxonSchemaError, ValidationError) as e:
            Printer.print_error("received a non valid build context.")
            Printer.print_error("Error message: {}.".format(e))
            sys.exit(1)
    else:
        specification = check_polyaxonfile(polyaxonfile,
                                           params=params,
                                           log=False)

        try:
            run_spec = get_specification(specification.generate_run_data())
            run_spec.apply_params(params=specification.config.params)
            run_spec.apply_context()
        except PolyaxonSchemaError:
            Printer.print_error(
                "Could not run this polyaxonfile locally, "
                "a context is required to resolve it dependencies.")
            sys.exit(1)

        build_context = run_spec.build_context

    generator = DockerFileGenerator(build_context=build_context,
                                    destination=destination or ".")
    generator.create()
    Printer.print_success("Dockerfile was generated: `{}`".format(
        generator.dockerfile_path))
Esempio n. 21
0
 def read_configs(cls, config_values):  # pylint:disable=redefined-outer-name
     config = reader.read(config_values)  # pylint:disable=redefined-outer-name
     return cls(**config) if config else None
Esempio n. 22
0
 def read(cls, values, unknown=None, config_type=None):
     values = reader.read(values, config_type=config_type)
     return cls.from_dict(values, unknown=unknown)
Esempio n. 23
0
def read(filepaths):
    data = reader.read(filepaths)
    return DeploymentConfig.from_dict(data)
Esempio n. 24
0
    def test_reads_non_existing_file(self):
        # Raises by default
        with self.assertRaises(PolyaxonSchemaError):
            reader.read("tests/fixtures/parsing/no_file.yml")

        with self.assertRaises(PolyaxonSchemaError):
            reader.read("tests/fixtures/parsing/no_file.json")

        with self.assertRaises(PolyaxonSchemaError):
            reader.read(ConfigSpec("tests/fixtures/parsing/no_file"))

        with self.assertRaises(PolyaxonSchemaError):
            reader.read(ConfigSpec("tests/fixtures/parsing/no_file.yml"))

        with self.assertRaises(PolyaxonSchemaError):
            reader.read(ConfigSpec("tests/fixtures/parsing/no_file.json"))

        # Does not raise if set to ignore
        assert (reader.read(
            ConfigSpec("tests/fixtures/parsing/no_file",
                       check_if_exists=False)) == {})

        assert (reader.read(
            ConfigSpec("tests/fixtures/parsing/no_file.yml",
                       check_if_exists=False)) == {})

        assert (reader.read(
            ConfigSpec("tests/fixtures/parsing/no_file.json",
                       check_if_exists=False)) == {})
Esempio n. 25
0
 def test_reads_json_files_without_extension(self):
     config = reader.read(
         ConfigSpec("tests/fixtures/parsing/json_file",
                    config_type=".json"))
     assert config == {"x": 1, "y": 2, "foo": "bar", "type": "json"}
Esempio n. 26
0
 def test_reads_yaml_files_without_extension(self):
     config = reader.read(
         ConfigSpec("tests/fixtures/parsing/yaml_file", config_type=".yml"))
     assert config == {"x": 10, "y": 20, "foo": "bar", "type": "yaml"}
Esempio n. 27
0
 def test_reads_json_files(self):
     config = reader.read("tests/fixtures/parsing/json_file.json")
     assert config == {"x": 1, "y": 2, "foo": "bar", "type": "json"}
Esempio n. 28
0
 def test_reads_yaml_files(self):
     config = reader.read("tests/fixtures/parsing/yaml_file.yml")
     assert config == {"x": 10, "y": 20, "foo": "bar", "type": "yaml"}
Esempio n. 29
0
 def test_reads_json_stream(self):
     stream = """---
     {x: y, 1: 2}
     """
     config = reader.read(stream)
     assert config is not None
Esempio n. 30
0
 def test_reads_non_valid_yaml_stream(self):
     stream = ";sdfsd;sdff"
     with self.assertRaises(PolyaxonSchemaError):
         reader.read(stream)