def test_no_params_for_required_inputs_outputs_raises(self): with self.assertRaises(PolyaxonfileError): PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_inputs.yml')) PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_outputs.yml'))
def test_required_inputs_with_params(self): plxfile = PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_inputs.yml'), params={ 'loss': 'bar', 'flag': False }) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.logging is None assert set(spec.tags) == {'foo', 'bar'} assert spec.params == {'loss': 'bar', 'flag': ''} assert spec.build.image == 'my_image' assert spec.run.cmd == 'video_prediction_train --loss=bar ' assert spec.environment is None assert spec.framework is None assert spec.is_experiment assert spec.cluster_def == ({TaskType.MASTER: 1}, False) assert spec.is_experiment is True plxfile = PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_inputs.yml'), params={ 'loss': 'bar', 'flag': True }) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.logging is None assert set(spec.tags) == {'foo', 'bar'} assert spec.params == {'loss': 'bar', 'flag': '--flag'} assert spec.build.image == 'my_image' assert spec.run.cmd == 'video_prediction_train --loss=bar --flag' assert spec.environment is None assert spec.framework is None assert spec.is_experiment assert spec.cluster_def == ({TaskType.MASTER: 1}, False) assert spec.is_experiment is True # Adding extra value raises with self.assertRaises(PolyaxonfileError): PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_inputs.yml'), params={ 'loss': 'bar', 'value': 1.1 }) # Adding non valid params raises with self.assertRaises(PolyaxonfileError): PolyaxonFile( os.path.abspath('tests/fixtures/typing/required_inputs.yml'), params={'value': 1.1})
def test_deprecated_advanced_file_passes(self): plxfile = PolyaxonFile( os.path.abspath('tests/fixtures/deprecated/advanced_file.yml')) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert isinstance(spec.logging, LoggingConfig) assert spec.is_experiment assert isinstance(spec.environment, EnvironmentConfig) assert spec.framework == ExperimentFramework.TENSORFLOW assert spec.config.tensorflow.n_workers == 5 assert spec.config.tensorflow.n_ps == 10 # check properties for returning worker configs and resources assert spec.config.tensorflow.worker_resources == {} assert spec.config.tensorflow.ps_resources == {} cluster, is_distributed = spec.cluster_def assert TensorflowSpecification.get_worker_resources( environment=spec.config.tensorflow, cluster=cluster, is_distributed=is_distributed) == {} assert TensorflowSpecification.get_ps_resources( environment=spec.config.tensorflow, cluster=cluster, is_distributed=is_distributed) == {} assert spec.cluster_def == ({ TaskType.MASTER: 1, TaskType.WORKER: 5, TaskType.PS: 10 }, True)
def test_polyaxon_found(self): def create_file(path, filename, ext): fpath = '{}/{}.{}'.format(path, filename, ext) open(fpath, 'w') for filename in DEFAULT_POLYAXON_FILE_NAME: for ext in DEFAULT_POLYAXON_FILE_EXTENSION: path = tempfile.mkdtemp() create_file(path, filename, ext) assert PolyaxonFile.check_default_path(path=path)
def test_cron_pipeline(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/simple_cron_pipeline.yml')) spec = plx_file.specification spec.apply_context() assert len(spec.config.ops) == 1 assert spec.config.ops[0].name == 'cron-task' assert spec.config.concurrency is None assert spec.config.schedule is not None assert spec.config.schedule.kind == 'cron' assert spec.config.schedule.cron == '0 0 * * *'
def test_interval_pipeline(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/simple_recurrent_pipeline.yml')) spec = plx_file.specification spec.apply_context() assert len(spec.config.ops) == 1 assert spec.config.ops[0].name == 'recurrent-task' assert spec.config.concurrency is None assert spec.config.schedule is not None assert spec.config.schedule.kind == 'interval' assert spec.config.schedule.start_at.year == 2019 assert spec.config.schedule.frequency == 120 assert spec.config.schedule.depends_on_past is True
def test_jupyter_lab_job_with_node_selectors(self): plxfile = PolyaxonFile( os.path.abspath( 'tests/fixtures/typing/jupyterlab_with_custom_environment.yml') ) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.is_notebook assert spec.is_notebook is True assert spec.backend == 'lab' assert spec.logging is None assert sorted(spec.tags) == sorted(['foo', 'bar']) assert isinstance(spec.build, BuildConfig) assert isinstance(spec.environment, EnvironmentConfig) assert spec.artifact_refs == ['outputs1'] assert spec.data_refs == ['data1', 'data2'] assert spec.secret_refs == ['secret1', 'secret2'] assert spec.config_map_refs == ['config_map1', 'config_map2'] node_selector = {'polyaxon.com': 'node_for_notebook_jobs'} assert spec.environment.node_selector == node_selector assert spec.node_selector == node_selector resources = { 'cpu': { 'requests': 1, 'limits': 2 }, 'memory': { 'requests': 200, 'limits': 200 }, } assert spec.environment.resources.to_dict() == resources assert spec.resources.to_dict() == resources affinity = { 'nodeAffinity': { 'requiredDuringSchedulingIgnoredDuringExecution': {} } } assert spec.environment.affinity == affinity assert spec.affinity == affinity tolerations = [{'key': 'key', 'operator': 'Exists'}] assert spec.environment.tolerations == tolerations assert spec.tolerations == tolerations
def test_run_simple_file_passes(self): plxfile = PolyaxonFile( os.path.abspath('tests/fixtures/typing/run_cmd_simple_file.yml')) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.logging is None assert sorted(spec.tags) == sorted(['foo', 'bar']) assert spec.is_experiment assert isinstance(spec.build, BuildConfig) assert isinstance(spec.run, RunConfig) assert spec.environment is None assert spec.framework is None assert spec.cluster_def == ({TaskType.MASTER: 1}, False) run = spec.run assert isinstance(run, RunConfig) assert run.cmd == "video_prediction_train --num_masks=2 --loss=MeanSquaredError"
def test_sequential_pipeline(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/simple_sequential_pipeline.yml')) spec = plx_file.specification spec.apply_context() assert len(spec.config.ops) == 4 assert spec.config.ops[0].name == 'job1' assert spec.config.ops[1].name == 'job2' assert spec.config.ops[1].dependencies == ['job1'] assert spec.config.ops[2].name == 'experiment1' assert spec.config.ops[2].dependencies == ['job2'] assert spec.config.ops[3].name == 'experiment2' assert spec.config.ops[3].dependencies == ['experiment1'] assert spec.config.sort_topologically(spec.config.dag) == [ ['job1'], ['job2'], ['experiment1'], ['experiment2']] assert spec.config.concurrency is None assert spec.config.schedule is None
def test_parallel_pipeline(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/simple_parallel_pipeline.yml')) spec = plx_file.specification spec.apply_context() assert len(spec.config.ops) == 4 assert spec.config.ops[0].name == 'job1' assert spec.config.ops[0].dependencies is None assert spec.config.ops[1].name == 'job2' assert spec.config.ops[1].dependencies is None assert spec.config.ops[2].name == 'experiment1' assert spec.config.ops[2].dependencies is None assert spec.config.ops[3].name == 'experiment2' assert spec.config.ops[3].dependencies is None assert set(spec.config.sort_topologically(spec.config.dag)[0]) == { 'job1', 'job2', 'experiment1', 'experiment2'} assert spec.config.concurrency == 2 assert spec.config.schedule is None
def test_dag_pipeline(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/simple_dag_pipeline.yml')) spec = plx_file.specification spec.apply_context() assert len(spec.config.ops) == 5 assert spec.config.ops[0].name == 'job1' assert spec.config.ops[1].name == 'experiment1' assert spec.config.ops[1].dependencies == ['job1'] assert spec.config.ops[2].name == 'experiment2' assert spec.config.ops[2].dependencies == ['job1'] assert spec.config.ops[3].name == 'experiment3' assert spec.config.ops[3].dependencies == ['job1'] assert spec.config.ops[4].name == 'job2' assert spec.config.ops[4].dependencies == ['experiment1', 'experiment2', 'experiment3'] srorted_dag = spec.config.sort_topologically(spec.config.dag) assert srorted_dag[0] == ['job1'] assert set(srorted_dag[1]) == {'experiment1', 'experiment2', 'experiment3'} assert srorted_dag[2] == ['job2'] assert spec.config.concurrency == 3 assert spec.config.schedule is None
def test_run_with_refs(self): plxfile = PolyaxonFile( os.path.abspath('tests/fixtures/typing/run_with_refs.yml')) spec = plxfile.specification required_refs = spec.raw_config.get_params_with_refs() assert len(required_refs) == 1 assert required_refs[0].name == 'model_path' assert required_refs[0].value == 'jobs.1.outputs.doo' spec.apply_context(context={'jobs__1__outputs__doo': 'model_path'}) assert spec.version == 1 assert spec.logging is None assert sorted(spec.tags) == sorted(['foo', 'bar']) assert spec.is_experiment assert isinstance(spec.build, BuildConfig) assert isinstance(spec.run, RunConfig) assert spec.environment is None assert spec.framework is None assert spec.cluster_def == ({TaskType.MASTER: 1}, False) run = spec.run assert isinstance(run, RunConfig) assert run.cmd == "video_prediction_train --num_masks=2 --model_path=model_path"
def test_simple_file_framework_passes(self): plxfile = PolyaxonFile( os.path.abspath( 'tests/fixtures/deprecated/simple_file_framework.yml')) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.logging is None assert spec.tags is None assert spec.build.dockerfile == 'Dockerfile' assert spec.run.cmd == 'video_prediction_train --model=DNA --num_masks=1' assert spec.environment is not None assert spec.environment.resources.gpu.to_dict() == { 'requests': 1, 'limits': 1 } assert spec.environment.outputs.to_dict() == { 'jobs': [111], 'experiments': None } assert spec.framework is not None assert spec.is_experiment is True
def test_matrix_file_passes_int_float_types(self): plxfile = PolyaxonFile( os.path.abspath( 'tests/fixtures/typing/matrix_file_with_int_float_types.yml')) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert spec.is_group assert isinstance(spec.hptuning.matrix['param1'], MatrixConfig) assert isinstance(spec.hptuning.matrix['param2'], MatrixConfig) assert spec.hptuning.matrix['param1'].to_dict() == {'values': [1, 2]} assert spec.hptuning.matrix['param2'].to_dict() == { 'values': [3.3, 4.4] } assert spec.matrix_space == 4 assert isinstance(spec.hptuning, HPTuningConfig) assert spec.hptuning.concurrency == 2 assert spec.search_algorithm == SearchAlgorithms.GRID assert spec.hptuning.early_stopping is None assert spec.early_stopping == [] assert spec.experiments_def == { 'search_algorithm': SearchAlgorithms.GRID, 'early_stopping': False, 'concurrency': 2, } build = spec.build assert build is None spec = spec.get_experiment_spec( matrix_declaration=spec.matrix_declaration_test) spec.apply_context() assert spec.environment is None assert spec.framework is None assert spec.cluster_def == ({TaskType.MASTER: 1}, False) assert spec.run.cmd == 'train --param1={param1} --param2={param2} --param3=23423'.format( **spec.params)
def test_using_untyped_params_raises(self): with self.assertRaises(PolyaxonfileError): PolyaxonFile( os.path.abspath('tests/fixtures/typing/untyped_params.yml'))
def test_deprecated_advanced_file_with_custom_configs_and_resources_passes( self): plxfile = PolyaxonFile( os.path.abspath( 'tests/fixtures/deprecated/advanced_file_with_custom_configs_and_resources.yml' )) spec = plxfile.specification spec.apply_context() assert spec.version == 1 assert isinstance(spec.logging, LoggingConfig) assert spec.is_experiment assert isinstance(spec.environment, EnvironmentConfig) assert spec.framework == ExperimentFramework.TENSORFLOW assert spec.artifact_refs == ['outputs1'] assert spec.data_refs == ['data1', 'data2'] assert spec.config_map_refs == ['config_map1', 'config_map2'] assert spec.config.tensorflow.n_workers == 5 assert spec.config.tensorflow.n_ps == 10 assert isinstance(spec.environment.resources, PodResourcesConfig) assert isinstance(spec.environment.resources.cpu, K8SResourcesConfig) assert spec.environment.resources.cpu.requests == 1 assert spec.environment.resources.cpu.limits == 2 assert spec.config.tensorflow.default_worker_node_selector == { 'foo': True } assert spec.config.tensorflow.worker_resources == {} assert spec.config.tensorflow.worker_affinities == {} assert isinstance(spec.config.tensorflow.worker_node_selectors[3], dict) assert spec.config.tensorflow.worker_node_selectors[3] == { 'foo': False } assert isinstance(spec.config.tensorflow.worker_tolerations[4], list) assert spec.config.tensorflow.worker_tolerations[4] == [{ 'key': 'key', 'operator': 'Exists', 'effect': 'NoSchedule', }] assert isinstance(spec.config.tensorflow.default_ps_resources, PodResourcesConfig) assert isinstance(spec.config.tensorflow.default_ps_resources.cpu, K8SResourcesConfig) assert spec.config.tensorflow.default_ps_resources.cpu.requests == 2 assert spec.config.tensorflow.default_ps_resources.cpu.limits == 4 assert spec.config.tensorflow.ps_node_selectors == {} assert isinstance(spec.config.tensorflow.ps_tolerations[7], list) assert spec.config.tensorflow.ps_tolerations[7] == [{ 'operator': 'Exists' }] assert isinstance(spec.config.tensorflow.ps_affinities[7], dict) assert isinstance(spec.config.tensorflow.ps_resources[9], PodResourcesConfig) assert isinstance(spec.config.tensorflow.ps_resources[9].memory, K8SResourcesConfig) assert spec.config.tensorflow.ps_resources[9].memory.requests == 512 assert spec.config.tensorflow.ps_resources[9].memory.limits == 1024 # check that properties for return list of configs and resources is working cluster, is_distributed = spec.cluster_def worker_node_selectors = TensorflowSpecification.get_worker_node_selectors( environment=spec.config.tensorflow, cluster=cluster, is_distributed=is_distributed) assert len(worker_node_selectors) == spec.config.tensorflow.n_workers assert set([i['foo'] for i in worker_node_selectors.values()]) == { spec.config.tensorflow.default_worker_node_selector['foo'], spec.config.tensorflow.worker_node_selectors[3]['foo'] } assert TensorflowSpecification.get_worker_resources( environment=spec.config.tensorflow, cluster=cluster, is_distributed=is_distributed) == {} ps_resources = TensorflowSpecification.get_ps_resources( environment=spec.config.tensorflow, cluster=cluster, is_distributed=is_distributed) assert len(ps_resources) == spec.config.tensorflow.n_ps assert set(ps_resources.values()) == { spec.config.tensorflow.default_ps_resources, spec.config.tensorflow.ps_resources[9] } # Check total resources assert spec.total_resources == { 'cpu': { 'requests': 1 + 2 * 9, 'limits': 2 + 4 * 9 }, 'memory': { 'requests': 512, 'limits': 1024 }, } assert spec.cluster_def == ({ TaskType.MASTER: 1, TaskType.WORKER: 5, TaskType.PS: 10 }, True)
def test_cyclic_pipeline_raises(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/cyclic_pipeline.yml')) with self.assertRaises(PolyaxonSchemaError): plx_file.specification.apply_context()
def test_pipeline_ops_not_corresponding_to_templates(self): plx_file = PolyaxonFile(os.path.abspath( 'tests/fixtures/pipelines/pipeline_ops_not_corresponding_to_templates.yml')) with self.assertRaises(PolyaxonSchemaError): plx_file.specification.apply_context()
def test_default_not_found(self): path = tempfile.mkdtemp() assert PolyaxonFile.check_default_path(path=path) is None