def test_call_pre_run(self, pipeline_config: PipelineConfig) -> None: """Test calling pre-run method on units.""" for unit in pipeline_config.iter_units(): flexmock(unit).should_receive("pre_run").with_args().and_return( None).once() pipeline_config.call_pre_run()
def test_call_post_run_report(self, pipeline_config: PipelineConfig) -> None: """Test calling post-run method with report.""" report = Report(count=2, pipeline=pipeline_config) for unit in pipeline_config.iter_units(): flexmock(unit).should_receive("post_run_report").with_args( report=report ).and_return(None).once() pipeline_config.call_post_run_report(report)
def test_to_dict(self, pipeline_config: PipelineConfig) -> None: """Test serialization of pipeline configuration.""" assert pipeline_config.to_dict() == { "boots": [{"name": "Boot1", "configuration": {"some_parameter": -0.2}}], "sieves": [{"name": "Sieve1", "configuration": {"flying_circus": 1969}}], "steps": [{"name": "Step1", "configuration": {"guido_retirement": 2019}}], "strides": [ { "name": "Stride1", "configuration": { "linus": { "residence": "oregon", "children": 3, "parents": ["nils", "anna"], } }, } ], "wraps": [ { "name": "Wrap1", "configuration": { "thoth": [2018, 2019], "cities": ["Brno", "Bonn", "Boston", "Milan"], }, } ], }
def test_to_dict(self, pipeline_config: PipelineConfig) -> None: """Test conversion to a dict.""" report = Report(count=3, pipeline=pipeline_config) project = flexmock() project_dict = {"aresto momentum": "avada kedavra"} project.should_receive("to_dict").with_args( keep_thoth_section=True).and_return( project_dict).twice() # In test and in the report. product = Product( project=project, score=0.666, justification=[{ "gryffindor": "le gladium leviosa" }], advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"}), ) report.add_product(product) assert report.product_count() == 1 assert list(report.iter_products()) == [product] assert report.to_dict() == { "pipeline": pipeline_config.to_dict(), "products": [product.to_dict()], "stack_info": [], "resolver_iterations": 0, "accepted_final_states_count": 0, "discarded_final_states_count": 0, }
def pipeline_config() -> PipelineConfig: """Create a fixture for a pipeline configuration with few representatives of each pipeline unit type.""" flexmock(PipelineConfig) flexmock(Boot1) flexmock(Pseudonym1) flexmock(Sieve1) flexmock(Step1) flexmock(Stride1) flexmock(Wrap1) boot1 = Boot1() pseudonym1 = Pseudonym1() sieve1 = Sieve1() step1 = Step1() stride1 = Stride1() wrap1 = Wrap1() return PipelineConfig( boots={boot1.configuration.get("package_name"): [boot1]}, pseudonyms={pseudonym1.configuration["package_name"]: [pseudonym1]}, sieves={sieve1.configuration.get("package_name"): [sieve1]}, steps={step1.configuration.get("package_name"): [step1]}, strides={stride1.configuration.get("package_name"): [stride1]}, wraps={wrap1.configuration.get("package_name"): [wrap1]}, )
def test_to_dict(self, pipeline_config: PipelineConfig) -> None: """Test serialization of pipeline configuration.""" pipeline_dict = pipeline_config.to_dict() assert pipeline_dict == { "boots": [{ "configuration": { "package_name": "flask", "some_parameter": -0.2 }, "name": "Boot1", "unit_run": False }], "pseudonyms": [{ "configuration": { "another_parameter": 0.33, "package_name": "tensorflow" }, "name": "Pseudonym1", "unit_run": False, }], "sieves": [{ "configuration": { "flying_circus": 1969, "package_name": "tensorflow" }, "name": "Sieve1", "unit_run": False, }], "steps": [{ "configuration": { "guido_retirement": 2019, "package_name": "tensorflow", "multi_package_resolution": False, }, "name": "Step1", "unit_run": False, }], "strides": [{ "configuration": { "linus": { "children": 3, "parents": ["nils", "anna"], "residence": "oregon" }, "package_name": None, }, "name": "Stride1", "unit_run": False, }], "wraps": [{ "configuration": { "cities": ["Brno", "Bonn", "Boston", "Milan"], "thoth": [2018, 2019], "package_name": None, }, "name": "Wrap1", "unit_run": False, }], }
def pipeline_config() -> PipelineConfig: # noqa: D401 """A fixture for a pipeline config.""" return PipelineConfig(boots={}, pseudonyms={}, sieves={}, steps={}, strides={}, wraps={})
def test_iter_units(self, pipeline_config: PipelineConfig) -> None: """Test iteration over all units present in the pipeline configuration.""" visited = dict.fromkeys( ("Boot1", "Pseudonym1", "Sieve1", "Step1", "Stride1", "Wrap1"), 0) for unit in pipeline_config.iter_units(): assert unit.__class__.__name__ in visited, f"Unknown unit {unit.__class__.__name__!r}" visited[unit.__class__.__name__] += 1 assert len(visited) == 6 assert list(visited.values()) == [1] * 6
def _get_test_dm( *, stack_output: str, with_devel: bool, products: List[Product], amun_context: Optional[Dict[str, Any]] = None ) -> DependencyMonkey: """Get instantiated dependency monkey ready to be tested.""" flexmock(Resolver) (Resolver.should_receive("resolve_products").with_args(with_devel=with_devel).and_return(products).once()) flexmock(PipelineConfig) (PipelineConfig.should_receive("call_post_run_report").and_return(None).once()) dependency_monkey = DependencyMonkey( resolver=Resolver( pipeline=PipelineConfig(), project=None, library_usage=None, graph=None, predictor=None, ), stack_output=stack_output, decision_type=DecisionType.ALL, context=amun_context or {}, ) return dependency_monkey
def pipeline_config() -> PipelineConfig: """A fixture for a pipeline configuration with few representatives of each pipeline unit type.""" flexmock(PipelineConfig) flexmock(Boot1) flexmock(Sieve1) flexmock(Step1) flexmock(Stride1) flexmock(Wrap1) return PipelineConfig( boots=[Boot1()], sieves=[Sieve1()], steps=[Step1()], strides=[Stride1()], wraps=[Wrap1()], )
def test_to_dict_metadata(self, pipeline_config: PipelineConfig) -> None: """Test conversion to a dict with passed metadata.""" report = Report(count=3, pipeline=pipeline_config) project = flexmock() project_dict = {"aresto momentum": "avada kedavra"} project.should_receive("to_dict").with_args( keep_thoth_section=True).and_return(project_dict) product = Product( project=project, score=0.666, justification=[{ "gryffindor": "le gladium leviosa" }], advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"}), ) report.add_product(product) stack_info = [{"type": "WARNING", "message": "Hello, metadata"}] stack_info_metadata = { "thoth.adviser": { "stack_info": stack_info, } } report.set_stack_info([{"foo": "bar"}]) assert "THOTH_ADVISER_METADATA" not in os.environ os.environ["THOTH_ADVISER_METADATA"] = json.dumps(stack_info_metadata) try: assert report.product_count() == 1 assert list(report.iter_products()) == [product] assert report.to_dict() == { "pipeline": pipeline_config.to_dict(), "products": [product.to_dict()], "stack_info": list(chain(stack_info, report.stack_info)), "resolver_iterations": 0, "accepted_final_states_count": 0, "discarded_final_states_count": 0, } except Exception: os.environ.pop("THOTH_ADVISER_METADATA") raise
def pipeline_config() -> PipelineConfig: """A fixture for a pipeline config.""" return PipelineConfig(boots=[], sieves=[], steps=[], strides=[], wraps=[])