def test_load_full_dump_from_path(tmpdir): # Given tape_fit_callback_function = TapeCallbackFunction() tape_transform_callback_function = TapeCallbackFunction() pipeline = Pipeline( [('step_a', Identity()), ('step_b', OutputTransformerWrapper( FitTransformCallbackStep(tape_fit_callback_function, tape_transform_callback_function)))], cache_folder=tmpdir).set_name(PIPELINE_NAME) # When pipeline, outputs = pipeline.fit_transform(DATA_INPUTS, EXPECTED_OUTPUTS) pipeline.save(ExecutionContext(tmpdir), full_dump=True) # Then loaded_pipeline = ExecutionContext(tmpdir).load( os.path.join(PIPELINE_NAME, 'step_b')) assert isinstance(loaded_pipeline, OutputTransformerWrapper) loaded_step_b_wrapped_step = loaded_pipeline.wrapped assert np.array_equal( loaded_step_b_wrapped_step.transform_callback_function.data[0], EXPECTED_OUTPUTS) assert np.array_equal( loaded_step_b_wrapped_step.fit_callback_function.data[0][0], EXPECTED_OUTPUTS) assert np.array_equal( loaded_step_b_wrapped_step.fit_callback_function.data[0][1], [None] * len(EXPECTED_OUTPUTS))
def test_step_with_context_should_only_save_wrapped_step(tmpdir): context = ExecutionContext(root=tmpdir) service = SomeService() context.set_service_locator({BaseService: service}) p = Pipeline([SomeStep().assert_has_services(BaseService) ]).with_context(context=context) p.save(context, full_dump=True) p: Pipeline = ExecutionContext(root=tmpdir).load( os.path.join('StepWithContext', 'Pipeline')) assert isinstance(p, Pipeline)
def test_tensorflowv2_saver(tmpdir): dataset = toy_dataset() model = Pipeline([create_model_step(tmpdir)]) loss_first_fit = evaluate_model_on_dataset(model, dataset) model.save(ExecutionContext(root=tmpdir)) loaded = Pipeline([create_model_step(tmpdir) ]).load(ExecutionContext(root=tmpdir)) loss_second_fit = evaluate_model_on_dataset(loaded, dataset) assert loss_second_fit < (loss_first_fit / 2)
def test_step_with_context_saver(tmpdir): context = ExecutionContext(root=tmpdir) service = SomeService() pipeline_name = 'testname' context.set_service_locator({SomeBaseService: service}) p = Pipeline([ SomeStep().assert_has_services(SomeBaseService) ]).with_context(context=context) p.set_name(pipeline_name) p.save(context, full_dump=True) p: StepWithContext = ExecutionContext(root=tmpdir).load(pipeline_name) assert isinstance(p, StepWithContext) p: Pipeline = ExecutionContext(root=tmpdir).load(os.path.join(pipeline_name, 'Pipeline')) assert isinstance(p, Pipeline)
def test_tensorflowv1_saver(tmpdir): data_inputs = np.array([ 3.3, 4.4, 5.5, 6.71, 6.93, 4.168, 9.779, 6.182, 7.59, 2.167, 7.042, 10.791, 5.313, 7.997, 5.654, 9.27, 3.1 ]) expected_ouptuts = np.array([ 1.7, 2.76, 2.09, 3.19, 1.694, 1.573, 3.366, 2.596, 2.53, 1.221, 2.827, 3.465, 1.65, 2.904, 2.42, 2.94, 1.3 ]) model = Pipeline([create_model_step()]) for i in range(50): model, outputs = model.fit_transform(data_inputs, expected_ouptuts) model.save(ExecutionContext(root=tmpdir)) model = Pipeline([create_model_step()]).load(ExecutionContext(root=tmpdir)) model, outputs = model.fit_transform(data_inputs, expected_ouptuts) assert ((outputs - expected_ouptuts)**2).mean() < 0.25