def test_build_pipeline(self): expected_pipeline = PipeLine(self.pipeline_name) expected_pipeline.add_job(read_eq_catalog) expected_pipeline.add_job(create_catalog_matrix) expected_pipeline.add_job(gardner_knopoff) self.assertEqual(expected_pipeline, self.pipeline_builder.build(self.context.config))
def test_build_pipeline_preprocessing_jobs_undefined(self): self.context_preprocessing.config['preprocessing_jobs'] = None expected_preprocessing_pipeline = PipeLine() expected_preprocessing_pipeline.add_job(read_eq_catalog) expected_preprocessing_pipeline.add_job(read_source_model) expected_preprocessing_pipeline.add_job(create_catalog_matrix) expected_preprocessing_pipeline.add_job(create_default_values) expected_preprocessing_pipeline.add_job( retrieve_completeness_table) pprocessing_built_pipeline = self.preprocessing_builder.build( self.context_preprocessing.config) self.assertEqual(expected_preprocessing_pipeline, pprocessing_built_pipeline)
def setUp(self): def square_job(context): value = context.number context.number = value * value def double_job(context): value = context.number context.number = 2 * value self.square_job = square_job self.double_job = double_job self.pipeline_name = 'square pipeline' self.pipeline = PipeLine(self.pipeline_name) self.context = Context(get_data_path('config.yml', DATA_DIR)) self.context.number = 2
def setUp(self): def square_job(context): value = context.number context.number = value * value def double_job(context): value = context.number context.number = 2 * value self.square_job = square_job self.double_job = double_job self.pipeline = PipeLine() self.context_preprocessing = Context( get_data_path('config_preprocessing.yml', DATA_DIR)) self.context_preprocessing.number = 2
class PipeLineTestCase(unittest.TestCase): def setUp(self): def square_job(context): value = context.number context.number = value * value def double_job(context): value = context.number context.number = 2 * value self.square_job = square_job self.double_job = double_job self.pipeline_name = 'square pipeline' self.pipeline = PipeLine(self.pipeline_name) self.context = Context(get_data_path('config.yml', DATA_DIR)) self.context.number = 2 def test_run_jobs(self): self.pipeline.add_job(self.square_job) self.pipeline.add_job(self.double_job) self.pipeline.run(self.context) self.assertEqual(8, self.context.number) # Change jobs order self.pipeline.jobs.reverse() # Reset context to a base value self.context.number = 2 self.pipeline.run(self.context) self.assertEqual(16, self.context.number)
def test_workflow_execute_pipelines(self): context = Context() context.config['apply_processing_jobs'] = True context.sm_definitions = None pipeline_preprocessing = PipeLine(None) pipeline_preprocessing.run = Mock() pipeline_processing = PipeLine(None) pipeline_processing.run = Mock() workflow = Workflow(pipeline_preprocessing, pipeline_processing) # Mocking a generator method sm_filter = MagicMock() sm_filter.filter_eqs.return_value.__iter__.return_value = \ iter([(dict(a=1), [1]), ((dict(b=2), [2]))]) workflow.start(context, sm_filter) self.assertTrue(workflow.preprocessing_pipeline.run.called) self.assertTrue(sm_filter.filter_eqs.called) self.assertTrue(pipeline_processing.run.called) self.assertEqual(2, pipeline_processing.run.call_count)
def test_build_pipeline_source_model_undefined(self): self.context_preprocessing.config['source_model_file'] = None expected_preprocessing_pipeline = PipeLine() expected_preprocessing_pipeline.add_job(read_eq_catalog) expected_preprocessing_pipeline.add_job(create_default_source_model) expected_preprocessing_pipeline.add_job(create_catalog_matrix) expected_preprocessing_pipeline.add_job(create_default_values) expected_preprocessing_pipeline.add_job(gardner_knopoff) expected_preprocessing_pipeline.add_job(stepp) expected_preprocessing_pipeline.add_job(create_selected_eq_vector) expected_preprocessing_pipeline.add_job(store_preprocessed_catalog) expected_preprocessing_pipeline.add_job( store_completeness_table) pprocessing_built_pipeline = self.preprocessing_builder.build( self.context_preprocessing.config) self.assertEqual(expected_preprocessing_pipeline, pprocessing_built_pipeline)
def test_build_pipeline(self): # Two different kinds of pipeline can be built: # preprocessing and processing pipeline expected_preprocessing_pipeline = PipeLine() expected_preprocessing_pipeline.add_job(read_eq_catalog) expected_preprocessing_pipeline.add_job(read_source_model) expected_preprocessing_pipeline.add_job(create_catalog_matrix) expected_preprocessing_pipeline.add_job(create_default_values) expected_preprocessing_pipeline.add_job(gardner_knopoff) expected_preprocessing_pipeline.add_job(stepp) expected_preprocessing_pipeline.add_job(create_selected_eq_vector) expected_preprocessing_pipeline.add_job(store_preprocessed_catalog) expected_preprocessing_pipeline.add_job( store_completeness_table) expected_processing_pipeline = PipeLine() expected_processing_pipeline.add_job(recurrence) pprocessing_built_pipeline = self.preprocessing_builder.build( self.context_preprocessing.config) processing_built_pipeline = self.processing_builder.build( self.context_processing.config) self.assertEqual(expected_preprocessing_pipeline, pprocessing_built_pipeline) self.assertEqual(expected_processing_pipeline, processing_built_pipeline)