def setUp(self):

        self.context_preprocessing = Context(
            get_data_path('config_preprocessing.yml', DATA_DIR))

        self.preprocessing_builder = PreprocessingBuilder()

        self.context_processing = Context(
            get_data_path('config_processing.yml', DATA_DIR))

        self.processing_builder = ProcessingBuilder()
示例#2
0
def create_workflow(config):
    """Create a workflow based on the config"""

    preprocessing_pipeline = PreprocessingBuilder().build(config)

    processing_pipeline = ProcessingBuilder().build(config)

    return Workflow(preprocessing_pipeline, processing_pipeline)
示例#3
0
from mtoolkit.catalog_filter import CatalogFilter, SourceModelCatalogFilter

if __name__ == '__main__':

    CMD_LINE_ARGS = cmd_line()

    if CMD_LINE_ARGS:

        INPUT_CONFIG_FILENAME = CMD_LINE_ARGS.input_file[0]

        LOG_LEVEL = logging.DEBUG if CMD_LINE_ARGS.detailed else logging.INFO

        build_logger(LOG_LEVEL)

        CONTEXT = Context(INPUT_CONFIG_FILENAME)

        PIPELINE_PREPROCESSING = PreprocessingBuilder().build(CONTEXT.config)

        PIPELINE_PROCESSING = ProcessingBuilder().build(CONTEXT.config)

        if CONTEXT.config['source_model_file']:
            CATALOG_FILTER = CatalogFilter(SourceModelCatalogFilter())
        else:
            CATALOG_FILTER = CatalogFilter()

        WORKFLOW = Workflow(PIPELINE_PREPROCESSING, PIPELINE_PROCESSING)
        WORKFLOW.start(CONTEXT, CATALOG_FILTER)

        WRITER = AreaSourceWriter(CONTEXT.config['result_file'])
        WRITER.serialize(CONTEXT.sm_definitions)
class PipeLineBuilderTestCase(unittest.TestCase):

    def setUp(self):

        self.context_preprocessing = Context(
            get_data_path('config_preprocessing.yml', DATA_DIR))

        self.preprocessing_builder = PreprocessingBuilder()

        self.context_processing = Context(
            get_data_path('config_processing.yml', DATA_DIR))

        self.processing_builder = ProcessingBuilder()

    def test_build_pipeline(self):
        # Two different kinds of pipeline can be built:
        # preprocessing and processing pipeline

        expected_preprocessing_pipeline = PipeLine()
        expected_preprocessing_pipeline.add_job(read_eq_catalog)
        expected_preprocessing_pipeline.add_job(read_source_model)
        expected_preprocessing_pipeline.add_job(create_catalog_matrix)
        expected_preprocessing_pipeline.add_job(create_default_values)
        expected_preprocessing_pipeline.add_job(gardner_knopoff)
        expected_preprocessing_pipeline.add_job(stepp)
        expected_preprocessing_pipeline.add_job(create_selected_eq_vector)
        expected_preprocessing_pipeline.add_job(store_preprocessed_catalog)
        expected_preprocessing_pipeline.add_job(
            store_completeness_table)

        expected_processing_pipeline = PipeLine()
        expected_processing_pipeline.add_job(recurrence)

        pprocessing_built_pipeline = self.preprocessing_builder.build(
            self.context_preprocessing.config)

        processing_built_pipeline = self.processing_builder.build(
            self.context_processing.config)

        self.assertEqual(expected_preprocessing_pipeline,
            pprocessing_built_pipeline)
        self.assertEqual(expected_processing_pipeline,
            processing_built_pipeline)

    def test_build_pipeline_source_model_undefined(self):
        self.context_preprocessing.config['source_model_file'] = None
        expected_preprocessing_pipeline = PipeLine()
        expected_preprocessing_pipeline.add_job(read_eq_catalog)
        expected_preprocessing_pipeline.add_job(create_default_source_model)
        expected_preprocessing_pipeline.add_job(create_catalog_matrix)
        expected_preprocessing_pipeline.add_job(create_default_values)
        expected_preprocessing_pipeline.add_job(gardner_knopoff)
        expected_preprocessing_pipeline.add_job(stepp)
        expected_preprocessing_pipeline.add_job(create_selected_eq_vector)
        expected_preprocessing_pipeline.add_job(store_preprocessed_catalog)
        expected_preprocessing_pipeline.add_job(
            store_completeness_table)

        pprocessing_built_pipeline = self.preprocessing_builder.build(
            self.context_preprocessing.config)

        self.assertEqual(expected_preprocessing_pipeline,
            pprocessing_built_pipeline)

    def test_build_pipeline_preprocessing_jobs_undefined(self):
        self.context_preprocessing.config['preprocessing_jobs'] = None
        expected_preprocessing_pipeline = PipeLine()
        expected_preprocessing_pipeline.add_job(read_eq_catalog)
        expected_preprocessing_pipeline.add_job(read_source_model)
        expected_preprocessing_pipeline.add_job(create_catalog_matrix)
        expected_preprocessing_pipeline.add_job(create_default_values)
        expected_preprocessing_pipeline.add_job(
            retrieve_completeness_table)

        pprocessing_built_pipeline = self.preprocessing_builder.build(
            self.context_preprocessing.config)

        self.assertEqual(expected_preprocessing_pipeline,
            pprocessing_built_pipeline)

    def test_non_existent_job_raise_exception(self):
        invalid_job = 'comb a quail\'s hair'
        self.context_preprocessing.config['preprocessing_jobs'] = [invalid_job]
        self.context_processing.config['processing_jobs'] = [invalid_job]

        self.assertRaises(RuntimeError, self.preprocessing_builder.build,
                self.context_preprocessing.config)

        self.assertRaises(RuntimeError, self.processing_builder.build,
                self.context_processing.config)

    def test_workflow_execute_pipelines(self):
        context = Context()
        context.config['apply_processing_jobs'] = True
        context.sm_definitions = None

        pipeline_preprocessing = PipeLine(None)
        pipeline_preprocessing.run = Mock()
        pipeline_processing = PipeLine(None)
        pipeline_processing.run = Mock()

        workflow = Workflow(pipeline_preprocessing, pipeline_processing)

        # Mocking a generator method
        sm_filter = MagicMock()
        sm_filter.filter_eqs.return_value.__iter__.return_value = \
            iter([(dict(a=1), [1]), ((dict(b=2), [2]))])

        workflow.start(context, sm_filter)

        self.assertTrue(workflow.preprocessing_pipeline.run.called)
        self.assertTrue(sm_filter.filter_eqs.called)
        self.assertTrue(pipeline_processing.run.called)
        self.assertEqual(2, pipeline_processing.run.call_count)