def datafiles_to_db_by_source(**kwargs): """ Populate tables with source files """ if not any(kwargs.values()): ctx = click.get_current_context() click.echo(ctx.get_help()) else: manager = PipelineManager(storage=get_source_folder(), dbi=app.dbi) for _arg, pipeline_info_list in arg_to_pipeline_config_list.items(): arg = _arg.replace(".", "__") if kwargs['all'] or kwargs[arg]: for pipeline, sub_dir, options in pipeline_info_list: required_flag = options.get('required_flag', None) if not required_flag or kwargs.get(required_flag, False): manager.pipeline_register( pipeline=pipeline, sub_directory=sub_dir, force=kwargs['force'], continue_transform=kwargs['continue'], products=kwargs['products'], **options, ) manager.pipeline_process_all()
def test_pipeline_register_when_already_registered(self): manager = Manager() manager.pipeline_register('test_pipeline', pipeline_id='fake_pipeline') with pytest.raises(ValueError): manager.pipeline_register('test_pipeline', pipeline_id='fake_pipeline') self.assert_pipeline_config(manager, 'fake_pipeline', False, 'test_pipeline', None, True, False)
def test_pipeline_register(self): manager = Manager() manager.pipeline_register( 'test_pipeline', pipeline_id='fake_pipeline', custom_parameter=True, force=True, unpack=True, trigger_dataflow_dag=True, ) assert len(manager._pipelines) == 1 self.assert_pipeline_config(manager, 'fake_pipeline', True, 'test_pipeline', None, True, True)