def evaluate_dispatcher_and_run_workflow(scheduler, file_meta): """ Evaluates the dispatcher of the given source for the given dataset and runs the corresponding workflow. """ loggy = local.logger dataset = scheduler.content_object file_target = file_meta.get('file_path', file_meta['file_name']) archive_item, dummy = ArchiveItem.objects.get_or_create( dataset=dataset, file_target=file_target ) archive_item.file_hash = file_meta['md5sum'] archive_item.save(force_update=True) wf_input_params = [] wf_exec_results = [] # Run dispatcher to configure dataset. result = _evaluate_dispatcher(archive_item, file_meta) if result == RECURSE_MARKER: archive_item.delete() # archive_item cleanup return _handle_recursion(scheduler, file_meta) else: # if any of the ArchiveItems have no rules attached, mark the # scheduler as incomplete if not archive_item.get_refine_rule(): scheduler.status = Scheduler.INCOMPLETE if not type(result) == list: result = [result] for result_elem in result: if not len(result_elem) == 2: raise Exception( 'Invalid workflow configuration from dispatcher. ' 'Expected couple( "<conf-name>", { <conf-input params> } ' ') . Found: ' + result_elem ) configuration, parameters = result_elem wf_input_params.append({ 'workflow': configuration, 'parameters': parameters }) loggy.info('Running workflow: %s', result_elem) wf_exec_results.append(Workflow( config_name_to_file(configuration), __expand_parameters(dataset, parameters), store_output=True ).run_module()) loggy.info('Workflow completed') return wf_input_params, wf_exec_results
def evaluate_dispatcher_and_run_workflow(scheduler, file_meta): """ Evaluates the dispatcher of the given source for the given dataset and runs the corresponding workflow. """ loggy = local.logger dataset = scheduler.content_object file_target = file_meta.get('file_path', file_meta['file_name']) archive_item, dummy = ArchiveItem.objects.get_or_create( dataset=dataset, file_target=file_target) archive_item.file_hash = file_meta['md5sum'] archive_item.save(force_update=True) wf_input_params = [] wf_exec_results = [] # Run dispatcher to configure dataset. result = _evaluate_dispatcher(archive_item, file_meta) if result == RECURSE_MARKER: archive_item.delete() # archive_item cleanup return _handle_recursion(scheduler, file_meta) else: # if any of the ArchiveItems have no rules attached, mark the # scheduler as incomplete if not archive_item.get_refine_rule(): scheduler.status = Scheduler.INCOMPLETE if not type(result) == list: result = [result] for result_elem in result: if not len(result_elem) == 2: raise Exception( 'Invalid workflow configuration from dispatcher. ' 'Expected couple( "<conf-name>", { <conf-input params> } ' ') . Found: ' + result_elem) configuration, parameters = result_elem wf_input_params.append({ 'workflow': configuration, 'parameters': parameters }) loggy.info('Running workflow: %s', result_elem) wf_exec_results.append( Workflow(config_name_to_file(configuration), __expand_parameters(dataset, parameters), store_output=True).run_module()) loggy.info('Workflow completed') return wf_input_params, wf_exec_results
def _run_workflow_from_handler(couple): """Runs a workflow from Celery.""" try: configuration, parameters = couple except ValueError: raise Exception( 'Invalid workflow configuration from handler. Expected ' 'couple( "<conf-name>", { <conf-input params> } ) . ' 'Found: ' + couple) return Workflow( config_name_to_file(configuration), parameters, store_output=True, ).run_module()
def _run_workflow_from_handler(couple): """Runs a workflow from Celery.""" try: configuration, parameters = couple except ValueError: raise Exception( 'Invalid workflow configuration from handler. Expected ' 'couple( "<conf-name>", { <conf-input params> } ) . ' 'Found: ' + couple ) return Workflow( config_name_to_file(configuration), parameters, store_output=True, ).run_module()