def _workflow_from_dict(self, trans, data, name, exact_tools=False): if isinstance(data, string_types): data = json.loads(data) # Create new workflow from source data workflow = model.Workflow() workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts(data): module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict, exact_tools=exact_tools) is_tool = is_tool_module_type(module.type) if is_tool and module.tool is None: # A required tool is not available in the local Galaxy instance. tool_id = step_dict.get('content_id', step_dict.get('tool_id', None)) assert tool_id is not None # Threw an exception elsewhere if not missing_tool_tup = (tool_id, step_dict['name'], step_dict['tool_version'], step_dict['id']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) # Save the entire step_dict in the unused config field, be parsed later # when we do have the tool step.config = json.dumps(step_dict) if step.tool_errors: workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps(steps, steps_by_external_id) # Order the steps if possible attach_ordered_steps(workflow, steps) return workflow, missing_tool_tups
def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, from_editor=False): # Put parameters in workflow mode trans.workflow_building_mode = True # Convert incoming workflow data from json if coming from editor data = json.loads(workflow_data) if from_editor else workflow_data # Create new workflow from incoming data workflow = model.Workflow() # Just keep the last name (user can rename later) workflow.name = stored_workflow.name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} errors = [] for key, step_dict in data['steps'].iteritems(): is_tool = is_tool_module_type( step_dict[ 'type' ] ) if is_tool and not trans.app.toolbox.has_tool( step_dict['tool_id'], exact=True ): errors.append("Step %s requires tool '%s'." % (step_dict['id'], step_dict['tool_id'])) if errors: raise MissingToolsException(workflow, errors) # First pass to build step objects and populate basic values for step_dict in self.__walk_step_dicts( data ): module, step = self.__module_from_dict( trans, step_dict, secure=from_editor ) # Create the model class for the step steps.append( step ) steps_by_external_id[ step_dict['id' ] ] = step if 'workflow_outputs' in step_dict: for output_name in step_dict['workflow_outputs']: m = model.WorkflowOutput(workflow_step=step, output_name=output_name) trans.sa_session.add(m) if step.tool_errors: # DBTODO Check for conditional inputs here. workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps( steps, steps_by_external_id ) # Order the steps if possible attach_ordered_steps( workflow, steps ) # Connect up workflow.stored_workflow = stored_workflow stored_workflow.latest_workflow = workflow # Persist trans.sa_session.flush() # Return something informative errors = [] if workflow.has_errors: errors.append( "Some steps in this workflow have validation errors" ) if workflow.has_cycles: errors.append( "This workflow contains cycles" ) return workflow, errors
def _workflow_from_dict(self, trans, data, name): if isinstance(data, string_types): data = json.loads(data) # Create new workflow from source data workflow = model.Workflow() workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts( data ): module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict ) is_tool = is_tool_module_type( module.type ) if is_tool and module.tool is None: # A required tool is not available in the local Galaxy instance. tool_id = step_dict.get('content_id', step_dict.get('tool_id', None)) assert tool_id is not None # Threw an exception elsewhere if not missing_tool_tup = ( tool_id, step_dict[ 'name' ], step_dict[ 'tool_version' ], step_dict[ 'id'] ) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append( missing_tool_tup ) # Save the entire step_dict in the unused config field, be parsed later # when we do have the tool step.config = json.dumps(step_dict) if step.tool_errors: workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps( steps, steps_by_external_id ) # Order the steps if possible attach_ordered_steps( workflow, steps ) return workflow, missing_tool_tups
def _workflow_from_dict(self, trans, data, name, **kwds): if isinstance(data, string_types): data = json.loads(data) # Create new workflow from source data workflow = model.Workflow() workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts(data): self.__load_subworkflows(trans, step_dict) for step_dict in self.__walk_step_dicts(data): module, step = self.__module_from_dict(trans, steps, steps_by_external_id, step_dict, **kwds) is_tool = is_tool_module_type(module.type) if is_tool and module.tool is None: missing_tool_tup = (module.tool_id, module.get_name(), module.tool_version, step_dict['id']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) if module.get_errors(): workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps(steps, steps_by_external_id) # Order the steps if possible attach_ordered_steps(workflow, steps) return workflow, missing_tool_tups