Beispiel #1
0
    def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, from_editor=False):
        # Put parameters in workflow mode
        trans.workflow_building_mode = True
        # Convert incoming workflow data from json if coming from editor
        data = json.loads(workflow_data) if from_editor else workflow_data
        # Create new workflow from incoming data
        workflow = model.Workflow()
        # Just keep the last name (user can rename later)
        workflow.name = stored_workflow.name
        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}
        errors = []
        for key, step_dict in data['steps'].iteritems():
            is_tool = is_tool_module_type( step_dict[ 'type' ] )
            if is_tool and not trans.app.toolbox.has_tool( step_dict['tool_id'], exact=True ):
                errors.append("Step %s requires tool '%s'." % (step_dict['id'], step_dict['tool_id']))
        if errors:
            raise MissingToolsException(workflow, errors)

        # First pass to build step objects and populate basic values
        for step_dict in self.__walk_step_dicts( data ):
            module, step = self.__module_from_dict( trans, step_dict, secure=from_editor )
            # Create the model class for the step
            steps.append( step )
            steps_by_external_id[ step_dict['id' ] ] = step
            if 'workflow_outputs' in step_dict:
                for output_name in step_dict['workflow_outputs']:
                    m = model.WorkflowOutput(workflow_step=step, output_name=output_name)
                    trans.sa_session.add(m)
            if step.tool_errors:
                # DBTODO Check for conditional inputs here.
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps( steps, steps_by_external_id )

        # Order the steps if possible
        attach_ordered_steps( workflow, steps )
        # Connect up
        workflow.stored_workflow = stored_workflow
        stored_workflow.latest_workflow = workflow
        # Persist
        trans.sa_session.flush()
        # Return something informative
        errors = []
        if workflow.has_errors:
            errors.append( "Some steps in this workflow have validation errors" )
        if workflow.has_cycles:
            errors.append( "This workflow contains cycles" )
        return workflow, errors
Beispiel #2
0
    def _workflow_from_dict(self, trans, data, name):
        if isinstance(data, string_types):
            data = json.loads(data)

        # Create new workflow from source data
        workflow = model.Workflow()

        workflow.name = name

        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}

        # Keep track of tools required by the workflow that are not available in
        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
        # will be ( tool_id, tool_name, tool_version ).
        missing_tool_tups = []

        for step_dict in self.__walk_step_dicts( data ):
            module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict )
            is_tool = is_tool_module_type( module.type )
            if is_tool and module.tool is None:
                # A required tool is not available in the local Galaxy instance.
                tool_id = step_dict.get('content_id', step_dict.get('tool_id', None))
                assert tool_id is not None  # Threw an exception elsewhere if not

                missing_tool_tup = ( tool_id, step_dict[ 'name' ], step_dict[ 'tool_version' ], step_dict[ 'id'] )
                if missing_tool_tup not in missing_tool_tups:
                    missing_tool_tups.append( missing_tool_tup )

                # Save the entire step_dict in the unused config field, be parsed later
                # when we do have the tool
                step.config = json.dumps(step_dict)

            if step.tool_errors:
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps( steps, steps_by_external_id )

        # Order the steps if possible
        attach_ordered_steps( workflow, steps )

        return workflow, missing_tool_tups
Beispiel #3
0
    def _workflow_from_dict(self, trans, data, name, **kwds):
        if isinstance(data, string_types):
            data = json.loads(data)

        # Create new workflow from source data
        workflow = model.Workflow()
        workflow.name = name

        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}

        # Keep track of tools required by the workflow that are not available in
        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
        # will be ( tool_id, tool_name, tool_version ).
        missing_tool_tups = []
        for step_dict in self.__walk_step_dicts(data):
            self.__load_subworkflows(trans, step_dict)

        for step_dict in self.__walk_step_dicts(data):
            module, step = self.__module_from_dict(trans, steps, steps_by_external_id, step_dict, **kwds)
            is_tool = is_tool_module_type(module.type)
            if is_tool and module.tool is None:
                missing_tool_tup = (module.tool_id, module.get_name(), module.tool_version, step_dict['id'])
                if missing_tool_tup not in missing_tool_tups:
                    missing_tool_tups.append(missing_tool_tup)
            if module.get_errors():
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps(steps, steps_by_external_id)

        # Order the steps if possible
        attach_ordered_steps(workflow, steps)

        return workflow, missing_tool_tups
Beispiel #4
0
    def _workflow_from_dict(self, trans, data, name, **kwds):
        if isinstance(data, string_types):
            data = json.loads(data)

        # Create new workflow from source data
        workflow = model.Workflow()
        workflow.name = name

        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}

        # Keep track of tools required by the workflow that are not available in
        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
        # will be ( tool_id, tool_name, tool_version ).
        missing_tool_tups = []
        for step_dict in self.__walk_step_dicts(data):
            module, step = self.__module_from_dict(trans, steps,
                                                   steps_by_external_id,
                                                   step_dict, **kwds)
            is_tool = is_tool_module_type(module.type)
            if is_tool and module.tool is None:
                missing_tool_tup = (module.tool_id, module.get_name(),
                                    module.tool_version, step_dict['id'])
                if missing_tool_tup not in missing_tool_tups:
                    missing_tool_tups.append(missing_tool_tup)
            if module.get_errors():
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps(steps, steps_by_external_id)

        # Order the steps if possible
        attach_ordered_steps(workflow, steps)

        return workflow, missing_tool_tups
Beispiel #5
0
    def build_workflow_from_dict(
        self,
        trans,
        data,
        source=None,
        add_to_menu=False,
        publish=False,
        create_stored_workflow=True,
    ):
        # Put parameters in workflow mode
        trans.workflow_building_mode = True
        # Create new workflow from incoming dict
        workflow = model.Workflow()
        # If there's a source, put it in the workflow name.
        if source:
            name = "%s (imported from %s)" % ( data['name'], source )
        else:
            name = data['name']
        workflow.name = name
        if 'uuid' in data:
            workflow.uuid = data['uuid']
        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}
        # Keep track of tools required by the workflow that are not available in
        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
        # will be ( tool_id, tool_name, tool_version ).
        missing_tool_tups = []
        for step_dict in self.__walk_step_dicts( data ):
            module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict, secure=False )
            if module.type == 'tool' and module.tool is None:
                # A required tool is not available in the local Galaxy instance.
                if 'content_id' in step_dict:
                    tool_id = step_dict[ 'content_id' ]
                else:
                    # Support legacy workflows... (created pre 16.01)
                    tool_id = step_dict[ 'tool_id' ]
                missing_tool_tup = ( tool_id, step_dict[ 'name' ], step_dict[ 'tool_version' ])
                if missing_tool_tup not in missing_tool_tups:
                    missing_tool_tups.append( missing_tool_tup )
                # Save the entire step_dict in the unused config field, be parsed later
                # when we do have the tool
                step.config = json.dumps(step_dict)
            if step.tool_errors:
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps( steps, steps_by_external_id )

        # Order the steps if possible
        attach_ordered_steps( workflow, steps )

        if create_stored_workflow:
            # Connect up
            stored = model.StoredWorkflow()
            stored.name = workflow.name
            workflow.stored_workflow = stored
            stored.latest_workflow = workflow
            stored.user = trans.user
            stored.published = publish
            if data[ 'annotation' ]:
                annotation = sanitize_html( data[ 'annotation' ], 'utf-8', 'text/html' )
                self.add_item_annotation( trans.sa_session, stored.user, stored, annotation )

            # Persist
            trans.sa_session.add( stored )

            if add_to_menu:
                if trans.user.stored_workflow_menu_entries is None:
                    trans.user.stored_workflow_menu_entries = []
                menuEntry = model.StoredWorkflowMenuEntry()
                menuEntry.stored_workflow = stored
                trans.user.stored_workflow_menu_entries.append( menuEntry )

        else:
            stored = None
            # Persist
            trans.sa_session.add( workflow )

        trans.sa_session.flush()

        return CreatedWorkflow(
            stored_workflow=stored,
            workflow=workflow,
            missing_tools=missing_tool_tups
        )
Beispiel #6
0
 def _workflow_to_dict_run(self, trans, stored):
     """
     Builds workflow dictionary used by run workflow form
     """
     workflow = stored.latest_workflow
     if len(workflow.steps) == 0:
         raise exceptions.MessageException(
             'Workflow cannot be run because it does not have any steps.')
     if attach_ordered_steps(workflow, workflow.steps):
         raise exceptions.MessageException(
             'Workflow cannot be run because it contains cycles.')
     trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
     module_injector = WorkflowModuleInjector(trans)
     has_upgrade_messages = False
     step_version_changes = []
     missing_tools = []
     errors = {}
     for step in workflow.steps:
         try:
             module_injector.inject(step,
                                    steps=workflow.steps,
                                    exact_tools=False)
         except exceptions.ToolMissingException:
             if step.tool_id not in missing_tools:
                 missing_tools.append(step.tool_id)
             continue
         if step.upgrade_messages:
             has_upgrade_messages = True
         if step.type == 'tool' or step.type is None:
             if step.module.version_changes:
                 step_version_changes.extend(step.module.version_changes)
             step_errors = step.module.get_errors()
             if step_errors:
                 errors[step.id] = step_errors
     if missing_tools:
         workflow.annotation = self.get_item_annotation_str(
             trans.sa_session, trans.user, workflow)
         raise exceptions.MessageException('Following tools missing: %s' %
                                           ', '.join(missing_tools))
     workflow.annotation = self.get_item_annotation_str(
         trans.sa_session, trans.user, workflow)
     step_order_indices = {}
     for step in workflow.steps:
         step_order_indices[step.id] = step.order_index
     step_models = []
     for i, step in enumerate(workflow.steps):
         step_model = None
         if step.type == 'tool':
             incoming = {}
             tool = trans.app.toolbox.get_tool(
                 step.tool_id, tool_version=step.tool_version)
             params_to_incoming(incoming, tool.inputs, step.state.inputs,
                                trans.app)
             step_model = tool.to_json(
                 trans,
                 incoming,
                 workflow_building_mode=workflow_building_modes.USE_HISTORY)
             step_model['post_job_actions'] = [{
                 'short_str':
                 ActionBox.get_short_str(pja),
                 'action_type':
                 pja.action_type,
                 'output_name':
                 pja.output_name,
                 'action_arguments':
                 pja.action_arguments
             } for pja in step.post_job_actions]
         else:
             inputs = step.module.get_runtime_inputs(
                 connections=step.output_connections)
             step_model = {
                 'inputs':
                 [input.to_dict(trans) for input in inputs.values()]
             }
         step_model['step_type'] = step.type
         step_model['step_label'] = step.label
         step_model['step_name'] = step.module.get_name()
         step_model['step_version'] = step.module.get_version()
         step_model['step_index'] = step.order_index
         step_model['output_connections'] = [{
             'input_step_index':
             step_order_indices.get(oc.input_step_id),
             'output_step_index':
             step_order_indices.get(oc.output_step_id),
             'input_name':
             oc.input_name,
             'output_name':
             oc.output_name
         } for oc in step.output_connections]
         if step.annotations:
             step_model['annotation'] = step.annotations[0].annotation
         if step.upgrade_messages:
             step_model['messages'] = step.upgrade_messages
         step_models.append(step_model)
     return {
         'id':
         trans.app.security.encode_id(stored.id),
         'history_id':
         trans.app.security.encode_id(trans.history.id)
         if trans.history else None,
         'name':
         stored.name,
         'steps':
         step_models,
         'step_version_changes':
         step_version_changes,
         'has_upgrade_messages':
         has_upgrade_messages,
         'workflow_resource_parameters':
         self._workflow_resource_parameters(trans, stored, workflow),
     }
def get_workflow_from_dict(trans, workflow_dict, tools_metadata, repository_id,
                           changeset_revision):
    """
    Return an in-memory Workflow object from the dictionary object created when it was exported.  This method is called from
    both Galaxy and the tool shed to retrieve a Workflow object that can be displayed as an SVG image.  This method is also
    called from Galaxy to retrieve a Workflow object that can be used for saving to the Galaxy database.
    """
    trans.workflow_building_mode = True
    workflow = trans.model.Workflow()
    workflow.name = workflow_dict['name']
    workflow.has_errors = False
    steps = []
    # Keep ids for each step that we need to use to make connections.
    steps_by_external_id = {}
    # Keep track of tools required by the workflow that are not available in
    # the tool shed repository.  Each tuple in the list of missing_tool_tups
    # will be ( tool_id, tool_name, tool_version ).
    missing_tool_tups = []
    # First pass to build step objects and populate basic values
    for step_dict in workflow_dict['steps'].values():
        # Create the model class for the step
        step = trans.model.WorkflowStep()
        step.label = step_dict.get('label', None)
        step.position = step_dict['position']
        module = module_factory.from_dict(trans,
                                          repository_id,
                                          changeset_revision,
                                          step_dict,
                                          tools_metadata=tools_metadata)
        if module.type == 'tool' and module.tool is None:
            # A required tool is not available in the current repository.
            step.tool_errors = 'unavailable'
            missing_tool_tup = (step_dict['tool_id'], step_dict['name'],
                                step_dict['tool_version'])
            if missing_tool_tup not in missing_tool_tups:
                missing_tool_tups.append(missing_tool_tup)
        module.save_to_step(step)
        if step.tool_errors:
            workflow.has_errors = True
        # Stick this in the step temporarily.
        step.temp_input_connections = step_dict['input_connections']
        if trans.webapp.name == 'galaxy':
            annotation = step_dict.get('annotation', '')
            if annotation:
                annotation = sanitize_html(annotation, 'utf-8', 'text/html')
                new_step_annotation = trans.model.WorkflowStepAnnotationAssociation(
                )
                new_step_annotation.annotation = annotation
                new_step_annotation.user = trans.user
                step.annotations.append(new_step_annotation)
        # Unpack and add post-job actions.
        post_job_actions = step_dict.get('post_job_actions', {})
        for pja_dict in post_job_actions.values():
            trans.model.PostJobAction(pja_dict['action_type'], step,
                                      pja_dict['output_name'],
                                      pja_dict['action_arguments'])
        steps.append(step)
        steps_by_external_id[step_dict['id']] = step
    # Second pass to deal with connections between steps.
    for step in steps:
        # Input connections.
        for input_name, conn_dict in step.temp_input_connections.items():
            if conn_dict:
                output_step = steps_by_external_id[conn_dict['id']]
                conn = trans.model.WorkflowStepConnection()
                conn.input_step = step
                conn.input_name = input_name
                conn.output_step = output_step
                conn.output_name = conn_dict['output_name']
                step.input_connections.append(conn)
        del step.temp_input_connections
    # Order the steps if possible.
    attach_ordered_steps(workflow, steps)
    # Return the in-memory Workflow object for display or later persistence to the Galaxy database.
    return workflow, missing_tool_tups
Beispiel #8
0
def get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
    """
    Return an in-memory Workflow object from the dictionary object created when it was exported.  This method is called from
    both Galaxy and the tool shed to retrieve a Workflow object that can be displayed as an SVG image.  This method is also
    called from Galaxy to retrieve a Workflow object that can be used for saving to the Galaxy database.
    """
    trans.workflow_building_mode = True
    workflow = trans.model.Workflow()
    workflow.name = workflow_dict[ 'name' ]
    workflow.has_errors = False
    steps = []
    # Keep ids for each step that we need to use to make connections.
    steps_by_external_id = {}
    # Keep track of tools required by the workflow that are not available in
    # the tool shed repository.  Each tuple in the list of missing_tool_tups
    # will be ( tool_id, tool_name, tool_version ).
    missing_tool_tups = []
    # First pass to build step objects and populate basic values
    for step_dict in workflow_dict[ 'steps' ].itervalues():
        # Create the model class for the step
        step = trans.model.WorkflowStep()
        step.label = step_dict.get('label', None)
        step.name = step_dict[ 'name' ]
        step.position = step_dict[ 'position' ]
        module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata )
        if module.type == 'tool' and module.tool is None:
            # A required tool is not available in the current repository.
            step.tool_errors = 'unavailable'
            missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
            if missing_tool_tup not in missing_tool_tups:
                missing_tool_tups.append( missing_tool_tup )
        module.save_to_step( step )
        if step.tool_errors:
            workflow.has_errors = True
        # Stick this in the step temporarily.
        step.temp_input_connections = step_dict[ 'input_connections' ]
        if trans.webapp.name == 'galaxy':
            annotation = step_dict.get( 'annotation', '')
            if annotation:
                annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
                new_step_annotation = trans.model.WorkflowStepAnnotationAssociation()
                new_step_annotation.annotation = annotation
                new_step_annotation.user = trans.user
                step.annotations.append( new_step_annotation )
        # Unpack and add post-job actions.
        post_job_actions = step_dict.get( 'post_job_actions', {} )
        for pja_dict in post_job_actions.values():
            trans.model.PostJobAction( pja_dict[ 'action_type' ],
                                       step,
                                       pja_dict[ 'output_name' ],
                                       pja_dict[ 'action_arguments' ] )
        steps.append( step )
        steps_by_external_id[ step_dict[ 'id' ] ] = step
    # Second pass to deal with connections between steps.
    for step in steps:
        # Input connections.
        for input_name, conn_dict in step.temp_input_connections.iteritems():
            if conn_dict:
                output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
                conn = trans.model.WorkflowStepConnection()
                conn.input_step = step
                conn.input_name = input_name
                conn.output_step = output_step
                conn.output_name = conn_dict[ 'output_name' ]
                step.input_connections.append( conn )
        del step.temp_input_connections
    # Order the steps if possible.
    attach_ordered_steps( workflow, steps )
    # Return the in-memory Workflow object for display or later persistence to the Galaxy database.
    return workflow, missing_tool_tups
Beispiel #9
0
    def update_workflow_from_dict(self,
                                  trans,
                                  stored_workflow,
                                  workflow_data,
                                  from_editor=False):
        # Put parameters in workflow mode
        trans.workflow_building_mode = True
        # Convert incoming workflow data from json if coming from editor
        data = json.loads(workflow_data) if from_editor else workflow_data
        # Create new workflow from incoming data
        workflow = model.Workflow()
        # Just keep the last name (user can rename later)
        workflow.name = stored_workflow.name
        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}
        errors = []
        for key, step_dict in data['steps'].iteritems():
            is_tool = is_tool_module_type(step_dict['type'])
            if is_tool and not trans.app.toolbox.has_tool(step_dict['tool_id'],
                                                          exact=True):
                errors.append("Step %s requires tool '%s'." %
                              (step_dict['id'], step_dict['tool_id']))
        if errors:
            raise MissingToolsException(workflow, errors)

        # First pass to build step objects and populate basic values
        for step_dict in self.__walk_step_dicts(data):
            module, step = self.__module_from_dict(trans,
                                                   step_dict,
                                                   secure=from_editor)
            # Create the model class for the step
            steps.append(step)
            steps_by_external_id[step_dict['id']] = step
            if 'workflow_outputs' in step_dict:
                for output_name in step_dict['workflow_outputs']:
                    m = model.WorkflowOutput(workflow_step=step,
                                             output_name=output_name)
                    trans.sa_session.add(m)
            if step.tool_errors:
                # DBTODO Check for conditional inputs here.
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps(steps, steps_by_external_id)

        # Order the steps if possible
        attach_ordered_steps(workflow, steps)
        # Connect up
        workflow.stored_workflow = stored_workflow
        stored_workflow.latest_workflow = workflow
        # Persist
        trans.sa_session.flush()
        # Return something informative
        errors = []
        if workflow.has_errors:
            errors.append("Some steps in this workflow have validation errors")
        if workflow.has_cycles:
            errors.append("This workflow contains cycles")
        return workflow, errors
Beispiel #10
0
    def build_workflow_from_dict(self,
                                 trans,
                                 data,
                                 source=None,
                                 add_to_menu=False,
                                 publish=False):
        # Put parameters in workflow mode
        trans.workflow_building_mode = True
        # Create new workflow from incoming dict
        workflow = model.Workflow()
        # If there's a source, put it in the workflow name.
        if source:
            name = "%s (imported from %s)" % (data['name'], source)
        else:
            name = data['name']
        workflow.name = name
        if 'uuid' in data:
            workflow.uuid = data['uuid']
        # Assume no errors until we find a step that has some
        workflow.has_errors = False
        # Create each step
        steps = []
        # The editor will provide ids for each step that we don't need to save,
        # but do need to use to make connections
        steps_by_external_id = {}
        # Keep track of tools required by the workflow that are not available in
        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
        # will be ( tool_id, tool_name, tool_version ).
        missing_tool_tups = []
        for step_dict in self.__walk_step_dicts(data):
            module, step = self.__module_from_dict(trans,
                                                   step_dict,
                                                   secure=False)
            steps.append(step)
            steps_by_external_id[step_dict['id']] = step

            if module.type == 'tool' and module.tool is None:
                # A required tool is not available in the local Galaxy instance.
                missing_tool_tup = (step_dict['tool_id'], step_dict['name'],
                                    step_dict['tool_version'])
                if missing_tool_tup not in missing_tool_tups:
                    missing_tool_tups.append(missing_tool_tup)
                # Save the entire step_dict in the unused config field, be parsed later
                # when we do have the tool
                step.config = json.dumps(step_dict)
            if step.tool_errors:
                workflow.has_errors = True

        # Second pass to deal with connections between steps
        self.__connect_workflow_steps(steps, steps_by_external_id)

        # Order the steps if possible
        attach_ordered_steps(workflow, steps)

        # Connect up
        stored = model.StoredWorkflow()
        stored.name = workflow.name
        workflow.stored_workflow = stored
        stored.latest_workflow = workflow
        stored.user = trans.user
        stored.published = publish
        if data['annotation']:
            annotation = sanitize_html(data['annotation'], 'utf-8',
                                       'text/html')
            self.add_item_annotation(trans.sa_session, stored.user, stored,
                                     annotation)

        # Persist
        trans.sa_session.add(stored)
        trans.sa_session.flush()

        if add_to_menu:
            if trans.user.stored_workflow_menu_entries is None:
                trans.user.stored_workflow_menu_entries = []
            menuEntry = model.StoredWorkflowMenuEntry()
            menuEntry.stored_workflow = stored
            trans.user.stored_workflow_menu_entries.append(menuEntry)
            trans.sa_session.flush()

        return CreatedWorkflow(stored_workflow=stored,
                               missing_tools=missing_tool_tups)
Beispiel #11
0
 def _workflow_to_dict_run(self, trans, stored):
     """
     Builds workflow dictionary used by run workflow form
     """
     workflow = stored.latest_workflow
     if len(workflow.steps) == 0:
         raise exceptions.MessageException('Workflow cannot be run because it does not have any steps.')
     if attach_ordered_steps(workflow, workflow.steps):
         raise exceptions.MessageException('Workflow cannot be run because it contains cycles.')
     trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
     module_injector = WorkflowModuleInjector(trans)
     has_upgrade_messages = False
     step_version_changes = []
     missing_tools = []
     errors = {}
     for step in workflow.steps:
         try:
             module_injector.inject(step, steps=workflow.steps, exact_tools=False)
         except exceptions.ToolMissingException:
             if step.tool_id not in missing_tools:
                 missing_tools.append(step.tool_id)
             continue
         if step.upgrade_messages:
             has_upgrade_messages = True
         if step.type == 'tool' or step.type is None:
             if step.module.version_changes:
                 step_version_changes.extend(step.module.version_changes)
             step_errors = step.module.get_errors()
             if step_errors:
                 errors[step.id] = step_errors
     if missing_tools:
         workflow.annotation = self.get_item_annotation_str(trans.sa_session, trans.user, workflow)
         raise exceptions.MessageException('Following tools missing: %s' % missing_tools)
     workflow.annotation = self.get_item_annotation_str(trans.sa_session, trans.user, workflow)
     step_order_indices = {}
     for step in workflow.steps:
         step_order_indices[step.id] = step.order_index
     step_models = []
     for i, step in enumerate(workflow.steps):
         step_model = None
         if step.type == 'tool':
             incoming = {}
             tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version)
             params_to_incoming(incoming, tool.inputs, step.state.inputs, trans.app)
             step_model = tool.to_json(trans, incoming, workflow_building_mode=workflow_building_modes.USE_HISTORY)
             step_model['post_job_actions'] = [{
                 'short_str'         : ActionBox.get_short_str(pja),
                 'action_type'       : pja.action_type,
                 'output_name'       : pja.output_name,
                 'action_arguments'  : pja.action_arguments
             } for pja in step.post_job_actions]
         else:
             inputs = step.module.get_runtime_inputs(connections=step.output_connections)
             step_model = {
                 'inputs' : [input.to_dict(trans) for input in inputs.values()]
             }
         step_model['step_type'] = step.type
         step_model['step_label'] = step.label
         step_model['step_name'] = step.module.get_name()
         step_model['step_version'] = step.module.get_version()
         step_model['step_index'] = step.order_index
         step_model['output_connections'] = [{
             'input_step_index'  : step_order_indices.get(oc.input_step_id),
             'output_step_index' : step_order_indices.get(oc.output_step_id),
             'input_name'        : oc.input_name,
             'output_name'       : oc.output_name
         } for oc in step.output_connections]
         if step.annotations:
             step_model['annotation'] = step.annotations[0].annotation
         if step.upgrade_messages:
             step_model['messages'] = step.upgrade_messages
         step_models.append(step_model)
     return {
         'id': trans.app.security.encode_id(stored.id),
         'history_id': trans.app.security.encode_id(trans.history.id) if trans.history else None,
         'name': stored.name,
         'steps': step_models,
         'step_version_changes': step_version_changes,
         'has_upgrade_messages': has_upgrade_messages,
         'workflow_resource_parameters': self._workflow_resource_parameters(trans, stored, workflow),
     }