예제 #1
0
 def _workflow_to_svg_canvas(self, trans, stored):
     workflow = stored.latest_workflow
     workflow_canvas = WorkflowCanvas()
     for step in workflow.steps:
         # Load from database representation
         module = module_factory.from_workflow_step(trans, step)
         module_name = module.get_name()
         module_data_inputs = module.get_data_inputs()
         module_data_outputs = module.get_data_outputs()
         workflow_canvas.populate_data_for_step(
             step,
             module_name,
             module_data_inputs,
             module_data_outputs,
         )
     workflow_canvas.add_steps()
     return workflow_canvas.finish()
예제 #2
0
 def _workflow_to_svg_canvas(self, trans, stored):
     workflow = stored.latest_workflow
     workflow_canvas = WorkflowCanvas()
     for step in workflow.steps:
         # Load from database representation
         module = module_factory.from_workflow_step(trans, step)
         module_name = module.get_name()
         module_data_inputs = module.get_data_inputs()
         module_data_outputs = module.get_data_outputs()
         workflow_canvas.populate_data_for_step(
             step,
             module_name,
             module_data_inputs,
             module_data_outputs,
         )
     workflow_canvas.add_steps()
     return workflow_canvas.finish()
예제 #3
0
파일: workflows.py 프로젝트: ARTbio/galaxy
    def _workflow_to_dict_export( self, trans, stored=None, workflow=None ):
        """ Export the workflow contents to a dictionary ready for JSON-ification and export.
        """
        if workflow is None:
            assert stored is not None
            workflow = stored.latest_workflow

        annotation_str = ""
        if stored is not None:
            workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored )
            if workflow_annotation:
                annotation_str = workflow_annotation.annotation
        # Pack workflow data into a dictionary and return
        data = {}
        data['a_galaxy_workflow'] = 'true'  # Placeholder for identifying galaxy workflow
        data['format-version'] = "0.1"
        data['name'] = workflow.name
        data['annotation'] = annotation_str
        if workflow.uuid is not None:
            data['uuid'] = str(workflow.uuid)
        data['steps'] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step( trans, step )
            if not module:
                return None
            # Get user annotation.
            step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step )
            annotation_str = ""
            if step_annotation:
                annotation_str = step_annotation.annotation
            content_id = module.get_content_id()
            # Step info
            step_dict = {
                'id': step.order_index,
                'type': module.type,
                'content_id': content_id,
                'tool_id': content_id,  # For worklfows exported to older Galaxies,
                                        # eliminate after a few years...
                'tool_version': step.tool_version,
                'name': module.get_name(),
                'tool_state': module.get_state( secure=False ),
                'tool_errors': module.get_errors(),
                'uuid': str(step.uuid),
                'label': step.label or None,
                # 'data_inputs': module.get_data_inputs(),
                # 'data_outputs': module.get_data_outputs(),
                'annotation': annotation_str
            }
            # Add post-job actions to step dict.
            if module.type == 'tool':
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type,
                        output_name=pja.output_name,
                        action_arguments=pja.action_arguments )
                step_dict[ 'post_job_actions' ] = pja_dict

            if module.type == 'subworkflow':
                del step_dict['content_id']
                del step_dict['tool_version']
                del step_dict['tool_state']
                del step_dict['tool_errors']
                subworkflow = step.subworkflow
                subworkflow_as_dict = self._workflow_to_dict_export(
                    trans,
                    stored=None,
                    workflow=subworkflow
                )
                step_dict['subworkflow'] = subworkflow_as_dict

            # Data inputs
            step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str )
            # User outputs

            workflow_outputs_dicts = []
            for workflow_output in step.unique_workflow_outputs:
                workflow_output_dict = dict(
                    output_name=workflow_output.output_name,
                    label=workflow_output.label,
                    uuid=str(workflow_output.uuid) if workflow_output.uuid is not None else None,
                )
                workflow_outputs_dicts.append(workflow_output_dict)
            step_dict['workflow_outputs'] = workflow_outputs_dicts

            # All step outputs
            step_dict['outputs'] = []
            if type( module ) is ToolModule:
                for output in module.get_data_outputs():
                    step_dict['outputs'].append( { 'name': output['name'], 'type': output['extensions'][0] } )

            # Connections
            input_connections = step.input_connections
            if step.type is None or step.type == 'tool':
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback( input, value, prefixed_name, prefixed_label ):
                    if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
                        data_input_names[ prefixed_name ] = True
                # FIXME: this updates modules silently right now; messages from updates should be provided.
                module.check_and_update_state()
                visit_input_values( module.tool.inputs, module.state.inputs, callback )
                # Filter
                # FIXME: this removes connection without displaying a message currently!
                input_connections = [ conn for conn in input_connections if (conn.input_name in data_input_names or conn.non_data_connection) ]

            # Encode input connections as dictionary
            input_conn_dict = {}
            unique_input_names = set( [conn.input_name for conn in input_connections] )
            for input_name in unique_input_names:
                input_conn_dicts = []
                for conn in input_connections:
                    if conn.input_name != input_name:
                        continue
                    input_conn = dict(
                        id=conn.output_step.order_index,
                        output_name=conn.output_name
                    )
                    if conn.input_subworkflow_step is not None:
                        subworkflow_step_id = conn.input_subworkflow_step.order_index
                        input_conn["input_subworkflow_step_id"] = subworkflow_step_id

                    input_conn_dicts.append(input_conn)
                input_conn_dict[ input_name ] = input_conn_dicts

            # Preserve backward compatability. Previously Galaxy
            # assumed input connections would be dictionaries not
            # lists of dictionaries, so replace any singleton list
            # with just the dictionary so that workflows exported from
            # newer Galaxy instances can be used with older Galaxy
            # instances if they do no include multiple input
            # tools. This should be removed at some point. Mirrored
            # hack in _workflow_from_dict should never be removed so
            # existing workflow exports continue to function.
            for input_name, input_conn in dict(input_conn_dict).iteritems():
                if len(input_conn) == 1:
                    input_conn_dict[input_name] = input_conn[0]
            step_dict['input_connections'] = input_conn_dict
            # Position
            step_dict['position'] = step.position
            # Add to return value
            data['steps'][step.order_index] = step_dict
        return data
예제 #4
0
파일: workflows.py 프로젝트: ARTbio/galaxy
    def _workflow_to_dict_editor(self, trans, stored):
        """
        """
        workflow = stored.latest_workflow
        # Pack workflow data into a dictionary and return
        data = {}
        data['name'] = workflow.name
        data['steps'] = {}
        data['upgrade_messages'] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step( trans, step )
            if not module:
                step_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, step )
                annotation_str = ""
                if step_annotation:
                    annotation_str = step_annotation.annotation
                invalid_tool_form_html = """<div class="toolForm tool-node-error">
                                            <div class="toolFormTitle form-row-error">Unrecognized Tool: %s</div>
                                            <div class="toolFormBody"><div class="form-row">
                                            The tool id '%s' for this tool is unrecognized.<br/><br/>
                                            To save this workflow, you will need to delete this step or enable the tool.
                                            </div></div></div>""" % (step.tool_id, step.tool_id)
                step_dict = {
                    'id': step.order_index,
                    'type': 'invalid',
                    'content_id': step.content_id,
                    'name': 'Unrecognized Tool: %s' % step.tool_id,
                    'tool_state': None,
                    'tooltip': None,
                    'tool_errors': ["Unrecognized Tool Id: %s" % step.tool_id],
                    'data_inputs': [],
                    'data_outputs': [],
                    'form_html': invalid_tool_form_html,
                    'annotation': annotation_str,
                    'input_connections': {},
                    'post_job_actions': {},
                    'uuid': str(step.uuid),
                    'label': step.label or None,
                    'workflow_outputs': []
                }
                # Position
                step_dict['position'] = step.position
                # Add to return value
                data['steps'][step.order_index] = step_dict
                continue
            # Fix any missing parameters
            upgrade_message = module.check_and_update_state()
            if upgrade_message:
                data['upgrade_messages'][step.order_index] = upgrade_message
            if (hasattr(module, "version_changes")) and (module.version_changes):
                if step.order_index in data['upgrade_messages']:
                    data['upgrade_messages'][step.order_index][module.tool.name] = "\n".join(module.version_changes)
                else:
                    data['upgrade_messages'][step.order_index] = {module.tool.name: "\n".join(module.version_changes)}
            # Get user annotation.
            step_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, step )
            annotation_str = ""
            if step_annotation:
                annotation_str = step_annotation.annotation
            form_html = None
            if trans.history:
                # If in a web session, attach form html. No reason to do
                # so for API requests.
                form_html = module.get_config_form()
            # Pack attributes into plain dictionary
            step_dict = {
                'id': step.order_index,
                'type': module.type,
                'content_id': module.get_content_id(),
                'name': module.get_name(),
                'tool_state': module.get_state(),
                'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
                'tool_errors': module.get_errors(),
                'data_inputs': module.get_data_inputs(),
                'data_outputs': module.get_data_outputs(),
                'form_html': form_html,
                'annotation': annotation_str,
                'post_job_actions': {},
                'uuid': str(step.uuid) if step.uuid else None,
                'label': step.label or None,
                'workflow_outputs': []
            }
            # Connections
            input_connections = step.input_connections
            input_connections_type = {}
            multiple_input = {}  # Boolean value indicating if this can be mutliple
            if step.type is None or step.type == 'tool':
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback( input, value, prefixed_name, prefixed_label ):
                    if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
                        data_input_names[ prefixed_name ] = True
                        multiple_input[ prefixed_name ] = input.multiple
                        if isinstance( input, DataToolParameter ):
                            input_connections_type[ input.name ] = "dataset"
                        if isinstance( input, DataCollectionToolParameter ):
                            input_connections_type[ input.name ] = "dataset_collection"
                visit_input_values( module.tool.inputs, module.state.inputs, callback )
                # Filter
                # FIXME: this removes connection without displaying a message currently!
                input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ]
                # post_job_actions
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type,
                        output_name=pja.output_name,
                        action_arguments=pja.action_arguments
                    )
                step_dict['post_job_actions'] = pja_dict

            # workflow outputs
            outputs = []
            for output in step.unique_workflow_outputs:
                output_label = output.label
                output_name = output.output_name
                output_uuid = str(output.uuid) if output.uuid else None
                outputs.append({"output_name": output_name,
                                "uuid": output_uuid,
                                "label": output_label})
            step_dict['workflow_outputs'] = outputs

            # Encode input connections as dictionary
            input_conn_dict = {}
            for conn in input_connections:
                input_type = "dataset"
                if conn.input_name in input_connections_type:
                    input_type = input_connections_type[ conn.input_name ]
                conn_dict = dict( id=conn.output_step.order_index, output_name=conn.output_name, input_type=input_type )
                if conn.input_name in multiple_input:
                    if conn.input_name in input_conn_dict:
                        input_conn_dict[ conn.input_name ].append( conn_dict )
                    else:
                        input_conn_dict[ conn.input_name ] = [ conn_dict ]
                else:
                    input_conn_dict[ conn.input_name ] = conn_dict
            step_dict['input_connections'] = input_conn_dict
            # Position
            step_dict['position'] = step.position
            # Add to return value
            data['steps'][step.order_index] = step_dict
        return data
예제 #5
0
    def _execute_workflow(self, sample):
        for key, value in sample.workflow['mappings'].items():
            if 'hda' not in value and 'ldda' in value:
                # If HDA is already here, it's an external input, we're not copying anything.
                ldda = self.sa_session.query(self.app.model.LibraryDatasetDatasetAssociation).get(value['ldda'])
                if ldda.dataset.state in ['new', 'upload', 'queued', 'running', 'empty', 'discarded']:
                    log.error("Cannot import dataset '%s' to user history since its state is '%s'.  " % (ldda.name, ldda.dataset.state))
                elif ldda.dataset.state in ['ok', 'error']:
                    hda = ldda.to_history_dataset_association(target_history=sample.history, add_to_history=True)
                    sample.workflow['mappings'][key]['hda'] = hda.id
                    self.sa_session.add(sample)
                    self.sa_session.flush()
        workflow_dict = sample.workflow
        import copy
        new_wf_dict = copy.deepcopy(workflow_dict)
        for key in workflow_dict['mappings']:
            if not isinstance(key, int):
                new_wf_dict['mappings'][int(key)] = workflow_dict['mappings'][key]
        workflow_dict = new_wf_dict
        fk_trans = FakeTrans(self.app, history=sample.history, user=sample.request.user)
        workflow = self.sa_session.query(self.app.model.Workflow).get(workflow_dict['id'])
        if not workflow:
            log.error("Workflow mapping failure.")
            return
        if len(workflow.steps) == 0:
            log.error("Workflow cannot be run because it does not have any steps")
            return
        if workflow.has_cycles:
            log.error("Workflow cannot be run because it contains cycles")
            return
        if workflow.has_errors:
            log.error("Workflow cannot be run because of validation errors in some steps")
            return
        # Build the state for each step
        errors = {}
        # Build a fake dictionary prior to execution.
        # Prepare each step
        for step in workflow.steps:
            step.upgrade_messages = {}
            # Contruct modules
            if step.type == 'tool' or step.type is None:
                # Restore the tool state for the step
                step.module = module_factory.from_workflow_step(fk_trans, step)
                # Fix any missing parameters
                step.upgrade_messages = step.module.check_and_update_state()
                # Any connected input needs to have value DummyDataset (these
                # are not persisted so we need to do it every time)
                step.module.add_dummy_datasets(connections=step.input_connections)
                # Store state with the step
                step.state = step.module.state
                # Error dict
                if step.tool_errors:
                    errors[step.id] = step.tool_errors
            else:
                # Non-tool specific stuff?
                step.module = module_factory.from_workflow_step(fk_trans, step)
                step.state = step.module.get_runtime_state()
            # Connections by input name
            step.input_connections_by_name = dict((conn.input_name, conn) for conn in step.input_connections)
        for step in workflow.steps:
            step.upgrade_messages = {}
            # Connections by input name
            step.input_connections_by_name = \
                dict((conn.input_name, conn) for conn in step.input_connections)
            # Extract just the arguments for this step by prefix
            step_errors = None
            if step.type == 'tool' or step.type is None:
                module = module_factory.from_workflow_step(fk_trans, step)
                # Fix any missing parameters
                step.upgrade_messages = module.check_and_update_state()
                # Any connected input needs to have value DummyDataset (these
                # are not persisted so we need to do it every time)
                module.add_dummy_datasets(connections=step.input_connections)
                # Get the tool
                tool = module.tool
                # Get the state
                step.state = state = module.state
            if step_errors:
                errors[step.id] = state.inputs["__errors__"] = step_errors
        # Run each step, connecting outputs to inputs
        workflow_invocation = self.app.model.WorkflowInvocation()
        workflow_invocation.workflow = workflow
        outputs = odict()
        for i, step in enumerate(workflow.steps):
            job = None
            if step.type == 'tool' or step.type is None:
                tool = self.app.toolbox.get_tool(step.tool_id)

                def callback(input, prefixed_name, **kwargs):
                    if isinstance(input, DataToolParameter):
                        if prefixed_name in step.input_connections_by_name:
                            conn = step.input_connections_by_name[prefixed_name]
                            return outputs[conn.output_step.id][conn.output_name]
                visit_input_values(tool.inputs, step.state.inputs, callback)
                job, out_data = tool.execute(fk_trans, step.state.inputs, history=sample.history)
                outputs[step.id] = out_data
                for pja in step.post_job_actions:
                    if pja.action_type in ActionBox.immediate_actions:
                        ActionBox.execute(self.app, self.sa_session, pja, job, replacement_dict=None)
                    else:
                        job.add_post_job_action(pja)
            else:
                job, out_data = step.module.execute(fk_trans, step.state)
                outputs[step.id] = out_data
                if step.id in workflow_dict['mappings']:
                    data = self.sa_session.query(self.app.model.HistoryDatasetAssociation).get(workflow_dict['mappings'][str(step.id)]['hda'])
                    outputs[step.id]['output'] = data
            workflow_invocation_step = self.app.model.WorkflowInvocationStep()
            workflow_invocation_step.workflow_invocation = workflow_invocation
            workflow_invocation_step.workflow_step = step
            workflow_invocation_step.job = job
        self.sa_session.add(workflow_invocation)
        self.sa_session.flush()
예제 #6
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        We're not creating workflows from the api.  Just execute for now.

        However, we will import them if installed_repository_file is specified
        """

        # ------------------------------------------------------------------------------- #
        ### RPARK: dictionary containing which workflows to change and edit ###
        param_map = {}
        if payload.has_key("parameters"):
            param_map = payload["parameters"]
        # ------------------------------------------------------------------------------- #

        if "workflow_id" not in payload:
            # create new
            if "installed_repository_file" in payload:
                workflow_controller = trans.webapp.controllers["workflow"]
                result = workflow_controller.import_workflow(trans=trans, cntrller="api", **payload)
                return result
            trans.response.status = 403
            return "Either workflow_id or installed_repository_file must be specified"
        if "installed_repository_file" in payload:
            trans.response.status = 403
            return "installed_repository_file may not be specified with workflow_id"
        stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(
            trans.security.decode_id(payload["workflow_id"])
        )
        if stored_workflow.user != trans.user and not trans.user_is_admin():
            if (
                trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation)
                .filter_by(user=trans.user, stored_workflow=stored_workflow)
                .count()
                == 0
            ):
                trans.response.status = 400
                return "Workflow is not owned by or shared with current user"
        workflow = stored_workflow.latest_workflow
        if payload["history"].startswith("hist_id="):
            # Passing an existing history to use.
            history = trans.sa_session.query(self.app.model.History).get(
                trans.security.decode_id(payload["history"][8:])
            )
            if history.user != trans.user and not trans.user_is_admin():
                trans.response.status = 400
                return "Invalid History specified."
        else:
            history = self.app.model.History(name=payload["history"], user=trans.user)
            trans.sa_session.add(history)
            trans.sa_session.flush()
        ds_map = payload["ds_map"]
        add_to_history = "no_add_to_history" not in payload
        for k in ds_map:
            try:
                if ds_map[k]["src"] == "ldda":
                    ldda = trans.sa_session.query(self.app.model.LibraryDatasetDatasetAssociation).get(
                        trans.security.decode_id(ds_map[k]["id"])
                    )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), ldda.dataset
                    )
                    hda = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif ds_map[k]["src"] == "ld":
                    ldda = (
                        trans.sa_session.query(self.app.model.LibraryDataset)
                        .get(trans.security.decode_id(ds_map[k]["id"]))
                        .library_dataset_dataset_association
                    )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), ldda.dataset
                    )
                    hda = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif ds_map[k]["src"] == "hda":
                    # Get dataset handle, add to dict and history if necessary
                    hda = trans.sa_session.query(self.app.model.HistoryDatasetAssociation).get(
                        trans.security.decode_id(ds_map[k]["id"])
                    )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), hda.dataset
                    )
                else:
                    trans.response.status = 400
                    return "Unknown dataset source '%s' specified." % ds_map[k]["src"]
                if add_to_history and hda.history != history:
                    hda = hda.copy()
                    history.add_dataset(hda)
                ds_map[k]["hda"] = hda
            except AssertionError:
                trans.response.status = 400
                return "Invalid Dataset '%s' Specified" % ds_map[k]["id"]
        if not workflow:
            trans.response.status = 400
            return "Workflow not found."
        if len(workflow.steps) == 0:
            trans.response.status = 400
            return "Workflow cannot be run because it does not have any steps"
        if workflow.has_cycles:
            trans.response.status = 400
            return "Workflow cannot be run because it contains cycles"
        if workflow.has_errors:
            trans.response.status = 400
            return "Workflow cannot be run because of validation errors in some steps"
        # Build the state for each step
        rval = {}
        for step in workflow.steps:
            step_errors = None
            if step.type == "tool" or step.type is None:
                step.module = module_factory.from_workflow_step(trans, step)
                # Check for missing parameters
                step.upgrade_messages = step.module.check_and_update_state()
                # Any connected input needs to have value DummyDataset (these
                # are not persisted so we need to do it every time)
                step.module.add_dummy_datasets(connections=step.input_connections)
                step.state = step.module.state

                ####################################################
                ####################################################
                # RPARK: IF TOOL_NAME IN PARAMETER MAP #
                if step.tool_id in param_map:
                    change_param = param_map[step.tool_id]["param"]
                    change_value = param_map[step.tool_id]["value"]
                    step.state.inputs[change_param] = change_value
                ####################################################
                ####################################################

                if step.tool_errors:
                    trans.response.status = 400
                    return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
                if step.upgrade_messages:
                    trans.response.status = 400
                    return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
            else:
                # This is an input step.  Make sure we have an available input.
                if step.type == "data_input" and str(step.id) not in ds_map:
                    trans.response.status = 400
                    return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
                step.module = module_factory.from_workflow_step(trans, step)
                step.state = step.module.get_runtime_state()
            step.input_connections_by_name = dict((conn.input_name, conn) for conn in step.input_connections)
        # Run each step, connecting outputs to inputs
        workflow_invocation = self.app.model.WorkflowInvocation()
        workflow_invocation.workflow = workflow
        outputs = util.odict.odict()
        rval["history"] = trans.security.encode_id(history.id)
        rval["outputs"] = []
        for i, step in enumerate(workflow.steps):
            job = None
            if step.type == "tool" or step.type is None:
                tool = self.app.toolbox.get_tool(step.tool_id)

                def callback(input, value, prefixed_name, prefixed_label):
                    if isinstance(input, DataToolParameter):
                        if prefixed_name in step.input_connections_by_name:
                            conn = step.input_connections_by_name[prefixed_name]
                            return outputs[conn.output_step.id][conn.output_name]

                visit_input_values(tool.inputs, step.state.inputs, callback)
                job, out_data = tool.execute(trans, step.state.inputs, history=history)
                outputs[step.id] = out_data
                for pja in step.post_job_actions:
                    if pja.action_type in ActionBox.immediate_actions:
                        ActionBox.execute(self.app, trans.sa_session, pja, job, replacement_dict=None)
                    else:
                        job.add_post_job_action(pja)
                for v in out_data.itervalues():
                    rval["outputs"].append(trans.security.encode_id(v.id))
            else:
                # This is an input step.  Use the dataset inputs from ds_map.
                job, out_data = step.module.execute(trans, step.state)
                outputs[step.id] = out_data
                outputs[step.id]["output"] = ds_map[str(step.id)]["hda"]
            workflow_invocation_step = self.app.model.WorkflowInvocationStep()
            workflow_invocation_step.workflow_invocation = workflow_invocation
            workflow_invocation_step.workflow_step = step
            workflow_invocation_step.job = job
        trans.sa_session.add(workflow_invocation)
        trans.sa_session.flush()
        return rval
예제 #7
0
    def _workflow_to_dict(self, trans, stored):
        """
        RPARK: copied from galaxy.web.controllers.workflows.py
        Converts a workflow to a dict of attributes suitable for exporting.
        """
        workflow = stored.latest_workflow

        ### ----------------------------------- ###
        ## RPARK EDIT ##
        workflow_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, stored)
        annotation_str = ""
        if workflow_annotation:
            annotation_str = workflow_annotation.annotation
        ### ----------------------------------- ###

        # Pack workflow data into a dictionary and return
        data = {}
        data["a_galaxy_workflow"] = "true"  # Placeholder for identifying galaxy workflow
        data["format-version"] = "0.1"
        data["name"] = workflow.name
        ### ----------------------------------- ###
        ## RPARK EDIT ##
        data["annotation"] = annotation_str
        ### ----------------------------------- ###

        data["steps"] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step(trans, step)

            ### ----------------------------------- ###
            ## RPARK EDIT ##

            # TODO: This is duplicated from
            # lib/galaxy/webapps/controllres/workflow.py -- refactor and
            # eliminate copied code.

            # Get user annotation.
            step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step)
            annotation_str = ""
            if step_annotation:
                annotation_str = step_annotation.annotation
            ### ----------------------------------- ###

            # Step info
            step_dict = {
                "id": step.order_index,
                "type": module.type,
                "tool_id": module.get_tool_id(),
                "tool_version": step.tool_version,
                "name": module.get_name(),
                "tool_state": module.get_state(secure=False),
                "tool_errors": module.get_errors(),
                ## 'data_inputs': module.get_data_inputs(),
                ## 'data_outputs': module.get_data_outputs(),
                ### ----------------------------------- ###
                ## RPARK EDIT ##
                "annotation": annotation_str
                ### ----------------------------------- ###
            }
            # Add post-job actions to step dict.
            if module.type == "tool":
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type, output_name=pja.output_name, action_arguments=pja.action_arguments
                    )
                step_dict["post_job_actions"] = pja_dict
            # Data inputs
            step_dict["inputs"] = []
            if module.type == "data_input":
                # Get input dataset name; default to 'Input Dataset'
                name = module.state.get("name", "Input Dataset")
                step_dict["inputs"].append({"name": name, "description": annotation_str})
            else:
                # Step is a tool and may have runtime inputs.
                for name, val in module.state.inputs.items():
                    input_type = type(val)
                    if input_type == RuntimeValue:
                        step_dict["inputs"].append(
                            {"name": name, "description": "runtime parameter for tool %s" % module.get_name()}
                        )
                    elif input_type == dict:
                        # Input type is described by a dict, e.g. indexed parameters.
                        for partname, partval in val.items():
                            if type(partval) == RuntimeValue:
                                step_dict["inputs"].append(
                                    {"name": name, "description": "runtime parameter for tool %s" % module.get_name()}
                                )
            # User outputs
            step_dict["user_outputs"] = []
            """
            module_outputs = module.get_data_outputs()
            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
            for output in step_outputs:
                name = output.output_name
                annotation = ""
                for module_output in module_outputs:
                    if module_output.get( 'name', None ) == name:
                        output_type = module_output.get( 'extension', '' )
                        break
                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
            """

            # All step outputs
            step_dict["outputs"] = []
            if type(module) is ToolModule:
                for output in module.get_data_outputs():
                    step_dict["outputs"].append({"name": output["name"], "type": output["extensions"][0]})
            # Connections
            input_connections = step.input_connections
            if step.type is None or step.type == "tool":
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback(input, value, prefixed_name, prefixed_label):
                    if isinstance(input, DataToolParameter):
                        data_input_names[prefixed_name] = True

                visit_input_values(module.tool.inputs, module.state.inputs, callback)
                # Filter
                # FIXME: this removes connection without displaying a message currently!
                input_connections = [conn for conn in input_connections if conn.input_name in data_input_names]
            # Encode input connections as dictionary
            input_conn_dict = {}
            for conn in input_connections:
                input_conn_dict[conn.input_name] = dict(id=conn.output_step.order_index, output_name=conn.output_name)
            step_dict["input_connections"] = input_conn_dict
            # Position
            step_dict["position"] = step.position
            # Add to return value
            data["steps"][step.order_index] = step_dict
        return data
예제 #8
0
    def _workflow_to_dict_export(self, trans, stored=None, workflow=None):
        """ Export the workflow contents to a dictionary ready for JSON-ification and export.
        """
        if workflow is None:
            assert stored is not None
            workflow = stored.latest_workflow

        annotation_str = ""
        tag_str = ""
        if stored is not None:
            annotation_str = self.get_item_annotation_str(trans.sa_session, trans.user, stored) or ''
            tag_str = stored.make_tag_string_list()
        # Pack workflow data into a dictionary and return
        data = {}
        data['a_galaxy_workflow'] = 'true'  # Placeholder for identifying galaxy workflow
        data['format-version'] = "0.1"
        data['name'] = workflow.name
        data['annotation'] = annotation_str
        data['tags'] = tag_str
        if workflow.uuid is not None:
            data['uuid'] = str(workflow.uuid)
        data['steps'] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step(trans, step)
            if not module:
                raise exceptions.MessageException('Unrecognized step type: %s' % step.type)
            # Get user annotation.
            annotation_str = self.get_item_annotation_str(trans.sa_session, trans.user, step) or ''
            content_id = module.get_content_id()
            # Export differences for backward compatibility
            if module.type == 'tool':
                tool_state = module.get_state(nested=False)
            else:
                tool_state = module.state.inputs
            # Step info
            step_dict = {
                'id': step.order_index,
                'type': module.type,
                'content_id': content_id,
                'tool_id': content_id,  # For worklfows exported to older Galaxies,
                                        # eliminate after a few years...
                'tool_version': step.tool_version,
                'name': module.get_name(),
                'tool_state': json.dumps(tool_state),
                'errors': module.get_errors(),
                'uuid': str(step.uuid),
                'label': step.label or None,
                'annotation': annotation_str
            }
            # Add tool shed repository information and post-job actions to step dict.
            if module.type == 'tool':
                if module.tool and module.tool.tool_shed:
                    step_dict["tool_shed_repository"] = {
                        'name': module.tool.repository_name,
                        'owner': module.tool.repository_owner,
                        'changeset_revision': module.tool.changeset_revision,
                        'tool_shed': module.tool.tool_shed
                    }
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type,
                        output_name=pja.output_name,
                        action_arguments=pja.action_arguments)
                step_dict['post_job_actions'] = pja_dict

            if module.type == 'subworkflow':
                del step_dict['content_id']
                del step_dict['errors']
                del step_dict['tool_version']
                del step_dict['tool_state']
                subworkflow = step.subworkflow
                subworkflow_as_dict = self._workflow_to_dict_export(
                    trans,
                    stored=None,
                    workflow=subworkflow
                )
                step_dict['subworkflow'] = subworkflow_as_dict

            # Data inputs, legacy section not used anywhere within core
            input_dicts = []
            step_state = module.state.inputs or {}
            if "name" in step_state and module.type != 'tool':
                name = step_state.get("name")
                input_dicts.append({"name": name, "description": annotation_str})
            for name, val in step_state.items():
                input_type = type(val)
                if input_type == RuntimeValue:
                    input_dicts.append({"name": name, "description": "runtime parameter for tool %s" % module.get_name()})
                elif input_type == dict:
                    # Input type is described by a dict, e.g. indexed parameters.
                    for partval in val.values():
                        if type(partval) == RuntimeValue:
                            input_dicts.append({"name": name, "description": "runtime parameter for tool %s" % module.get_name()})
            step_dict['inputs'] = input_dicts

            # User outputs
            workflow_outputs_dicts = []
            for workflow_output in step.unique_workflow_outputs:
                workflow_output_dict = dict(
                    output_name=workflow_output.output_name,
                    label=workflow_output.label,
                    uuid=str(workflow_output.uuid) if workflow_output.uuid is not None else None,
                )
                workflow_outputs_dicts.append(workflow_output_dict)
            step_dict['workflow_outputs'] = workflow_outputs_dicts

            # All step outputs
            step_dict['outputs'] = []
            if type(module) is ToolModule:
                for output in module.get_data_outputs():
                    step_dict['outputs'].append({'name': output['name'], 'type': output['extensions'][0]})

            # Connections
            input_connections = step.input_connections
            if step.type is None or step.type == 'tool':
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback(input, prefixed_name, **kwargs):
                    if isinstance(input, DataToolParameter) or isinstance(input, DataCollectionToolParameter):
                        data_input_names[prefixed_name] = True
                # FIXME: this updates modules silently right now; messages from updates should be provided.
                module.check_and_update_state()
                if module.tool:
                    # If the tool is installed we attempt to verify input values
                    # and connections, otherwise the last known state will be dumped without modifications.
                    visit_input_values(module.tool.inputs, module.state.inputs, callback)
                    # FIXME: this removes connection without displaying a message currently!
                    input_connections = [conn for conn in input_connections if (conn.input_name in data_input_names or conn.non_data_connection)]

            # Encode input connections as dictionary
            input_conn_dict = {}
            unique_input_names = set([conn.input_name for conn in input_connections])
            for input_name in unique_input_names:
                input_conn_dicts = []
                for conn in input_connections:
                    if conn.input_name != input_name:
                        continue
                    input_conn = dict(
                        id=conn.output_step.order_index,
                        output_name=conn.output_name
                    )
                    if conn.input_subworkflow_step is not None:
                        subworkflow_step_id = conn.input_subworkflow_step.order_index
                        input_conn["input_subworkflow_step_id"] = subworkflow_step_id

                    input_conn_dicts.append(input_conn)
                input_conn_dict[input_name] = input_conn_dicts

            # Preserve backward compatability. Previously Galaxy
            # assumed input connections would be dictionaries not
            # lists of dictionaries, so replace any singleton list
            # with just the dictionary so that workflows exported from
            # newer Galaxy instances can be used with older Galaxy
            # instances if they do no include multiple input
            # tools. This should be removed at some point. Mirrored
            # hack in _workflow_from_dict should never be removed so
            # existing workflow exports continue to function.
            for input_name, input_conn in dict(input_conn_dict).items():
                if len(input_conn) == 1:
                    input_conn_dict[input_name] = input_conn[0]
            step_dict['input_connections'] = input_conn_dict
            # Position
            step_dict['position'] = step.position
            # Add to return value
            data['steps'][step.order_index] = step_dict
        return data
예제 #9
0
    def _workflow_to_dict_editor(self, trans, stored):
        workflow = stored.latest_workflow
        # Pack workflow data into a dictionary and return
        data = {}
        data['name'] = workflow.name
        data['steps'] = {}
        data['upgrade_messages'] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step(trans, step, exact_tools=False)
            if not module:
                raise exceptions.MessageException('Unrecognized step type: %s' % step.type)
            # Load label from state of data input modules, necessary for backward compatibility
            self.__set_default_label(step, module, step.tool_inputs)
            # Fix any missing parameters
            upgrade_message = module.check_and_update_state()
            if upgrade_message:
                data['upgrade_messages'][step.order_index] = upgrade_message
            if (hasattr(module, "version_changes")) and (module.version_changes):
                if step.order_index in data['upgrade_messages']:
                    data['upgrade_messages'][step.order_index][module.tool.name] = "\n".join(module.version_changes)
                else:
                    data['upgrade_messages'][step.order_index] = {module.tool.name: "\n".join(module.version_changes)}
            # Get user annotation.
            annotation_str = self.get_item_annotation_str(trans.sa_session, trans.user, step) or ''
            config_form = module.get_config_form()
            # Pack attributes into plain dictionary
            step_dict = {
                'id': step.order_index,
                'type': module.type,
                'label': module.label,
                'content_id': module.get_content_id(),
                'name': module.get_name(),
                'tool_state': module.get_state(),
                'tooltip': module.get_tooltip(static_path=url_for('/static')),
                'errors': module.get_errors(),
                'data_inputs': module.get_data_inputs(),
                'data_outputs': module.get_data_outputs(),
                'config_form': config_form,
                'annotation': annotation_str,
                'post_job_actions': {},
                'uuid': str(step.uuid) if step.uuid else None,
                'workflow_outputs': []
            }
            # Connections
            input_connections = step.input_connections
            input_connections_type = {}
            multiple_input = {}  # Boolean value indicating if this can be mutliple
            if (step.type is None or step.type == 'tool') and module.tool:
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback(input, prefixed_name, **kwargs):
                    if isinstance(input, DataToolParameter) or isinstance(input, DataCollectionToolParameter):
                        data_input_names[prefixed_name] = True
                        multiple_input[prefixed_name] = input.multiple
                        if isinstance(input, DataToolParameter):
                            input_connections_type[input.name] = "dataset"
                        if isinstance(input, DataCollectionToolParameter):
                            input_connections_type[input.name] = "dataset_collection"
                visit_input_values(module.tool.inputs, module.state.inputs, callback)
                # Filter
                # FIXME: this removes connection without displaying a message currently!
                input_connections = [conn for conn in input_connections if conn.input_name in data_input_names]
                # post_job_actions
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type,
                        output_name=pja.output_name,
                        action_arguments=pja.action_arguments
                    )
                step_dict['post_job_actions'] = pja_dict

            # workflow outputs
            outputs = []
            for output in step.unique_workflow_outputs:
                output_label = output.label
                output_name = output.output_name
                output_uuid = str(output.uuid) if output.uuid else None
                outputs.append({"output_name": output_name,
                                "uuid": output_uuid,
                                "label": output_label})
            step_dict['workflow_outputs'] = outputs

            # Encode input connections as dictionary
            input_conn_dict = {}
            for conn in input_connections:
                input_type = "dataset"
                if conn.input_name in input_connections_type:
                    input_type = input_connections_type[conn.input_name]
                conn_dict = dict(id=conn.output_step.order_index, output_name=conn.output_name, input_type=input_type)
                if conn.input_name in multiple_input:
                    if conn.input_name in input_conn_dict:
                        input_conn_dict[conn.input_name].append(conn_dict)
                    else:
                        input_conn_dict[conn.input_name] = [conn_dict]
                else:
                    input_conn_dict[conn.input_name] = conn_dict
            step_dict['input_connections'] = input_conn_dict

            # Position
            step_dict['position'] = step.position
            # Add to return value
            data['steps'][step.order_index] = step_dict
        return data
예제 #10
0
    def _workflow_to_dict(self, trans, stored):
        """
        RPARK: copied from galaxy.web.controllers.workflows.py
        Converts a workflow to a dict of attributes suitable for exporting.
        """
        workflow = stored.latest_workflow

        ### ----------------------------------- ###
        ## RPARK EDIT ##
        workflow_annotation = self.get_item_annotation_obj(
            trans.sa_session, trans.user, stored)
        annotation_str = ""
        if workflow_annotation:
            annotation_str = workflow_annotation.annotation
        ### ----------------------------------- ###

        # Pack workflow data into a dictionary and return
        data = {}
        data[
            'a_galaxy_workflow'] = 'true'  # Placeholder for identifying galaxy workflow
        data['format-version'] = "0.1"
        data['name'] = workflow.name
        ### ----------------------------------- ###
        ## RPARK EDIT ##
        data['annotation'] = annotation_str
        ### ----------------------------------- ###

        data['steps'] = {}
        # For each step, rebuild the form and encode the state
        for step in workflow.steps:
            # Load from database representation
            module = module_factory.from_workflow_step(trans, step)

            ### ----------------------------------- ###
            ## RPARK EDIT ##
            # Get user annotation.
            step_annotation = self.get_item_annotation_obj(
                trans.sa_session, trans.user, step)
            annotation_str = ""
            if step_annotation:
                annotation_str = step_annotation.annotation
            ### ----------------------------------- ###

            # Step info
            step_dict = {
                'id': step.order_index,
                'type': module.type,
                'tool_id': module.get_tool_id(),
                'tool_version': step.tool_version,
                'name': module.get_name(),
                'tool_state': module.get_state(secure=False),
                'tool_errors': module.get_errors(),
                ## 'data_inputs': module.get_data_inputs(),
                ## 'data_outputs': module.get_data_outputs(),

                ### ----------------------------------- ###
                ## RPARK EDIT ##
                'annotation': annotation_str
                ### ----------------------------------- ###
            }
            # Add post-job actions to step dict.
            if module.type == 'tool':
                pja_dict = {}
                for pja in step.post_job_actions:
                    pja_dict[pja.action_type + pja.output_name] = dict(
                        action_type=pja.action_type,
                        output_name=pja.output_name,
                        action_arguments=pja.action_arguments)
                step_dict['post_job_actions'] = pja_dict
            # Data inputs
            step_dict['inputs'] = []
            if module.type == "data_input":
                # Get input dataset name; default to 'Input Dataset'
                name = module.state.get('name', 'Input Dataset')
                step_dict['inputs'].append({
                    "name": name,
                    "description": annotation_str
                })
            else:
                # Step is a tool and may have runtime inputs.
                for name, val in module.state.inputs.items():
                    input_type = type(val)
                    if input_type == RuntimeValue:
                        step_dict['inputs'].append({
                            "name":
                            name,
                            "description":
                            "runtime parameter for tool %s" %
                            module.get_name()
                        })
                    elif input_type == dict:
                        # Input type is described by a dict, e.g. indexed parameters.
                        for partname, partval in val.items():
                            if type(partval) == RuntimeValue:
                                step_dict['inputs'].append({
                                    "name":
                                    name,
                                    "description":
                                    "runtime parameter for tool %s" %
                                    module.get_name()
                                })
            # User outputs
            step_dict['user_outputs'] = []
            """
            module_outputs = module.get_data_outputs()
            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
            for output in step_outputs:
                name = output.output_name
                annotation = ""
                for module_output in module_outputs:
                    if module_output.get( 'name', None ) == name:
                        output_type = module_output.get( 'extension', '' )
                        break
                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
            """

            # All step outputs
            step_dict['outputs'] = []
            if type(module) is ToolModule:
                for output in module.get_data_outputs():
                    step_dict['outputs'].append({
                        'name': output['name'],
                        'type': output['extensions'][0]
                    })
            # Connections
            input_connections = step.input_connections
            if step.type is None or step.type == 'tool':
                # Determine full (prefixed) names of valid input datasets
                data_input_names = {}

                def callback(input, value, prefixed_name, prefixed_label):
                    if isinstance(input, DataToolParameter):
                        data_input_names[prefixed_name] = True

                visit_input_values(module.tool.inputs, module.state.inputs,
                                   callback)
                # Filter
                # FIXME: this removes connection without displaying a message currently!
                input_connections = [
                    conn for conn in input_connections
                    if conn.input_name in data_input_names
                ]
            # Encode input connections as dictionary
            input_conn_dict = {}
            for conn in input_connections:
                input_conn_dict[ conn.input_name ] = \
                    dict( id=conn.output_step.order_index, output_name=conn.output_name )
            step_dict['input_connections'] = input_conn_dict
            # Position
            step_dict['position'] = step.position
            # Add to return value
            data['steps'][step.order_index] = step_dict
        return data
예제 #11
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        We're not creating workflows from the api.  Just execute for now.

        However, we will import them if installed_repository_file is specified
        """

        # ------------------------------------------------------------------------------- #
        ### RPARK: dictionary containing which workflows to change and edit ###
        param_map = {}
        if (payload.has_key('parameters')):
            param_map = payload['parameters']
        # ------------------------------------------------------------------------------- #

        if 'workflow_id' not in payload:
            # create new
            if 'installed_repository_file' in payload:
                workflow_controller = trans.webapp.controllers['workflow']
                result = workflow_controller.import_workflow(trans=trans,
                                                             cntrller='api',
                                                             **payload)
                return result
            trans.response.status = 403
            return "Either workflow_id or installed_repository_file must be specified"
        if 'installed_repository_file' in payload:
            trans.response.status = 403
            return "installed_repository_file may not be specified with workflow_id"
        stored_workflow = trans.sa_session.query(
            self.app.model.StoredWorkflow).get(
                trans.security.decode_id(payload['workflow_id']))
        if stored_workflow.user != trans.user and not trans.user_is_admin():
            if trans.sa_session.query(
                    trans.app.model.StoredWorkflowUserShareAssociation
            ).filter_by(user=trans.user,
                        stored_workflow=stored_workflow).count() == 0:
                trans.response.status = 400
                return ("Workflow is not owned by or shared with current user")
        workflow = stored_workflow.latest_workflow
        if payload['history'].startswith('hist_id='):
            #Passing an existing history to use.
            history = trans.sa_session.query(self.app.model.History).get(
                trans.security.decode_id(payload['history'][8:]))
            if history.user != trans.user and not trans.user_is_admin():
                trans.response.status = 400
                return "Invalid History specified."
        else:
            history = self.app.model.History(name=payload['history'],
                                             user=trans.user)
            trans.sa_session.add(history)
            trans.sa_session.flush()
        ds_map = payload['ds_map']
        add_to_history = 'no_add_to_history' not in payload
        for k in ds_map:
            try:
                if ds_map[k]['src'] == 'ldda':
                    ldda = trans.sa_session.query(
                        self.app.model.LibraryDatasetDatasetAssociation).get(
                            trans.security.decode_id(ds_map[k]['id']))
                    assert trans.user_is_admin(
                    ) or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), ldda.dataset)
                    hda = ldda.to_history_dataset_association(
                        history, add_to_history=add_to_history)
                elif ds_map[k]['src'] == 'ld':
                    ldda = trans.sa_session.query(
                        self.app.model.LibraryDataset).get(
                            trans.security.decode_id(
                                ds_map[k]
                                ['id'])).library_dataset_dataset_association
                    assert trans.user_is_admin(
                    ) or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), ldda.dataset)
                    hda = ldda.to_history_dataset_association(
                        history, add_to_history=add_to_history)
                elif ds_map[k]['src'] == 'hda':
                    # Get dataset handle, add to dict and history if necessary
                    hda = trans.sa_session.query(
                        self.app.model.HistoryDatasetAssociation).get(
                            trans.security.decode_id(ds_map[k]['id']))
                    assert trans.user_is_admin(
                    ) or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), hda.dataset)
                else:
                    trans.response.status = 400
                    return "Unknown dataset source '%s' specified." % ds_map[
                        k]['src']
                if add_to_history and hda.history != history:
                    hda = hda.copy()
                    history.add_dataset(hda)
                ds_map[k]['hda'] = hda
            except AssertionError:
                trans.response.status = 400
                return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
        if not workflow:
            trans.response.status = 400
            return "Workflow not found."
        if len(workflow.steps) == 0:
            trans.response.status = 400
            return "Workflow cannot be run because it does not have any steps"
        if workflow.has_cycles:
            trans.response.status = 400
            return "Workflow cannot be run because it contains cycles"
        if workflow.has_errors:
            trans.response.status = 400
            return "Workflow cannot be run because of validation errors in some steps"
        # Build the state for each step
        rval = {}
        for step in workflow.steps:
            step_errors = None
            if step.type == 'tool' or step.type is None:
                step.module = module_factory.from_workflow_step(trans, step)
                # Check for missing parameters
                step.upgrade_messages = step.module.check_and_update_state()
                # Any connected input needs to have value DummyDataset (these
                # are not persisted so we need to do it every time)
                step.module.add_dummy_datasets(
                    connections=step.input_connections)
                step.state = step.module.state

                ####################################################
                ####################################################
                # RPARK: IF TOOL_NAME IN PARAMETER MAP #
                if step.tool_id in param_map:
                    change_param = param_map[step.tool_id]['param']
                    change_value = param_map[step.tool_id]['value']
                    step.state.inputs[change_param] = change_value
                ####################################################
                ####################################################

                if step.tool_errors:
                    trans.response.status = 400
                    return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
                if step.upgrade_messages:
                    trans.response.status = 400
                    return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
            else:
                # This is an input step.  Make sure we have an available input.
                if step.type == 'data_input' and str(step.id) not in ds_map:
                    trans.response.status = 400
                    return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
                step.module = module_factory.from_workflow_step(trans, step)
                step.state = step.module.get_runtime_state()
            step.input_connections_by_name = dict(
                (conn.input_name, conn) for conn in step.input_connections)
        # Run each step, connecting outputs to inputs
        workflow_invocation = self.app.model.WorkflowInvocation()
        workflow_invocation.workflow = workflow
        outputs = util.odict.odict()
        rval['history'] = trans.security.encode_id(history.id)
        rval['outputs'] = []
        for i, step in enumerate(workflow.steps):
            job = None
            if step.type == 'tool' or step.type is None:
                tool = self.app.toolbox.get_tool(step.tool_id)

                def callback(input, value, prefixed_name, prefixed_label):
                    if isinstance(input, DataToolParameter):
                        if prefixed_name in step.input_connections_by_name:
                            conn = step.input_connections_by_name[
                                prefixed_name]
                            return outputs[conn.output_step.id][
                                conn.output_name]

                visit_input_values(tool.inputs, step.state.inputs, callback)
                job, out_data = tool.execute(trans,
                                             step.state.inputs,
                                             history=history)
                outputs[step.id] = out_data
                for pja in step.post_job_actions:
                    if pja.action_type in ActionBox.immediate_actions:
                        ActionBox.execute(self.app,
                                          trans.sa_session,
                                          pja,
                                          job,
                                          replacement_dict=None)
                    else:
                        job.add_post_job_action(pja)
                for v in out_data.itervalues():
                    rval['outputs'].append(trans.security.encode_id(v.id))
            else:
                #This is an input step.  Use the dataset inputs from ds_map.
                job, out_data = step.module.execute(trans, step.state)
                outputs[step.id] = out_data
                outputs[step.id]['output'] = ds_map[str(step.id)]['hda']
            workflow_invocation_step = self.app.model.WorkflowInvocationStep()
            workflow_invocation_step.workflow_invocation = workflow_invocation
            workflow_invocation_step.workflow_step = step
            workflow_invocation_step.job = job
        trans.sa_session.add(workflow_invocation)
        trans.sa_session.flush()
        return rval
예제 #12
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        We're not creating workflows from the api.  Just execute for now.

        However, we will import them if installed_repository_file is specified
        """

        # Pull parameters out of payload.
        workflow_id = payload['workflow_id']
        param_map = payload.get('parameters', {})
        ds_map = payload['ds_map']
        add_to_history = 'no_add_to_history' not in payload
        history_param = payload['history']

        # Get/create workflow.
        if not workflow_id:
            # create new
            if 'installed_repository_file' in payload:
                workflow_controller = trans.webapp.controllers[ 'workflow' ]
                result = workflow_controller.import_workflow( trans=trans,
                                                              cntrller='api',
                                                              **payload)
                return result
            trans.response.status = 403
            return "Either workflow_id or installed_repository_file must be specified"
        if 'installed_repository_file' in payload:
            trans.response.status = 403
            return "installed_repository_file may not be specified with workflow_id"

        # Get workflow + accessibility check.
        stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(
                        trans.security.decode_id(workflow_id))
        if stored_workflow.user != trans.user and not trans.user_is_admin():
            if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
                trans.response.status = 400
                return("Workflow is not owned by or shared with current user")
        workflow = stored_workflow.latest_workflow

        # Get target history.
        if history_param.startswith('hist_id='):
            #Passing an existing history to use.
            history = trans.sa_session.query(self.app.model.History).get(
                    trans.security.decode_id(history_param[8:]))
            if history.user != trans.user and not trans.user_is_admin():
                trans.response.status = 400
                return "Invalid History specified."
        else:
            # Send workflow outputs to new history.
            history = self.app.model.History(name=history_param, user=trans.user)
            trans.sa_session.add(history)
            trans.sa_session.flush()

        # Set workflow inputs.
        for k in ds_map:
            try:
                if ds_map[k]['src'] == 'ldda':
                    ldda = trans.sa_session.query(self.app.model.LibraryDatasetDatasetAssociation).get(
                            trans.security.decode_id(ds_map[k]['id']))
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
                    hda = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif ds_map[k]['src'] == 'ld':
                    ldda = trans.sa_session.query(self.app.model.LibraryDataset).get(
                            trans.security.decode_id(ds_map[k]['id'])).library_dataset_dataset_association
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
                    hda = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif ds_map[k]['src'] == 'hda':
                    # Get dataset handle, add to dict and history if necessary
                    hda = trans.sa_session.query(self.app.model.HistoryDatasetAssociation).get(
                            trans.security.decode_id(ds_map[k]['id']))
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset )
                else:
                    trans.response.status = 400
                    return "Unknown dataset source '%s' specified." % ds_map[k]['src']
                if add_to_history and  hda.history != history:
                    hda = hda.copy()
                    history.add_dataset(hda)
                ds_map[k]['hda'] = hda
            except AssertionError:
                trans.response.status = 400
                return "Invalid Dataset '%s' Specified" % ds_map[k]['id']

        # Sanity checks.
        if not workflow:
            trans.response.status = 400
            return "Workflow not found."
        if len( workflow.steps ) == 0:
            trans.response.status = 400
            return "Workflow cannot be run because it does not have any steps"
        if workflow.has_cycles:
            trans.response.status = 400
            return "Workflow cannot be run because it contains cycles"
        if workflow.has_errors:
            trans.response.status = 400
            return "Workflow cannot be run because of validation errors in some steps"

        # Build the state for each step
        rval = {}
        for step in workflow.steps:
            step_errors = None
            input_connections_by_name = {}
            for conn in step.input_connections:
                input_name = conn.input_name
                if not input_name in input_connections_by_name:
                    input_connections_by_name[input_name] = []
                input_connections_by_name[input_name].append(conn)
            step.input_connections_by_name = input_connections_by_name

            if step.type == 'tool' or step.type is None:
                step.module = module_factory.from_workflow_step( trans, step )
                # Check for missing parameters
                step.upgrade_messages = step.module.check_and_update_state()
                # Any connected input needs to have value DummyDataset (these
                # are not persisted so we need to do it every time)
                step.module.add_dummy_datasets( connections=step.input_connections )
                step.state = step.module.state
                _update_step_parameters(step, param_map)
                if step.tool_errors:
                    trans.response.status = 400
                    return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
                if step.upgrade_messages:
                    trans.response.status = 400
                    return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
            else:
                # This is an input step.  Make sure we have an available input.
                if step.type == 'data_input' and str(step.id) not in ds_map:
                    trans.response.status = 400
                    return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
                step.module = module_factory.from_workflow_step( trans, step )
                step.state = step.module.get_runtime_state()

        # Run each step, connecting outputs to inputs
        outputs = util.odict.odict()
        rval['history'] = trans.security.encode_id(history.id)
        rval['outputs'] = []

        replacement_dict = payload.get('replacement_params', {})

        outputs = invoke(
            trans=trans,
            workflow=workflow,
            target_history=history,
            replacement_dict=replacement_dict,
            ds_map=ds_map,
        )
        trans.sa_session.flush()

        # Build legacy output - should probably include more information from
        # outputs.
        for step in workflow.steps:
            if step.type == 'tool' or step.type is None:
                for v in outputs[ step.id ].itervalues():
                    rval[ 'outputs' ].append( trans.security.encode_id( v.id ) )

        return rval