Esempio n. 1
0
    def run( self, trans, workflow_id, payload, **kwd ):
        """
        POST /api_internal/workflows/{encoded_workflow_id}/run

        Run a workflow with a dictionary of prefixed_name/value pairs e.g.
            payload = { inputs: { step_0: { parameter_0|parameter_1 : value_0, ... }, ... } }
        """
        workflow = self.__get_stored_accessible_workflow( trans, workflow_id ).latest_workflow
        trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
        module_injector = WorkflowModuleInjector( trans )
        params, param_keys = expand_workflow_inputs( payload.get( 'inputs', [] ) )
        errors = {}
        for workflow_args in params:
            for step in workflow.steps:
                step_args = workflow_args.get( str( step.id ), {} )
                step_errors = module_injector.inject( step, step_args )
                if step_errors:
                    errors[ step.id ] = step_errors
        if errors:
            log.exception( errors )
            raise exceptions.MessageException( err_data=errors )
        invocations = []
        for index, workflow_args in enumerate( params ):
            for step in workflow.steps:
                step_args = workflow_args.get( str( step.id ), {} )
                module_injector.inject( step, step_args )
            new_history = None
            if 'new_history_name' in payload:
                if payload[ 'new_history_name' ]:
                    nh_name = payload[ 'new_history_name' ]
                else:
                    nh_name = 'History from %s workflow' % workflow.name
                if index in param_keys:
                    ids = param_keys[ index ]
                    nids = len( ids )
                    if nids == 1:
                        nh_name = '%s on %s' % ( nh_name, ids[ 0 ] )
                    elif nids > 1:
                        nh_name = '%s on %s and %s' % ( nh_name, ', '.join( ids[ 0:-1 ] ), ids[ -1 ] )
                new_history = trans.app.model.History( user=trans.user, name=nh_name )
                new_history.copy_tags_from( trans.user, trans.history )
                trans.sa_session.add( new_history )
                target_history = new_history
            elif 'history_id' in payload:
                target_history = histories.HistoryManager( trans.app ).get_owned( trans.security.decode_id( payload.get( 'history_id' ), trans.user, current_history=trans.history ) )
            else:
                target_history = trans.history
            run_config = WorkflowRunConfig(
                target_history=target_history,
                replacement_dict=payload.get( 'replacement_params', {} ),
                copy_inputs_to_history=new_history is not None )
            invocation = queue_invoke(
                trans=trans,
                workflow=workflow,
                workflow_run_config=run_config,
                populate_state=False )
            invocations.append({ 'history'      : { 'id' : trans.app.security.encode_id( new_history.id ), 'name' : new_history.name } if new_history else None,
                                 'scheduled'    : invocation.state == trans.app.model.WorkflowInvocation.states.SCHEDULED })
            trans.sa_session.flush()
        return invocations
def build_workflow_run_configs(trans, workflow, payload):
    app = trans.app
    allow_tool_state_corrections = payload.get('allow_tool_state_corrections', False)
    use_cached_job = payload.get('use_cached_job', False)

    # Sanity checks.
    if len(workflow.steps) == 0:
        raise exceptions.MessageException("Workflow cannot be run because it does not have any steps")
    if workflow.has_cycles:
        raise exceptions.MessageException("Workflow cannot be run because it contains cycles")

    if 'step_parameters' in payload and 'parameters' in payload:
        raise exceptions.RequestParameterInvalidException("Cannot specify both legacy parameters and step_parameters attributes.")
    if 'inputs' in payload and 'ds_map' in payload:
        raise exceptions.RequestParameterInvalidException("Cannot specify both legacy ds_map and input attributes.")

    add_to_history = 'no_add_to_history' not in payload
    legacy = payload.get('legacy', False)
    already_normalized = payload.get('parameters_normalized', False)
    raw_parameters = payload.get('parameters', {})

    run_configs = []
    unexpanded_param_map = _normalize_step_parameters(workflow.steps, raw_parameters, legacy=legacy, already_normalized=already_normalized)
    expanded_params, expanded_param_keys = expand_workflow_inputs(unexpanded_param_map)
    for index, param_map in enumerate(expanded_params):
        history = _get_target_history(trans, workflow, payload, expanded_param_keys, index)
        inputs = payload.get('inputs', None)
        inputs_by = payload.get('inputs_by', None)
        # New default is to reference steps by index of workflow step
        # which is intrinsic to the workflow and independent of the state
        # of Galaxy at the time of workflow import.
        default_inputs_by = 'step_index|step_uuid'
        if inputs is None:
            # Default to legacy behavior - read ds_map and reference steps
            # by unencoded step id (a raw database id).
            inputs = payload.get('ds_map', {})
            if legacy:
                default_inputs_by = 'step_id|step_uuid'
            inputs_by = inputs_by or default_inputs_by
        else:
            inputs = inputs or {}
        inputs_by = inputs_by or default_inputs_by
        if inputs or not already_normalized:
            normalized_inputs = _normalize_inputs(workflow.steps, inputs, inputs_by)
        else:
            # Only allow dumping IDs directly into JSON database instead of properly recording the
            # inputs with referential integrity if parameters are already normalized (coming from tool form).
            normalized_inputs = {}

        steps_by_id = workflow.steps_by_id
        # Set workflow inputs.
        for key, input_dict in normalized_inputs.items():
            step = steps_by_id[key]
            if step.type == 'parameter_input':
                continue
            if 'src' not in input_dict:
                raise exceptions.RequestParameterInvalidException("Not input source type defined for input '%s'." % input_dict)
            if 'id' not in input_dict:
                raise exceptions.RequestParameterInvalidException("Not input id defined for input '%s'." % input_dict)
            if 'content' in input_dict:
                raise exceptions.RequestParameterInvalidException("Input cannot specify explicit 'content' attribute %s'." % input_dict)
            input_source = input_dict['src']
            input_id = input_dict['id']
            try:
                if input_source == 'ldda':
                    ldda = trans.sa_session.query(app.model.LibraryDatasetDatasetAssociation).get(trans.security.decode_id(input_id))
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(trans.get_current_user_roles(), ldda.dataset)
                    content = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif input_source == 'ld':
                    ldda = trans.sa_session.query(app.model.LibraryDataset).get(trans.security.decode_id(input_id)).library_dataset_dataset_association
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(trans.get_current_user_roles(), ldda.dataset)
                    content = ldda.to_history_dataset_association(history, add_to_history=add_to_history)
                elif input_source == 'hda':
                    # Get dataset handle, add to dict and history if necessary
                    content = trans.sa_session.query(app.model.HistoryDatasetAssociation).get(trans.security.decode_id(input_id))
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(trans.get_current_user_roles(), content.dataset)
                elif input_source == 'uuid':
                    dataset = trans.sa_session.query(app.model.Dataset).filter(app.model.Dataset.uuid == input_id).first()
                    if dataset is None:
                        # this will need to be changed later. If federation code is avalible, then a missing UUID
                        # could be found amoung fereration partners
                        raise exceptions.RequestParameterInvalidException("Input cannot find UUID: %s." % input_id)
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset(trans.get_current_user_roles(), dataset)
                    content = history.add_dataset(dataset)
                elif input_source == 'hdca':
                    content = app.dataset_collections_service.get_dataset_collection_instance(trans, 'history', input_id)
                else:
                    raise exceptions.RequestParameterInvalidException("Unknown workflow input source '%s' specified." % input_source)
                if add_to_history and content.history != history:
                    content = content.copy()
                    if isinstance(content, app.model.HistoryDatasetAssociation):
                        history.add_dataset(content)
                    else:
                        history.add_dataset_collection(content)
                input_dict['content'] = content
            except AssertionError:
                raise exceptions.ItemAccessibilityException("Invalid workflow input '%s' specified" % input_id)
        for key in set(normalized_inputs.keys()):
            value = normalized_inputs[key]
            if isinstance(value, dict) and 'content' in value:
                normalized_inputs[key] = value['content']
            else:
                normalized_inputs[key] = value
        resource_params = payload.get('resource_params', {})
        if resource_params:
            # quick attempt to validate parameters, just handle select options now since is what
            # is needed for DTD - arbitrary plugins can define arbitrary logic at runtime in the
            # destination function. In the future this should be extended to allow arbitrary
            # pluggable validation.
            resource_mapper_function = get_resource_mapper_function(trans.app)
            # TODO: Do we need to do anything with the stored_workflow or can this be removed.
            resource_parameters = resource_mapper_function(trans=trans, stored_workflow=None, workflow=workflow)
            for resource_parameter in resource_parameters:
                if resource_parameter.get("type") == "select":
                    name = resource_parameter.get("name")
                    if name in resource_params:
                        value = resource_params[name]
                        valid_option = False
                        # TODO: How should be handle the case where no selection is made by the user
                        # This can happen when there is a select on the page but the user has no options to select
                        # Here I have the validation pass it through. An alternative may be to remove the parameter if
                        # it is None.
                        if value is None:
                            valid_option = True
                        else:
                            for option_elem in resource_parameter.get('data'):
                                option_value = option_elem.get("value")
                                if value == option_value:
                                    valid_option = True
                        if not valid_option:
                            raise exceptions.RequestParameterInvalidException("Invalid value for parameter '%s' found." % name)

        run_configs.append(WorkflowRunConfig(
            target_history=history,
            replacement_dict=payload.get('replacement_params', {}),
            inputs=normalized_inputs,
            param_map=param_map,
            allow_tool_state_corrections=allow_tool_state_corrections,
            use_cached_job=use_cached_job,
            resource_params=resource_params,
        ))

    return run_configs
Esempio n. 3
0
def build_workflow_run_configs( trans, workflow, payload ):
    app = trans.app
    allow_tool_state_corrections = payload.get( 'allow_tool_state_corrections', False )

    # Sanity checks.
    if len( workflow.steps ) == 0:
        raise exceptions.MessageException( "Workflow cannot be run because it does not have any steps" )
    if workflow.has_cycles:
        raise exceptions.MessageException( "Workflow cannot be run because it contains cycles" )

    if 'step_parameters' in payload and 'parameters' in payload:
        raise exceptions.RequestParameterInvalidException( "Cannot specify both legacy parameters and step_parameters attributes." )
    if 'inputs' in payload and 'ds_map' in payload:
        raise exceptions.RequestParameterInvalidException( "Cannot specify both legacy ds_map and input attributes." )

    add_to_history = 'no_add_to_history' not in payload
    legacy = payload.get( 'legacy', False )
    already_normalized = payload.get( 'parameters_normalized', False )
    raw_parameters = payload.get( 'parameters', {} )

    run_configs = []
    unexpanded_param_map = _normalize_step_parameters( workflow.steps, raw_parameters, legacy=legacy, already_normalized=already_normalized )
    expanded_params, expanded_param_keys = expand_workflow_inputs( unexpanded_param_map )
    for index, param_map in enumerate( expanded_params ):
        history = _get_target_history(trans, workflow, payload, expanded_param_keys, index)
        inputs = payload.get( 'inputs', None )
        inputs_by = payload.get( 'inputs_by', None )
        # New default is to reference steps by index of workflow step
        # which is intrinsic to the workflow and independent of the state
        # of Galaxy at the time of workflow import.
        default_inputs_by = 'step_index|step_uuid'
        if inputs is None:
            # Default to legacy behavior - read ds_map and reference steps
            # by unencoded step id (a raw database id).
            inputs = payload.get( 'ds_map', {} )
            if legacy:
                default_inputs_by = 'step_id|step_uuid'
            inputs_by = inputs_by or default_inputs_by
        else:
            inputs = inputs or {}
        inputs_by = inputs_by or default_inputs_by
        if inputs or not already_normalized:
            normalized_inputs = _normalize_inputs( workflow.steps, inputs, inputs_by )
        else:
            # Only allow dumping IDs directly into JSON database instead of properly recording the
            # inputs with referential integrity if parameters are already normalized (coming from tool form).
            normalized_inputs = {}

        steps_by_id = workflow.steps_by_id
        # Set workflow inputs.
        for key, input_dict in normalized_inputs.iteritems():
            step = steps_by_id[key]
            if step.type == 'parameter_input':
                continue
            if 'src' not in input_dict:
                raise exceptions.RequestParameterInvalidException( "Not input source type defined for input '%s'." % input_dict )
            if 'id' not in input_dict:
                raise exceptions.RequestParameterInvalidException( "Not input id defined for input '%s'." % input_dict )
            if 'content' in input_dict:
                raise exceptions.RequestParameterInvalidException( "Input cannot specify explicit 'content' attribute %s'." % input_dict )
            input_source = input_dict[ 'src' ]
            input_id = input_dict[ 'id' ]
            try:
                if input_source == 'ldda':
                    ldda = trans.sa_session.query( app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( input_id ) )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
                    content = ldda.to_history_dataset_association( history, add_to_history=add_to_history )
                elif input_source == 'ld':
                    ldda = trans.sa_session.query( app.model.LibraryDataset ).get( trans.security.decode_id( input_id ) ).library_dataset_dataset_association
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
                    content = ldda.to_history_dataset_association( history, add_to_history=add_to_history )
                elif input_source == 'hda':
                    # Get dataset handle, add to dict and history if necessary
                    content = trans.sa_session.query( app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( input_id ) )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), content.dataset )
                elif input_source == 'uuid':
                    dataset = trans.sa_session.query( app.model.Dataset ).filter( app.model.Dataset.uuid == input_id ).first()
                    if dataset is None:
                        # this will need to be changed later. If federation code is avalible, then a missing UUID
                        # could be found amoung fereration partners
                        raise exceptions.RequestParameterInvalidException( "Input cannot find UUID: %s." % input_id )
                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), dataset )
                    content = history.add_dataset( dataset )
                elif input_source == 'hdca':
                    content = app.dataset_collections_service.get_dataset_collection_instance( trans, 'history', input_id )
                else:
                    raise exceptions.RequestParameterInvalidException( "Unknown workflow input source '%s' specified." % input_source )
                if add_to_history and content.history != history:
                    content = content.copy()
                    if isinstance( content, app.model.HistoryDatasetAssociation ):
                        history.add_dataset( content )
                    else:
                        history.add_dataset_collection( content )
                input_dict[ 'content' ] = content
            except AssertionError:
                raise exceptions.ItemAccessibilityException( "Invalid workflow input '%s' specified" % input_id )
        for key in set( normalized_inputs.keys() ):
            value = normalized_inputs[ key ]
            if isinstance( value, dict ) and 'content' in value:
                normalized_inputs[ key ] = value[ 'content' ]
            else:
                normalized_inputs[ key ] = value
        run_configs.append(WorkflowRunConfig(
            target_history=history,
            replacement_dict=payload.get( 'replacement_params', {} ),
            inputs=normalized_inputs,
            param_map=param_map,
            allow_tool_state_corrections=allow_tool_state_corrections
        ))

    return run_configs
    def run(self, trans, workflow_id, payload, **kwd):
        """
        POST /api_internal/workflows/{encoded_workflow_id}/run

        Run a workflow with a dictionary of prefixed_name/value pairs e.g.
            payload = { inputs: { step_0: { parameter_0|parameter_1 : value_0, ... }, ... } }
        """
        workflow = self.__get_stored_accessible_workflow(
            trans, workflow_id).latest_workflow
        trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
        module_injector = WorkflowModuleInjector(trans)
        params, param_keys = expand_workflow_inputs(payload.get('inputs', []))
        errors = {}
        for workflow_args in params:
            for step in workflow.steps:
                step_args = workflow_args.get(str(step.id), {})
                step_errors = module_injector.inject(step, step_args)
                if step_errors:
                    errors[step.id] = step_errors
        if errors:
            log.exception(errors)
            raise exceptions.MessageException(err_data=errors)
        invocations = []
        for index, workflow_args in enumerate(params):
            for step in workflow.steps:
                step_args = workflow_args.get(str(step.id), {})
                module_injector.inject(step, step_args)
            new_history = None
            if 'new_history_name' in payload:
                if payload['new_history_name']:
                    nh_name = payload['new_history_name']
                else:
                    nh_name = 'History from %s workflow' % workflow.name
                if index in param_keys:
                    ids = param_keys[index]
                    nids = len(ids)
                    if nids == 1:
                        nh_name = '%s on %s' % (nh_name, ids[0])
                    elif nids > 1:
                        nh_name = '%s on %s and %s' % (nh_name, ', '.join(
                            ids[0:-1]), ids[-1])
                new_history = trans.app.model.History(user=trans.user,
                                                      name=nh_name)
                new_history.copy_tags_from(trans.user, trans.history)
                trans.sa_session.add(new_history)
                target_history = new_history
            elif 'history_id' in payload:
                target_history = histories.HistoryManager(trans.app).get_owned(
                    trans.security.decode_id(payload.get('history_id'),
                                             trans.user,
                                             current_history=trans.history))
            else:
                target_history = trans.history
            run_config = WorkflowRunConfig(target_history=target_history,
                                           replacement_dict=payload.get(
                                               'replacement_params', {}),
                                           copy_inputs_to_history=new_history
                                           is not None)
            invocation = queue_invoke(trans=trans,
                                      workflow=workflow,
                                      workflow_run_config=run_config,
                                      populate_state=False)
            invocations.append({
                'history': {
                    'id': trans.app.security.encode_id(new_history.id),
                    'name': new_history.name
                } if new_history else None,
                'scheduled':
                invocation.state ==
                trans.app.model.WorkflowInvocation.states.SCHEDULED
            })
            trans.sa_session.flush()
        return invocations