Esempio n. 1
0
def grab_workflows(instance=None, connection_galaxy=None):

    # Delete old references to workflows
    Workflow.objects.all().delete()

    # checks to see if an existing galaxy connection, otherwise create a connection
    if (connection_galaxy is None):
        print("instance is none")
        #get connection
        instance = Instance.objects.all()[0]
        connection_galaxy = instance.get_galaxy_connection()
    #get all your workflows
    workflows = connection_galaxy.get_complete_workflows()

    #for each workflow, create a core Workflow object and its associated WorkflowDataInput objects
    for workflow in workflows:
        workflow_dict = {
            'name': workflow.name,
            'internal_id': workflow.identifier
            #'visibility': 2 #give public visibility for now
        }
        w = Workflow(**workflow_dict)
        try:
            w.save()
            inputs = workflow.inputs
            for input in inputs:
                input_dict = {
                    'name': input.name,
                    'internal_id': input.identifier
                }
                i = WorkflowDataInput(**input_dict)
                i.save()
                w.data_inputs.add(i)
        except:
            connection.rollback()
def grab_workflows(instance=None, connection_galaxy=None):
    
    # Delete old references to workflows
    Workflow.objects.all().delete() 
    
    # checks to see if an existing galaxy connection, otherwise create a connection
    if (connection_galaxy is None):
        print ("instance is none")
        #get connection
        instance = Instance.objects.all()[0]
        connection_galaxy = instance.get_galaxy_connection()
    #get all your workflows
    workflows = connection_galaxy.get_complete_workflows()

    #for each workflow, create a core Workflow object and its associated WorkflowDataInput objects
    for workflow in workflows:
        workflow_dict = {
                         'name': workflow.name,
                         'internal_id': workflow.identifier
                         #'visibility': 2 #give public visibility for now
                         }
        w = Workflow(**workflow_dict)
        try:
            w.save()
            inputs = workflow.inputs
            for input in inputs:
                input_dict = {
                              'name': input.name,
                              'internal_id': input.identifier
                              }
                i = WorkflowDataInput(**input_dict)
                i.save()
                w.data_inputs.add(i)
        except:
            connection.rollback()
Esempio n. 3
0
def import_workflow(workflow, workflow_engine, workflow_dictionary):

    issues = []
    has_step_issues = False
    has_input_issues = False
    has_annotation_issues = False

    workflow_annotation = get_workflow_annotation(workflow_dictionary)

    if workflow_annotation is None:
        issues.append("Workflow annotation not found.")
        return issues

    workflow_type = get_workflow_type(workflow_annotation)

    if workflow_type is None:
        issues.append("Workflow type not found.")
        return issues

    # check workflow inputs for correct annotations
    workflow_input_issues = check_workflow_inputs(workflow_dictionary)
    if len(workflow_input_issues) > 0:
        has_input_issues = True
        issues = issues + workflow_input_issues

    #  check workflow steps for correct annotations and skip import if problems are detected
    workflow_step_issues = check_steps(workflow_dictionary)
    if workflow_step_issues is None:  # no error in parsing but no outputs defined
        issues.append("Workflow does not declare outputs.")
        has_step_issues = True
    else:
        if len(workflow_step_issues) > 0:
            has_step_issues = True
            issues = issues + workflow_step_issues

    # skip import if workflow has incorrect input annotations or step annotation
    if has_step_issues or has_input_issues:
        return issues

    # import workflow
    if workflow_type is not None:  # if workflow is meant for refinery

        workflow_object = Workflow.objects.create(
            name=workflow.name,
            internal_id=workflow.identifier,
            workflow_engine=workflow_engine,
            is_active=True,
            type=workflow_type,
            graph=json.dumps(workflow_dictionary))
        workflow_object.set_manager_group(workflow_engine.get_manager_group())

        workflow_object.share(
            workflow_engine.get_manager_group().get_managed_group())

        inputs = workflow.inputs

        # Adding workflowdatainputs i.e. inputs from workflow into database models
        for input in inputs:
            input_dict = {'name': input.name, 'internal_id': input.identifier}
            i = WorkflowDataInput(**input_dict)
            i.save()
            workflow_object.data_inputs.add(i)

            # if workflow has only 1 input, input a default input relationship type
            if (len(inputs) == 1):
                opt_single = {'category': TYPE_1_1, 'set1': input.name}
                temp_relationship = WorkflowInputRelationships(**opt_single)
                temp_relationship.save()
                workflow_object.input_relationships.add(temp_relationship)

        # check to input NodeRelationshipType
        # noderelationship types defined for workflows with greater than 1 input
        # refinery_relationship=[{"category":"N-1", "set1":"input_file"}]
        workflow_relationships = get_input_relationships(workflow_annotation)

        if workflow_relationships is not None:
            if (len(inputs) > 1):
                for opt_r in workflow_relationships:
                    try:
                        temp_relationship = WorkflowInputRelationships(**opt_r)
                        temp_relationship.save()
                        workflow_object.input_relationships.add(
                            temp_relationship)
                    except KeyError, e:
                        logger.error()
                        issues.append("Input relationship option error: %s" %
                                      e)
Esempio n. 4
0
def import_workflow(workflow, workflow_engine, workflow_dictionary):
    issues = []
    has_step_issues = False
    has_input_issues = False

    workflow_annotation = get_workflow_annotation(workflow_dictionary)

    if workflow_annotation is None:
        issues.append("Workflow annotation not found.")
        return issues

    workflow_type = get_workflow_type(workflow_annotation)

    if workflow_type is None:
        issues.append("Workflow type not found.")
        return issues

    # check workflow inputs for correct annotations
    workflow_input_issues = check_workflow_inputs(workflow_dictionary)
    if len(workflow_input_issues) > 0:
        has_input_issues = True
        issues = issues + workflow_input_issues

    # check workflow steps for correct annotations and skip import if problems
    # are detected
    workflow_step_issues = check_steps(workflow_dictionary)
    if workflow_step_issues is None:
        # no error in parsing but no outputs defined
        issues.append("Workflow does not declare outputs.")
        has_step_issues = True
    else:
        if len(workflow_step_issues) > 0:
            has_step_issues = True
            issues = issues + workflow_step_issues

    # skip import if workflow has incorrect input annotations or step
    # annotation
    if has_step_issues or has_input_issues:
        return issues
    # import workflow
    if workflow_type is not None:  # if workflow is meant for refinery
        workflow_object = Workflow.objects.create(
            name=workflow.name, internal_id=workflow.identifier,
            workflow_engine=workflow_engine, is_active=True,
            type=workflow_type, graph=json.dumps(workflow_dictionary))
        workflow_object.set_manager_group(workflow_engine.get_manager_group())
        workflow_object.share(
            workflow_engine.get_manager_group().get_managed_group())
        inputs = workflow.inputs
        # Adding workflowdatainputs i.e. inputs from workflow into database
        # models
        for input in inputs:
            input_dict = {
                'name': input.name,
                'internal_id': input.identifier
            }
            i = WorkflowDataInput(**input_dict)
            i.save()
            workflow_object.data_inputs.add(i)
            # if workflow has only 1 input, input a default input relationship
            # type
            if len(inputs) == 1:
                opt_single = {
                    'category': TYPE_1_1,
                    'set1': input.name
                }
                temp_relationship = WorkflowInputRelationships(**opt_single)
                temp_relationship.save()
                workflow_object.input_relationships.add(temp_relationship)

        # check to input NodeRelationshipType
        # noderelationship types defined for workflows with greater than 1
        # input
        # refinery_relationship=[{"category":"N-1", "set1":"input_file"}]
        workflow_relationships = get_input_relationships(workflow_annotation)
        if workflow_relationships is not None:
            if len(inputs) > 1:
                for opt_r in workflow_relationships:
                    try:
                        temp_relationship = WorkflowInputRelationships(**opt_r)
                        temp_relationship.save()
                        workflow_object.input_relationships.add(
                            temp_relationship)
                    except KeyError as e:
                        logger.error(e)
                        issues.append(
                            "Input relationship option error: %s" % e)
    return issues