Beispiel #1
0
def workflow_run_post_req(workflow_id):
    """Submits the workflow for execution

    Parameters
    ----------
    workflow_id : str
        The workflow id

    Returns
    -------
    dict of {str: str}
        A dictionary of the form: {'status': str, 'message': str} in which
        status is the status of the request ('error' or 'success') and message
        is a human readable string with the error message in case that status
        is 'error'.
    """
    try:
        wf = ProcessingWorkflow(workflow_id)
    except QiitaDBUnknownIDError:
        return {
            'status': 'error',
            'message': 'Workflow %s does not exist' % workflow_id
        }
    wf.submit()
    return {'status': 'success', 'message': ''}
Beispiel #2
0
 def test_workflow_handler_post_req(self):
     params = ('{"max_barcode_errors": 1.5, "barcode_type": "golay_12", '
               '"max_bad_run_length": 3, "phred_offset": "auto", '
               '"rev_comp": false, "phred_quality_threshold": 3, '
               '"input_data": 1, "rev_comp_barcode": false, '
               '"rev_comp_mapping_barcodes": false, '
               '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
     obs = workflow_handler_post_req("*****@*****.**", 1, params)
     wf_id = obs['workflow_id']
     wf = ProcessingWorkflow(wf_id)
     nodes = wf.graph.nodes()
     self.assertEqual(len(nodes), 1)
     job = nodes[0]
     exp = {
         'status': 'success',
         'message': '',
         'workflow_id': wf_id,
         'job': {
             'id': job.id,
             'inputs': [1],
             'label': "Split libraries FASTQ",
             'outputs': [['demultiplexed', 'Demultiplexed']]
         }
     }
     self.assertEqual(obs, exp)
Beispiel #3
0
def workflow_handler_post_req(user_id, command_id, params):
    """Creates a new workflow in the system

    Parameters
    ----------
    user_id : str
        The user creating the workflow
    command_id : int
        The first command to execute in the workflow
    params : str
        JSON representations of the parameters for the first command of
        the workflow

    Returns
    -------
    dict of objects
        A dictionary containing the commands information
        {'status': str,
         'message': str,
         'workflow_id': int}
    """
    parameters = Parameters.load(Command(command_id), json_str=params)

    status = 'success'
    message = ''
    try:
        wf = ProcessingWorkflow.from_scratch(User(user_id), parameters)
    except Exception as exc:
        wf = None
        wf_id = None
        job_info = None
        status = 'error'
        message = str(exc)

    if wf is not None:
        # this is safe as we are creating the workflow for the first time
        # and there is only one node. Remember networkx doesn't assure order
        # of nodes
        job = list(wf.graph.nodes())[0]
        inputs = [a.id for a in job.input_artifacts]
        job_cmd = job.command
        wf_id = wf.id
        job_info = {
            'id': job.id,
            'inputs': inputs,
            'label': job_cmd.name,
            'outputs': job_cmd.outputs
        }

    return {
        'status': status,
        'message': message,
        'workflow_id': wf_id,
        'job': job_info
    }
Beispiel #4
0
    def test_workflow_handler_patch_req(self):
        # Create a new workflow so it is in construction
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(exp_user,
                                             exp_params,
                                             name=name,
                                             force=True)

        graph = wf.graph
        nodes = list(graph.nodes())
        job_id = nodes[0].id
        value = {
            'dflt_params': 10,
            'connections': {
                job_id: {
                    'demultiplexed': 'input_data'
                }
            }
        }
        obs = workflow_handler_patch_req('add',
                                         '/%s/' % wf.id,
                                         req_value=dumps(value))
        new_jobs = set(wf.graph.nodes()) - set(nodes)
        self.assertEqual(len(new_jobs), 1)
        new_job = new_jobs.pop()
        exp = {
            'status': 'success',
            'message': '',
            'job': {
                'id': new_job.id,
                'inputs': [job_id],
                'label': 'Pick closed-reference OTUs',
                'outputs': [['OTU table', 'BIOM']]
            }
        }
        self.assertEqual(obs, exp)

        obs = workflow_handler_patch_req('remove',
                                         '/%s/%s/' % (wf.id, new_job.id))
        exp = {'status': 'success', 'message': ''}
        jobs = set(wf.graph.nodes()) - set(nodes)
        self.assertEqual(jobs, set())
Beispiel #5
0
def workflow_run_post_req(workflow_id):
    """Submits the workflow for execution

    Parameters
    ----------
    workflow_id : str
        The workflow id

    Returns
    -------
    dict of {str: str}
        A dictionary of the form: {'status': str, 'message': str} in which
        status is the status of the request ('error' or 'success') and message
        is a human readable string with the error message in case that status
        is 'error'.
    """
    try:
        wf = ProcessingWorkflow(workflow_id)
    except QiitaDBUnknownIDError:
        return {'status': 'error',
                'message': 'Workflow %s does not exist' % workflow_id}
    wf.submit()
    return {'status': 'success', 'message': ''}
Beispiel #6
0
    def test_patch(self):
        # Create a new job - through a workflow since that is the only way
        # of creating jobs in the interface
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(exp_user,
                                             exp_params,
                                             name=name,
                                             force=True)

        graph = wf.graph
        nodes = graph.nodes()
        job_id = nodes[0].id

        response = self.patch('/study/process/job/', {
            'op': 'remove',
            'path': job_id
        })
        self.assertEqual(response.code, 200)
        exp = {
            'status':
            'error',
            'message':
            "Can't delete job %s. It is 'in_construction' "
            "status. Please use /study/process/workflow/" % job_id
        }
        self.assertEqual(loads(response.body), exp)

        # Test success
        ProcessingJob(job_id)._set_error('Killed for testing')
        response = self.patch('/study/process/job/', {
            'op': 'remove',
            'path': job_id
        })
        self.assertEqual(response.code, 200)
        exp = {'status': 'success', 'message': ''}
        self.assertEqual(loads(response.body), exp)
Beispiel #7
0
 def test_workflow_handler_post_req(self):
     next_id = get_count('qiita.processing_job_workflow_root') + 1
     obs = workflow_handler_post_req("*****@*****.**", 1, '{"input_data": 1}')
     wf = ProcessingWorkflow(next_id)
     nodes = wf.graph.nodes()
     self.assertEqual(len(nodes), 1)
     job = nodes[0]
     exp = {
         'status': 'success',
         'message': '',
         'workflow_id': next_id,
         'job': {
             'id': job.id,
             'inputs': [1],
             'label': "Split libraries FASTQ",
             'outputs': [['demultiplexed', 'Demultiplexed']]
         }
     }
     self.assertEqual(obs, exp)
Beispiel #8
0
def workflow_handler_post_req(user_id, dflt_params_id, req_params):
    """Creates a new workflow in the system

    Parameters
    ----------
    user_id : str
        The user creating the workflow
    dflt_params_id : int
        The default parameters to use for the first command of the workflow
    req_params : str
        JSON representations of the required parameters for the first
        command of the workflow

    Returns
    -------
    dict of objects
        A dictionary containing the commands information
        {'status': str,
         'message': str,
         'workflow_id': int}
    """
    dflt_params = DefaultParameters(dflt_params_id)
    req_params = loads(req_params)
    parameters = Parameters.from_default_params(dflt_params, req_params)
    wf = ProcessingWorkflow.from_scratch(User(user_id), parameters)
    # this is safe as we are creating the workflow for the first time and there
    # is only one node. Remember networkx doesn't assure order of nodes
    job = wf.graph.nodes()[0]
    inputs = [a.id for a in job.input_artifacts]
    job_cmd = job.command
    return {
        'status': 'success',
        'message': '',
        'workflow_id': wf.id,
        'job': {
            'id': job.id,
            'inputs': inputs,
            'label': job_cmd.name,
            'outputs': job_cmd.outputs
        }
    }
Beispiel #9
0
    def test_workflow_handler_patch_req(self):
        # Create a new workflow so it is in construction
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(
            exp_user, exp_params, name=name, force=True)

        graph = wf.graph
        nodes = list(graph.nodes())
        job_id = nodes[0].id
        value = {'dflt_params': 10,
                 'connections': {job_id: {'demultiplexed': 'input_data'}}}
        obs = workflow_handler_patch_req(
            'add', '/%s/' % wf.id, req_value=dumps(value))
        new_jobs = set(wf.graph.nodes()) - set(nodes)
        self.assertEqual(len(new_jobs), 1)
        new_job = new_jobs.pop()
        exp = {'status': 'success',
               'message': '',
               'job': {'id': new_job.id,
                       'inputs': [job_id],
                       'label': 'Pick closed-reference OTUs',
                       'outputs': [['OTU table', 'BIOM']]}}
        self.assertEqual(obs, exp)

        obs = workflow_handler_patch_req(
            'remove', '/%s/%s/' % (wf.id, new_job.id))
        exp = {'status': 'success', 'message': ''}
        jobs = set(wf.graph.nodes()) - set(nodes)
        self.assertEqual(jobs, set())
Beispiel #10
0
    def test_patch(self):
        # Create a new job - through a workflow since that is the only way
        # of creating jobs in the interface
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(
            exp_user, exp_params, name=name, force=True)

        graph = wf.graph
        nodes = graph.nodes()
        job_id = nodes[0].id

        response = self.patch('/study/process/job/',
                              {'op': 'remove', 'path': job_id})
        self.assertEqual(response.code, 200)
        exp = {'status': 'error',
               'message': "Can't delete job %s. It is 'in_construction' "
                          "status. Please use /study/process/workflow/"
                          % job_id}
        self.assertEqual(loads(response.body), exp)

        # Test success
        ProcessingJob(job_id)._set_error('Killed for testing')
        response = self.patch('/study/process/job/',
                              {'op': 'remove', 'path': job_id})
        self.assertEqual(response.code, 200)
        exp = {'status': 'success',
               'message': ''}
        self.assertEqual(loads(response.body), exp)
Beispiel #11
0
def workflow_handler_post_req(user_id, dflt_params_id, req_params):
    """Creates a new workflow in the system

    Parameters
    ----------
    user_id : str
        The user creating the workflow
    dflt_params_id : int
        The default parameters to use for the first command of the workflow
    req_params : str
        JSON representations of the required parameters for the first
        command of the workflow

    Returns
    -------
    dict of objects
        A dictionary containing the commands information
        {'status': str,
         'message': str,
         'workflow_id': int}
    """
    dflt_params = DefaultParameters(dflt_params_id)
    req_params = loads(req_params)
    parameters = Parameters.from_default_params(dflt_params, req_params)
    wf = ProcessingWorkflow.from_scratch(User(user_id), parameters)
    # this is safe as we are creating the workflow for the first time and there
    # is only one node. Remember networkx doesn't assure order of nodes
    job = wf.graph.nodes()[0]
    inputs = [a.id for a in job.input_artifacts]
    job_cmd = job.command
    return {'status': 'success',
            'message': '',
            'workflow_id': wf.id,
            'job': {'id': job.id,
                    'inputs': inputs,
                    'label': job_cmd.name,
                    'outputs': job_cmd.outputs}}
Beispiel #12
0
    def test_get_analysis_graph_handler(self):
        response = self.get('/analysis/description/1/graph/')
        self.assertEqual(response.code, 200)
        # The job id is randomly generated in the test environment. Gather
        # it here. There is only 1 job in the first artifact of the analysis
        job_id = Analysis(1).artifacts[0].jobs()[0].id
        obs = loads(response.body)
        exp = {'edges': [[8, job_id], [job_id, 9]],
               'nodes': [
                    ['job', 'job', job_id, 'Single Rarefaction', 'success'],
                    ['artifact', 'BIOM', 9, 'noname\n(BIOM)', 'artifact'],
                    ['artifact', 'BIOM', 8, 'noname\n(BIOM)', 'artifact']],
               'workflow': None}
        self.assertItemsEqual(obs, exp)
        self.assertItemsEqual(obs['edges'], exp['edges'])
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        self.assertIsNone(obs['workflow'])

        # Create a new analysis with 2 starting BIOMs to be able to test
        # the different if statements of the request
        BaseHandler.get_current_user = Mock(
            return_value=User('*****@*****.**'))
        user = User('*****@*****.**')
        dflt_analysis = user.default_analysis
        dflt_analysis.add_samples(
            {4: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196'],
             6: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']})
        args = {'name': 'New Test Graph Analysis', 'description': 'Desc'}
        response = self.post('/analysis/create/', args)
        new_id = response.effective_url.split('/')[-2]
        a = Analysis(new_id)
        # Wait until all the jobs are done so the BIOM tables exist
        for j in a.jobs:
            wait_for_processing_job(j.id)

        artifacts = a.artifacts
        self.assertEqual(len(artifacts), 2)

        # Create a new workflow starting on the first artifact
        # Magic number 9 -> Summarize Taxa command
        params = Parameters.load(
            Command(9), values_dict={'metadata_category': 'None',
                                     'sort': 'False',
                                     'biom_table': artifacts[0].id})
        wf = ProcessingWorkflow.from_scratch(user, params)

        # There is only one job in the workflow
        job_id = wf.graph.nodes()[0].id

        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 200)
        obs = loads(response.body)
        exp = {'edges': [[artifacts[0].id, job_id],
                         [job_id, '%s:taxa_summary' % job_id]],
               'nodes': [
                    ['job', 'job', job_id, 'Summarize Taxa',
                     'in_construction'],
                    ['artifact', 'BIOM', artifacts[0].id, 'noname\n(BIOM)',
                     'artifact'],
                    ['artifact', 'BIOM', artifacts[1].id, 'noname\n(BIOM)',
                     'artifact'],
                    ['type', 'taxa_summary', '%s:taxa_summary' % job_id,
                     'taxa_summary\n(taxa_summary)', 'type']],
               'workflow': wf.id}
        # Check that the keys are the same
        self.assertItemsEqual(obs, exp)
        # Check the edges
        self.assertItemsEqual(obs['edges'], exp['edges'])
        # Check the edges
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        # Check the edges
        self.assertEqual(obs['workflow'], exp['workflow'])

        # Add a job to the second BIOM to make sure that the edges and nodes
        # are respected. Magic number 12 -> Single Rarefaction
        job2 = wf.add(
            DefaultParameters(16), req_params={'depth': '100',
                                               'biom_table': artifacts[1].id})
        job_id_2 = job2.id

        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 200)
        obs = loads(response.body)
        exp = {'edges': [[artifacts[0].id, job_id],
                         [job_id, '%s:taxa_summary' % job_id],
                         [artifacts[1].id, job_id_2],
                         [job_id_2, '%s:rarefied_table' % job_id_2]],
               'nodes': [
                    ['job', 'job', job_id, 'Summarize Taxa',
                     'in_construction'],
                    ['job', 'job', job_id_2, 'Single Rarefaction',
                     'in_construction'],
                    ['artifact', 'BIOM', artifacts[0].id, 'noname\n(BIOM)',
                     'artifact'],
                    ['artifact', 'BIOM', artifacts[1].id, 'noname\n(BIOM)',
                     'artifact'],
                    ['type', 'taxa_summary', '%s:taxa_summary' % job_id,
                     'taxa_summary\n(taxa_summary)', 'type'],
                    ['type', 'BIOM', '%s:rarefied_table' % job_id_2,
                     'rarefied_table\n(BIOM)', 'type']],
               'workflow': wf.id}
        # Check that the keys are the same
        self.assertItemsEqual(obs, exp)
        # Check the edges
        self.assertItemsEqual(obs['edges'], exp['edges'])
        # Check the edges
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        # Check the edges
        self.assertEqual(obs['workflow'], exp['workflow'])

        # Add a second Workflow to the second artifact to force the raise of
        # the error. This situation should never happen when using
        # the interface
        wf.remove(job2)
        params = Parameters.load(
            Command(9), values_dict={'metadata_category': 'None',
                                     'sort': 'False',
                                     'biom_table': artifacts[1].id})
        wf = ProcessingWorkflow.from_scratch(user, params)
        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 500)
Beispiel #13
0
def workflow_handler_patch_req(req_op,
                               req_path,
                               req_value=None,
                               req_from=None):
    """Patches a workflow

    Parameters
    ----------
    req_op : str
        The operation to perform on the workflow
    req_path : str
        Path parameter with the workflow to patch
    req_value : str, optional
        The value that needs to be modified
    req_from : str, optional
        The original path of the element

    Returns
    -------
    dict of {str: str}
        A dictionary of the form: {'status': str, 'message': str} in which
        status is the status of the request ('error' or 'success') and message
        is a human readable string with the error message in case that status
        is 'error'.
    """
    if req_op == 'add':
        req_path = [v for v in req_path.split('/') if v]
        if len(req_path) != 1:
            return {'status': 'error', 'message': 'Incorrect path parameter'}
        req_path = req_path[0]
        try:
            wf = ProcessingWorkflow(req_path)
        except QiitaDBUnknownIDError:
            return {
                'status': 'error',
                'message': 'Workflow %s does not exist' % req_path
            }

        req_value = loads(req_value)
        dflt_params = DefaultParameters(req_value['dflt_params'])
        req_params = req_value.get('req_params', None)
        opt_params = req_value.get('opt_params', None)
        connections = {
            ProcessingJob(k): v
            for k, v in req_value['connections'].items()
        }
        job = wf.add(dflt_params,
                     connections=connections,
                     req_params=req_params,
                     opt_params=opt_params)
        job_cmd = job.command
        return {
            'status': 'success',
            'message': '',
            'job': {
                'id': job.id,
                'inputs': list(req_value['connections'].keys()),
                'label': job_cmd.name,
                'outputs': job_cmd.outputs
            }
        }
    elif req_op == 'remove':
        req_path = [v for v in req_path.split('/') if v]
        if len(req_path) != 2:
            return {'status': 'error', 'message': 'Incorrect path parameter'}
        wf_id = req_path[0]
        job_id = req_path[1]
        wf = ProcessingWorkflow(wf_id)
        job = ProcessingJob(job_id)
        wf.remove(job, cascade=True)
        return {'status': 'success', 'message': ''}
    else:
        return {
            'status':
            'error',
            'message':
            'Operation "%s" not supported. Current supported '
            'operations: add' % req_op
        }
Beispiel #14
0
    def test_job_ajax_patch_req(self):
        # Create a new job - through a workflow since that is the only way
        # of creating jobs in the interface
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(
            exp_user, exp_params, name=name, force=True)

        graph = wf.graph
        nodes = list(graph.nodes())
        job_id = nodes[0].id

        # Incorrect path parameter
        obs = job_ajax_patch_req('remove', '/%s/somethingelse' % job_id)
        exp = {'status': 'error',
               'message': 'Incorrect path parameter: missing job id'}
        self.assertEqual(obs, exp)

        obs = job_ajax_patch_req('remove', '/')
        exp = {'status': 'error',
               'message': 'Incorrect path parameter: missing job id'}
        self.assertEqual(obs, exp)

        # Job id is not like a job id
        obs = job_ajax_patch_req('remove', '/notAJobId')
        exp = {'status': 'error',
               'message': 'Incorrect path parameter: '
                          'notAJobId is not a recognized job id'}
        self.assertEqual(obs, exp)

        # Job doesn't exist
        obs = job_ajax_patch_req('remove',
                                 '/6d368e16-2242-4cf8-87b4-a5dc40bc890b')
        exp = {'status': 'error',
               'message': 'Incorrect path parameter: '
                          '6d368e16-2242-4cf8-87b4-a5dc40bc890b is not a '
                          'recognized job id'}
        self.assertEqual(obs, exp)

        # in_construction job
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {'status': 'error',
               'message': "Can't delete job %s. It is 'in_construction' "
                          "status. Please use /study/process/workflow/"
                          % job_id}
        self.assertEqual(obs, exp)

        # job status != 'error'
        job = ProcessingJob(job_id)
        job._set_status('queued')
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {'status': 'error',
               'message': 'Only jobs in "error" status can be deleted.'}
        self.assertEqual(obs, exp)

        # Operation not supported
        job._set_status('queued')
        obs = job_ajax_patch_req('add', '/%s' % job_id)
        exp = {'status': 'error',
               'message': 'Operation "add" not supported. Current supported '
                          'operations: remove'}
        self.assertEqual(obs, exp)

        # Test success
        job._set_error('Killed for testing')
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {'status': 'success',
               'message': ''}
        self.assertEqual(obs, exp)
Beispiel #15
0
    def test_get_analysis_graph_handler(self):
        response = self.get('/analysis/description/1/graph/')
        self.assertEqual(response.code, 200)
        # The job id is randomly generated in the test environment. Gather
        # it here. There is only 1 job in the first artifact of the analysis
        job_id = Analysis(1).artifacts[0].jobs()[0].id
        obs = loads(response.body)
        exp = {
            'edges': [[8, job_id], [job_id, 9]],
            'nodes': [['job', 'job', job_id, 'Single Rarefaction', 'success'],
                      ['artifact', 'BIOM', 9, 'noname\n(BIOM)', 'artifact'],
                      ['artifact', 'BIOM', 8, 'noname\n(BIOM)', 'artifact']],
            'workflow':
            None
        }
        self.assertItemsEqual(obs, exp)
        self.assertItemsEqual(obs['edges'], exp['edges'])
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        self.assertIsNone(obs['workflow'])

        # Create a new analysis with 2 starting BIOMs to be able to test
        # the different if statements of the request
        BaseHandler.get_current_user = Mock(
            return_value=User('*****@*****.**'))
        user = User('*****@*****.**')
        dflt_analysis = user.default_analysis
        dflt_analysis.add_samples({
            4: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196'],
            6: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']
        })
        args = {'name': 'New Test Graph Analysis', 'description': 'Desc'}
        response = self.post('/analysis/create/', args)
        new_id = response.effective_url.split('/')[-2]
        a = Analysis(new_id)
        # Wait until all the jobs are done so the BIOM tables exist
        for j in a.jobs:
            wait_for_processing_job(j.id)

        artifacts = a.artifacts
        self.assertEqual(len(artifacts), 2)

        # Create a new workflow starting on the first artifact
        # Magic number 9 -> Summarize Taxa command
        params = Parameters.load(Command(9),
                                 values_dict={
                                     'metadata_category': 'None',
                                     'sort': 'False',
                                     'biom_table': artifacts[0].id
                                 })
        wf = ProcessingWorkflow.from_scratch(user, params)

        # There is only one job in the workflow
        job_id = wf.graph.nodes()[0].id

        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 200)
        obs = loads(response.body)
        exp = {
            'edges': [[artifacts[0].id, job_id],
                      [job_id, '%s:taxa_summary' % job_id]],
            'nodes':
            [['job', 'job', job_id, 'Summarize Taxa', 'in_construction'],
             [
                 'artifact', 'BIOM', artifacts[0].id, 'noname\n(BIOM)',
                 'artifact'
             ],
             [
                 'artifact', 'BIOM', artifacts[1].id, 'noname\n(BIOM)',
                 'artifact'
             ],
             [
                 'type', 'taxa_summary',
                 '%s:taxa_summary' % job_id, 'taxa_summary\n(taxa_summary)',
                 'type'
             ]],
            'workflow':
            wf.id
        }
        # Check that the keys are the same
        self.assertItemsEqual(obs, exp)
        # Check the edges
        self.assertItemsEqual(obs['edges'], exp['edges'])
        # Check the edges
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        # Check the edges
        self.assertEqual(obs['workflow'], exp['workflow'])

        # Add a job to the second BIOM to make sure that the edges and nodes
        # are respected. Magic number 12 -> Single Rarefaction
        job2 = wf.add(DefaultParameters(16),
                      req_params={
                          'depth': '100',
                          'biom_table': artifacts[1].id
                      })
        job_id_2 = job2.id

        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 200)
        obs = loads(response.body)
        exp = {
            'edges': [[artifacts[0].id, job_id],
                      [job_id, '%s:taxa_summary' % job_id],
                      [artifacts[1].id, job_id_2],
                      [job_id_2, '%s:rarefied_table' % job_id_2]],
            'nodes':
            [['job', 'job', job_id, 'Summarize Taxa', 'in_construction'],
             ['job', 'job', job_id_2, 'Single Rarefaction', 'in_construction'],
             [
                 'artifact', 'BIOM', artifacts[0].id, 'noname\n(BIOM)',
                 'artifact'
             ],
             [
                 'artifact', 'BIOM', artifacts[1].id, 'noname\n(BIOM)',
                 'artifact'
             ],
             [
                 'type', 'taxa_summary',
                 '%s:taxa_summary' % job_id, 'taxa_summary\n(taxa_summary)',
                 'type'
             ],
             [
                 'type', 'BIOM',
                 '%s:rarefied_table' % job_id_2, 'rarefied_table\n(BIOM)',
                 'type'
             ]],
            'workflow':
            wf.id
        }
        # Check that the keys are the same
        self.assertItemsEqual(obs, exp)
        # Check the edges
        self.assertItemsEqual(obs['edges'], exp['edges'])
        # Check the edges
        self.assertItemsEqual(obs['nodes'], exp['nodes'])
        # Check the edges
        self.assertEqual(obs['workflow'], exp['workflow'])

        # Add a second Workflow to the second artifact to force the raise of
        # the error. This situation should never happen when using
        # the interface
        wf.remove(job2)
        params = Parameters.load(Command(9),
                                 values_dict={
                                     'metadata_category': 'None',
                                     'sort': 'False',
                                     'biom_table': artifacts[1].id
                                 })
        wf = ProcessingWorkflow.from_scratch(user, params)
        response = self.get('/analysis/description/%s/graph/' % new_id)
        self.assertEqual(response.code, 500)
Beispiel #16
0
    def test_job_ajax_patch_req(self):
        # Create a new job - through a workflow since that is the only way
        # of creating jobs in the interface
        exp_command = Command(1)
        json_str = (
            '{"input_data": 1, "max_barcode_errors": 1.5, '
            '"barcode_type": "golay_12", "max_bad_run_length": 3, '
            '"rev_comp": false, "phred_quality_threshold": 3, '
            '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, '
            '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}')
        exp_params = Parameters.load(exp_command, json_str=json_str)
        exp_user = User('*****@*****.**')
        name = "Test processing workflow"

        # tests success
        wf = ProcessingWorkflow.from_scratch(exp_user,
                                             exp_params,
                                             name=name,
                                             force=True)

        graph = wf.graph
        nodes = list(graph.nodes())
        job_id = nodes[0].id

        # Incorrect path parameter
        obs = job_ajax_patch_req('remove', '/%s/somethingelse' % job_id)
        exp = {
            'status': 'error',
            'message': 'Incorrect path parameter: missing job id'
        }
        self.assertEqual(obs, exp)

        obs = job_ajax_patch_req('remove', '/')
        exp = {
            'status': 'error',
            'message': 'Incorrect path parameter: missing job id'
        }
        self.assertEqual(obs, exp)

        # Job id is not like a job id
        obs = job_ajax_patch_req('remove', '/notAJobId')
        exp = {
            'status':
            'error',
            'message':
            'Incorrect path parameter: '
            'notAJobId is not a recognized job id'
        }
        self.assertEqual(obs, exp)

        # Job doesn't exist
        obs = job_ajax_patch_req('remove',
                                 '/6d368e16-2242-4cf8-87b4-a5dc40bc890b')
        exp = {
            'status':
            'error',
            'message':
            'Incorrect path parameter: '
            '6d368e16-2242-4cf8-87b4-a5dc40bc890b is not a '
            'recognized job id'
        }
        self.assertEqual(obs, exp)

        # in_construction job
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {
            'status':
            'error',
            'message':
            "Can't delete job %s. It is 'in_construction' "
            "status. Please use /study/process/workflow/" % job_id
        }
        self.assertEqual(obs, exp)

        # job status != 'error'
        job = ProcessingJob(job_id)
        job._set_status('queued')
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {
            'status': 'error',
            'message': 'Only jobs in "error" status can be deleted.'
        }
        self.assertEqual(obs, exp)

        # Operation not supported
        job._set_status('queued')
        obs = job_ajax_patch_req('add', '/%s' % job_id)
        exp = {
            'status':
            'error',
            'message':
            'Operation "add" not supported. Current supported '
            'operations: remove'
        }
        self.assertEqual(obs, exp)

        # Test success
        job._set_error('Killed for testing')
        obs = job_ajax_patch_req('remove', '/%s' % job_id)
        exp = {'status': 'success', 'message': ''}
        self.assertEqual(obs, exp)
Beispiel #17
0
def workflow_handler_patch_req(req_op, req_path, req_value=None,
                               req_from=None):
    """Patches an ontology

    Parameters
    ----------
    req_op : str
        The operation to perform on the ontology
    req_path : str
        The ontology to patch
    req_value : str, optional
        The value that needs to be modified
    req_from : str, optional
        The original path of the element

    Returns
    -------
    dict of {str: str}
        A dictionary of the form: {'status': str, 'message': str} in which
        status is the status of the request ('error' or 'success') and message
        is a human readable string with the error message in case that status
        is 'error'.
    """
    if req_op == 'add':
        req_path = [v for v in req_path.split('/') if v]
        if len(req_path) != 1:
            return {'status': 'error',
                    'message': 'Incorrect path parameter'}
        req_path = req_path[0]
        try:
            wf = ProcessingWorkflow(req_path)
        except QiitaDBUnknownIDError:
            return {'status': 'error',
                    'message': 'Workflow %s does not exist' % req_path}

        req_value = loads(req_value)
        dflt_params = DefaultParameters(req_value['dflt_params'])
        req_params = req_value.get('req_params', None)
        opt_params = req_value.get('opt_params', None)
        connections = {ProcessingJob(k): v
                       for k, v in req_value['connections'].items()}
        job = wf.add(dflt_params, connections=connections,
                     req_params=req_params, opt_params=opt_params)
        job_cmd = job.command
        return {'status': 'success',
                'message': '',
                'job': {'id': job.id,
                        'inputs': req_value['connections'].keys(),
                        'label': job_cmd.name,
                        'outputs': job_cmd.outputs}}
    elif req_op == 'remove':
        req_path = [v for v in req_path.split('/') if v]
        if len(req_path) != 2:
            return {'status': 'error',
                    'message': 'Incorrect path parameter'}
        wf_id = req_path[0]
        job_id = req_path[1]
        wf = ProcessingWorkflow(wf_id)
        job = ProcessingJob(job_id)
        wf.remove(job, cascade=True)
        return {'status': 'success',
                'message': ''}
    else:
        return {'status': 'error',
                'message': 'Operation "%s" not supported. Current supported '
                           'operations: add' % req_op}