Exemplo n.º 1
0
def show_run(run):
    """Show workflow run information."""
    with service() as api:
        doc = api.runs().get_run(run_id=run)
    click.echo('ID: {}'.format(doc[labels.RUN_ID]))
    if labels.RUN_STARTED in doc:
        click.echo('Started at: {}'.format(doc[labels.RUN_STARTED][:19]))
    if labels.RUN_FINISHED in doc:
        click.echo('Finished at: {}'.format(doc[labels.RUN_FINISHED][:19]))
    click.echo('State: {}'.format(doc[labels.RUN_STATE]))
    # Get index of parameters. The index contains the parameter name
    # and type
    parameters = ParameterIndex.from_dict(doc[labels.RUN_PARAMETERS])
    click.echo('\nArguments:')
    for arg in doc['arguments']:
        para = parameters[arg['name']]
        if para.is_file():
            file_id, target_path = deserialize_fh(arg['value'])
            value = '{} ({})'.format(file_id, target_path)
        else:
            value = arg['value']
        click.echo('  {} = {}'.format(para.name, value))
    if labels.RUN_ERRORS in doc:
        click.echo('\nMessages:')
        for msg in doc[labels.RUN_ERRORS]:
            click.echo('  {}'.format(msg))
    elif labels.RUN_FILES in doc:
        click.echo('\nFiles:')
        for res in doc[labels.RUN_FILES]:
            click.echo('  {} ({})'.format(res[flbls.FILE_ID], res[flbls.FILE_NAME]))
Exemplo n.º 2
0
def start_run(ctx, group, configfile):
    """Start new workflow run."""
    group_id = ctx.obj.get_group(ctx.params)
    config = factory.read_config(configfile) if configfile else None
    with service() as api:
        doc = api.groups().get_group(group_id=group_id)
        config = config if config else doc[glbls.ENGINE_CONFIG]
        # Create list of file descriptors for uploaded files that are included
        # in the submission handle
        files = []
        for fh in doc[glbls.GROUP_UPLOADS]:
            files.append((
                fh[flbls.FILE_ID],
                fh[flbls.FILE_NAME],
                fh[flbls.FILE_DATE][:19])
            )
        # Create list of additional user-provided template parameters
        parameters = ParameterIndex.from_dict(doc[glbls.GROUP_PARAMETERS])
        # Read values for all parameters.
        user_input = read(parameters.sorted(), files=files)
        args = [serialize_arg(key, val) for key, val in user_input.items()]
        # Start the run and print returned run state information.
        doc = api.runs().start_run(group_id=group_id, arguments=args, config=config)
        run_id = doc[labels.RUN_ID]
        run_state = doc[labels.RUN_STATE]
        click.echo('started run {} is {}'.format(run_id, run_state))
Exemplo n.º 3
0
    def __init__(self, workflow_id: str, group_id: str, service: APIFactory):
        """Initialize the required identifier and the API factory.

        Reads all metadata for the given workflow during intialization and
        maintains a copy in memory.

        Parameters
        ----------
        workflow_id: string
            Unique workflow identifier.
        group_id: string
            Unique workflow group identifier.
        service: flowserv.client.api.APIFactory
            Factory to create instances of the service API.
        """
        self.workflow_id = workflow_id
        self.group_id = group_id
        self.service = service
        # Get application properties from the database.
        with self.service() as api:
            wf = api.workflows().get_workflow(self.workflow_id)
            grp = api.groups().get_group(group_id=self.group_id)
        self._name = wf.get(wflbls.WORKFLOW_NAME)
        self._description = wf.get(wflbls.WORKFLOW_DESCRIPTION)
        self._instructions = wf.get(wflbls.WORKFLOW_INSTRUCTIONS)
        self._parameters = ParameterIndex.from_dict(
            grp[glbls.GROUP_PARAMETERS])
Exemplo n.º 4
0
def test_validate_arguments():
    """Test validating a given set of arguments against the parameters in a
    workflow template.
    """
    parameters = ParameterIndex.from_dict([
        String(name='A', label='P1', index=0, required=True).to_dict(),
        String(name='B', label='P2', index=1, default='X').to_dict()
    ])
    template = WorkflowTemplate(workflow_spec=dict(), parameters=parameters)
    template.validate_arguments({'A': 1, 'B': 0})
    template.validate_arguments({'A': 1})
    with pytest.raises(err.MissingArgumentError):
        template.validate_arguments({'B': 1})
def test_parameter_index_serialization():
    """Test generating parameter index from serializations."""
    p1 = String(name='0', label='P1', index=1)
    p2 = String(name='1', label='P2', index=0)
    doc = ParameterIndex.from_dict([p1.to_dict(), p2.to_dict()]).to_dict()
    parameters = ParameterIndex.from_dict(doc)
    assert len(parameters) == 2
    assert '0' in parameters
    assert '1' in parameters
    assert [p.name for p in parameters.sorted()] == ['1', '0']
    # Error case: Duplicate parameter.
    with pytest.raises(err.InvalidTemplateError):
        ParameterIndex.from_dict([p1.to_dict(), p1.to_dict()])
    # Error case: Unknown parameter type.
    doc = p1.to_dict()
    doc['dtype'] = 'unknown'
    with pytest.raises(err.InvalidParameterError):
        ParameterIndex.from_dict([doc])
Exemplo n.º 6
0
def start_run(ctx, submission):
    """Start new submission run."""
    s_id = submission if submission else config.SUBMISSION_ID()
    if s_id is None:
        click.echo('no submission specified')
        return
    try:
        url = ctx.obj['URLS'].get_submission(submission_id=s_id)
        headers = ctx.obj['HEADERS']
        r = requests.get(url, headers=headers)
        r.raise_for_status()
        body = r.json()
        # Create list of file descriptors for uploaded files that are included
        # in the submission handle
        files = []
        for fh in body['files']:
            files.append((fh['id'], fh['name'], fh['createdAt'][:19]))
        # Create list of additional user-provided template parameters
        parameters = ParameterIndex.from_dict(body['parameters'])
        # Read values for all parameters
        args = read(parameters.sorted(), files=files)
        data = {'arguments': [ARG(key, val) for key, val in args.items()]}
        url = ctx.obj['URLS'].start_run(submission_id=s_id)
        r = requests.post(url, json=data, headers=headers)
        r.raise_for_status()
        body = r.json()
        if ctx.obj['RAW']:
            click.echo(json.dumps(body, indent=4))
        else:
            run_id = body['id']
            run_state = body['state']
            click.echo('run {} in state {}'.format(run_id, run_state))
    except (requests.ConnectionError, requests.HTTPError) as ex:
        click.echo('{}'.format(ex))
    except (ValueError, IOError, OSError) as ex:
        click.echo('{}'.format(ex))
Exemplo n.º 7
0
    def from_dict(cls, doc, validate=True):
        """Create an instance of the workflow template for a dictionary
        serialization. The structure of the dictionary is expected to be the
        same as generated by the to_dict() method of this class. The only
        mandatory element in the dictionary is the workflow specification.

        Parameters
        ----------
        doc: dict
            Dictionary serialization of a workflow template
        validate: bool, optional
            Validate template parameter declarations against the parameter
            schema if this flag is True.

        Returns
        -------
        flowserv.model.template.base.WorkflowTemplate

        Raises
        ------
        flowserv.error.InvalidTemplateError
        flowserv.error.UnknownParameterError
        """
        # Ensure that the mandatory elements are present. At this point, only
        # the workflow specification is mandatory.
        if validate:
            if 'workflow' not in doc:
                msg = "missing element '{}'".format('workflow')
                raise err.InvalidTemplateError(msg)
        # -- Workflow specification -------------------------------------------
        workflow_spec = doc['workflow']
        # -- Parameter declarations -------------------------------------------
        # Add given parameter declarations to the parameter list. Ensure that
        # all default values are set
        parameters = ParameterIndex.from_dict(
            doc.get('parameters', dict()),
            validate=validate
        )
        # Ensure that the workflow specification does not reference
        # undefined parameters if validate flag is True.
        if validate:
            for key in tp.get_parameter_references(workflow_spec):
                if key not in parameters:
                    raise err.UnknownParameterError(key)
        # -- Post-processing task ---------------------------------------------
        postproc_spec = None
        if 'postproc' in doc:
            postproc_spec = doc['postproc']
            if validate:
                util.validate_doc(
                    doc=postproc_spec,
                    mandatory=['workflow'],
                    optional=['inputs', 'outputs']
                )
                util.validate_doc(
                    doc=postproc_spec.get('inputs', {'files': ''}),
                    mandatory=['files'],
                    optional=['runs']
                )
        # -- Parameter module information -------------------------------------
        parameter_groups = None
        if 'parameterGroups' in doc:
            parameter_groups = list()
            for m in doc['parameterGroups']:
                parameter_groups.append(ParameterGroup.from_dict(m, validate=validate))
        # -- Output file specifications --------------------------------------
        outputs = None
        if 'outputs' in doc:
            outputs = [WorkflowOutputFile.from_dict(
                f,
                validate=validate
            ) for f in doc['outputs']]
        # -- Result schema ---------------------------------------------------
        schema = ResultSchema.from_dict(doc.get('results'), validate=validate)
        # Return template instance
        return cls(
            workflow_spec=workflow_spec,
            postproc_spec=postproc_spec,
            parameters=parameters,
            result_schema=schema,
            parameter_groups=parameter_groups,
            outputs=outputs
        )