예제 #1
0
    def __module_from_dict(self, trans, step_dict, exact_tools=False):
        """ Create a WorkflowStep model object and corresponding module
        representing type-specific functionality from the incoming dictionary.
        """
        step = model.WorkflowStep()
        # TODO: Consider handling position inside module.
        step.position = step_dict['position']
        if "uuid" in step_dict and step_dict['uuid'] != "None":
            step.uuid = step_dict["uuid"]
        if "label" in step_dict:
            step.label = step_dict["label"]

        step_type = step_dict.get("type", None)
        if step_type == "subworkflow":
            subworkflow = self.__load_subworkflow_from_step_dict(
                trans, step_dict)
            step_dict["subworkflow"] = subworkflow

        module = module_factory.from_dict(trans,
                                          step_dict,
                                          exact_tools=exact_tools)
        module.save_to_step(step)

        annotation = step_dict['annotation']
        if annotation:
            annotation = sanitize_html(annotation, 'utf-8', 'text/html')
            self.add_item_annotation(trans.sa_session, trans.get_user(), step,
                                     annotation)

        # Stick this in the step temporarily
        step.temp_input_connections = step_dict['input_connections']

        return module, step
예제 #2
0
    def __module_from_dict(self, trans, step_dict, secure):
        """ Create a WorkflowStep model object and corrsponding module representing
        type-specific functionality from the incoming dicitionary.
        """
        step = model.WorkflowStep()

        # TODO: Consider handling position inside module.
        step.position = step_dict['position']
        if "uuid" in step_dict:
            step.uuid = step_dict["uuid"]
        if "label" in step_dict:
            step.label = step_dict["label"]
        module = module_factory.from_dict(trans, step_dict, secure=secure)
        module.save_to_step(step)

        annotation = step_dict['annotation']
        if annotation:
            annotation = sanitize_html(annotation, 'utf-8', 'text/html')
            self.add_item_annotation(trans.sa_session, trans.get_user(), step,
                                     annotation)

        # Stick this in the step temporarily
        step.temp_input_connections = step_dict['input_connections']

        return module, step
예제 #3
0
    def __module_from_dict(self, trans, steps, steps_by_external_id, step_dict,
                           **kwds):
        """ Create a WorkflowStep model object and corresponding module
        representing type-specific functionality from the incoming dictionary.
        """
        step = model.WorkflowStep()
        # TODO: Consider handling position inside module.
        step.position = step_dict['position']
        if step_dict.get("uuid", None) and step_dict['uuid'] != "None":
            step.uuid = step_dict["uuid"]
        if "label" in step_dict:
            step.label = step_dict["label"]
        step_type = step_dict.get("type", None)
        if step_type == "subworkflow":
            subworkflow = self.__load_subworkflow_from_step_dict(
                trans, step_dict)
            step_dict["subworkflow"] = subworkflow

        module = module_factory.from_dict(trans, step_dict, **kwds)
        self.__set_default_label(step, module, step_dict.get('tool_state'))
        module.save_to_step(step)

        annotation = step_dict['annotation']
        if annotation:
            annotation = sanitize_html(annotation, 'utf-8', 'text/html')
            self.add_item_annotation(trans.sa_session, trans.get_user(), step,
                                     annotation)

        # Stick this in the step temporarily
        step.temp_input_connections = step_dict['input_connections']

        # Create the model class for the step
        steps.append(step)
        steps_by_external_id[step_dict['id']] = step
        if 'workflow_outputs' in step_dict:
            workflow_outputs = step_dict['workflow_outputs']
            found_output_names = set([])
            for workflow_output in workflow_outputs:
                # Allow workflow outputs as list of output_names for backward compatiblity.
                if not isinstance(workflow_output, dict):
                    workflow_output = {"output_name": workflow_output}
                output_name = workflow_output["output_name"]
                if output_name in found_output_names:
                    raise exceptions.ObjectAttributeInvalidException(
                        "Duplicate workflow outputs with name [%s] found." %
                        output_name)
                if not output_name:
                    raise exceptions.ObjectAttributeInvalidException(
                        "Workflow output with empty name encountered.")
                found_output_names.add(output_name)
                uuid = workflow_output.get("uuid", None)
                label = workflow_output.get("label", None)
                m = step.create_or_update_workflow_output(
                    output_name=output_name,
                    uuid=uuid,
                    label=label,
                )
                trans.sa_session.add(m)
        return module, step
예제 #4
0
    def test_annotations(self):
        model = self.model

        u = model.User(email="*****@*****.**", password="******")
        self.persist(u)

        def persist_and_check_annotation(annotation_class, **kwds):
            annotated_association = annotation_class()
            annotated_association.annotation = "Test Annotation"
            annotated_association.user = u
            for key, value in kwds.items():
                setattr(annotated_association, key, value)
            self.persist(annotated_association)
            self.expunge()
            stored_annotation = self.query(annotation_class).all()[0]
            assert stored_annotation.annotation == "Test Annotation"
            assert stored_annotation.user.email == "*****@*****.**"

        sw = model.StoredWorkflow()
        sw.user = u
        self.persist(sw)
        persist_and_check_annotation(model.StoredWorkflowAnnotationAssociation, stored_workflow=sw)

        workflow = model.Workflow()
        workflow.stored_workflow = sw
        self.persist(workflow)

        ws = model.WorkflowStep()
        ws.workflow = workflow
        self.persist(ws)
        persist_and_check_annotation(model.WorkflowStepAnnotationAssociation, workflow_step=ws)

        h = model.History(name="History for Annotation", user=u)
        self.persist(h)
        persist_and_check_annotation(model.HistoryAnnotationAssociation, history=h)

        d1 = model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=model.session)
        self.persist(d1)
        persist_and_check_annotation(model.HistoryDatasetAssociationAnnotationAssociation, hda=d1)

        page = model.Page()
        page.user = u
        self.persist(page)
        persist_and_check_annotation(model.PageAnnotationAssociation, page=page)

        visualization = model.Visualization()
        visualization.user = u
        self.persist(visualization)
        persist_and_check_annotation(model.VisualizationAnnotationAssociation, visualization=visualization)

        dataset_collection = model.DatasetCollection(collection_type="paired")
        history_dataset_collection = model.HistoryDatasetCollectionAssociation(collection=dataset_collection)
        self.persist(history_dataset_collection)
        persist_and_check_annotation(model.HistoryDatasetCollectionAssociationAnnotationAssociation, history_dataset_collection=history_dataset_collection)

        library_dataset_collection = model.LibraryDatasetCollectionAssociation(collection=dataset_collection)
        self.persist(library_dataset_collection)
        persist_and_check_annotation(model.LibraryDatasetCollectionAnnotationAssociation, library_dataset_collection=library_dataset_collection)
예제 #5
0
def yaml_to_model(has_dict, id_offset=100):
    if isinstance(has_dict, str):
        has_dict = yaml.safe_load(has_dict)

    workflow = model.Workflow()
    workflow.steps = []
    for i, step in enumerate(has_dict.get("steps", [])):
        workflow_step = model.WorkflowStep()
        if "order_index" not in step:
            step["order_index"] = i
        if "id" not in step:
            # Fixed Offset ids just to test against assuption order_index != id
            step["id"] = id_offset
            id_offset += 1
        step_type = step.get("type", None)
        assert step_type is not None

        if step_type == "subworkflow":
            subworkflow_dict = step["subworkflow"]
            del step["subworkflow"]
            subworkflow = yaml_to_model(subworkflow_dict, id_offset=id_offset)
            step["subworkflow"] = subworkflow
            id_offset += len(subworkflow.steps)

        for key, value in step.items():
            if key == "input_connections":
                raise NotImplementedError()
            if key == "inputs":
                inputs = []
                for input_name, input_def in value.items():
                    step_input = model.WorkflowStepInput(workflow_step)
                    step_input.name = input_name
                    connections = []
                    for conn_dict in input_def.get("connections", []):
                        conn = model.WorkflowStepConnection()
                        for conn_key, conn_value in conn_dict.items():
                            if conn_key == "@output_step":
                                target_step = workflow.steps[conn_value]
                                conn_value = target_step
                                conn_key = "output_step"
                            if conn_key == "@input_subworkflow_step":
                                conn_value = step["subworkflow"].step_by_index(
                                    conn_value)
                                conn_key = "input_subworkflow_step"
                            setattr(conn, conn_key, conn_value)
                        connections.append(conn)
                    step_input.connections = connections
                    inputs.append(step_input)
                value = inputs
            if key == "workflow_outputs":
                value = [
                    partial(_dict_to_workflow_output, workflow_step)(_)
                    for _ in value
                ]
            setattr(workflow_step, key, value)
        workflow.steps.append(workflow_step)

    return workflow
예제 #6
0
    def test_workflow_export(self):
        stored_workflow = model.StoredWorkflow()
        stored_workflow.name = "My Cool Workflow"
        workflow = model.Workflow()
        stored_workflow.latest_workflow = workflow
        workflow_step_0 = model.WorkflowStep()
        workflow.steps = [workflow_step_0]
        self.trans.app.workflow_manager.get_stored_accessible_workflow.return_value = stored_workflow
        example = """# Example
```galaxy
workflow_display(workflow_id=1)
```
"""
        result = self._to_basic(example)
        assert "**Workflow:** My Cool Workflow\n" in result
        assert "**Steps:**\n" in result
예제 #7
0
    def test_workflows(self):
        model = self.model
        user = model.User(
            email="*****@*****.**",
            password="******"
        )

        def workflow_from_steps(steps):
            stored_workflow = model.StoredWorkflow()
            stored_workflow.user = user
            workflow = model.Workflow()
            workflow.steps = steps
            workflow.stored_workflow = stored_workflow
            return workflow

        child_workflow = workflow_from_steps([])
        self.persist(child_workflow)

        workflow_step_1 = model.WorkflowStep()
        workflow_step_1.order_index = 0
        workflow_step_1.type = "data_input"
        workflow_step_2 = model.WorkflowStep()
        workflow_step_2.order_index = 1
        workflow_step_2.type = "subworkflow"
        workflow_step_2.subworkflow = child_workflow

        workflow_step_1.get_or_add_input("moo1")
        workflow_step_1.get_or_add_input("moo2")
        workflow_step_2.get_or_add_input("moo")
        workflow_step_1.add_connection("foo", "cow", workflow_step_2)

        workflow = workflow_from_steps([workflow_step_1, workflow_step_2])
        self.persist(workflow)
        workflow_id = workflow.id

        annotation = model.WorkflowStepAnnotationAssociation()
        annotation.annotation = "Test Step Annotation"
        annotation.user = user
        annotation.workflow_step = workflow_step_1
        self.persist(annotation)

        assert workflow_step_1.id is not None
        h1 = model.History(name="WorkflowHistory1", user=user)

        invocation_uuid = uuid.uuid1()

        workflow_invocation = model.WorkflowInvocation()
        workflow_invocation.uuid = invocation_uuid
        workflow_invocation.history = h1

        workflow_invocation_step1 = model.WorkflowInvocationStep()
        workflow_invocation_step1.workflow_invocation = workflow_invocation
        workflow_invocation_step1.workflow_step = workflow_step_1

        subworkflow_invocation = model.WorkflowInvocation()
        workflow_invocation.attach_subworkflow_invocation_for_step(workflow_step_2, subworkflow_invocation)

        workflow_invocation_step2 = model.WorkflowInvocationStep()
        workflow_invocation_step2.workflow_invocation = workflow_invocation
        workflow_invocation_step2.workflow_step = workflow_step_2

        workflow_invocation.workflow = workflow

        d1 = self.new_hda(h1, name="1")
        workflow_request_dataset = model.WorkflowRequestToInputDatasetAssociation()
        workflow_request_dataset.workflow_invocation = workflow_invocation
        workflow_request_dataset.workflow_step = workflow_step_1
        workflow_request_dataset.dataset = d1
        self.persist(workflow_invocation)
        assert workflow_request_dataset is not None
        assert workflow_invocation.id is not None

        history_id = h1.id
        self.expunge()

        loaded_invocation = self.query(model.WorkflowInvocation).get(workflow_invocation.id)
        assert loaded_invocation.uuid == invocation_uuid, "%s != %s" % (loaded_invocation.uuid, invocation_uuid)
        assert loaded_invocation
        assert loaded_invocation.history.id == history_id

        step_1, step_2 = loaded_invocation.workflow.steps

        assert not step_1.subworkflow
        assert step_2.subworkflow
        assert len(loaded_invocation.steps) == 2

        subworkflow_invocation_assoc = loaded_invocation.get_subworkflow_invocation_association_for_step(step_2)
        assert subworkflow_invocation_assoc is not None
        assert isinstance(subworkflow_invocation_assoc.subworkflow_invocation, model.WorkflowInvocation)
        assert isinstance(subworkflow_invocation_assoc.parent_workflow_invocation, model.WorkflowInvocation)

        assert subworkflow_invocation_assoc.subworkflow_invocation.history.id == history_id

        loaded_workflow = self.query(model.Workflow).get(workflow_id)
        assert len(loaded_workflow.steps[0].annotations) == 1
        copied_workflow = loaded_workflow.copy(user=user)
        annotations = copied_workflow.steps[0].annotations
        assert len(annotations) == 1
예제 #8
0
def extract_steps(trans,
                  history=None,
                  job_ids=None,
                  dataset_ids=None,
                  dataset_collection_ids=None,
                  dataset_names=None,
                  dataset_collection_names=None):
    # Ensure job_ids and dataset_ids are lists (possibly empty)
    if job_ids is None:
        job_ids = []
    elif type(job_ids) is not list:
        job_ids = [job_ids]
    if dataset_ids is None:
        dataset_ids = []
    elif type(dataset_ids) is not list:
        dataset_ids = [dataset_ids]
    if dataset_collection_ids is None:
        dataset_collection_ids = []
    elif type(dataset_collection_ids) is not list:
        dataset_collection_ids = [dataset_collection_ids]
    # Convert both sets of ids to integers
    job_ids = [int(_) for _ in job_ids]
    dataset_ids = [int(_) for _ in dataset_ids]
    dataset_collection_ids = [int(_) for _ in dataset_collection_ids]
    # Find each job, for security we (implicitly) check that they are
    # associated with a job in the current history.
    summary = WorkflowSummary(trans, history)
    jobs = summary.jobs
    steps = []
    hid_to_output_pair = {}
    # Input dataset steps
    for i, hid in enumerate(dataset_ids):
        step = model.WorkflowStep()
        step.type = 'data_input'
        if dataset_names:
            name = dataset_names[i]
        else:
            name = "Input Dataset"
        step.tool_inputs = dict(name=name)
        hid_to_output_pair[hid] = (step, 'output')
        steps.append(step)
    for i, hid in enumerate(dataset_collection_ids):
        step = model.WorkflowStep()
        step.type = 'data_collection_input'
        if hid not in summary.collection_types:
            raise exceptions.RequestParameterInvalidException(
                "hid %s does not appear to be a collection" % hid)
        collection_type = summary.collection_types[hid]
        if dataset_collection_names:
            name = dataset_collection_names[i]
        else:
            name = "Input Dataset Collection"
        step.tool_inputs = dict(name=name, collection_type=collection_type)
        hid_to_output_pair[hid] = (step, 'output')
        steps.append(step)
    # Tool steps
    for job_id in job_ids:
        if job_id not in summary.job_id2representative_job:
            log.warning("job_id %s not found in job_id2representative_job %s" %
                        (job_id, summary.job_id2representative_job))
            raise AssertionError(
                "Attempt to create workflow with job not connected to current history"
            )
        job = summary.job_id2representative_job[job_id]
        tool_inputs, associations = step_inputs(trans, job)
        step = model.WorkflowStep()
        step.type = 'tool'
        step.tool_id = job.tool_id
        step.tool_version = job.tool_version
        step.tool_inputs = tool_inputs
        # NOTE: We shouldn't need to do two passes here since only
        #       an earlier job can be used as an input to a later
        #       job.
        for other_hid, input_name in associations:
            if job in summary.implicit_map_jobs:
                an_implicit_output_collection = jobs[job][0][1]
                input_collection = an_implicit_output_collection.find_implicit_input_collection(
                    input_name)
                if input_collection:
                    other_hid = input_collection.hid
                else:
                    log.info("Cannot find implicit input collection for %s" %
                             input_name)
            if other_hid in hid_to_output_pair:
                other_step, other_name = hid_to_output_pair[other_hid]
                conn = model.WorkflowStepConnection()
                conn.input_step = step
                conn.input_name = input_name
                # Should always be connected to an earlier step
                conn.output_step = other_step
                conn.output_name = other_name
        steps.append(step)
        # Store created dataset hids
        for assoc in (job.output_datasets +
                      job.output_dataset_collection_instances):
            assoc_name = assoc.name
            if ToolOutputCollectionPart.is_named_collection_part_name(
                    assoc_name):
                continue
            if job in summary.implicit_map_jobs:
                hid = None
                for implicit_pair in jobs[job]:
                    query_assoc_name, dataset_collection = implicit_pair
                    if query_assoc_name == assoc_name:
                        hid = dataset_collection.hid
                if hid is None:
                    template = "Failed to find matching implicit job - job id is %s, implicit pairs are %s, assoc_name is %s."
                    message = template % (job.id, jobs[job], assoc_name)
                    log.warning(message)
                    raise Exception("Failed to extract job.")
            else:
                if hasattr(assoc, "dataset"):
                    hid = assoc.dataset.hid
                else:
                    hid = assoc.dataset_collection_instance.hid
            hid_to_output_pair[hid] = (step, assoc.name)
    return steps
예제 #9
0
def __step(**kwds):
    step = model.WorkflowStep()
    for key, value in kwds.items():
        setattr(step, key, value)

    return step
예제 #10
0
 def add_step(**kwds):
     workflow_step = model.WorkflowStep()
     for key, value in kwds.items():
         setattr(workflow_step, key, value)
     workflow.steps.append(workflow_step)
예제 #11
0
 def _workflow_from_dict(self, trans, data, source=None):
     """
     RPARK: copied from galaxy.webapps.galaxy.controllers.workflows.py
     Creates a workflow from a dict. Created workflow is stored in the database and returned.
     """
     # Put parameters in workflow mode
     trans.workflow_building_mode = True
     # Create new workflow from incoming dict
     workflow = model.Workflow()
     # If there's a source, put it in the workflow name.
     if source:
         name = "%s (imported from %s)" % (data['name'], source)
     else:
         name = data['name']
     workflow.name = name
     # Assume no errors until we find a step that has some
     workflow.has_errors = False
     # Create each step
     steps = []
     # The editor will provide ids for each step that we don't need to save,
     # but do need to use to make connections
     steps_by_external_id = {}
     # Keep track of tools required by the workflow that are not available in
     # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
     # will be ( tool_id, tool_name, tool_version ).
     missing_tool_tups = []
     # First pass to build step objects and populate basic values
     for key, step_dict in data['steps'].iteritems():
         # Create the model class for the step
         step = model.WorkflowStep()
         steps.append(step)
         steps_by_external_id[step_dict['id']] = step
         # FIXME: Position should be handled inside module
         step.position = step_dict['position']
         module = module_factory.from_dict(trans, step_dict, secure=False)
         if module.type == 'tool' and module.tool is None:
             # A required tool is not available in the local Galaxy instance.
             missing_tool_tup = (step_dict['tool_id'], step_dict['name'],
                                 step_dict['tool_version'])
             if missing_tool_tup not in missing_tool_tups:
                 missing_tool_tups.append(missing_tool_tup)
         module.save_to_step(step)
         if step.tool_errors:
             workflow.has_errors = True
         # Stick this in the step temporarily
         step.temp_input_connections = step_dict['input_connections']
         # Save step annotation.
         #annotation = step_dict[ 'annotation' ]
         #if annotation:
         #annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
         # ------------------------------------------ #
         # RPARK REMOVING: user annotation b/c of API
         #self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation )
         # ------------------------------------------ #
         # Unpack and add post-job actions.
         post_job_actions = step_dict.get('post_job_actions', {})
         for name, pja_dict in post_job_actions.items():
             model.PostJobAction(pja_dict['action_type'], step,
                                 pja_dict['output_name'],
                                 pja_dict['action_arguments'])
     # Second pass to deal with connections between steps
     for step in steps:
         # Input connections
         for input_name, conn_dict in step.temp_input_connections.iteritems(
         ):
             if conn_dict:
                 conn = model.WorkflowStepConnection()
                 conn.input_step = step
                 conn.input_name = input_name
                 conn.output_name = conn_dict['output_name']
                 conn.output_step = steps_by_external_id[conn_dict['id']]
         del step.temp_input_connections
     # Order the steps if possible
     attach_ordered_steps(workflow, steps)
     # Connect up
     stored = model.StoredWorkflow()
     stored.name = workflow.name
     workflow.stored_workflow = stored
     stored.latest_workflow = workflow
     stored.user = trans.user
     # Persist
     trans.sa_session.add(stored)
     trans.sa_session.flush()
     return stored, missing_tool_tups
예제 #12
0
파일: extract.py 프로젝트: roalva1/galaxy
def extract_steps(trans,
                  history=None,
                  job_ids=None,
                  dataset_ids=None,
                  dataset_collection_ids=None):
    # Ensure job_ids and dataset_ids are lists (possibly empty)
    if job_ids is None:
        job_ids = []
    elif type(job_ids) is not list:
        job_ids = [job_ids]
    if dataset_ids is None:
        dataset_ids = []
    elif type(dataset_ids) is not list:
        dataset_ids = [dataset_ids]
    if dataset_collection_ids is None:
        dataset_collection_ids = []
    elif type(dataset_collection_ids) is not list:
        dataset_collection_ids = [dataset_collection_ids]
    # Convert both sets of ids to integers
    job_ids = [int(id) for id in job_ids]
    dataset_ids = [int(id) for id in dataset_ids]
    dataset_collection_ids = [int(id) for id in dataset_collection_ids]
    # Find each job, for security we (implicately) check that they are
    # associated witha job in the current history.
    summary = WorkflowSummary(trans, history)
    jobs = summary.jobs
    jobs_by_id = dict((job.id, job) for job in jobs.keys())
    steps = []
    steps_by_job_id = {}
    hid_to_output_pair = {}
    # Input dataset steps
    for hid in dataset_ids:
        step = model.WorkflowStep()
        step.type = 'data_input'
        step.tool_inputs = dict(name="Input Dataset")
        hid_to_output_pair[hid] = (step, 'output')
        steps.append(step)
    for hid in dataset_collection_ids:
        step = model.WorkflowStep()
        step.type = 'data_collection_input'
        if hid not in summary.collection_types:
            raise exceptions.RequestParameterInvalidException(
                "hid %s does not appear to be a collection" % hid)
        collection_type = summary.collection_types[hid]
        step.tool_inputs = dict(name="Input Dataset Collection",
                                collection_type=collection_type)
        hid_to_output_pair[hid] = (step, 'output')
        steps.append(step)
    # Tool steps
    for job_id in job_ids:
        if job_id not in jobs_by_id:
            log.warn("job_id %s not found in jobs_by_id %s" %
                     (job_id, jobs_by_id))
            raise AssertionError(
                "Attempt to create workflow with job not connected to current history"
            )
        job = jobs_by_id[job_id]
        tool_inputs, associations = step_inputs(trans, job)
        step = model.WorkflowStep()
        step.type = 'tool'
        step.tool_id = job.tool_id
        step.tool_inputs = tool_inputs
        # NOTE: We shouldn't need to do two passes here since only
        #       an earlier job can be used as an input to a later
        #       job.
        for other_hid, input_name in associations:
            if job in summary.implicit_map_jobs:
                an_implicit_output_collection = jobs[job][0][1]
                input_collection = an_implicit_output_collection.find_implicit_input_collection(
                    input_name)
                if input_collection:
                    other_hid = input_collection.hid
            if other_hid in hid_to_output_pair:
                other_step, other_name = hid_to_output_pair[other_hid]
                conn = model.WorkflowStepConnection()
                conn.input_step = step
                conn.input_name = input_name
                # Should always be connected to an earlier step
                conn.output_step = other_step
                conn.output_name = other_name
        steps.append(step)
        steps_by_job_id[job_id] = step
        # Store created dataset hids
        for assoc in job.output_datasets:
            if job in summary.implicit_map_jobs:
                hid = None
                for implicit_pair in jobs[job]:
                    query_assoc_name, dataset_collection = implicit_pair
                    if query_assoc_name == assoc.name:
                        hid = dataset_collection.hid
                if hid is None:
                    log.warn("Failed to find matching implicit job.")
                    raise Exception("Failed to extract job.")
            else:
                hid = assoc.dataset.hid
            hid_to_output_pair[hid] = (step, assoc.name)
    return steps