def extract_workflow(trans, user, history=None, job_ids=None, dataset_ids=None, dataset_collection_ids=None, workflow_name=None): steps = extract_steps(trans, history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids) # Workflow to populate workflow = model.Workflow() workflow.name = workflow_name # Order the steps if possible attach_ordered_steps(workflow, steps) # And let's try to set up some reasonable locations on the canvas # (these are pretty arbitrary values) levorder = order_workflow_steps_with_levels(steps) base_pos = 10 for i, steps_at_level in enumerate(levorder): for j, index in enumerate(steps_at_level): step = steps[index] step.position = dict(top=(base_pos + 120 * j), left=(base_pos + 220 * i)) # Store it stored = model.StoredWorkflow() stored.user = user stored.name = workflow_name workflow.stored_workflow = stored stored.latest_workflow = workflow trans.sa_session.add(stored) trans.sa_session.flush() return stored
def workflow_from_steps(steps): stored_workflow = model.StoredWorkflow() stored_workflow.user = user workflow = model.Workflow() workflow.steps = steps workflow.stored_workflow = stored_workflow return workflow
def __workflow_fixure(trans): user = model.User(email="*****@*****.**", password="******") stored_workflow = model.StoredWorkflow() stored_workflow.user = user workflow = model.Workflow() workflow.stored_workflow = stored_workflow def add_step(**kwds): workflow_step = model.WorkflowStep() for key, value in kwds.items(): setattr(workflow_step, key, value) workflow.steps.append(workflow_step) trans.app.model.context.add(workflow, ) add_step(type="data_input", order_index=0, tool_inputs={"name": "input1"}) add_step(type="data_input", order_index=1, tool_inputs={"name": "input2"}) add_step( type="tool", tool_id="cat1", order_index=2, ) add_step( type="tool", tool_id="cat1", order_index=4, ) trans.app.model.context.flush() # Expunge and reload to ensure step state is as expected from database. workflow_id = workflow.id trans.app.model.context.expunge_all() return trans.app.model.context.query(model.Workflow).get(workflow_id)
def test_annotations(self): model = self.model u = model.User(email="*****@*****.**", password="******") self.persist(u) def persist_and_check_annotation(annotation_class, **kwds): annotated_association = annotation_class() annotated_association.annotation = "Test Annotation" annotated_association.user = u for key, value in kwds.items(): setattr(annotated_association, key, value) self.persist(annotated_association) self.expunge() stored_annotation = self.query(annotation_class).all()[0] assert stored_annotation.annotation == "Test Annotation" assert stored_annotation.user.email == "*****@*****.**" sw = model.StoredWorkflow() sw.user = u self.persist(sw) persist_and_check_annotation(model.StoredWorkflowAnnotationAssociation, stored_workflow=sw) workflow = model.Workflow() workflow.stored_workflow = sw self.persist(workflow) ws = model.WorkflowStep() ws.workflow = workflow self.persist(ws) persist_and_check_annotation(model.WorkflowStepAnnotationAssociation, workflow_step=ws) h = model.History(name="History for Annotation", user=u) self.persist(h) persist_and_check_annotation(model.HistoryAnnotationAssociation, history=h) d1 = model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=model.session) self.persist(d1) persist_and_check_annotation(model.HistoryDatasetAssociationAnnotationAssociation, hda=d1) page = model.Page() page.user = u self.persist(page) persist_and_check_annotation(model.PageAnnotationAssociation, page=page) visualization = model.Visualization() visualization.user = u self.persist(visualization) persist_and_check_annotation(model.VisualizationAnnotationAssociation, visualization=visualization) dataset_collection = model.DatasetCollection(collection_type="paired") history_dataset_collection = model.HistoryDatasetCollectionAssociation(collection=dataset_collection) self.persist(history_dataset_collection) persist_and_check_annotation(model.HistoryDatasetCollectionAssociationAnnotationAssociation, history_dataset_collection=history_dataset_collection) library_dataset_collection = model.LibraryDatasetCollectionAssociation(collection=dataset_collection) self.persist(library_dataset_collection) persist_and_check_annotation(model.LibraryDatasetCollectionAnnotationAssociation, library_dataset_collection=library_dataset_collection)
def yaml_to_model(has_dict, id_offset=100): if isinstance(has_dict, str): has_dict = yaml.safe_load(has_dict) workflow = model.Workflow() workflow.steps = [] for i, step in enumerate(has_dict.get("steps", [])): workflow_step = model.WorkflowStep() if "order_index" not in step: step["order_index"] = i if "id" not in step: # Fixed Offset ids just to test against assuption order_index != id step["id"] = id_offset id_offset += 1 step_type = step.get("type", None) assert step_type is not None if step_type == "subworkflow": subworkflow_dict = step["subworkflow"] del step["subworkflow"] subworkflow = yaml_to_model(subworkflow_dict, id_offset=id_offset) step["subworkflow"] = subworkflow id_offset += len(subworkflow.steps) for key, value in step.items(): if key == "input_connections": raise NotImplementedError() if key == "inputs": inputs = [] for input_name, input_def in value.items(): step_input = model.WorkflowStepInput(workflow_step) step_input.name = input_name connections = [] for conn_dict in input_def.get("connections", []): conn = model.WorkflowStepConnection() for conn_key, conn_value in conn_dict.items(): if conn_key == "@output_step": target_step = workflow.steps[conn_value] conn_value = target_step conn_key = "output_step" if conn_key == "@input_subworkflow_step": conn_value = step["subworkflow"].step_by_index( conn_value) conn_key = "input_subworkflow_step" setattr(conn, conn_key, conn_value) connections.append(conn) step_input.connections = connections inputs.append(step_input) value = inputs if key == "workflow_outputs": value = [ partial(_dict_to_workflow_output, workflow_step)(_) for _ in value ] setattr(workflow_step, key, value) workflow.steps.append(workflow_step) return workflow
def _workflow_from_dict(self, trans, data, name, exact_tools=False): if isinstance(data, string_types): data = json.loads(data) # Create new workflow from source data workflow = model.Workflow() workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts(data): module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict, exact_tools=exact_tools) is_tool = is_tool_module_type(module.type) if is_tool and module.tool is None: # A required tool is not available in the local Galaxy instance. tool_id = step_dict.get('content_id', step_dict.get('tool_id', None)) assert tool_id is not None # Threw an exception elsewhere if not missing_tool_tup = (tool_id, step_dict['name'], step_dict['tool_version'], step_dict['id']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) # Save the entire step_dict in the unused config field, be parsed later # when we do have the tool step.config = json.dumps(step_dict) if step.tool_errors: workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps(steps, steps_by_external_id) # Order the steps if possible attach_ordered_steps(workflow, steps) return workflow, missing_tool_tups
def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, from_editor=False): # Put parameters in workflow mode trans.workflow_building_mode = True # Convert incoming workflow data from json if coming from editor data = json.loads(workflow_data) if from_editor else workflow_data # Create new workflow from incoming data workflow = model.Workflow() # Just keep the last name (user can rename later) workflow.name = stored_workflow.name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} errors = [] for key, step_dict in data['steps'].iteritems(): is_tool = is_tool_module_type( step_dict[ 'type' ] ) if is_tool and not trans.app.toolbox.has_tool( step_dict['tool_id'], exact=True ): errors.append("Step %s requires tool '%s'." % (step_dict['id'], step_dict['tool_id'])) if errors: raise MissingToolsException(workflow, errors) # First pass to build step objects and populate basic values for step_dict in self.__walk_step_dicts( data ): module, step = self.__module_from_dict( trans, step_dict, secure=from_editor ) # Create the model class for the step steps.append( step ) steps_by_external_id[ step_dict['id' ] ] = step if 'workflow_outputs' in step_dict: for output_name in step_dict['workflow_outputs']: m = model.WorkflowOutput(workflow_step=step, output_name=output_name) trans.sa_session.add(m) if step.tool_errors: # DBTODO Check for conditional inputs here. workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps( steps, steps_by_external_id ) # Order the steps if possible attach_ordered_steps( workflow, steps ) # Connect up workflow.stored_workflow = stored_workflow stored_workflow.latest_workflow = workflow # Persist trans.sa_session.flush() # Return something informative errors = [] if workflow.has_errors: errors.append( "Some steps in this workflow have validation errors" ) if workflow.has_cycles: errors.append( "This workflow contains cycles" ) return workflow, errors
def save_workflow_as(self, trans, workflow_name, workflow_data, workflow_annotation="", from_tool_form=False): """ Creates a new workflow based on Save As command. It is a new workflow, but is created with workflow_data already present. """ user = trans.get_user() if workflow_name is not None: workflow_contents_manager = self.app.workflow_contents_manager stored_workflow = model.StoredWorkflow() stored_workflow.name = workflow_name stored_workflow.user = user self.slug_builder.create_item_slug(trans.sa_session, stored_workflow) workflow = model.Workflow() workflow.name = workflow_name workflow.stored_workflow = stored_workflow stored_workflow.latest_workflow = workflow # Add annotation. workflow_annotation = sanitize_html(workflow_annotation) self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation) # Persist session = trans.sa_session session.add(stored_workflow) session.flush() workflow_update_options = WorkflowUpdateOptions( update_stored_workflow_attributes=False, # taken care of above from_tool_form=from_tool_form, ) try: workflow, errors = workflow_contents_manager.update_workflow_from_raw_description( trans, stored_workflow, workflow_data, workflow_update_options, ) except MissingToolsException as e: return dict( name=e.workflow.name, message= ("This workflow includes missing or invalid tools. " "It cannot be saved until the following steps are removed or the missing tools are enabled." ), errors=e.errors, ) return (trans.security.encode_id(stored_workflow.id)) else: # This is an error state, 'save as' must have a workflow_name log.exception("Error in Save As workflow: no name.")
def __test_workflow( self ): stored_workflow = model.StoredWorkflow() workflow = model.Workflow() workflow.stored_workflow = stored_workflow stored_workflow.latest_workflow = workflow user = model.User() user.email = "*****@*****.**" user.password = "******" stored_workflow.user = user self.app.model.context.add( workflow ) self.app.model.context.add( stored_workflow ) self.app.model.context.flush() return stored_workflow
def create(self, trans, payload=None, **kwd): if trans.request.method == 'GET': return { 'title': 'Create Workflow', 'inputs': [{ 'name': 'workflow_name', 'label': 'Name', 'value': 'Unnamed workflow' }, { 'name': 'workflow_annotation', 'label': 'Annotation', 'help': 'A description of the workflow; annotation is shown alongside shared or published workflows.' }] } else: user = trans.get_user() workflow_name = payload.get('workflow_name') workflow_annotation = payload.get('workflow_annotation') if not workflow_name: return self.message_exception( trans, 'Please provide a workflow name.') # Create the new stored workflow stored_workflow = model.StoredWorkflow() stored_workflow.name = workflow_name stored_workflow.user = user self.slug_builder.create_item_slug(trans.sa_session, stored_workflow) # And the first (empty) workflow revision workflow = model.Workflow() workflow.name = workflow_name workflow.stored_workflow = stored_workflow stored_workflow.latest_workflow = workflow # Add annotation. workflow_annotation = sanitize_html(workflow_annotation) self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation) # Persist session = trans.sa_session session.add(stored_workflow) session.flush() return { 'id': trans.security.encode_id(stored_workflow.id), 'message': f'Workflow {workflow_name} has been created.' }
def test_workflow_export(self): stored_workflow = model.StoredWorkflow() stored_workflow.name = "My Cool Workflow" workflow = model.Workflow() stored_workflow.latest_workflow = workflow workflow_step_0 = model.WorkflowStep() workflow.steps = [workflow_step_0] self.trans.app.workflow_manager.get_stored_accessible_workflow.return_value = stored_workflow example = """# Example ```galaxy workflow_display(workflow_id=1) ``` """ result = self._to_basic(example) assert "**Workflow:** My Cool Workflow\n" in result assert "**Steps:**\n" in result
def _workflow_from_dict(self, trans, data, name, **kwds): if isinstance(data, string_types): data = json.loads(data) # Create new workflow from source data workflow = model.Workflow() workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts(data): self.__load_subworkflows(trans, step_dict) for step_dict in self.__walk_step_dicts(data): module, step = self.__module_from_dict(trans, steps, steps_by_external_id, step_dict, **kwds) is_tool = is_tool_module_type(module.type) if is_tool and module.tool is None: missing_tool_tup = (module.tool_id, module.get_name(), module.tool_version, step_dict['id']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) if module.get_errors(): workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps(steps, steps_by_external_id) # Order the steps if possible attach_ordered_steps(workflow, steps) return workflow, missing_tool_tups
def test_metadata_mutable_column(self): w = model.Workflow() self.model.session.add(w) self.model.session.flush() w.reports_config = {'x': 'z'} persisted = self.persist_and_reload(w) assert persisted.reports_config == {'x': 'z'} persisted.reports_config['x'] = '1' persisted = self.persist_and_reload(persisted) assert persisted.reports_config['x'] == '1' # test string persisted.reports_config = 'abcdefg' persisted = self.persist_and_reload(persisted) assert persisted.reports_config == 'abcdefg' # test int persisted.reports_config = 1 persisted = self.persist_and_reload(persisted) assert persisted.reports_config == 1 # test float persisted.reports_config = 1.1 persisted = self.persist_and_reload(persisted) assert persisted.reports_config == 1.1 # test bool persisted.reports_config = True persisted = self.persist_and_reload(persisted) assert persisted.reports_config is True # Test nested dict/list persisted.reports_config = {'list': [[1, 2, 3]]} persisted = self.persist_and_reload(persisted) assert persisted.reports_config == {'list': [[1, 2, 3]]} copy.deepcopy(persisted.reports_config) assert persisted.reports_config.pop('list') == [[1, 2, 3]] persisted = self.persist_and_reload(persisted) assert persisted.reports_config == {} persisted.reports_config.update({'x': 'z'}) persisted = self.persist_and_reload(persisted) assert persisted.reports_config == {'x': 'z'} del persisted.reports_config['x'] persisted = self.persist_and_reload(persisted) assert persisted.reports_config == {} persisted.reports_config = {'x': {'y': 'z'}} persisted = self.persist_and_reload(persisted) assert persisted.reports_config == {'x': {'y': 'z'}}
def build_workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publish=False): # Put parameters in workflow mode trans.workflow_building_mode = True # Create new workflow from incoming dict workflow = model.Workflow() # If there's a source, put it in the workflow name. if source: name = "%s (imported from %s)" % (data['name'], source) else: name = data['name'] workflow.name = name if 'uuid' in data: workflow.uuid = data['uuid'] # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] for step_dict in self.__walk_step_dicts(data): module, step = self.__module_from_dict(trans, step_dict, secure=False) steps.append(step) steps_by_external_id[step_dict['id']] = step if module.type == 'tool' and module.tool is None: # A required tool is not available in the local Galaxy instance. missing_tool_tup = (step_dict['tool_id'], step_dict['name'], step_dict['tool_version']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) # Save the entire step_dict in the unused config field, be parsed later # when we do have the tool step.config = json.dumps(step_dict) if step.tool_errors: workflow.has_errors = True # Second pass to deal with connections between steps self.__connect_workflow_steps(steps, steps_by_external_id) # Order the steps if possible attach_ordered_steps(workflow, steps) # Connect up stored = model.StoredWorkflow() stored.name = workflow.name workflow.stored_workflow = stored stored.latest_workflow = workflow stored.user = trans.user stored.published = publish if data['annotation']: annotation = sanitize_html(data['annotation'], 'utf-8', 'text/html') self.add_item_annotation(trans.sa_session, stored.user, stored, annotation) # Persist trans.sa_session.add(stored) trans.sa_session.flush() if add_to_menu: if trans.user.stored_workflow_menu_entries is None: trans.user.stored_workflow_menu_entries = [] menuEntry = model.StoredWorkflowMenuEntry() menuEntry.stored_workflow = stored trans.user.stored_workflow_menu_entries.append(menuEntry) trans.sa_session.flush() return CreatedWorkflow(stored_workflow=stored, missing_tools=missing_tool_tups)
def test_render(): # Doesn't check anything about the render code - just exercises to # ensure that obvious errors aren't thrown. workflow_canvas = render.WorkflowCanvas() workflow = model.Workflow() workflow.steps = [] def add_step(**kwds): workflow_step = model.WorkflowStep() for key, value in kwds.iteritems(): setattr(workflow_step, key, value) workflow.steps.append(workflow_step) return workflow_step def connection(**kwds): conn = model.WorkflowStepConnection() for key, value in kwds.iteritems(): setattr(conn, key, value) return conn step_0 = add_step(type="data_input", order_index=0, tool_inputs={"name": "input1"}, input_connections=[], position={ "top": 3, "left": 3 }) step_1 = add_step(type="data_input", order_index=1, tool_inputs={"name": "input2"}, input_connections=[], position={ "top": 6, "left": 4 }) step_2 = add_step(type="tool", tool_id="cat1", order_index=2, input_connections=[ connection(input_name="input1", output_step=step_0, output_name="di1") ], position={ "top": 13, "left": 10 }) step_3 = add_step(type="tool", tool_id="cat1", order_index=3, input_connections=[ connection(input_name="input1", output_step=step_0, output_name="di1") ], position={ "top": 33, "left": 103 }) workflow_canvas.populate_data_for_step( step_0, "input1", [], [{ "name": "di1" }], ) workflow_canvas.populate_data_for_step( step_1, "input2", [], [{ "name": "di1" }], ) workflow_canvas.populate_data_for_step(step_2, "cat wrapper", [{ "name": "input1", "label": "i1" }], [{ "name": "out1" }]) workflow_canvas.populate_data_for_step(step_3, "cat wrapper", [{ "name": "input1", "label": "i1" }], [{ "name": "out1" }]) workflow_canvas.add_steps() workflow_canvas.finish() assert workflow_canvas.canvas.standalone_xml()
def _workflow_from_dict(self, trans, data, source=None): """ RPARK: copied from galaxy.webapps.galaxy.controllers.workflows.py Creates a workflow from a dict. Created workflow is stored in the database and returned. """ # Put parameters in workflow mode trans.workflow_building_mode = True # Create new workflow from incoming dict workflow = model.Workflow() # If there's a source, put it in the workflow name. if source: name = "%s (imported from %s)" % (data['name'], source) else: name = data['name'] workflow.name = name # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step steps = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections steps_by_external_id = {} # Keep track of tools required by the workflow that are not available in # the local Galaxy instance. Each tuple in the list of missing_tool_tups # will be ( tool_id, tool_name, tool_version ). missing_tool_tups = [] # First pass to build step objects and populate basic values for key, step_dict in data['steps'].iteritems(): # Create the model class for the step step = model.WorkflowStep() steps.append(step) steps_by_external_id[step_dict['id']] = step # FIXME: Position should be handled inside module step.position = step_dict['position'] module = module_factory.from_dict(trans, step_dict, secure=False) if module.type == 'tool' and module.tool is None: # A required tool is not available in the local Galaxy instance. missing_tool_tup = (step_dict['tool_id'], step_dict['name'], step_dict['tool_version']) if missing_tool_tup not in missing_tool_tups: missing_tool_tups.append(missing_tool_tup) module.save_to_step(step) if step.tool_errors: workflow.has_errors = True # Stick this in the step temporarily step.temp_input_connections = step_dict['input_connections'] # Save step annotation. #annotation = step_dict[ 'annotation' ] #if annotation: #annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) # ------------------------------------------ # # RPARK REMOVING: user annotation b/c of API #self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) # ------------------------------------------ # # Unpack and add post-job actions. post_job_actions = step_dict.get('post_job_actions', {}) for name, pja_dict in post_job_actions.items(): model.PostJobAction(pja_dict['action_type'], step, pja_dict['output_name'], pja_dict['action_arguments']) # Second pass to deal with connections between steps for step in steps: # Input connections for input_name, conn_dict in step.temp_input_connections.iteritems( ): if conn_dict: conn = model.WorkflowStepConnection() conn.input_step = step conn.input_name = input_name conn.output_name = conn_dict['output_name'] conn.output_step = steps_by_external_id[conn_dict['id']] del step.temp_input_connections # Order the steps if possible attach_ordered_steps(workflow, steps) # Connect up stored = model.StoredWorkflow() stored.name = workflow.name workflow.stored_workflow = stored stored.latest_workflow = workflow stored.user = trans.user # Persist trans.sa_session.add(stored) trans.sa_session.flush() return stored, missing_tool_tups