def get_data_uploader_overlay(self, project_id): """ Returns the html which displays a dialog which allows the user to upload certain data into the application. """ upload_categories = self.flow_service.get_uploader_categories() upload_algorithms = self.flow_service.get_groups_for_categories(upload_categories) flow_controller = FlowController() algorithms_interface = {} tabs = [] for algo_group in upload_algorithms: adapter_template = flow_controller.get_adapter_template(project_id, algo_group.id, True, None) algorithms_interface['template_for_algo_' + str(algo_group.id)] = adapter_template tabs.append(OverlayTabDefinition(algo_group.displayname, algo_group.subsection_name, description=algo_group.description)) template_specification = self.fill_overlay_attributes(None, "Upload", "Upload data for this project", "project/upload_data_overlay", "dialog-upload", tabs_vertical=tabs) template_specification['uploadAlgorithms'] = upload_algorithms template_specification['projectId'] = project_id template_specification['algorithmsInterface'] = algorithms_interface return flow_controller.fill_default_attributes(template_specification)
def get_data_uploader_overlay(self, project_id): """ Returns the html which displays a dialog which allows the user to upload certain data into the application. """ upload_algorithms = self.flow_service.get_upload_algorithms() flow_controller = FlowController() algorithms_interface = {} tabs = [] for algorithm in upload_algorithms: adapter_template = flow_controller.get_adapter_template(project_id, algorithm.id, True, None) algorithms_interface['template_for_algo_' + str(algorithm.id)] = adapter_template tabs.append(OverlayTabDefinition(algorithm.displayname, algorithm.subsection_name, description=algorithm.description)) template_specification = self.fill_overlay_attributes(None, "Upload", "Upload data for this project", "project/upload_data_overlay", "dialog-upload", tabs_vertical=tabs) template_specification['uploadAlgorithms'] = upload_algorithms template_specification['projectId'] = project_id template_specification['algorithmsInterface'] = algorithms_interface return flow_controller.fill_default_attributes(template_specification)
def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService()
def get_operation_details(self, entity_gid, is_group=False, back_page='burst'): """ Returns the HTML which contains the details for the given operation. """ if string2bool(str(is_group)): ### we have an OperationGroup entity. template_specification = self._compute_operation_details(entity_gid, True) #I expect that all the operations from a group are visible or not template_specification["nodeType"] = graph_structures.NODE_OPERATION_GROUP_TYPE else: ### we have a simple Operation template_specification = self._compute_operation_details(entity_gid) template_specification["displayRelevantButton"] = True template_specification["nodeType"] = graph_structures.NODE_OPERATION_TYPE template_specification["backPageIdentifier"] = back_page overlay_class = "can-browse editor-node node-type-" + template_specification["nodeType"] if template_specification["isRelevant"]: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" template_specification = self.fill_overlay_attributes(template_specification, "Details", "Operation", "project/details_operation_overlay", overlay_class) return FlowController().fill_default_attributes(template_specification)
def get_project_uploader_overlay(self): """ Returns the html which displays a dialog which allows the user to upload an entire project. """ template_specification = self.fill_overlay_attributes(None, "Upload", "Project structure", "project/upload_project_overlay", "dialog-upload") return FlowController().fill_default_attributes(template_specification)
def setUp(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService()
def init_cherrypy(arguments=None): #### Mount static folders from modules marked for introspection arguments = arguments or [] CONFIGUER = TvbProfile.current.web.CHERRYPY_CONFIGURATION for module in arguments: module_inst = importlib.import_module(str(module)) module_path = os.path.dirname(os.path.abspath(module_inst.__file__)) CONFIGUER["/static_" + str(module)] = {'tools.staticdir.on': True, 'tools.staticdir.dir': '.', 'tools.staticdir.root': module_path} #### Mount controllers, and specify the root URL for them. cherrypy.tree.mount(BaseController(), "/", config=CONFIGUER) cherrypy.tree.mount(UserController(), "/user/", config=CONFIGUER) cherrypy.tree.mount(ProjectController(), "/project/", config=CONFIGUER) cherrypy.tree.mount(FigureController(), "/project/figure/", config=CONFIGUER) cherrypy.tree.mount(FlowController(), "/flow/", config=CONFIGUER) cherrypy.tree.mount(SettingsController(), "/settings/", config=CONFIGUER) cherrypy.tree.mount(HelpController(), "/help/", config=CONFIGUER) cherrypy.tree.mount(SimulatorController(), "/burst/", config=CONFIGUER) cherrypy.tree.mount(ParameterExplorationController(), "/burst/explore/", config=CONFIGUER) cherrypy.tree.mount(DynamicModelController(), "/burst/dynamic/", config=CONFIGUER) cherrypy.tree.mount(SpatioTemporalController(), "/spatial/", config=CONFIGUER) cherrypy.tree.mount(RegionsModelParametersController(), "/burst/modelparameters/regions/", config=CONFIGUER) cherrypy.tree.mount(SurfaceModelParametersController(), "/spatial/modelparameters/surface/", config=CONFIGUER) cherrypy.tree.mount(RegionStimulusController(), "/spatial/stimulus/region/", config=CONFIGUER) cherrypy.tree.mount(SurfaceStimulusController(), "/spatial/stimulus/surface/", config=CONFIGUER) cherrypy.tree.mount(LocalConnectivityController(), "/spatial/localconnectivity/", config=CONFIGUER) cherrypy.tree.mount(NoiseConfigurationController(), "/burst/noise/", config=CONFIGUER) cherrypy.tree.mount(HPCController(), "/hpc/", config=CONFIGUER) cherrypy.config.update(CONFIGUER) # ----------------- Register additional request handlers ----------------- # This tool checks for MAX upload size cherrypy.tools.upload = Tool('on_start_resource', RequestHandler.check_upload_size) # This tools clean up files on disk (mainly after export) cherrypy.tools.cleanup = Tool('on_end_request', RequestHandler.clean_files_on_disk) # ----------------- End register additional request handlers ---------------- # Register housekeeping job if TvbProfile.current.hpc.IS_HPC_RUN and TvbProfile.current.hpc.CAN_RUN_HPC: cherrypy.engine.housekeeper = cherrypy.process.plugins.BackgroundTask( TvbProfile.current.hpc.BACKGROUND_JOB_INTERVAL, HPCOperationService.check_operations_job) cherrypy.engine.housekeeper.start() # HTTP Server is fired now ###### cherrypy.engine.start()
def launchloader(self, project_id, algorithm_id, cancel=False, **data): """ Start Upload mechanism """ success_link = "/project/editstructure/" + str(project_id) # do not allow GET if cherrypy.request.method != 'POST' or cancel: raise cherrypy.HTTPRedirect(success_link) try: int(project_id) int(algorithm_id) except (ValueError, TypeError): raise cherrypy.HTTPRedirect(success_link) project = self.project_service.find_project(project_id) algorithm = self.flow_service.get_algorithm_by_identifier(algorithm_id) FlowController().execute_post(project.id, success_link, algorithm.fk_category, algorithm, **data) raise cherrypy.HTTPRedirect(success_link)
def __init__(self): super(ProjectController, self).__init__() self.flow_controller = FlowController()
class ProjectController(BaseController): """ Displays pages which deals with Project data management. """ PRROJECTS_FOR_LINK_KEY = "projectsforlink" PRROJECTS_LINKED_KEY = "projectslinked" KEY_OPERATION_FILTERS = "operationfilters" NODE_OPERATION_TYPE = "operation" NODE_OPERATION_GROUP_TYPE = "operationGroup" def __init__(self): super(ProjectController, self).__init__() self.flow_controller = FlowController() @expose_page @settings def index(self): """ Display project main-menu. Choose one project to work with. """ current_project = common.get_current_project() if current_project is None: raise cherrypy.HTTPRedirect("/project/viewall") template_specification = dict(mainContent="project/project_submenu", title="TVB Project Menu") return self.fill_default_attributes(template_specification) @expose_page @settings def viewall(self, create=False, page=1, selected_project_id=None, **_): """ Display all existent projects. Choose one project to work with. """ page = int(page) if cherrypy.request.method == 'POST' and create: raise cherrypy.HTTPRedirect('/project/editone') current_user_id = common.get_logged_user().id ## Select project if user choose one. if selected_project_id is not None: try: selected_project = self.project_service.find_project( selected_project_id) self._mark_selected(selected_project) except ProjectServiceException as excep: self.logger.error(excep) self.logger.warning("Could not select project: " + str(selected_project_id)) common.set_error_message("Could not select project: " + str(selected_project_id)) # Prepare template response prjs, pages_no = self.project_service.retrieve_projects_for_user( current_user_id, page) template_specification = dict(mainContent="project/viewall", title="Available TVB Projects", projectsList=prjs, page_number=page, total_pages=pages_no) return self.fill_default_attributes(template_specification, 'list') @cherrypy.expose @handle_error(redirect=True) @check_user @settings def projectupload(self, **data): """Upload Project from TVB ZIP.""" self.logger.debug("Uploading ..." + str(data)) try: upload_param = "uploadedfile" if upload_param in data and data[upload_param]: import_service = ImportService() import_service.import_project_structure( data[upload_param], common.get_logged_user().id) except ServicesBaseException as excep: self.logger.warning(excep.message) common.set_error_message(excep.message) raise cherrypy.HTTPRedirect('/project/viewall') def _remove_project(self, project_id): """Private method for removing project.""" try: self.project_service.remove_project(project_id) except ServicesBaseException as exc: self.logger.error("Could not delete project!") self.logger.exception(exc) common.set_error_message(exc.message) prj = common.get_current_project() if prj is not None and prj.id == int(project_id): SimulatorContext().clean_project_data_from_session() @expose_page @settings def editone(self, project_id=None, cancel=False, save=False, delete=False, **data): """ Create or change Project. When project_id is empty we create a new entity, otherwise we are to edit and existent one. """ if cherrypy.request.method == 'POST' and cancel: raise cherrypy.HTTPRedirect('/project') if cherrypy.request.method == 'POST' and delete: self._remove_project(project_id) raise cherrypy.HTTPRedirect('/project/viewall') current_user = common.get_logged_user() is_create = False if project_id is None or not int(project_id): is_create = True data["administrator"] = current_user.display_name admin_username = current_user.username else: current_project = self.project_service.find_project(project_id) if not save: # Only when we do not have submitted data, # populate fields with initial values for edit. data = dict(name=current_project.name, description=current_project.description) data["administrator"] = current_project.administrator.display_name admin_username = current_project.administrator.username self._mark_selected(current_project) data["project_id"] = project_id template_specification = dict( mainContent="project/editone", data=data, isCreate=is_create, title="Create new project" if is_create else "Edit " + data["name"], editUsersEnabled=(current_user.username == admin_username)) try: if cherrypy.request.method == 'POST' and save: data = EditForm().to_python(data) saved_project = self.project_service.store_project( current_user, is_create, project_id, **data) if StorageInterface.encryption_enabled() and is_create: StorageInterface().remove_project(saved_project, True) self._mark_selected(saved_project) raise cherrypy.HTTPRedirect('/project/viewall') except formencode.Invalid as excep: self.logger.debug(str(excep)) template_specification[common.KEY_ERRORS] = excep.unpack_errors() except ProjectServiceException as excep: self.logger.debug(str(excep)) common.set_error_message(excep.message) raise cherrypy.HTTPRedirect('/project/viewall') all_users, members, pages = self.user_service.get_users_for_project( current_user.username, project_id) template_specification['usersList'] = all_users template_specification['usersMembers'] = [m.id for m in members] template_specification['usersPages'] = pages template_specification['usersCurrentPage'] = 1 return self.fill_default_attributes(template_specification, 'properties') @expose_fragment('project/project_members') def getmemberspage(self, page, project_id=None): """Retrieve a new page of Project members.""" current_name = common.get_logged_user().username all_users, members, _ = self.user_service.get_users_for_project( current_name, project_id, int(page)) edit_enabled = True if project_id is not None: current_project = self.project_service.find_project(project_id) edit_enabled = ( current_name == current_project.administrator.username) return dict(usersList=all_users, usersMembers=[m.id for m in members], usersCurrentPage=page, editUsersEnabled=edit_enabled) @expose_json def set_visibility(self, entity_type, entity_gid, to_de_relevant): """ Method used for setting the relevancy/visibility on a DataType(Group)/Operation(Group. """ to_de_relevant = string2bool(to_de_relevant) is_operation, is_group = False, False if entity_type == self.NODE_OPERATION_TYPE: is_group = False is_operation = True elif entity_type == self.NODE_OPERATION_GROUP_TYPE: is_group = True is_operation = True if is_operation: self.project_service.set_operation_and_group_visibility( entity_gid, to_de_relevant, is_group) else: self.project_service.set_datatype_visibility( entity_gid, to_de_relevant) @expose_page @settings def viewoperations(self, project_id=None, page=1, filtername=None, reset_filters=None): """ Display table of operations for a given project selected """ if (project_id is None) or (not int(project_id)): raise cherrypy.HTTPRedirect('/project') ## Toggle filters filters = self.__get_operations_filters() selected_filters = None for my_filter in filters: if cherrypy.request.method == 'POST' and (filtername is not None): if reset_filters: my_filter.selected = False elif my_filter.display_name == filtername: my_filter.selected = not my_filter.selected if my_filter.selected: selected_filters = my_filter + selected_filters ## Iterate one more time, to update counters for my_filter in filters: if not my_filter.selected: new_count = self.project_service.count_filtered_operations( project_id, my_filter + selected_filters) my_filter.passes_count = new_count else: my_filter.passes_count = '' page = int(page) project, total_op_count, filtered_ops, pages_no = self.project_service.retrieve_project_full( project_id, selected_filters, page) ## Select current project self._mark_selected(project) template_specification = dict( mainContent="project/viewoperations", project=project, title='Past operations for " ' + project.name + '"', operationsList=filtered_ops, total_op_count=total_op_count, total_pages=pages_no, page_number=page, filters=filters, no_filter_selected=(selected_filters is None), model=model) return self.fill_default_attributes(template_specification, 'operations') @expose_fragment("call_out_project") def generate_call_out_control(self): """ Returns the content of a confirmation dialog, with a given question. """ self.update_operations_count() return {'selectedProject': common.get_current_project()} def __get_operations_filters(self): """ Filters for VIEW_ALL_OPERATIONS page. Get from session currently selected filters, or build a new set of filters. """ session_filtes = common.get_from_session(self.KEY_OPERATION_FILTERS) if session_filtes: return session_filtes else: sim_group = self.algorithm_service.get_algorithm_by_module_and_class( IntrospectionRegistry.SIMULATOR_MODULE, IntrospectionRegistry.SIMULATOR_CLASS) new_filters = StaticFiltersFactory.build_operations_filters( sim_group, common.get_logged_user().id) common.add2session(self.KEY_OPERATION_FILTERS, new_filters) return new_filters @expose_fragment("overlay_confirmation") def show_confirmation_overlay(self, **data): """ Returns the content of a confirmation dialog, with a given question. """ if not data: data = {} question = data.get('question', "Are you sure ?") data['question'] = question return self.fill_default_attributes(data) @expose_fragment("overlay") def get_datatype_details(self, entity_gid, back_page='null', exclude_tabs=None): """ Returns the HTML which contains the details for the given dataType. :param back_page: if different from 'null' (the default) it will redirect to it after saving metedata changes """ if exclude_tabs is None: exclude_tabs = [] selected_project = common.get_current_project() datatype_details, states, entity = self.project_service.get_datatype_details( entity_gid) # Load DataType categories current_type = datatype_details.data_type datatype_gid = datatype_details.gid categories, has_operations_warning = {}, False if not entity.invalid: categories, has_operations_warning = self.algorithm_service.get_launchable_algorithms( datatype_gid) is_group = False if datatype_details.operation_group_id is not None: is_group = True # Retrieve links linkable_projects_dict = self._get_linkable_projects_dict(entity.id) # Load all exporters exporters = {} if not entity.invalid: exporters = ExportManager().get_exporters_for_data(entity) is_relevant = entity.visible template_specification = { "entity_gid": entity_gid, "nodeFields": datatype_details.get_ui_fields(), "allStates": states, "project": selected_project, "categories": categories, "exporters": exporters, "datatype_id": entity.id, "isGroup": is_group, "isRelevant": is_relevant, "nodeType": 'datatype', "backPageIdentifier": back_page } template_specification.update(linkable_projects_dict) overlay_class = "can-browse editor-node node-type-" + str( current_type).lower() if is_relevant: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" overlay_title = current_type if datatype_details.datatype_tag_1: overlay_title += " " + datatype_details.datatype_tag_1 tabs = [] overlay_indexes = [] if "Metadata" not in exclude_tabs: tabs.append(OverlayTabDefinition("Metadata", "metadata")) overlay_indexes.append(0) if "Analyzers" not in exclude_tabs: tabs.append( OverlayTabDefinition("Analyzers", "analyzers", enabled=categories and 'Analyze' in categories)) overlay_indexes.append(1) if "Visualizers" not in exclude_tabs: tabs.append( OverlayTabDefinition("Visualizers", "visualizers", enabled=categories and 'View' in categories)) overlay_indexes.append(2) enable_link_tab = False if (not entity.invalid) and (linkable_projects_dict is not None): projects_for_link = linkable_projects_dict.get( self.PRROJECTS_FOR_LINK_KEY) if projects_for_link is not None and len(projects_for_link) > 0: enable_link_tab = True projects_linked = linkable_projects_dict.get( self.PRROJECTS_LINKED_KEY) if projects_linked is not None and len(projects_linked) > 0: enable_link_tab = True if "Links" not in exclude_tabs: tabs.append( OverlayTabDefinition("Links", "link_to", enabled=enable_link_tab)) overlay_indexes.append(3) if "Export" not in exclude_tabs: tabs.append( OverlayTabDefinition("Export", "export", enabled=(exporters and len(exporters) > 0))) overlay_indexes.append(4) template_specification = self.fill_overlay_attributes( template_specification, "DataType Details", overlay_title, "project/details_datatype_overlay", overlay_class, tabs, overlay_indexes) template_specification = self.flow_controller.fill_default_attributes( template_specification) if has_operations_warning: template_specification[common.KEY_MESSAGE] = 'Not all operations could be loaded for this input DataType.' \ ' Contact the admin to check the logs!' template_specification[common.KEY_MESSAGE_TYPE] = "warningMessage" return template_specification @expose_fragment('project/linkable_projects') def get_linkable_projects(self, datatype_id, is_group, entity_gid): """ Returns the HTML which displays the link-able projects for the given dataType """ template_specification = self._get_linkable_projects_dict(datatype_id) template_specification["entity_gid"] = entity_gid template_specification["isGroup"] = is_group return template_specification def _get_linkable_projects_dict(self, datatype_id): """" UI ready dictionary with projects in which current DataType can be linked.""" self.logger.debug("Searching projects to link for DT " + str(datatype_id)) for_link, linked = self.project_service.get_linkable_projects_for_user( common.get_logged_user().id, datatype_id) projects_for_link, linked_projects = None, None if for_link: projects_for_link = {} for project in for_link: projects_for_link[project.id] = project.name if linked: linked_projects = {} for project in linked: linked_projects[project.id] = project.name template_specification = { self.PRROJECTS_FOR_LINK_KEY: projects_for_link, self.PRROJECTS_LINKED_KEY: linked_projects, "datatype_id": datatype_id } return template_specification @expose_fragment("overlay") def get_operation_details(self, entity_gid, is_group=False, back_page='burst'): """ Returns the HTML which contains the details for the given operation. """ if string2bool(str(is_group)): # we have an OperationGroup entity. template_specification = self._compute_operation_details( entity_gid, True) # I expect that all the operations from a group are visible or not template_specification["nodeType"] = self.NODE_OPERATION_GROUP_TYPE else: # we have a simple Operation template_specification = self._compute_operation_details( entity_gid) template_specification["displayRelevantButton"] = True template_specification["nodeType"] = self.NODE_OPERATION_TYPE template_specification["backPageIdentifier"] = back_page overlay_class = "can-browse editor-node node-type-" + template_specification[ "nodeType"] if template_specification["isRelevant"]: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" template_specification = self.fill_overlay_attributes( template_specification, "Details", "Operation", "project/details_operation_overlay", overlay_class) return self.flow_controller.fill_default_attributes( template_specification) def _compute_operation_details(self, entity_gid, is_group=False): """ Returns a dictionary which contains the details for the given operation. """ selected_project = common.get_current_project() op_details = self.project_service.get_operation_details( entity_gid, is_group) operation_id = op_details.operation_id display_reload_btn = True operation = OperationService.load_operation(operation_id) if (operation.fk_operation_group is not None) or (operation.burst is not None): display_reload_btn = False else: op_categ_id = operation.algorithm.fk_category raw_categories = self.algorithm_service.get_raw_categories() for category in raw_categories: if category.id == op_categ_id: display_reload_btn = False break template_specification = { "entity_gid": entity_gid, "nodeFields": op_details.get_ui_fields(), "operationId": operation_id, "displayReloadBtn": display_reload_btn, "project": selected_project, "isRelevant": operation.visible } return template_specification def get_project_structure_grouping(self): user = common.get_logged_user() return user.get_project_structure_grouping() def set_project_structure_grouping(self, first, second): user = common.get_logged_user() user.set_project_structure_grouping(first, second) self.user_service.edit_user(user) @expose_page @settings def editstructure(self, project_id=None, first_level=None, second_level=None, filter_input="", visibility_filter=None, **_ignored): """ Return the page skeleton for displaying the project structure. """ try: int(project_id) except (ValueError, TypeError): raise cherrypy.HTTPRedirect('/project') if first_level is None or second_level is None: first_level, second_level = self.get_project_structure_grouping() selected_project = self.project_service.find_project(project_id) self._mark_selected(selected_project) data = self.project_service.get_filterable_meta() filters = StaticFiltersFactory.build_datatype_filters( selected=visibility_filter) tumor_creator_algorithm = dao.get_algorithm_by_module( TumorDatasetCreator.__module__, TumorDatasetCreator.__name__) template_specification = dict( mainContent="project/structure", title=selected_project.name, project=selected_project, data=data, firstLevelSelection=first_level, secondLevelSelection=second_level, filterInputValue=filter_input, filters=filters, tumorCreatorAlgorithmId=tumor_creator_algorithm.id) return self.fill_default_attributes(template_specification, 'data') @expose_fragment("overlay") def get_data_uploader_overlay(self, project_id): """ Returns the html which displays a dialog which allows the user to upload certain data into the application. """ upload_algorithms = self.algorithm_service.get_upload_algorithms() algorithms_interface = {} tabs = [] for algorithm in upload_algorithms: adapter_template = self.flow_controller.get_adapter_template( project_id, algorithm.id, True, None) algorithms_interface['template_for_algo_' + str(algorithm.id)] = adapter_template tabs.append( OverlayTabDefinition(algorithm.displayname, algorithm.subsection_name, description=algorithm.description)) template_specification = self.fill_overlay_attributes( None, "Upload", "Upload data for this project", "project/upload_data_overlay", "dialog-upload", tabs_vertical=tabs) template_specification['uploadAlgorithms'] = upload_algorithms template_specification['projectId'] = project_id template_specification['algorithmsInterface'] = algorithms_interface return self.flow_controller.fill_default_attributes( template_specification) @expose_fragment("overlay") def get_project_uploader_overlay(self): """ Returns the html which displays a dialog which allows the user to upload an entire project. """ template_specification = self.fill_overlay_attributes( None, "Upload", "Project structure", "project/upload_project_overlay", "dialog-upload") return self.flow_controller.fill_default_attributes( template_specification) @expose_page def launchloader(self, project_id, algorithm_id, cancel=False, **data): """ Start Upload mechanism """ success_link = "/project/editstructure/" + str(project_id) # do not allow GET if cherrypy.request.method != 'POST' or cancel: raise cherrypy.HTTPRedirect(success_link) try: int(project_id) int(algorithm_id) except (ValueError, TypeError): raise cherrypy.HTTPRedirect(success_link) project = self.project_service.find_project(project_id) algorithm = self.algorithm_service.get_algorithm_by_identifier( algorithm_id) self.flow_controller.execute_post(project.id, success_link, algorithm.fk_category, algorithm, **data) raise cherrypy.HTTPRedirect(success_link) @cherrypy.expose @handle_error(redirect=False) @check_user def readjsonstructure(self, project_id, visibility_filter=StaticFiltersFactory.FULL_VIEW, first_level=None, second_level=None, filter_value=None): """ AJAX exposed method. Will return the complete JSON for Project's structure, or filtered tree (filter only Relevant entities or Burst only Data). """ if first_level is None or second_level is None: first_level, second_level = self.get_project_structure_grouping() else: self.set_project_structure_grouping(first_level, second_level) selected_filter = StaticFiltersFactory.build_datatype_filters( single_filter=visibility_filter) if project_id == 'undefined': project_id = common.get_current_project().id project = self.project_service.find_project(project_id) json_structure = self.project_service.get_project_structure( project, selected_filter, first_level, second_level, filter_value) # This JSON encoding is necessary, otherwise we will get an error # from JSTree library while trying to load with AJAX # the content of the tree. encoder = JSONEncoder() return encoder.iterencode(json_structure) @cherrypy.expose @handle_error(redirect=False) @check_user def createlink(self, link_data, project_id, is_group): """ Delegate the creation of the actual link to the flow service. """ if not string2bool(str(is_group)): self.algorithm_service.create_link([link_data], project_id) else: all_data = self.project_service.get_datatype_in_group(link_data) # Link all Dts in group and the DT_Group entity data_ids = [data.id for data in all_data] data_ids.append(int(link_data)) self.algorithm_service.create_link(data_ids, project_id) @cherrypy.expose @handle_error(redirect=False) @check_user def removelink(self, link_data, project_id, is_group): """ Delegate the creation of the actual link to the flow service. """ if not string2bool(str(is_group)): self.algorithm_service.remove_link(link_data, project_id) else: all_data = self.project_service.get_datatype_in_group(link_data) for data in all_data: self.algorithm_service.remove_link(data.id, project_id) self.algorithm_service.remove_link(int(link_data), project_id) @cherrypy.expose @handle_error(redirect=False) @check_user def noderemove(self, project_id, node_gid): """ AJAX exposed method, to execute operation of data removal. """ try: if node_gid is None: return "Remove can only be applied on a Node with GID!" self.logger.debug("Removing data with GID=" + str(node_gid)) self.project_service.remove_datatype(project_id, node_gid) except RemoveDataTypeException as excep: self.logger.exception("Could not execute operation Node Remove!") return excep.message except ServicesBaseException as excep: self.logger.exception("Could not execute operation Node Remove!") return excep.message return None @cherrypy.expose @handle_error(redirect=False) @check_user def updatemetadata(self, **data): """ Submit MetaData edited for DataType(Group) or Operation(Group). """ try: self.project_service.update_metadata(data) except ServicesBaseException as excep: self.logger.error("Could not execute MetaData update!") self.logger.exception(excep) common.set_error_message(excep.message) return excep.message @cherrypy.expose @handle_error(redirect=False) @check_user def downloaddatatype(self, data_gid, export_module): """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """ current_prj = common.get_current_project() # Load data by GID entity = load_entity_by_gid(data_gid) # Do real export export_mng = ExportManager() file_name, file_path, delete_file = export_mng.export_data( entity, export_module, current_prj) if delete_file: # We force parent folder deletion because export process generated it. self.mark_file_for_delete(file_path, True) self.logger.debug("Data exported in file: " + str(file_path)) return serve_file(file_path, "application/x-download", "attachment", file_name) @cherrypy.expose @handle_error(redirect=False) @check_user def downloadproject(self, project_id): """ Export the data from a whole project. """ current_project = self.project_service.find_project(project_id) export_mng = ExportManager() export_file = export_mng.export_project(current_project) # Register export file for delete when download complete # We force parent folder deletion because export process generated it. self.mark_file_for_delete(export_file, True) return serve_file(export_file, "application/x-download", "attachment") def fill_default_attributes(self, template_dictionary, subsection='project'): """ Overwrite base controller to add required parameters for adapter templates. """ template_dictionary[common.KEY_SECTION] = 'project' template_dictionary[common.KEY_SUB_SECTION] = subsection template_dictionary[ common.KEY_INCLUDE_RESOURCES] = 'project/included_resources' BaseController.fill_default_attributes(self, template_dictionary) return template_dictionary
class TestFlowContoller(BaseControllersTest): """ Unit tests for FlowController """ def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def teardown_method(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() @pytest.fixture() def long_burst_launch(self, connectivity_factory): def build(is_range=False): self.burst_c.index() connectivity = connectivity_factory[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid launch_params['simulation_length'] = '10000' if is_range: launch_params['conduction_speed'] = '[10,15,20]' launch_params[RANGE_PARAMETER_1] = 'conduction_speed' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) return build def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step_analyzers) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ result_dict = self.flow_c.step_analyzers() assert common.KEY_SUBMENU_LIST in result_dict,\ "Expect to have a submenu with available algorithms for category." assert result_dict["section_name"] == 'analyze' def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() assert result_dict['section_name'] == 'connectivity' assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_adapters_from_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % (categ.id, algo.id) assert 'mainContent' in result_dict assert result_dict['isAdapter'] def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_adapters_from_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self, datatype_with_storage_factory): """ Read an attribute from a datatype. """ dt = datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") assert returned_data == '["this", "is", "the", "stored", "data"]' def test_read_datatype_attribute_method_call(self, datatype_with_storage_factory): """ Call method on given datatype. """ dt =datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) assert returned_data == str(list(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() assert result['inputList'] == expected_interface def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_remove_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) assert operation is None def test_remove_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) assert operation is None def _launch_test_algo_on_cluster(self, **data): adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) assert not operation.has_finished self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_operations_group(self): data = {RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED
class TestFlowController(BaseControllersTest): """ Unit tests for FlowController """ def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = SimulatorController() self.operation_service = OperationService() def teardown_method(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step_analyzers) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ result_dict = self.flow_c.step_analyzers() assert common.KEY_SUBMENU_LIST in result_dict, \ "Expect to have a submenu with available algorithms for category." assert result_dict["section_name"] == 'analyze' def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() assert result_dict['section_name'] == 'connectivity' assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: # Ignore creators, as those won't go through this flow if categ.displayname in [ CreateAlgorithmCategoryConfig.category_name ]: continue algo_groups = dao.get_adapters_from_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % ( categ.id, algo.id) assert 'mainContent' in result_dict assert result_dict['isAdapter'] def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_adapters_from_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self, dummy_datatype_index_factory): """ Read an attribute from a datatype. """ dt = dummy_datatype_index_factory(row1='This is stored data') dt.subject = "test_subject" dt.state = "RAW_STATE" returned_data = self.flow_c.read_datatype_attribute(dt.gid, "row1") assert returned_data == '"This is stored data"' def test_read_datatype_attribute_method_call(self, dummy_datatype_index_factory): """ Call method on given datatype. """ dt = dummy_datatype_index_factory(row1='This is stored data') args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute( dt.gid, 'return_test_data', **args) assert returned_data.replace('"', '') == " ".join( str(x) for x in range(101)) def test_get_simple_adapter_interface(self, test_adapter_factory): algo = test_adapter_factory() form = TestAdapter1Form() adapter = TestFactory.create_adapter( 'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') adapter.submit_form(form) result = self.flow_c.get_simple_adapter_interface(algo.id) expected_interface = adapter.get_form() found_form = result['adapter_form']['adapter_form'] assert isinstance(result['adapter_form'], dict) assert isinstance(found_form, TestAdapter1Form) assert found_form.test1_val1.value == expected_interface.test1_val1.value assert found_form.test1_val2.value == expected_interface.test1_val2.value def test_stop_burst_operation(self, simulation_launch): operation = simulation_launch(self.test_user, self.test_project, 1000) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_burst_operation_group(self, simulation_launch): first_op = simulation_launch(self.test_user, self.test_project, 1000, True) operations_group_id = first_op.fk_operation_group assert not first_op.has_finished self.flow_c.cancel_or_remove_operation(operations_group_id, 1, False) operations = dao.get_operations_in_group(operations_group_id) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_remove_burst_operation(self, simulation_launch): operation = simulation_launch(self.test_user, self.test_project, 1000) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) assert operation is None def test_remove_burst_operation_group(self, simulation_launch): first_op = simulation_launch(self.test_user, self.test_project, 1000, True) operations_group_id = first_op.fk_operation_group assert not first_op.has_finished self.flow_c.cancel_or_remove_operation(operations_group_id, 1, True) operations = dao.get_operations_in_group(operations_group_id) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) assert operation is None def _asynch_launch_simple_op(self): adapter = TestFactory.create_adapter( 'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') view_model = TestModel() view_model.test1_val1 = 5 view_model.test1_val2 = 6 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) return operation def test_stop_operation(self): operation = self._asynch_launch_simple_op() operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_operations_group(self, test_adapter_factory, datatype_group_factory): group = datatype_group_factory(status=STATUS_STARTED, store_vm=True) operations = dao.get_operations_in_group(group.fk_from_operation) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished operation_group_id = operation.fk_operation_group self.flow_c.cancel_or_remove_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED
def test_reduce_dimension_component(self): """ Tests the generation of the component which allows the user to select one dimension from a multi dimension array """ flow_service = FlowService() array_count = self.count_all_entities(MappedArray) assert 0 == array_count, "Expected to find no data" adapter_instance = NDimensionArrayAdapter() PARAMS = {} OperationService().initiate_prelaunch(self.operation, adapter_instance, {}, **PARAMS) inserted_arrays, array_count = flow_service.get_available_datatypes( self.test_project.id, MappedArray) assert 1 == array_count, "Problems when inserting data" algorithm = flow_service.get_algorithm_by_module_and_class( 'tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter') interface = flow_service.prepare_adapter(self.test_project.id, algorithm) self.template_specification['inputList'] = interface resulted_html = _template2string(self.template_specification) self.soup = BeautifulSoup(resulted_html) found_divs = self.soup.find_all( 'p', attrs=dict(id="dimensionsDiv_input_data")) assert len(found_divs) == 1, "Data generated incorrect" gid = inserted_arrays[0][2] cherrypy.session = {'user': self.test_user} entity = dao.get_datatype_by_gid(gid) component_content = FlowController().gettemplatefordimensionselect( gid, "input_data") self.soup = BeautifulSoup(component_content) # check dimensions found_selects_0 = self.soup.find_all( 'select', attrs=dict(id="dimId_input_data_dimensions_0")) found_selects_1 = self.soup.find_all( 'select', attrs=dict(id="dimId_input_data_dimensions_1")) found_selects_2 = self.soup.find_all( 'select', attrs=dict(id="dimId_input_data_dimensions_2")) assert len(found_selects_0) == 1, "select not found" assert len(found_selects_1) == 1, "select not found" assert len(found_selects_2) == 1, "select not found" # check the aggregation functions selects agg_selects_0 = self.soup.find_all( 'select', attrs=dict(id="funcId_input_data_dimensions_0")) agg_selects_1 = self.soup.find_all( 'select', attrs=dict(id="funcId_input_data_dimensions_1")) agg_selects_2 = self.soup.find_all( 'select', attrs=dict(id="funcId_input_data_dimensions_2")) assert len(agg_selects_0), 1 == "incorrect first dim" assert len(agg_selects_1), 1 == "incorrect second dim" assert len(agg_selects_2), 1 == "incorrect third dim." data_shape = entity.shape assert len(data_shape) == 3, "Shape of the array is incorrect" for i in range(data_shape[0]): options = self.soup.find_all('option', attrs=dict(value=gid + "_0_" + str(i))) assert len(options) == 1, "Generated option is incorrect" assert options[0].text == "Time " + str( i), "The label of the option is not correct" assert options[0].parent["name"] == "input_data_dimensions_0" for i in range(data_shape[1]): options = self.soup.find_all('option', attrs=dict(value=gid + "_1_" + str(i))) assert len(options) == 1, "Generated option is incorrect" assert options[0].text == "Channel " + str( i), "Option's label incorrect" assert options[0].parent[ "name"] == "input_data_dimensions_1", "incorrect parent" for i in range(data_shape[2]): options = self.soup.find_all('option', attrs=dict(value=gid + "_2_" + str(i))) assert len(options) == 1, "Generated option is incorrect" assert options[0].text == "Line " + str( i), "The label of the option is not correct" assert options[0].parent["name"] == "input_data_dimensions_2" # check the expected hidden fields expected_shape = self.soup.find_all( 'input', attrs=dict(id="input_data_expected_shape")) assert len(expected_shape) == 1, "The generated option is not correct" assert expected_shape[0][ "value"] == "expected_shape_", "The generated option is not correct" input_hidden_op = self.soup.find_all( 'input', attrs=dict(id="input_data_operations")) assert len(input_hidden_op) == 1, "The generated option is not correct" assert input_hidden_op[0][ "value"] == "operations_", "The generated option is not correct" input_hidden_dim = self.soup.find_all( 'input', attrs=dict(id="input_data_expected_dim")) assert len( input_hidden_dim) == 1, "The generated option is not correct" assert input_hidden_dim[0][ "value"] == "requiredDim_1", "The generated option is not correct" input_hidden_shape = self.soup.find_all( 'input', attrs=dict(id="input_data_array_shape")) assert len( input_hidden_shape) == 1, "The generated option is not correct" assert input_hidden_shape[0][ "value"] == "[5, 1, 3]", "The generated option is not correct" # check only the first option from the aggregations functions selects options = self.soup.find_all('option', attrs=dict(value="func_none")) assert len(options) == 3, "The generated option is not correct"
def get_datatype_details(self, entity_gid, back_page='null', exclude_tabs=None): """ Returns the HTML which contains the details for the given dataType. :param back_page: if different from 'null' (the default) it will redirect to it after saving metedata changes """ if exclude_tabs is None: exclude_tabs = [] selected_project = common.get_current_project() datatype_details, states, entity = self.project_service.get_datatype_details(entity_gid) ### Load DataType categories current_type = datatype_details.data_type datatype_gid = datatype_details.gid categories = {} if not entity.invalid: categories = self.flow_service.get_launchable_algorithms(datatype_gid) is_group = False if datatype_details.operation_group_id is not None: ## Is a DataTypeGroup is_group = True ### Retrieve links linkable_projects_dict = self._get_linkable_projects_dict(entity.id) ### Load all exporters exporters = {} if not entity.invalid: exporters = ExportManager().get_exporters_for_data(entity) is_relevant = entity.visible template_specification = {"entity_gid": entity_gid, "nodeFields": datatype_details.get_ui_fields(), "allStates": states, "project": selected_project, "categories": categories, "exporters": exporters, "datatype_id": entity.id, "isGroup": is_group, "isRelevant": is_relevant, "nodeType": 'datatype', "backPageIdentifier": back_page} template_specification.update(linkable_projects_dict) overlay_class = "can-browse editor-node node-type-" + str(current_type).lower() if is_relevant: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" overlay_title = current_type if datatype_details.datatype_tag_1: overlay_title += " " + datatype_details.datatype_tag_1 tabs = [] overlay_indexes = [] if "Metadata" not in exclude_tabs: tabs.append(OverlayTabDefinition("Metadata", "metadata")) overlay_indexes.append(0) if "Analyzers" not in exclude_tabs: tabs.append(OverlayTabDefinition("Analyzers", "analyzers", enabled=categories and 'Analyze' in categories)) overlay_indexes.append(1) if "Visualizers" not in exclude_tabs: tabs.append(OverlayTabDefinition("Visualizers", "visualizers", enabled=categories and 'View' in categories)) overlay_indexes.append(2) enable_link_tab = False if (not entity.invalid) and (linkable_projects_dict is not None): projects_for_link = linkable_projects_dict.get(self.PRROJECTS_FOR_LINK_KEY) if projects_for_link is not None and len(projects_for_link) > 0: enable_link_tab = True projects_linked = linkable_projects_dict.get(self.PRROJECTS_LINKED_KEY) if projects_linked is not None and len(projects_linked) > 0: enable_link_tab = True if "Links" not in exclude_tabs: tabs.append(OverlayTabDefinition("Links", "link_to", enabled=enable_link_tab)) overlay_indexes.append(3) if "Export" not in exclude_tabs: tabs.append(OverlayTabDefinition("Export", "export", enabled=(exporters and len(exporters) > 0))) overlay_indexes.append(4) if "Derived DataTypes" not in exclude_tabs: tabs.append(OverlayTabDefinition("Derived DataTypes", "result_dts", enabled=self.project_service.count_datatypes_generated_from(entity_gid))) overlay_indexes.append(5) template_specification = self.fill_overlay_attributes(template_specification, "DataType Details", overlay_title, "project/details_datatype_overlay", overlay_class, tabs, overlay_indexes) template_specification['baseUrl'] = TvbProfile.current.web.BASE_URL return FlowController().fill_default_attributes(template_specification)
class FlowContollerTest(BaseControllersTest): """ Unit tests for FlowController """ def setUp(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def tearDown(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step) def test_invalid_step(self): """ Pass an invalid step and make sure we are redirected to tvb start page. """ self._expect_redirect('/tvb', self.flow_c.step) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ categories = dao.get_algorithm_categories() for categ in categories: result_dict = self.flow_c.step(categ.id) self.assertTrue(common.KEY_SUBMENU_LIST in result_dict, "Expect to have a submenu with available algorithms for category.") self.assertEqual(result_dict["section_name"], categ.displayname.lower()) def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() self.assertEqual(result_dict['section_name'], 'connectivity') self.assertEqual(result_dict['submenu_list'], self.flow_c.connectivity_submenu) def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_groups_by_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) self.assertEqual(result_dict[common.KEY_SUBMIT_LINK], '/flow/%i/%i' % (categ.id, algo.id)) self.assertTrue('mainContent' in result_dict) self.assertTrue(result_dict['isAdapter']) def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_groups_by_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self): """ Read an attribute from a datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]') def test_read_datatype_attribute_method_call(self): """ Call method on given datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) self.assertTrue(returned_data == str(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.find_group('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() self.assertEqual(result['inputList'], expected_interface) def _long_burst_launch(self, is_range=False): self.burst_c.index() connectivity = DatatypesFactory().create_connectivity()[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid if not is_range: launch_params['simulation_length'] = '10000' else: launch_params['simulation_length'] = '[10000,10001,10002]' launch_params[model.RANGE_PARAMETER_1] = 'simulation_length' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_remove_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def test_remove_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def _launch_test_algo_on_cluster(self, **data): module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) self.assertFalse(operation.has_finished) self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_operations_group(self): data = {model.RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertFalse(operation.has_finished) operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED)
def __init__(self): super(ProjectController, self).__init__() self.project_service = ProjectService() self.flow_controller = FlowController()