def get_creator_and_interface(self, creator_module, creator_class, datatype_instance, lock_midpoint_for_eq=None): """ Returns a Tuple: a creator instance and a dictionary for the creator interface. The interface is prepared for rendering, it is populated with existent data, in case of a parameter of type DataType. The name of the attributes are also prefixed to identify groups. """ algo_group = self.flow_service.get_algorithm_by_module_and_class( creator_module, creator_class)[1] group, _ = self.flow_service.prepare_adapter( base.get_current_project().id, algo_group) #I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service # because the selects that display dataTypes will also have the 'All' entry. datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY input_list = datatype_instance.interface[ traited_interface.INTERFACE_ATTRIBUTES] if lock_midpoint_for_eq is not None: for idx in lock_midpoint_for_eq: input_list[idx] = self._lock_midpoints(input_list[idx]) category = self.flow_service.get_visualisers_category() input_list = self.flow_service.prepare_parameters( input_list, base.get_current_project().id, category.id) input_list = ABCAdapter.prepare_param_names(input_list) return self.flow_service.build_adapter_instance(group), input_list
def index(self): """Get on burst main page""" template_specification = dict( mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=cfg.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = base.get_from_session(base.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration( base.get_current_project().id) base.add2session(base.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values( )[0] adapter_interface = ABCAdapter.fill_defaults( adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session( self.cached_simulator_algo_group, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification[ 'burst_list'] = self.burst_service.get_available_bursts( base.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps( selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algo_group = self.flow_service.get_algorithm_by_module_and_class( MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)[1] adapter_instance = ABCAdapter.build_adapter(algo_group) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [ metric_name for metric_name in adapter_instance.available_algorithms.keys() ] else: template_specification['available_metrics'] = [] template_specification[base.KEY_PARAMETERS_CONFIG] = False template_specification[base.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification)
def _compute_operation_details(self, entity_gid, is_group=False): """ Returns a dictionary which contains the details for the given operation. """ selected_project = bc.get_current_project() op_details = self.project_service.get_operation_details( entity_gid, is_group) operation_id = op_details.operation_id display_reload_btn = True operation = self.flow_service.load_operation(operation_id) if (operation.fk_operation_group is not None) or (operation.burst is not None): display_reload_btn = False else: op_categ_id = operation.algorithm.algo_group.fk_category raw_categories = self.flow_service.get_raw_categories() for category in raw_categories: if category.id == op_categ_id: display_reload_btn = False break template_specification = dict() template_specification["entity_gid"] = entity_gid template_specification["nodeFields"] = op_details.get_ui_fields() template_specification["operationId"] = operation_id template_specification["displayReloadBtn"] = display_reload_btn template_specification["project"] = selected_project template_specification["isRelevant"] = operation.visible return template_specification
def launch_burst(self, launch_mode, burst_name, **data): """ Do the actual burst launch, using the configuration saved in current session. :param launch_mode: new/branch/continue :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x) :param data: kwargs for simulation input parameters. """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) ## Validate new burst-name if burst_name != 'none_undefined': self._validate_burst_name(burst_name) burst_config.name = burst_name ## Fill all parameters user_id = base.get_logged_user().id data[base.KEY_ADAPTER] = self.cached_simulator_algorithm_id burst_config.update_simulator_configuration(data) burst_config.fk_project = base.get_current_project().id ## Do the asynchronous launch burst_id, burst_name = self.burst_service.launch_burst( burst_config, 0, self.cached_simulator_algorithm_id, user_id, launch_mode) return [burst_id, burst_name]
def get_available_selections(self, **data): """ Get all the saved selections for the current project and return the ones that are compatible with the received connectivity labels. """ curent_project = base.get_current_project() connectivity_gid = data['connectivity_gid'] selections = self.flow_service.get_selections_for_project( curent_project.id, connectivity_gid) default_selection = data['con_selection'] if not len(default_selection) > 0: default_selection = data['con_labels'] nodes, ids, names = [], [], [] for selection in selections: ids.append(selection.id) labels = json.loads(selection.labels) selected_labels = '' for idx in json.loads(selection.selected_nodes): selected_labels += labels[idx] + ',' nodes.append(selected_labels[:-1]) names.append(selection.ui_name) result = dict(selection_nodes=nodes, selection_names=names, selection_ids=ids, all_labels=default_selection, new_selection_name=self.NEW_SELECTION_NAME) return self.fill_default_attributes(result)
def get_portlet_configurable_interface(self, index_in_tab): """ From the position given by the tab index and the index from that tab, get the portlet configuration and build the configurable interface for that portlet. """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) tab_index = burst_config.selected_tab portlet_config = burst_config.tabs[tab_index].portlets[int( index_in_tab)] portlet_interface = self.burst_service.build_portlet_interface( portlet_config, base.get_current_project().id) full_portlet_input_tree = [] for entry in portlet_interface: full_portlet_input_tree.extend(entry.interface) self.context.add_portlet_to_session(full_portlet_input_tree) portlet_interface = { "adapters_list": portlet_interface, base.KEY_PARAMETERS_CONFIG: False, base.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION } return self.fill_default_attributes(portlet_interface)
def reset_burst(self): """ Called when click on "New Burst" entry happens from UI. This will generate an empty new Burst Configuration. """ new_burst = self.burst_service.new_burst_configuration(base.get_current_project().id) base.add2session(base.KEY_BURST_CONFIG, new_burst)
def invokeadaptermethod(self, adapter_id, method_name, **data): """ Public web method, to be used when invoking specific methods from external Adapters/Algorithms. """ algo_group = self.flow_service.get_algo_group_by_identifier(adapter_id) try: adapter_instance = self.flow_service.build_adapter_instance( algo_group) result = self.flow_service.fire_operation( adapter_instance, base.get_logged_user(), base.get_current_project().id, method_name, **data) base.set_info_message("Submit OK!") if isinstance(adapter_instance, ABCDisplayer) and isinstance( result, dict): base.remove_from_session(base.KEY_MESSAGE) result[ABCDisplayer.KEY_IS_ADAPTER] = True result[base.KEY_DISPLAY_MENU] = True result[base.KEY_OPERATION_ID] = adapter_instance.operation_id result[base.KEY_ADAPTER] = adapter_id if KEY_CONTROLLS not in result: result[KEY_CONTROLLS] = None return self.fill_default_attributes(result, algo_group) except OperationException, excep: base.set_warning_message('Problem when submitting data!') self.logger.error( "Invalid method, or wrong parameters when invoking external method on post!" ) self.logger.exception(excep)
def load_burst_history(self): """ Load the available burst that are stored in the database at this time. This is one alternative to 'chrome-back problem'. """ session_burst = base.get_from_session(base.KEY_BURST_CONFIG) return {'burst_list': self.burst_service.get_available_bursts(base.get_current_project().id), 'selectedBurst': session_burst.id}
def storeresultfigure(self, img_type, operation_id, **data): """Create preview for current displayed canvas and store image in current session, for future comparison.""" project = base.get_current_project() user = base.get_logged_user() self.figure_service.store_result_figure(project, user, img_type, operation_id, data['export_data'])
def index(self): """ Display project main-menu. Choose one project to work with. """ current_project = bc.get_current_project() if current_project is None: raise cherrypy.HTTPRedirect("/project/viewall") template_specification = dict(mainContent="project_submenu", title="TVB Project Menu") return self.fill_default_attributes(template_specification)
def cached_simulator_input_tree(self): """ Cache Simulator's input tree, for performance issues. Anyway, without restart, the introspected tree will not be different on multiple executions. :return: Simulator's Input Tree (copy from cache or just loaded) """ if self._cached_simulator_input_tree is None: self._cached_simulator_input_tree = self.flow_service.prepare_adapter(base.get_current_project().id, self.cached_simulator_algo_group)[1] return copy.deepcopy(self._cached_simulator_input_tree)
def prepare_group_launch(self, group_gid, step_key, adapter_key, **data): """ Recieves as input a group gid and an algorithm given by category and id, along with data that gives the name of the required input parameter for the algorithm. Having these generate a range of gid's for all the datatypes in the group and launch a new operation group. """ prj_service = ProjectService() dt_group = prj_service.get_datatypegroup_by_gid(group_gid) datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id) range_param_name = data['range_param_name'] del data['range_param_name'] data[PARAM_RANGE_1] = range_param_name data[range_param_name] = ','.join([dt.gid for dt in datatypes]) OperationService().group_operation_launch( base.get_logged_user().id, base.get_current_project().id, int(adapter_key), int(step_key), **data) redirect_url = self._compute_back_link('operations', base.get_current_project()) raise cherrypy.HTTPRedirect(redirect_url)
def _persist_project(self, data, project_id, is_create, current_user): """Private method to persist""" data = EditForm().to_python(data) saved_project = self.project_service.store_project( current_user, is_create, project_id, **data) selected_project = bc.get_current_project() if len( self.project_service.retrieve_projects_for_user( current_user.id, 1)) == 1: selected_project = saved_project if selected_project is None or (saved_project.id == selected_project.id): self._mark_selected(saved_project)
def get_simple_adapter_interface(self, algo_group_id, parent_div='', is_uploader=False): """ AJAX exposed method. Will return only the interface for a adapter, to be used when tabs are needed. """ curent_project = base.get_current_project() is_uploader = string2bool(is_uploader) template_specification = self.get_adapter_template( curent_project.id, algo_group_id, is_uploader) template_specification[base.KEY_PARENT_DIV] = parent_div return self.fill_default_attributes(template_specification)
def start_dti_pipeline(self, cancel=False, start=False, **data): """ Prepare DTI Pipeline run. """ project_id = basecontroller.get_current_project().id if cherrypy.request.method == 'POST' and cancel: raise cherrypy.HTTPRedirect("/project/editstructure/" + str(project_id)) template_specification = dict( title="Import Connectivity", data=data, section_name='project', subsection_name='pipeline', mainContent="pipeline/get_connectivity", includedResources='project/included_resources') if cherrypy.request.method == 'POST' and start: form = ImportForm() try: data = form.to_python(data) service = DTIPipelineService(data['server_ip'], data['username']) current_project = basecontroller.get_current_project() current_user = basecontroller.get_logged_user() service.fire_pipeline(data['dti_scans'], current_project, current_user, data['threads_number']) okmessage = "Import Started! You will see results after few hours on Data Structure Page!" basecontroller.set_info_message(okmessage) raise cherrypy.HTTPRedirect("/project/editstructure/" + str(project_id)) except formencode.Invalid, excep: basecontroller.set_error_message( "Some parameters are invalid!") template_specification[ basecontroller.KEY_ERRORS] = excep.unpack_errors()
def cached_simulator_input_tree(self): """ Cache Simulator's input tree, for performance issues. Anyway, without restart, the introspected tree will not be different on multiple executions. :returns: Simulator's Input Tree (copy from cache or just loaded) """ cached_simulator_tree = base.get_from_session( base.KEY_CACHED_SIMULATOR_TREE) if cached_simulator_tree is None: cached_simulator_tree = self.flow_service.prepare_adapter( base.get_current_project().id, self.cached_simulator_algo_group)[1] base.add2session(base.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree) return copy.deepcopy(cached_simulator_tree)
def get_select_existent_entities(self, label, entity_type, entity_gid=None): """ Returns the dictionary needed for drawing the select which display all the created entities of the specified type. """ project_id = base.get_current_project().id category = self.flow_service.get_visualisers_category() interface = [{'name': 'existentEntitiesSelect', 'label': label, 'type': entity_type}] if entity_gid is not None: interface[0]['default'] = entity_gid interface = self.flow_service.prepare_parameters(interface, project_id, category.id) interface = ABCAdapter.prepare_param_names(interface) return interface
def downloaddata(self, data_gid, export_module): """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """ current_prj = bc.get_current_project() # Load data by GID entity = ABCAdapter.load_entity_by_gid(data_gid) # Do real export export_mng = ExportManager() file_name, file_path, delete_file = export_mng.export_data( entity, export_module, current_prj) if delete_file: # We force parent folder deletion because export process generated it. self.mark_file_for_delete(file_path, True) self.logger.debug("Data exported in file: " + str(file_path)) return serve_file(file_path, "application/x-download", "attachment", file_name)
def create_stimulus(self): """ Creates a stimulus from the given data. """ try: context = base.get_from_session(KEY_SURFACE_CONTEXT) surface_stimulus_creator = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE, SURFACE_STIMULUS_CREATOR_CLASS, StimuliSurface())[0] self.flow_service.fire_operation(surface_stimulus_creator, base.get_logged_user(), base.get_current_project().id, **context.equation_kwargs) base.set_info_message("The operation for creating the stimulus was successfully launched.") context.selected_stimulus = None except (NameError, ValueError, SyntaxError), _: base.set_error_message("The operation failed due to invalid parameter input.") return False
def create_local_connectivity(self, **kwargs): """ Used for creating and storing a local connectivity. """ context = base.get_from_session(KEY_LCONN_CONTEXT) local_connectivity_creator = self.get_creator_and_interface( LOCAL_CONN_CREATOR_MODULE, LOCAL_CONN_CREATOR_CLASS, LocalConnectivity())[0] self.flow_service.fire_operation(local_connectivity_creator, base.get_logged_user(), base.get_current_project().id, **kwargs) base.set_info_message( "The operation for creating the local connectivity was successfully launched." ) context.reset() return self.step_1()
def create_stimulus(self): """ Creates a stimulus from the given data. """ context = base.get_from_session(KEY_REGION_CONTEXT) local_connectivity_creator = self.get_creator_and_interface( REGION_STIMULUS_CREATOR_MODULE, REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[0] context.equation_kwargs.update( {'weight': json.dumps(context.get_weights())}) self.flow_service.fire_operation(local_connectivity_creator, base.get_logged_user(), base.get_current_project().id, **context.equation_kwargs) base.set_info_message( "The operation for creating the stimulus was successfully launched." )
def displayresultfigures(self, selected_session='all_sessions'): """ Collect and display saved previews, grouped by session.""" project = base.get_current_project() user = base.get_logged_user() data, all_sessions_info = self.figure_service.retrieve_result_figures( project, user, selected_session) manage_figure_title = "Figures for " + str( selected_session) + " category" if selected_session == 'all_sessions': manage_figure_title = "Figures for all categories" template_specification = dict(mainContent="project/figures_display", title="Stored Visualizer Previews", controlPage=None, displayControl=False, selected_sessions_data=data, all_sessions_info=all_sessions_info, selected_session=selected_session, manageFigureTitle=manage_figure_title) template_specification = self.fill_default_attributes( template_specification, subsection='figures') return template_specification
def readprojectsforlink(self, data_id, return_both=False): """ For a given user return a dictionary in form {project_ID: project_Name}. """ for_link, linked = self.project_service.get_linkable_projects_for_user( bc.get_logged_user().id, data_id) to_link_result, linked_result = None, None current_project = bc.get_current_project() if for_link: to_link_result = {} for project in for_link: if project.id != current_project.id: to_link_result[project.id] = project.name to_link_result = json.dumps(to_link_result) if return_both: if linked: linked_result = {} for project in linked: linked_result[project.id] = project.name return to_link_result, linked_result return to_link_result
def store_connectivity_selection(self, ui_name, **data): """ Save the passed connectivity selection. Since cherryPy/Ajax seems to have problems when passing arrays, the data is passed as a string that needs to be split. """ if ui_name and ui_name != self.NEW_SELECTION_NAME: sel_project_id = base.get_current_project().id selection = data['selection'] labels = data['labels'] #We need to split as cherryPy/AJAX doesn't support lists used_names = data['select_names'].split(',') selection = json.dumps([int(idx) for idx in selection.split(',')]) labels = json.dumps(labels.split(',')) self.flow_service.save_connectivity_selection( ui_name, sel_project_id, selection, labels, used_names) return [True, 'Selection saved successfully.'] else: error_msg = (self.NEW_SELECTION_NAME + " or empty name are not valid as selection names.") return [False, error_msg]
def default(self, step_key, adapter_key, cancel=False, back_page=None, not_reset=False, **data): """ Render a specific adapter. 'data' are arguments for POST """ project = base.get_current_project() algo_group = self.flow_service.get_algo_group_by_identifier( adapter_key) back_page_link = self._compute_back_link(back_page, project) if algo_group is None: raise cherrypy.HTTPRedirect("/tvb?error=True") if cherrypy.request.method == 'POST' and cancel: raise cherrypy.HTTPRedirect(back_page_link) submit_link = self.get_url_adapter(step_key, adapter_key, back_page) if cherrypy.request.method == 'POST': back_indicator = back_page if back_page == 'burst' else 'operations' success_url = self._compute_back_link(back_indicator, project) data[base.KEY_ADAPTER] = adapter_key template_specification = self.execute_post(project.id, submit_link, success_url, step_key, algo_group, **data) else: if (('Referer' not in cherrypy.request.headers or ('Referer' in cherrypy.request.headers and 'step' not in cherrypy.request.headers['Referer'])) and 'View' in algo_group.group_category.displayname): # Avoid reset in case of Visualizers, as a supplementary GET # might be enforced by MPLH5 on FF. not_reset = True template_specification = self.get_template_for_adapter( project.id, step_key, algo_group, submit_link, not not_reset) if template_specification is None: raise cherrypy.HTTPRedirect('/tvb') if KEY_CONTROLLS not in template_specification: template_specification[KEY_CONTROLLS] = None if base.KEY_SUBMIT_LINK not in template_specification: template_specification[base.KEY_SUBMIT_LINK] = submit_link if KEY_CONTENT not in template_specification: template_specification[KEY_CONTENT] = "flow/full_adapter_interface" template_specification[base.KEY_DISPLAY_MENU] = False else: template_specification[base.KEY_DISPLAY_MENU] = True template_specification[base.KEY_BACK_PAGE] = back_page_link template_specification[base.KEY_ADAPTER] = adapter_key template_specification[ABCDisplayer.KEY_IS_ADAPTER] = True self.fill_default_attributes(template_specification, algo_group) if (back_page is not None and back_page in ['operations', 'data'] and not (base.KEY_SECTION in template_specification and template_specification[base.KEY_SECTION] == 'connectivity')): template_specification[base.KEY_SECTION] = 'project' return template_specification
def create_json(self, item_gid, item_type, visibility_filter): """ Method used for creating a JSON representation of a graph. """ selected_filter = StaticFiltersFactory.build_datatype_filters( single_filter=visibility_filter) graph_branches = [] project = bc.get_current_project() is_upload_operation = (item_type == graph_structures.NODE_OPERATION_TYPE) and \ (self.project_service.is_upload_operation(item_gid) or item_gid == "firstOperation") if is_upload_operation: uploader_operations = self.project_service.get_all_operations_for_uploaders( project.id) for operation in uploader_operations: dt_outputs = self.project_service.get_results_for_operation( operation.id, selected_filter) dt_outputs = self._create_datatype_nodes(dt_outputs) parent_op = self._create_operation_nodes([operation], item_gid) branch = graph_structures.GraphBranch([], parent_op, dt_outputs, []) graph_branches.append(branch) graph = graph_structures.GraphStructure(graph_branches) return graph.to_json() dt_inputs, parent_op, dt_outputs, op_inputs = [], [], [], [] if item_type == graph_structures.NODE_OPERATION_TYPE: dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation( item_gid, selected_filter) parent_op = self.project_service.load_operation_by_gid(item_gid) dt_outputs = self.project_service.get_results_for_operation( parent_op.id, selected_filter) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [parent_op], dt_outputs, [], item_gid) elif item_type == graph_structures.NODE_OPERATION_GROUP_TYPE: parent_op_group = self.project_service.get_operation_group_by_gid( item_gid) dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group( parent_op_group.id, selected_filter) datatype_group = self.project_service.get_datatypegroup_by_op_group_id( parent_op_group.id) datatype = self.project_service.get_datatype_by_id( datatype_group.id) dt_inputs = self._create_datatype_nodes(dt_inputs) parent_op = graph_structures.OperationGroupNodeStructure( parent_op_group.gid) parent_op.selected = True parent_op = [parent_op] if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW and datatype.visible is False: dt_outputs = [] else: dt_outputs = self._create_datatype_nodes([datatype]) elif item_type == graph_structures.NODE_DATATYPE_TYPE: selected_dt = ABCAdapter.load_entity_by_gid(item_gid) if self.project_service.is_datatype_group(item_gid): datatype_group = self.project_service.get_datatypegroup_by_gid( selected_dt.gid) parent_op_group = self.project_service.get_operation_group_by_id( datatype_group.fk_operation_group) dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group( parent_op_group.id, selected_filter) op_inputs = self.project_service.get_operations_for_datatype_group( selected_dt.id, selected_filter) op_inputs_in_groups = self.project_service.get_operations_for_datatype_group( selected_dt.id, selected_filter, only_in_groups=True) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [], [selected_dt], op_inputs, item_gid) parent_op = [ graph_structures.OperationGroupNodeStructure( parent_op_group.gid) ] op_inputs_in_groups = self._create_operation_group_nodes( op_inputs_in_groups) op_inputs.extend(op_inputs_in_groups) else: parent_op = self.flow_service.load_operation( selected_dt.fk_from_operation) dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation( parent_op.gid, selected_filter) op_inputs = self.project_service.get_operations_for_datatype( selected_dt.gid, selected_filter) op_inputs_in_groups = self.project_service.get_operations_for_datatype( selected_dt.gid, selected_filter, only_in_groups=True) dt_outputs = self.project_service.get_results_for_operation( parent_op.id, selected_filter) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [parent_op], dt_outputs, op_inputs, item_gid) op_inputs_in_groups = self._create_operation_group_nodes( op_inputs_in_groups) op_inputs.extend(op_inputs_in_groups) else: self.logger.error("Invalid item type: " + str(item_type)) raise Exception("Invalid item type.") branch = graph_structures.GraphBranch(dt_inputs, parent_op, dt_outputs, op_inputs) graph_branches.append(branch) graph = graph_structures.GraphStructure(graph_branches) return graph.to_json()
class FlowController(base.BaseController): """ This class takes care of executing steps in projects. """ def __init__(self): base.BaseController.__init__(self) self.context = SelectedAdapterContext() self.files_helper = FilesHelper() @cherrypy.expose @using_template('base_template') @logged() @base.settings() @context_selected() def step(self, step_key=None): """ Choose exact action/adapter for current step. """ category = self.flow_service.get_category_by_id(step_key) if category is None: message = 'Inconsistent Step Name! Please excuse the wrong link!' base.set_warning_message(message) self.logger.warning(message + '- Wrong step:' + str(step_key)) raise cherrypy.HTTPRedirect('/tvb') step_name = category.displayname.lower() template_specification = dict(mainContent="header_menu", section_name=step_name, controlPage=None, title="Select an algorithm", displayControl=False) adapters_list = [] for algo_group in self.flow_service.get_groups_for_categories( [category]): if algo_group.ui_display < 0: continue adapter_link = self.get_url_adapter(step_key, algo_group.id) adapters_list.append({ base.KEY_TITLE: algo_group.displayname, 'link': adapter_link, 'description': algo_group.description, 'subsection': algo_group.subsection_name }) self.analyze_adapters = adapters_list template_specification[base.KEY_SUBMENU_LIST] = adapters_list return self.fill_default_attributes(template_specification) @cherrypy.expose @using_template('base_template') @base.settings() @logged() @context_selected() def step_connectivity(self): """ Display menu for Connectivity Footer tab. """ template_specification = dict(mainContent="header_menu", section_name='connectivity', controlPage=None, title="Select an algorithm", displayControl=False, subsection_name='step', submenu_list=self.connectivity_submenu) return self.fill_default_attributes(template_specification) @staticmethod def _compute_back_link(back_indicator, project): """ Based on a simple indicator, compute URL for anchor BACK. """ if back_indicator is None: ## This applies to Connectivity and other visualizers when RELAUNCH button is used from Operation page. back_page_link = None elif back_indicator == 'burst': back_page_link = "/burst" elif back_indicator == 'operations': back_page_link = '/project/viewoperations/' + str(project.id) else: back_page_link = '/project/editstructure/' + str(project.id) return back_page_link @cherrypy.expose @base.settings() @logged() @context_selected() @using_template('base_template') def prepare_group_launch(self, group_gid, step_key, adapter_key, **data): """ Recieves as input a group gid and an algorithm given by category and id, along with data that gives the name of the required input parameter for the algorithm. Having these generate a range of gid's for all the datatypes in the group and launch a new operation group. """ prj_service = ProjectService() dt_group = prj_service.get_datatypegroup_by_gid(group_gid) datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id) range_param_name = data['range_param_name'] del data['range_param_name'] data[PARAM_RANGE_1] = range_param_name data[range_param_name] = ','.join([dt.gid for dt in datatypes]) OperationService().group_operation_launch( base.get_logged_user().id, base.get_current_project().id, int(adapter_key), int(step_key), **data) redirect_url = self._compute_back_link('operations', base.get_current_project()) raise cherrypy.HTTPRedirect(redirect_url) @cherrypy.expose @using_template('base_template') @base.settings() @logged() @context_selected() def default(self, step_key, adapter_key, cancel=False, back_page=None, not_reset=False, **data): """ Render a specific adapter. 'data' are arguments for POST """ project = base.get_current_project() algo_group = self.flow_service.get_algo_group_by_identifier( adapter_key) back_page_link = self._compute_back_link(back_page, project) if algo_group is None: raise cherrypy.HTTPRedirect("/tvb?error=True") if cherrypy.request.method == 'POST' and cancel: raise cherrypy.HTTPRedirect(back_page_link) submit_link = self.get_url_adapter(step_key, adapter_key, back_page) if cherrypy.request.method == 'POST': back_indicator = back_page if back_page == 'burst' else 'operations' success_url = self._compute_back_link(back_indicator, project) data[base.KEY_ADAPTER] = adapter_key template_specification = self.execute_post(project.id, submit_link, success_url, step_key, algo_group, **data) else: if (('Referer' not in cherrypy.request.headers or ('Referer' in cherrypy.request.headers and 'step' not in cherrypy.request.headers['Referer'])) and 'View' in algo_group.group_category.displayname): # Avoid reset in case of Visualizers, as a supplementary GET # might be enforced by MPLH5 on FF. not_reset = True template_specification = self.get_template_for_adapter( project.id, step_key, algo_group, submit_link, not not_reset) if template_specification is None: raise cherrypy.HTTPRedirect('/tvb') if KEY_CONTROLLS not in template_specification: template_specification[KEY_CONTROLLS] = None if base.KEY_SUBMIT_LINK not in template_specification: template_specification[base.KEY_SUBMIT_LINK] = submit_link if KEY_CONTENT not in template_specification: template_specification[KEY_CONTENT] = "flow/full_adapter_interface" template_specification[base.KEY_DISPLAY_MENU] = False else: template_specification[base.KEY_DISPLAY_MENU] = True template_specification[base.KEY_BACK_PAGE] = back_page_link template_specification[base.KEY_ADAPTER] = adapter_key template_specification[ABCDisplayer.KEY_IS_ADAPTER] = True self.fill_default_attributes(template_specification, algo_group) if (back_page is not None and back_page in ['operations', 'data'] and not (base.KEY_SECTION in template_specification and template_specification[base.KEY_SECTION] == 'connectivity')): template_specification[base.KEY_SECTION] = 'project' return template_specification @cherrypy.expose @using_template("flow/reduce_dimension_select") @logged() def gettemplatefordimensionselect(self, entity_gid=None, select_name="", reset_session='False', parameters_prefix="dimensions", required_dimension=1, expected_shape="", operations=""): """ Returns the HTML which contains the selects components which allows the user to reduce the dimension of a multi-dimensional array. We try to obtain the aggregation_functions from the entity, which is a list of lists. For each dimension should be a list with the supported aggregation functions. We create a DICT for each of those lists. The key will be the name of the function and the value will be its label. entity_gid - the GID of the entity for which is displayed the component select_name - the name of the parent select. The select in which is displayed the entity with the given GID parameters_prefix - a string which will be used for computing the names of the component required_dimension - the expected dimension for the resulted array expected_shape and operations - used for applying conditions on the resulted array e.g.: If the resulted array is a 3D array and we want that the length of the second dimension to be smaller then 512 then the expected_shape and operations should be: expected_shape='x,512,x' and operations='x,<,x' """ template_params = dict() template_params["select_name"] = "" template_params["data"] = [] template_params["parameters_prefix"] = parameters_prefix template_params["array_shape"] = "" template_params["required_dimension"] = required_dimension template_params["currentDim"] = "" template_params["required_dim_msg"] = "" template_params["expected_shape"] = expected_shape template_params["operations"] = operations #if reload => populate the selected values session_dict = self.context.get_current_default() dimensions = {1: [0], 3: [0]} selected_agg_functions = {} if not eval(str(reset_session)) and session_dict is not None: starts_with_str = select_name + "_" + parameters_prefix + "_" ui_sel_items = dict((k, v) for k, v in session_dict.items() if k.startswith(starts_with_str)) dimensions, selected_agg_functions, required_dimension, _ = MappedArray( ).parse_selected_items(ui_sel_items) template_params["selected_items"] = dimensions template_params["selected_functions"] = selected_agg_functions aggregation_functions = [] default_agg_functions = self.accepted__aggregation_functions() labels_set = ["Time", "Channel", "Line"] if entity_gid is not None: actual_entity = ABCAdapter.load_entity_by_gid(entity_gid) if hasattr(actual_entity, 'shape'): array_shape = actual_entity.shape new_shape, current_dim = self._compute_current_dimension( list(array_shape), dimensions, selected_agg_functions) if required_dimension is not None and current_dim != int( required_dimension): template_params[ "required_dim_msg"] = "Please select a " + str( required_dimension) + "D array" if not current_dim: template_params["currentDim"] = "1 element" else: template_params["currentDim"] = str( current_dim) + "D array" template_params["array_shape"] = json.dumps(new_shape) if hasattr(actual_entity, 'dimensions_labels' ) and actual_entity.dimensions_labels is not None: labels_set = actual_entity.dimensions_labels #make sure there exists labels for each dimension while len(labels_set) < len(array_shape): labels_set.append("Undefined") if (hasattr(actual_entity, 'aggregation_functions') and actual_entity.aggregation_functions is not None and len(actual_entity.aggregation_functions) == len(array_shape)): #will be a list of lists of aggregation functions defined_functions = actual_entity.aggregation_functions for function in defined_functions: if not len(function): aggregation_functions.append({}) else: func_dict = dict() for function_key in function: func_dict[ function_key] = default_agg_functions[ function_key] aggregation_functions.append(func_dict) else: for _ in array_shape: aggregation_functions.append(default_agg_functions) result = [] for i, shape in enumerate(array_shape): labels = [] values = [] for j in xrange(shape): labels.append(labels_set[i] + " " + str(j)) values.append(entity_gid + "_" + str(i) + "_" + str(j)) result.append([labels, values, aggregation_functions[i]]) template_params["select_name"] = select_name template_params["data"] = result return template_params return template_params @staticmethod def _compute_current_dimension(array_shape, selected_items, selected_functions): """ If the user reloads an operation we have to compute the current dimension of the array and also the shape of the array based on his selections """ current_dim = len(array_shape) for i in xrange(len(array_shape)): if i in selected_items and len(selected_items[i]) > 0: array_shape[i] = len(selected_items[i]) if len(selected_items[i]) == 1: current_dim -= 1 if i in selected_functions and selected_functions[i] != 'none': array_shape[i] = 1 if i not in selected_items or len(selected_items[i]) > 1: current_dim -= 1 return array_shape, current_dim @staticmethod def accepted__aggregation_functions(): """ Returns the list of aggregation functions that may be applied on arrays. """ return {"sum": "Sum", "average": "Average"} @cherrypy.expose @using_template("flow/type2component/datatype2select_simple") @logged() def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: base.set_error_message( "Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads( FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): #Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][ FILTER_TYPE] == 'date': try: filter_ = string2date(filter_, False) filters[FILTER_VALUES][i] = filter_ except ValueError, excep: raise excep #In order for the filter object not to "stack up" on multiple calls to #this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) #Get dataTypes that match the filters from DB then populate with values datatypes = self.flow_service.get_available_datatypes( base.get_current_project().id, datatype, new_filter) values = self.flow_service.populate_values( datatypes, datatype, self.context.get_current_step()) #Create a dictionary that matches what the template expects parameters = dict() parameters[ABCAdapter.KEY_NAME] = name if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ ABCAdapter.KEY_REQUIRED] if len(values) > 0 and eval( str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str( values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected is not None and previous_selected in [ str(vv['value']) for vv in values ]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected parameters[ABCAdapter.KEY_FILTERABLE] = availablefilter parameters[ABCAdapter.KEY_TYPE] = ABCAdapter.TYPE_SELECT parameters[ABCAdapter.KEY_OPTIONS] = values parameters[ABCAdapter.KEY_DATATYPE] = datatype template_specification = { "inputRow": parameters, "disabled": False, "parentDivId": parent_div, base.KEY_SESSION_TREE: tree_session_key } return self.fill_default_attributes(template_specification)
def get_datatype_details(self, entity_gid, back_page='burst', exclude_tabs=None): """ Returns the HTML which contains the details for the given dataType. """ if exclude_tabs is None: exclude_tabs = [] selected_project = bc.get_current_project() datatype_details, states, entity = self.project_service.get_datatype_details( entity_gid) ### Load DataType categories current_type = datatype_details.data_type datatype_gid = datatype_details.gid categories = {} if not entity.invalid: categories = self.getalgorithmsfordatatype(str(current_type), str(datatype_gid)) categories = json.loads(categories) datatype_id = datatype_details.data_type_id is_group = False if datatype_details.operation_group_id is not None: ## Is a DataTypeGroup datatype_id = datatype_details.operation_group_id is_group = True ### Retrieve links linkable_projects_dict = self._get_linkable_projects_dict(datatype_id) ### Load all exporters exporters = {} if not entity.invalid: exporters = ExportManager().get_exporters_for_data(entity) is_relevant = entity.visible template_specification = dict() template_specification["entity_gid"] = entity_gid template_specification["nodeFields"] = datatype_details.get_ui_fields() template_specification["allStates"] = states template_specification["project"] = selected_project template_specification["categories"] = categories template_specification["exporters"] = exporters template_specification["datatype_id"] = datatype_id template_specification["isGroup"] = is_group template_specification["isRelevant"] = is_relevant template_specification["nodeType"] = 'datatype' template_specification["backPageIdentifier"] = back_page template_specification.update(linkable_projects_dict) overlay_class = "can-browse editor-node node-type-" + str( current_type).lower() if is_relevant: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" overlay_title = current_type if datatype_details.datatype_tag_1: overlay_title += " " + datatype_details.datatype_tag_1 tabs = [] overlay_indexes = [] if "Metadata" not in exclude_tabs: tabs.append(OverlayTabDefinition("Metadata", "metadata")) overlay_indexes.append(0) if "Analyzers" not in exclude_tabs: tabs.append( OverlayTabDefinition("Analyzers", "analyzers", enabled=categories and 'Analyze' in categories)) overlay_indexes.append(1) if "Visualizers" not in exclude_tabs: tabs.append( OverlayTabDefinition("Visualizers", "visualizers", enabled=categories and 'View' in categories)) overlay_indexes.append(2) enable_link_tab = False if (not entity.invalid) and (linkable_projects_dict is not None): if self.PRROJECTS_FOR_LINK_KEY in linkable_projects_dict: projects_for_link = linkable_projects_dict[ self.PRROJECTS_FOR_LINK_KEY] if projects_for_link is not None and len( projects_for_link) > 0: enable_link_tab = True if self.PRROJECTS_LINKED_KEY in linkable_projects_dict: projects_linked = linkable_projects_dict[ self.PRROJECTS_LINKED_KEY] if projects_linked is not None and len(projects_linked) > 0: enable_link_tab = True if "Links" not in exclude_tabs: tabs.append( OverlayTabDefinition("Links", "link_to", enabled=enable_link_tab)) overlay_indexes.append(3) if "Export" not in exclude_tabs: tabs.append( OverlayTabDefinition("Export", "export", enabled=(exporters and len(exporters) > 0))) overlay_indexes.append(4) if "Resulted Datatypes" not in exclude_tabs: tabs.append( OverlayTabDefinition( "Resulted Datatypes", "result_dts", enabled=self.project_service. count_datatypes_generated_from(entity_gid))) overlay_indexes.append(5) template_specification = self.fill_overlay_attributes( template_specification, "DataType Details", overlay_title, "project/details_datatype_overlay", overlay_class, tabs, overlay_indexes) template_specification['baseUrl'] = cfg.BASE_URL #template_specification[bc.KEY_OVERLAY_PAGINATION] = True #template_specification[bc.KEY_OVERLAY_PREVIOUS] = "alert(1);" #template_specification[bc.KEY_OVERLAY_NEXT] = "alert(2);" return FlowController().fill_default_attributes(template_specification)
data[upload_param], bc.get_logged_user().id) except ServicesBaseException, excep: self.logger.warning(excep.message) bc.set_error_message(excep.message) raise cherrypy.HTTPRedirect('/project/viewall') def _remove_project(self, project_id): """Private method for removing project.""" try: self.project_service.remove_project(project_id) except ServicesBaseException, exc: self.logger.error("Could not delete project!") self.logger.exception(exc) bc.set_error_message(exc.message) prj = bc.get_current_project() if prj is not None and prj.id == int(project_id): bc.remove_from_session(bc.KEY_PROJECT) def _persist_project(self, data, project_id, is_create, current_user): """Private method to persist""" data = EditForm().to_python(data) saved_project = self.project_service.store_project( current_user, is_create, project_id, **data) selected_project = bc.get_current_project() if len( self.project_service.retrieve_projects_for_user( current_user.id, 1)) == 1: selected_project = saved_project if selected_project is None or (saved_project.id == selected_project.id):