def build_portlet_interface(self, portlet_configuration, project_id): """ From a portlet_id and a project_id, first build the portlet entity then get it's configurable interface. :param portlet_configuration: a portlet configuration entity. It holds at the least the portlet_id, and in case any default parameters were saved they can be rebuilt from the analyzers // visualizer parameters :param project_id: the id of the current project :returns: the portlet interface will be of the following form:: [{'interface': adapter_interface, 'prefix': prefix_for_parameter_names, 'subalg': {algorithm_field_name: default_algorithm_value}, 'algo_group': algorithm_group, 'alg_ui_name': displayname}, ......] A list of dictionaries for each adapter that makes up the portlet. """ portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id) portlet_interface = portlet_configurer.get_configurable_interface() for adapter_conf in portlet_interface: interface = adapter_conf.interface itree_mngr = InputTreeManager() interface = itree_mngr.fill_input_tree_with_options(interface, project_id, adapter_conf.stored_adapter.fk_category) adapter_conf.interface = itree_mngr.prepare_param_names(interface) portlet_configurer.update_default_values(portlet_interface, portlet_configuration) portlet_configurer.prefix_adapters_parameters(portlet_interface) return portlet_interface
def update_default_values(portlet_interface, portlet_configuration): """ :param portlet_interface: a list of AdapterConfiguration entities. :param portlet_configuration: a PortletConfiguration entity. Update the defaults from each AdapterConfiguration entity with the values stored in the corresponding workflow step held in the PortletConfiguration entity. """ # Check for any defaults first in analyzer steps if portlet_configuration.analyzers: for adapter_idx in xrange(len(portlet_interface[:-1])): saved_configuration = portlet_configuration.analyzers[ adapter_idx] replaced_defaults_dict = InputTreeManager.fill_defaults( portlet_interface[adapter_idx].interface, saved_configuration.static_param) portlet_interface[ adapter_idx].interface = replaced_defaults_dict # Check for visualization defaults if portlet_configuration.visualizer: saved_configuration = portlet_configuration.visualizer replaced_defaults_dict = InputTreeManager.fill_defaults( portlet_interface[-1].interface, saved_configuration.static_param) portlet_interface[-1].interface = replaced_defaults_dict
def get_reduced_simulator_interface(self): """ Get a simulator interface that only contains the inputs that are marked as KEY_PARAMETER_CHECKED in the current session. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) simulator_config = burst_config.simulator_configuration ## Fill with stored defaults, and see if any parameter was checked by user ## default_values, any_checked = burst_config.get_all_simulator_values() simulator_input_tree = self.cached_simulator_input_tree simulator_input_tree = InputTreeManager.fill_defaults( simulator_input_tree, default_values) ## In case no values were checked just skip tree-cut part and show entire simulator tree ## if any_checked: simulator_input_tree = InputTreeManager.select_simulator_inputs( simulator_input_tree, simulator_config) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, simulator_input_tree, default_values) template_specification = { "inputList": simulator_input_tree, common.KEY_PARAMETERS_CONFIG: False, 'draw_hidden_ranges': True } return self.fill_default_attributes(template_specification)
def __init__(self): # It will be populate with key from DataTypeMetaData self.meta_data = { DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT } self.file_handler = FilesHelper() self.storage_path = '.' # Will be populate with current running operation's identifier self.operation_id = None self.user_id = None self.log = get_logger(self.__class__.__module__) self.tree_manager = InputTreeManager()
def prepare_param_names(attributes_list, prefix=None, add_option_prefix=False): """ For a given attribute list, change the name of the attributes where needed. Changes refer to adding a prefix, to identify groups. Will be used on parameters page GET. """ return InputTreeManager.prepare_param_names(attributes_list, prefix, add_option_prefix)
def configure_simulator_parameters(self): """ Return the required input tree to generate the simulator interface for the burst page in 'configuration mode', meaning with checkboxes next to each input that are checked or not depending on if the user selected them so, and with the user filled defaults. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) default_values, any_checked = burst_config.get_all_simulator_values() simulator_input_tree = self.cached_simulator_input_tree simulator_input_tree = InputTreeManager.fill_defaults( simulator_input_tree, default_values) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, simulator_input_tree, default_values) template_vars = {} self.fill_default_attributes(template_vars) template_vars.update({ "inputList": simulator_input_tree, common.KEY_PARAMETERS_CONFIG: True, 'none_checked': not any_checked, 'selectedParametersDictionary': burst_config.simulator_configuration }) return template_vars
def get_surface_model_parameters_data(self, default_selected_model_param=None): """ Returns a dictionary which contains all the data needed for drawing the model parameters. """ context_model_parameters = common.get_from_session(KEY_CONTEXT_MPS) if default_selected_model_param is None: default_selected_model_param = context_model_parameters.prepared_model_parameter_names.values()[0] equation_displayer = EquationDisplayer() equation_displayer.trait.bound = interface.INTERFACE_ATTRIBUTES_ONLY input_list = equation_displayer.interface[interface.INTERFACE_ATTRIBUTES] input_list[0] = self._lock_midpoints(input_list[0]) options = [] for original_param, modified_param in context_model_parameters.prepared_model_parameter_names.items(): attributes = deepcopy(input_list) self._fill_default_values(attributes, modified_param) option = {'name': original_param, 'value': modified_param, 'attributes': attributes} options.append(option) input_list = [{'name': 'model_param', 'type': 'select', 'default': default_selected_model_param, 'label': 'Model param', 'required': True, 'options': options}] input_list = InputTreeManager.prepare_param_names(input_list) return {common.KEY_PARAMETERS_CONFIG: False, 'inputList': input_list, 'applied_equations': context_model_parameters.get_configure_info()}
def get_surface_model_parameters_data(self, default_selected_model_param=None): import tvb.basic.traits.traited_interface as interface """ Returns a dictionary which contains all the data needed for drawing the model parameters. """ context_model_parameters = common.get_from_session(KEY_CONTEXT_MPS) if default_selected_model_param is None: default_selected_model_param = list(context_model_parameters.prepared_model_parameter_names.values())[0] equation_displayer = EquationDisplayer() equation_displayer.trait.bound = interface.INTERFACE_ATTRIBUTES_ONLY input_list = equation_displayer.interface[interface.INTERFACE_ATTRIBUTES] input_list[0] = self._lock_midpoints(input_list[0]) options = [] for original_param, modified_param in context_model_parameters.prepared_model_parameter_names.items(): attributes = deepcopy(input_list) self._fill_default_values(attributes, modified_param) option = {'name': original_param, 'value': modified_param, 'attributes': attributes} options.append(option) input_list = [{'name': 'model_param', 'type': 'select', 'default': default_selected_model_param, 'label': 'Model param', 'required': True, 'options': options}] input_list = InputTreeManager.prepare_param_names(input_list) return {common.KEY_PARAMETERS_CONFIG: False, 'inputList': input_list, 'applied_equations': context_model_parameters.get_configure_info()}
def get_template_for_adapter(self, project_id, step_key, stored_adapter, submit_url, session_reset=True, is_burst=True): """ Get Input HTML Interface template or a given adapter """ try: if session_reset: self.context.clean_from_session() group = None # Cache some values in session, for performance previous_tree = self.context.get_current_input_tree() previous_sub_step = self.context.get_current_substep() if not session_reset and previous_tree is not None and previous_sub_step == stored_adapter.id: adapter_interface = previous_tree else: adapter_interface = self.flow_service.prepare_adapter(project_id, stored_adapter) self.context.add_adapter_to_session(stored_adapter, adapter_interface) category = self.flow_service.get_category_by_id(step_key) title = "Fill parameters for step " + category.displayname.lower() if group: title = title + " - " + group.displayname current_defaults = self.context.get_current_default() if current_defaults is not None: # Change default values in tree, according to selected input adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_defaults) template_specification = dict(submitLink=submit_url, inputList=adapter_interface, title=title) self._populate_section(stored_adapter, template_specification, is_burst) return template_specification except OperationException as oexc: self.logger.error("Inconsistent Adapter") self.logger.exception(oexc) common.set_warning_message('Inconsistent Adapter! Please review the link (development problem)!') return None
def load_region_stimulus(self, region_stimulus_gid, from_step=None): """ Loads the interface for the selected region stimulus. """ selected_region_stimulus = ABCAdapter.load_entity_by_gid(region_stimulus_gid) temporal_eq = selected_region_stimulus.temporal spatial_eq = selected_region_stimulus.spatial connectivity = selected_region_stimulus.connectivity weights = selected_region_stimulus.weight temporal_eq_type = temporal_eq.__class__.__name__ spatial_eq_type = spatial_eq.__class__.__name__ default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type, 'connectivity': connectivity.gid, 'weight': json.dumps(weights)} for param in temporal_eq.parameters: prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type) prepared_name = prepared_name + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(temporal_eq.parameters[param]) for param in spatial_eq.parameters: prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(spatial_eq.parameters[param]) input_list = self.get_creator_and_interface(REGION_STIMULUS_CREATOR_MODULE, REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1] input_list = InputTreeManager.fill_defaults(input_list, default_dict) context = common.get_from_session(KEY_REGION_CONTEXT) context.reset() context.update_from_interface(input_list) context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_region_stimulus.user_tag_1 context.set_active_stimulus(region_stimulus_gid) return self.do_step(from_step)
def test_multidimensional_array(self): """ Test the generation of a multi-dimensional array. """ input_tree = TraitAdapter().get_input_tree() input_tree = InputTreeManager.prepare_param_names(input_tree) self.template_specification['inputList'] = input_tree resulted_html = _template2string(self.template_specification) soup = BeautifulSoup(resulted_html) #Find dictionary div which should be dict_+${dict_var_name} dict_div = soup.find_all('div', attrs=dict(id="dict_test_dict")) self.assertEqual(len(dict_div), 1, 'Dictionary div not found') dict_entries = soup.find_all( 'input', attrs=dict(name=re.compile('^test_dict_parameters*'))) self.assertEqual(len(dict_entries), 2, 'Not all entries found') for i in range(2): if dict_entries[i]['name'] == "test_dict_parameters_W": self.assertEqual(dict_entries[0]['value'], "-6.0", "Incorrect values") if dict_entries[i]['name'] == "test_dict_parameters_V": self.assertEqual(dict_entries[1]['value'], "-3.0", "Incorrect values") array_entry = soup.find_all('input', attrs=dict(name='test_array')) self.assertEqual(len(array_entry), 1, 'Array entry not found') self.assertEqual(array_entry[0]['value'], "[[-3.0, -6.0], [3.0, 6.0]]", "Wrong value stored")
def get_template_for_adapter(self, project_id, step_key, stored_adapter, submit_url, session_reset=True, is_burst=True): """ Get Input HTML Interface template or a given adapter """ try: if session_reset: self.context.clean_from_session() group = None # Cache some values in session, for performance previous_tree = self.context.get_current_input_tree() previous_sub_step = self.context.get_current_substep() if not session_reset and previous_tree is not None and previous_sub_step == stored_adapter.id: adapter_interface = previous_tree else: adapter_interface = self.flow_service.prepare_adapter(project_id, stored_adapter) self.context.add_adapter_to_session(stored_adapter, adapter_interface) category = self.flow_service.get_category_by_id(step_key) title = "Fill parameters for step " + category.displayname.lower() if group: title = title + " - " + group.displayname current_defaults = self.context.get_current_default() if current_defaults is not None: #Change default values in tree, according to selected input adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_defaults) template_specification = dict(submitLink=submit_url, inputList=adapter_interface, title=title) self._populate_section(stored_adapter, template_specification, is_burst) return template_specification except OperationException, oexc: self.logger.error("Inconsistent Adapter") self.logger.exception(oexc) common.set_warning_message('Inconsistent Adapter! Please review the link (development problem)!')
def get_configurable_interface(self): """ Given an algorithm identifier, go trough the adapter chain, and merge their input tree with the declared overwrites """ chain_adapters = self.reader.get_adapters_chain(self.algo_identifier) result = [] for adapter_declaration in chain_adapters: adapter_instance, algorithm_group = self.build_adapter_from_declaration(adapter_declaration) algorithm_field = adapter_declaration[KEY_FIELD] if algorithm_field: default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT] else: default_algorithm = '' all_portlet_defined_params = self.reader.get_inputs(self.algo_identifier) specific_adapter_overwrites = [entry for entry in all_portlet_defined_params if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] == adapter_declaration[ABCAdapter.KEY_NAME]] if default_algorithm: alg_inputs = adapter_instance.xml_reader.get_inputs(default_algorithm) prefix = InputTreeManager.form_prefix(algorithm_field, None, default_algorithm) else: alg_inputs = adapter_instance.get_input_tree() prefix = '' replace_values = self._prepare_input_tree(alg_inputs, specific_adapter_overwrites, prefix) adapter_configuration = AdapterConfiguration(replace_values, algorithm_group, prefix=prefix, subalgorithm_field=algorithm_field, subalgorithm_value=default_algorithm) result.append(adapter_configuration) return result
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): # Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise # In order for the filter object not to "stack up" on multiple calls to # this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) # Get dataTypes that match the filters from DB then populate with values values, total_count = InputTreeManager().populate_option_values_for_dtype( common.get_current_project().id, datatype, new_filter, self.context.get_current_step()) # Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED] if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def index(self): """Get on burst main page""" # todo : reuse load_burst here for consistency. template_specification = dict( mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=TvbProfile.current.web.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration( common.get_current_project().id) common.add2session(common.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values( )[0] adapter_interface = InputTreeManager.fill_defaults( adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session( self.cached_simulator_algorithm, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification[ 'burst_list'] = self.burst_service.get_available_bursts( common.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps( selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algorithm = self.flow_service.get_algorithm_by_module_and_class( IntrospectionRegistry.MEASURE_METRICS_MODULE, IntrospectionRegistry.MEASURE_METRICS_CLASS) adapter_instance = ABCAdapter.build_adapter(algorithm) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [ metric_name for metric_name in adapter_instance.available_algorithms ] else: template_specification['available_metrics'] = [] template_specification[common.KEY_PARAMETERS_CONFIG] = False template_specification[common.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification)
def prepare_ui_inputs(self, kwargs, validation_required=True): """ Prepare the inputs received from a HTTP Post in a form that will be used by the Python adapter. """ algorithm_inputs = self.get_input_tree() algorithm_inputs = InputTreeManager.prepare_param_names(algorithm_inputs) self.tree_manager.append_required_defaults(kwargs, algorithm_inputs) return self.convert_ui_inputs(kwargs, validation_required=validation_required)
def __init__(self): # It will be populate with key from DataTypeMetaData self.meta_data = {DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT} self.file_handler = FilesHelper() self.storage_path = '.' # Will be populate with current running operation's identifier self.operation_id = None self.user_id = None self.log = get_logger(self.__class__.__module__) self.tree_manager = InputTreeManager()
def prepare_ui_inputs(self, kwargs, validation_required=True): """ Prepare the inputs received from a HTTP Post in a form that will be used by the Python adapter. """ algorithm_inputs = self.get_input_tree() algorithm_inputs = InputTreeManager.prepare_param_names( algorithm_inputs) self.tree_manager.append_required_defaults(kwargs, algorithm_inputs) return self.convert_ui_inputs(kwargs, validation_required=validation_required)
def get_reduced_simulator_interface(self): """ Get a simulator interface that only contains the inputs that are marked as KEY_PARAMETER_CHECKED in the current session. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) simulator_config = burst_config.simulator_configuration ## Fill with stored defaults, and see if any parameter was checked by user ## default_values, any_checked = burst_config.get_all_simulator_values() simulator_input_tree = self.cached_simulator_input_tree simulator_input_tree = InputTreeManager.fill_defaults(simulator_input_tree, default_values) ## In case no values were checked just skip tree-cut part and show entire simulator tree ## if any_checked: simulator_input_tree = InputTreeManager.select_simulator_inputs(simulator_input_tree, simulator_config) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, simulator_input_tree, default_values) template_specification = {"inputList": simulator_input_tree, common.KEY_PARAMETERS_CONFIG: False, 'draw_hidden_ranges': True} return self.fill_default_attributes(template_specification)
def _get_stimulus_interface(self): """ Returns a dictionary which contains the interface for a surface stimulus. """ context = common.get_from_session(KEY_SURFACE_CONTEXT) input_list = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE, SURFACE_STIMULUS_CREATOR_CLASS, StimuliSurface(), lock_midpoint_for_eq=[1])[1] input_list = InputTreeManager.fill_defaults(input_list, context.equation_kwargs) input_list, focal_points_list = self._remove_focal_points(input_list) input_list = self.prepare_entity_interface(input_list) input_list['selectedFocalPoints'] = focal_points_list return self._add_extra_fields_to_interface(input_list)
def _get_stimulus_interface(self): """ Returns a dictionary which contains the data needed for creating the interface for a stimulus. """ context = common.get_from_session(KEY_REGION_CONTEXT) input_list = self.get_creator_and_interface(REGION_STIMULUS_CREATOR_MODULE, REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1] context.equation_kwargs.update({SCALING_PARAMETER: context.get_weights()}) input_list = InputTreeManager.fill_defaults(input_list, context.equation_kwargs) input_list, any_scaling = self._remove_scaling(input_list) template_specification = {'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False} return self._add_extra_fields_to_interface(template_specification), any_scaling
def update_default_values(portlet_interface, portlet_configuration): """ :param portlet_interface: a list of AdapterConfiguration entities. :param portlet_configuration: a PortletConfiguration entity. Update the defaults from each AdapterConfiguration entity with the values stored in the corresponding workflow step held in the PortletConfiguration entity. """ # Check for any defaults first in analyzer steps if portlet_configuration.analyzers: for adapter_idx in xrange(len(portlet_interface[:-1])): saved_configuration = portlet_configuration.analyzers[adapter_idx] replaced_defaults_dict = InputTreeManager.fill_defaults(portlet_interface[adapter_idx].interface, saved_configuration.static_param) portlet_interface[adapter_idx].interface = replaced_defaults_dict # Check for visualization defaults if portlet_configuration.visualizer: saved_configuration = portlet_configuration.visualizer replaced_defaults_dict = InputTreeManager.fill_defaults(portlet_interface[-1].interface, saved_configuration.static_param) portlet_interface[-1].interface = replaced_defaults_dict
def index(self): dynamic_gid = utils.generate_guid() adapter = _LeftFragmentAdapter(self.available_models) input_tree = adapter.get_input_tree() #WARN: If this input tree will contain data type references then to render it correctly we have to use fill_input_tree_with_options input_tree = InputTreeManager.prepare_param_names(input_tree) integrator_adapter = _IntegratorFragmentAdapter() integrator_input_tree = integrator_adapter.get_input_tree() integrator_input_tree = InputTreeManager.prepare_param_names(integrator_input_tree) params = { 'title': "Dynamic model", 'mainContent': 'burst/dynamic', 'input_tree': input_tree, 'integrator_input_tree': integrator_input_tree, 'dynamic_gid': dynamic_gid } self.fill_default_attributes(params) dynamic = self.get_cached_dynamic(dynamic_gid) self._configure_integrator_noise(dynamic.integrator, dynamic.model) return params
def index(self): dynamic_gid = utils.generate_guid() adapter = _LeftFragmentAdapter(self.available_models) input_tree = adapter.get_input_tree() #WARN: If this input tree will contain data type references then to render it correctly we have to use fill_input_tree_with_options input_tree = InputTreeManager.prepare_param_names(input_tree) integrator_adapter = _IntegratorFragmentAdapter() integrator_input_tree = integrator_adapter.get_input_tree() integrator_input_tree = InputTreeManager.prepare_param_names( integrator_input_tree) params = { 'title': "Dynamic model", 'mainContent': 'burst/dynamic', 'input_tree': input_tree, 'integrator_input_tree': integrator_input_tree, 'dynamic_gid': dynamic_gid } self.fill_default_attributes(params) dynamic = self.get_cached_dynamic(dynamic_gid) self._configure_integrator_noise(dynamic.integrator, dynamic.model) return params
def load_surface_stimulus(self, surface_stimulus_gid, from_step): """ Loads the interface for the selected surface stimulus. """ context = common.get_from_session(KEY_SURFACE_CONTEXT) selected_surface_stimulus = ABCAdapter.load_entity_by_gid( surface_stimulus_gid) temporal_eq = selected_surface_stimulus.temporal spatial_eq = selected_surface_stimulus.spatial surface = selected_surface_stimulus.surface focal_points_surface = selected_surface_stimulus.focal_points_surface focal_points_triangles = selected_surface_stimulus.focal_points_triangles temporal_eq_type = temporal_eq.__class__.__name__ spatial_eq_type = spatial_eq.__class__.__name__ default_dict = { 'temporal': temporal_eq_type, 'spatial': spatial_eq_type, 'surface': surface.gid, 'focal_points_surface': json.dumps(focal_points_surface), 'focal_points_triangles': json.dumps(focal_points_triangles) } for param in temporal_eq.parameters: prepared_name = 'temporal_parameters_option_' + str( temporal_eq_type) prepared_name = prepared_name + '_parameters_parameters_' + str( param) default_dict[prepared_name] = str(temporal_eq.parameters[param]) for param in spatial_eq.parameters: prepared_name = 'spatial_parameters_option_' + str( spatial_eq_type) + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(spatial_eq.parameters[param]) default_dict[ DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1 input_list = self.get_creator_and_interface( SURFACE_STIMULUS_CREATOR_MODULE, SURFACE_STIMULUS_CREATOR_CLASS, StimuliSurface(), lock_midpoint_for_eq=[1])[1] input_list = InputTreeManager.fill_defaults(input_list, default_dict) context.reset() context.update_from_interface(input_list) context.equation_kwargs[ DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1 context.set_active_stimulus(surface_stimulus_gid) return self.do_step(from_step)
def get_template_from_context(self): """ Return the parameters for the local connectivity in case one is stored in context. Load the entity and use it to populate the defaults from the interface accordingly. """ context = common.get_from_session(KEY_LCONN_CONTEXT) selected_local_conn = ABCAdapter.load_entity_by_gid( context.selected_entity) cutoff = selected_local_conn.cutoff equation = selected_local_conn.equation surface = selected_local_conn.surface default_dict = {'surface': surface.gid, 'cutoff': cutoff} if equation is not None: equation_type = equation.__class__.__name__ default_dict['equation'] = equation_type for param in equation.parameters: prepared_name = 'equation_parameters_option_' + str( equation_type) prepared_name = prepared_name + '_parameters_parameters_' + str( param) default_dict[prepared_name] = equation.parameters[param] else: msg = "There is no equation specified for this local connectivity. " msg += "The default equation is displayed into the spatial field." self.logger.warning(msg) common.set_info_message(msg) default_dict[ DataTypeMetaData.KEY_TAG_1] = selected_local_conn.user_tag_1 input_list = self.get_creator_and_interface(LOCAL_CONN_CREATOR_MODULE, LOCAL_CONN_CREATOR_CLASS, LocalConnectivity(), lock_midpoint_for_eq=[1 ])[1] input_list = self._add_extra_fields_to_interface(input_list) input_list = InputTreeManager.fill_defaults(input_list, default_dict) template_specification = { 'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False, 'equationViewerUrl': '/spatial/localconnectivity/get_equation_chart', 'equationsPrefixes': json.dumps(self.plotted_equations_prefixes) } return template_specification
def test_select_simulator_inputs(self): """ Test that given a dictionary of selected inputs as it would arrive from UI, only the selected simulator inputs are kept. """ simulator_input_tree = self.flow_service.prepare_adapter(self.test_project.id, self.sim_algorithm) child_parameter = '' checked_parameters = {simulator_input_tree[0][ABCAdapter.KEY_NAME]: {model.KEY_PARAMETER_CHECKED: True, model.KEY_SAVED_VALUE: 'new_value'}, simulator_input_tree[1][ABCAdapter.KEY_NAME]: {model.KEY_PARAMETER_CHECKED: True, model.KEY_SAVED_VALUE: 'new_value'}} #Look for a entry from a subtree to add to the selected simulator inputs for idx, entry in enumerate(simulator_input_tree): found_it = False if idx not in (0, 1) and entry.get(ABCAdapter.KEY_OPTIONS, False): for option in entry[ABCAdapter.KEY_OPTIONS]: if option[ABCAdapter.KEY_VALUE] == entry[ABCAdapter.KEY_DEFAULT]: if option[ABCAdapter.KEY_ATTRIBUTES]: child_parameter = option[ABCAdapter.KEY_ATTRIBUTES][0][ABCAdapter.KEY_NAME] checked_parameters[entry[ABCAdapter.KEY_NAME]] = {model.KEY_PARAMETER_CHECKED: False, model.KEY_SAVED_VALUE: entry[ ABCAdapter.KEY_DEFAULT]} checked_parameters[child_parameter] = {model.KEY_PARAMETER_CHECKED: True, model.KEY_SAVED_VALUE: 'new_value'} found_it = True break if found_it: break self.assertTrue(child_parameter != '', "Could not find any sub-tree entry in simulator interface.") subtree = InputTreeManager.select_simulator_inputs(simulator_input_tree, checked_parameters) #After the select method we expect only the checked parameters entries to remain with #the new values updated accordingly. expected_outputs = [{ABCAdapter.KEY_NAME: simulator_input_tree[0][ABCAdapter.KEY_NAME], ABCAdapter.KEY_DEFAULT: 'new_value'}, {ABCAdapter.KEY_NAME: simulator_input_tree[1][ABCAdapter.KEY_NAME], ABCAdapter.KEY_DEFAULT: 'new_value'}, {ABCAdapter.KEY_NAME: child_parameter, ABCAdapter.KEY_DEFAULT: 'new_value'}] self.assertEqual(len(expected_outputs), len(subtree), "Some entries that should not have been displayed still are.") for idx, entry in enumerate(expected_outputs): self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_NAME], subtree[idx][ABCAdapter.KEY_NAME]) self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_DEFAULT], subtree[idx][ABCAdapter.KEY_DEFAULT], 'Default value not update properly.')
def load_region_stimulus(self, region_stimulus_gid, from_step=None): """ Loads the interface for the selected region stimulus. """ selected_region_stimulus = ABCAdapter.load_entity_by_gid( region_stimulus_gid) temporal_eq = selected_region_stimulus.temporal spatial_eq = selected_region_stimulus.spatial connectivity = selected_region_stimulus.connectivity weights = selected_region_stimulus.weight temporal_eq_type = temporal_eq.__class__.__name__ spatial_eq_type = spatial_eq.__class__.__name__ default_dict = { 'temporal': temporal_eq_type, 'spatial': spatial_eq_type, 'connectivity': connectivity.gid, 'weight': json.dumps(weights) } for param in temporal_eq.parameters: prepared_name = 'temporal_parameters_option_' + str( temporal_eq_type) prepared_name = prepared_name + '_parameters_parameters_' + str( param) default_dict[prepared_name] = str(temporal_eq.parameters[param]) for param in spatial_eq.parameters: prepared_name = 'spatial_parameters_option_' + str( spatial_eq_type) + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(spatial_eq.parameters[param]) input_list = self.get_creator_and_interface( REGION_STIMULUS_CREATOR_MODULE, REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1] input_list = InputTreeManager.fill_defaults(input_list, default_dict) context = common.get_from_session(KEY_REGION_CONTEXT) context.reset() context.update_from_interface(input_list) context.equation_kwargs[ DataTypeMetaData.KEY_TAG_1] = selected_region_stimulus.user_tag_1 context.set_active_stimulus(region_stimulus_gid) return self.do_step(from_step)
def index(self): """Get on burst main page""" # todo : reuse load_burst here for consistency. template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=TvbProfile.current.web.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id) common.add2session(common.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values()[0] adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps(selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algorithm = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) adapter_instance = ABCAdapter.build_adapter(algorithm) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [metric_name for metric_name in adapter_instance.available_algorithms.keys()] else: template_specification['available_metrics'] = [] template_specification[common.KEY_PARAMETERS_CONFIG] = False template_specification[common.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification)
def _get_stimulus_interface(self): """ Returns a dictionary which contains the data needed for creating the interface for a stimulus. """ context = common.get_from_session(KEY_REGION_CONTEXT) input_list = self.get_creator_and_interface( REGION_STIMULUS_CREATOR_MODULE, REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1] context.equation_kwargs.update( {SCALING_PARAMETER: context.get_weights()}) input_list = InputTreeManager.fill_defaults(input_list, context.equation_kwargs) input_list, any_scaling = self._remove_scaling(input_list) template_specification = { 'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False } return self._add_extra_fields_to_interface( template_specification), any_scaling
def test_multidimensional_array(self): """ Test the generation of a multi-dimensional array. """ input_tree = TraitAdapter().get_input_tree() input_tree = InputTreeManager.prepare_param_names(input_tree) self.template_specification['inputList'] = input_tree resulted_html = _template2string(self.template_specification) soup = BeautifulSoup(resulted_html) #Find dictionary div which should be dict_+${dict_var_name} dict_div = soup.find_all('div', attrs=dict(id="dict_test_dict")) assert len(dict_div) == 1, 'Dictionary div not found' dict_entries = soup.find_all('input', attrs=dict(name=re.compile('^test_dict_parameters*'))) assert len(dict_entries) == 2, 'Not all entries found' for i in range(2): if dict_entries[i]['name'] == "test_dict_parameters_W": assert dict_entries[0]['value'] == "-6.0", "Incorrect values" if dict_entries[i]['name'] == "test_dict_parameters_V": assert dict_entries[1]['value'] == "-3.0", "Incorrect values" array_entry = soup.find_all('input', attrs=dict(name='test_array')) assert len(array_entry) == 1, 'Array entry not found' assert array_entry[0]['value'] == "[[-3.0, -6.0], [3.0, 6.0]]", "Wrong value stored"
def configure_simulator_parameters(self): """ Return the required input tree to generate the simulator interface for the burst page in 'configuration mode', meaning with checkboxes next to each input that are checked or not depending on if the user selected them so, and with the user filled defaults. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) default_values, any_checked = burst_config.get_all_simulator_values() simulator_input_tree = self.cached_simulator_input_tree simulator_input_tree = InputTreeManager.fill_defaults(simulator_input_tree, default_values) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, simulator_input_tree, default_values) template_vars = {} self.fill_default_attributes(template_vars) template_vars.update({ "inputList": simulator_input_tree, common.KEY_PARAMETERS_CONFIG: True, 'none_checked': not any_checked, 'selectedParametersDictionary': burst_config.simulator_configuration }) return template_vars
def get_template_from_context(self): """ Return the parameters for the local connectivity in case one is stored in context. Load the entity and use it to populate the defaults from the interface accordingly. """ context = common.get_from_session(KEY_LCONN_CONTEXT) selected_local_conn = ABCAdapter.load_entity_by_gid(context.selected_entity) cutoff = selected_local_conn.cutoff equation = selected_local_conn.equation surface = selected_local_conn.surface default_dict = {'surface': surface.gid, 'cutoff': cutoff} if equation is not None: equation_type = equation.__class__.__name__ default_dict['equation'] = equation_type for param in equation.parameters: prepared_name = 'equation_parameters_option_' + str(equation_type) prepared_name = prepared_name + '_parameters_parameters_' + str(param) default_dict[prepared_name] = equation.parameters[param] else: msg = "There is no equation specified for this local connectivity. " msg += "The default equation is displayed into the spatial field." self.logger.warning(msg) common.set_info_message(msg) default_dict[DataTypeMetaData.KEY_TAG_1] = selected_local_conn.user_tag_1 input_list = self.get_creator_and_interface(LOCAL_CONN_CREATOR_MODULE, LOCAL_CONN_CREATOR_CLASS, LocalConnectivity(), lock_midpoint_for_eq=[1])[1] input_list = self._add_extra_fields_to_interface(input_list) input_list = InputTreeManager.fill_defaults(input_list, default_dict) template_specification = {'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False, 'equationViewerUrl': '/spatial/localconnectivity/get_equation_chart', 'equationsPrefixes': json.dumps(self.plotted_equations_prefixes)} return template_specification
def load_surface_stimulus(self, surface_stimulus_gid, from_step): """ Loads the interface for the selected surface stimulus. """ context = common.get_from_session(KEY_SURFACE_CONTEXT) selected_surface_stimulus = ABCAdapter.load_entity_by_gid(surface_stimulus_gid) temporal_eq = selected_surface_stimulus.temporal spatial_eq = selected_surface_stimulus.spatial surface = selected_surface_stimulus.surface focal_points_surface = selected_surface_stimulus.focal_points_surface focal_points_triangles = selected_surface_stimulus.focal_points_triangles temporal_eq_type = temporal_eq.__class__.__name__ spatial_eq_type = spatial_eq.__class__.__name__ default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type, 'surface': surface.gid, 'focal_points_surface': json.dumps(focal_points_surface), 'focal_points_triangles': json.dumps(focal_points_triangles)} for param in temporal_eq.parameters: prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type) prepared_name = prepared_name + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(temporal_eq.parameters[param]) for param in spatial_eq.parameters: prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param) default_dict[prepared_name] = str(spatial_eq.parameters[param]) default_dict[DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1 input_list = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE, SURFACE_STIMULUS_CREATOR_CLASS, StimuliSurface(), lock_midpoint_for_eq=[1])[1] input_list = InputTreeManager.fill_defaults(input_list, default_dict) context.reset() context.update_from_interface(input_list) context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1 context.set_active_stimulus(surface_stimulus_gid) return self.do_step(from_step)
def create_new_portlet_configuration(self, name=''): """ Create a PortletConfiguration entity with the default values from the portlet XML declaration and the adapter input trees. """ chain_adapters = self.reader.get_adapters_chain(self.algo_identifier) analyze_steps = [] view_step = None idx = 0 for adapter_declaration in chain_adapters: adapter_instance, algorithm_group = self.build_adapter_from_declaration(adapter_declaration) ### Get the flatten interface for the adapter, and in case of ##### ### sub-algorithms also get the pair {algorithm : value} ##### algorithm_field = adapter_declaration[KEY_FIELD] if algorithm_field: default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT] else: default_algorithm = '' if default_algorithm: prefix = InputTreeManager.form_prefix(algorithm_field, None, default_algorithm) alg_inputs = adapter_instance.tree_manager.flatten(adapter_instance.xml_reader.get_inputs(default_algorithm), prefix) else: alg_inputs = adapter_instance.flaten_input_interface() ################################################################### ### Get the overwrites defined in the portlet configuration ####### ### for this specific adapter in the adapter chain ####### ### split in static and dynamic ones ####### prepared_params = {KEY_STATIC: {}, KEY_DYNAMIC: {}} all_portlet_defined_params = self.reader.get_inputs(self.algo_identifier) specific_adapter_overwrites = [entry for entry in all_portlet_defined_params if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] == adapter_declaration[ABCAdapter.KEY_NAME]] for entry in specific_adapter_overwrites: if ABCAdapter.KEY_DEFAULT in entry: declared_value = entry[ABCAdapter.KEY_DEFAULT] elif ABCAdapter.KEY_VALUE in entry: declared_value = entry[ABCAdapter.KEY_VALUE] else: declared_value = '' if entry[ABCAdapter.KEY_TYPE] == KEY_DYNAMIC: prepared_params[KEY_DYNAMIC][entry[ABCAdapter.KEY_NAME]] = declared_value else: prepared_params[KEY_STATIC][entry[ABCAdapter.KEY_NAME]] = declared_value ################################################################### ### Now just fill the rest of the adapter inputs if they are not ## ### present in neither dynamic or static overwrites. In case of ## ### sub-algorithms also add as static the algorithm : value pair ## for input_dict in alg_inputs: input_name = input_dict[ABCAdapter.KEY_NAME] if input_name not in prepared_params[KEY_STATIC] and input_name not in prepared_params[KEY_DYNAMIC]: if ABCAdapter.KEY_DEFAULT in input_dict: input_value = input_dict[ABCAdapter.KEY_DEFAULT] else: input_value = '' prepared_params[KEY_STATIC][input_name] = input_value if default_algorithm: prepared_params[KEY_STATIC][algorithm_field] = default_algorithm ################################################################### ### Now parse the dynamic inputs declared in the portlets XML ###### ### into workflow_step specific format. #### for param_name in prepared_params[KEY_DYNAMIC]: new_value = self._portlet_dynamic2workflow_step(prepared_params[KEY_DYNAMIC][param_name]) prepared_params[KEY_DYNAMIC][param_name] = new_value ################################################################### ###Finally get the actual algorithm id from the DB as we need the # ###algorithm id, then build the workflow step given the computed # ###parameter set, then build and return the portlet configuration## algorithm = dao.get_algorithm_by_group(algorithm_group.id, default_algorithm) if idx == len(chain_adapters) - 1: view_step = WorkflowStepView(algorithm_id=algorithm.id, portlet_id=self.portlet_id, ui_name=name, static_param=prepared_params[KEY_STATIC], dynamic_param=prepared_params[KEY_DYNAMIC]) else: workflow_step = WorkflowStep(algorithm_id=algorithm.id, static_param=prepared_params[KEY_STATIC], dynamic_param=prepared_params[KEY_DYNAMIC]) analyze_steps.append(workflow_step) idx += 1 portlet_configuration = PortletConfiguration(self.portlet_id) portlet_configuration.set_analyzers(analyze_steps) portlet_configuration.set_visualizer(view_step) return portlet_configuration
def fill_defaults(adapter_interface, data, fill_unselected_branches=False): """ Change the default values in the Input Interface Tree.""" return InputTreeManager.fill_defaults(adapter_interface, data, fill_unselected_branches)
def get_input_for_algorithm(self, algorithm_identifier=None): """For a group, we will return input tree on algorithm base.""" inputs = self.xml_reader.get_inputs(algorithm_identifier) prefix = InputTreeManager.form_prefix(self.get_algorithm_param(), option_prefix=algorithm_identifier) result = ABCAdapter.prepare_param_names(inputs, prefix) return result
class FlowService: """ Service Layer for all TVB generic Work-Flow operations. """ def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() self.input_tree_manager = InputTreeManager() def get_category_by_id(self, identifier): """ Pass to DAO the retrieve of category by ID operation.""" return dao.get_category_by_id(identifier) @staticmethod def get_raw_categories(): """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)""" return dao.get_raw_categories() @staticmethod def get_visualisers_category(): """Retrieve all Algorithm categories, with display capability""" result = dao.get_visualisers_categories() if not result: raise ValueError("View Category not found!!!") return result[0] @staticmethod def get_algorithm_by_identifier(ident): """ Retrieve Algorithm entity by ID. Return None, if ID is not found in DB. """ return dao.get_algorithm_by_id(ident) @staticmethod def load_operation(operation_id): """ Retrieve previously stored Operation from DB, and load operation.burst attribute""" operation = dao.get_operation_by_id(operation_id) operation.burst = dao.get_burst_for_operation_id(operation_id) return operation @staticmethod def get_operation_numbers(proj_id): """ Count total number of operations started for current project. """ return dao.get_operation_numbers(proj_id) def prepare_adapter(self, project_id, stored_adapter): """ Having a StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'. """ adapter_module = stored_adapter.module adapter_name = stored_adapter.classname try: # Prepare Adapter Interface, by populating with existent data, # in case of a parameter of type DataType. adapter_instance = ABCAdapter.build_adapter(stored_adapter) interface = adapter_instance.get_input_tree() interface = self.input_tree_manager.fill_input_tree_with_options(interface, project_id, stored_adapter.fk_category) interface = self.input_tree_manager.prepare_param_names(interface) return interface except Exception: self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module) raise OperationException("Could not prepare " + adapter_name) @staticmethod def get_algorithm_by_module_and_class(module, classname): """ Get the db entry from the algorithm table for the given module and class. """ return dao.get_algorithm_by_module(module, classname) @staticmethod def get_available_datatypes(project_id, data_type_cls, filters=None): """ Return all dataTypes that match a given name and some filters. :param data_type_cls: either a fully qualified class name or a class object """ return get_filtered_datatypes(project_id, data_type_cls, filters) @staticmethod def create_link(data_ids, project_id): """ For a list of dataType IDs and a project id create all the required links. """ for data in data_ids: link = model.Links(data, project_id) dao.store_entity(link) @staticmethod def remove_link(dt_id, project_id): """ Remove the link from the datatype given by dt_id to project given by project_id. """ link = dao.get_link(dt_id, project_id) if link is not None: dao.remove_entity(model.Links, link.id) def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data): """ Launch an operation, specified by AdapterInstance, for CurrentUser, Current Project and a given set of UI Input Data. """ operation_name = str(adapter_instance.__class__.__name__) try: self.logger.info("Starting operation " + operation_name) project = dao.get_project_by_id(project_id) tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER) result = OperationService().initiate_operation(current_user, project.id, adapter_instance, tmp_folder, visible, **data) self.logger.info("Finished operation launch:" + operation_name) return result except TVBException as excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data, because: " + excep.message) raise OperationException(excep.message, excep) except Exception as excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!") raise OperationException(str(excep)) @staticmethod def get_upload_algorithms(): """ :return: List of StoredAdapter entities """ categories = dao.get_uploader_categories() categories_ids = [categ.id for categ in categories] return dao.get_adapters_from_categories(categories_ids) def get_analyze_groups(self): """ :return: list of AlgorithmTransientGroup entities """ categories = dao.get_launchable_categories(elimin_viewers=True) categories_ids = [categ.id for categ in categories] stored_adapters = dao.get_adapters_from_categories(categories_ids) groups_list = [] for adapter in stored_adapters: # For empty groups, this time, we fill the actual adapter group = AlgorithmTransientGroup(adapter.group_name or adapter.displayname, adapter.group_description or adapter.description) group = self._find_group(groups_list, group) group.children.append(adapter) return categories[0], groups_list @staticmethod def _find_group(groups_list, new_group): for i in range(len(groups_list) - 1, -1, -1): current_group = groups_list[i] if current_group.name == new_group.name and current_group.description == new_group.description: return current_group # Not found in list groups_list.append(new_group) return new_group def get_visualizers_for_group(self, dt_group_gid): categories = dao.get_visualisers_categories() return self._get_launchable_algorithms(dt_group_gid, categories)[1] def get_launchable_algorithms(self, datatype_gid): """ :param datatype_gid: Filter only algorithms compatible with this GUID :return: dict(category_name: List AlgorithmTransientGroup) """ categories = dao.get_launchable_categories() datatype_instance, filtered_adapters = self._get_launchable_algorithms(datatype_gid, categories) if isinstance(datatype_instance, model.DataTypeGroup): # If part of a group, update also with specific analyzers of the child datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] analyze_category = dao.get_launchable_categories(True) _, inner_analyzers = self._get_launchable_algorithms(datatype.gid, analyze_category) filtered_adapters.extend(inner_analyzers) categories_dict = dict() for c in categories: categories_dict[c.id] = c.displayname return self._group_adapters_by_category(filtered_adapters, categories_dict) def _get_launchable_algorithms(self, datatype_gid, categories): datatype_instance = dao.get_datatype_by_gid(datatype_gid) data_class = datatype_instance.__class__ all_compatible_classes = [data_class.__name__] for one_class in getmro(data_class): if issubclass(one_class, MappedType) and one_class.__name__ not in all_compatible_classes: all_compatible_classes.append(one_class.__name__) self.logger.debug("Searching in categories: " + str(categories) + " for classes " + str(all_compatible_classes)) categories_ids = [categ.id for categ in categories] launchable_adapters = dao.get_applicable_adapters(all_compatible_classes, categories_ids) filtered_adapters = [] for stored_adapter in launchable_adapters: filter_chain = FilterChain.from_json(stored_adapter.datatype_filter) if not filter_chain or filter_chain.get_python_filter_equivalent(datatype_instance): filtered_adapters.append(stored_adapter) return datatype_instance, filtered_adapters def _group_adapters_by_category(self, stored_adapters, categories): """ :param stored_adapters: list StoredAdapter :return: dict(category_name: List AlgorithmTransientGroup), empty groups all in the same AlgorithmTransientGroup """ categories_dict = dict() for adapter in stored_adapters: category_name = categories.get(adapter.fk_category) if category_name in categories_dict: groups_list = categories_dict.get(category_name) else: groups_list = [] categories_dict[category_name] = groups_list group = AlgorithmTransientGroup(adapter.group_name, adapter.group_description) group = self._find_group(groups_list, group) group.children.append(adapter) return categories_dict @staticmethod def get_generic_entity(entity_type, filter_value, select_field): return dao.get_generic_entity(entity_type, filter_value, select_field) ########################################################################## ######## Methods below are for MeasurePoint selections ################### ########################################################################## @staticmethod def get_selections_for_project(project_id, datatype_gid): """ Retrieved from DB saved selections for current project. If a certain selection doesn't have all the labels between the labels of the given connectivity than this selection will not be returned. :returns: List of ConnectivitySelection entities. """ return dao.get_selections_for_project(project_id, datatype_gid) @staticmethod def save_measure_points_selection(ui_name, selected_nodes, datatype_gid, project_id): """ Store in DB a ConnectivitySelection. """ select_entities = dao.get_selections_for_project(project_id, datatype_gid, ui_name) if select_entities: # when the name of the new selection is within the available selections then update that selection: select_entity = select_entities[0] select_entity.selected_nodes = selected_nodes else: select_entity = model.MeasurePointsSelection(ui_name, selected_nodes, datatype_gid, project_id) dao.store_entity(select_entity) ########################################################################## ########## Bellow are PSE Filters specific methods ################## ########################################################################## @staticmethod def get_stored_pse_filters(datatype_group_gid): return dao.get_stored_pse_filters(datatype_group_gid) @staticmethod def save_pse_filter(ui_name, datatype_group_gid, threshold_value, applied_on): """ Store in DB a PSE filter. """ select_entities = dao.get_stored_pse_filters(datatype_group_gid, ui_name) if select_entities: # when the UI name is already in DB, update the existing entity select_entity = select_entities[0] select_entity.threshold_value = threshold_value select_entity.applied_on = applied_on # this is the type, as in applied on size or color else: select_entity = model.StoredPSEFilter(ui_name, datatype_group_gid, threshold_value, applied_on) dao.store_entity(select_entity)
def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() self.input_tree_manager = InputTreeManager()
class FlowService: """ Service Layer for all TVB generic Work-Flow operations. """ def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() self.input_tree_manager = InputTreeManager() def get_category_by_id(self, identifier): """ Pass to DAO the retrieve of category by ID operation.""" return dao.get_category_by_id(identifier) @staticmethod def get_raw_categories(): """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)""" return dao.get_raw_categories() @staticmethod def get_visualisers_category(): """Retrieve all Algorithm categories, with display capability""" result = dao.get_visualisers_categories() if not result: raise ValueError("View Category not found!!!") return result[0] @staticmethod def get_algorithm_by_identifier(ident): """ Retrieve Algorithm entity by ID. Return None, if ID is not found in DB. """ return dao.get_algorithm_by_id(ident) @staticmethod def load_operation(operation_id): """ Retrieve previously stored Operation from DB, and load operation.burst attribute""" operation = dao.get_operation_by_id(operation_id) operation.burst = dao.get_burst_for_operation_id(operation_id) return operation @staticmethod def get_operation_numbers(proj_id): """ Count total number of operations started for current project. """ return dao.get_operation_numbers(proj_id) def prepare_adapter(self, project_id, stored_adapter): """ Having a StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'. """ adapter_module = stored_adapter.module adapter_name = stored_adapter.classname try: # Prepare Adapter Interface, by populating with existent data, # in case of a parameter of type DataType. adapter_instance = ABCAdapter.build_adapter(stored_adapter) interface = adapter_instance.get_input_tree() interface = self.input_tree_manager.fill_input_tree_with_options(interface, project_id, stored_adapter.fk_category) interface = self.input_tree_manager.prepare_param_names(interface) return interface except Exception: self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module) raise OperationException("Could not prepare " + adapter_name) @staticmethod def get_algorithm_by_module_and_class(module, classname): """ Get the db entry from the algorithm table for the given module and class. """ return dao.get_algorithm_by_module(module, classname) @staticmethod def get_available_datatypes(project_id, data_type_cls, filters=None): """ Return all dataTypes that match a given name and some filters. :param data_type_cls: either a fully qualified class name or a class object """ return get_filtered_datatypes(project_id, data_type_cls, filters) @staticmethod def create_link(data_ids, project_id): """ For a list of dataType IDs and a project id create all the required links. """ for data in data_ids: link = model.Links(data, project_id) dao.store_entity(link) @staticmethod def remove_link(dt_id, project_id): """ Remove the link from the datatype given by dt_id to project given by project_id. """ link = dao.get_link(dt_id, project_id) if link is not None: dao.remove_entity(model.Links, link.id) def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data): """ Launch an operation, specified by AdapterInstance, for CurrentUser, Current Project and a given set of UI Input Data. """ operation_name = str(adapter_instance.__class__.__name__) try: self.logger.info("Starting operation " + operation_name) project = dao.get_project_by_id(project_id) tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER) result = OperationService().initiate_operation(current_user, project.id, adapter_instance, tmp_folder, visible, **data) self.logger.info("Finished operation:" + operation_name) return result except TVBException, excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data, because: " + excep.message) raise OperationException(excep.message, excep) except Exception, excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!") raise OperationException(str(excep))
class FlowService: """ Service Layer for all TVB generic Work-Flow operations. """ def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() self.input_tree_manager = InputTreeManager() def get_category_by_id(self, identifier): """ Pass to DAO the retrieve of category by ID operation.""" return dao.get_category_by_id(identifier) @staticmethod def get_raw_categories(): """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)""" return dao.get_raw_categories() @staticmethod def get_visualisers_category(): """Retrieve all Algorithm categories, with display capability""" result = dao.get_visualisers_categories() if not result: raise ValueError("View Category not found!!!") return result[0] @staticmethod def get_algorithm_by_identifier(ident): """ Retrieve Algorithm entity by ID. Return None, if ID is not found in DB. """ return dao.get_algorithm_by_id(ident) @staticmethod def load_operation(operation_id): """ Retrieve previously stored Operation from DB, and load operation.burst attribute""" operation = dao.get_operation_by_id(operation_id) operation.burst = dao.get_burst_for_operation_id(operation_id) return operation @staticmethod def get_operation_numbers(proj_id): """ Count total number of operations started for current project. """ return dao.get_operation_numbers(proj_id) def prepare_adapter(self, project_id, stored_adapter): """ Having a StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'. """ adapter_module = stored_adapter.module adapter_name = stored_adapter.classname try: # Prepare Adapter Interface, by populating with existent data, # in case of a parameter of type DataType. adapter_instance = ABCAdapter.build_adapter(stored_adapter) interface = adapter_instance.get_input_tree() interface = self.input_tree_manager.fill_input_tree_with_options( interface, project_id, stored_adapter.fk_category) interface = self.input_tree_manager.prepare_param_names(interface) return interface except Exception: self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module) raise OperationException("Could not prepare " + adapter_name) @staticmethod def get_algorithm_by_module_and_class(module, classname): """ Get the db entry from the algorithm table for the given module and class. """ return dao.get_algorithm_by_module(module, classname) @staticmethod def get_available_datatypes(project_id, data_type_cls, filters=None): """ Return all dataTypes that match a given name and some filters. :param data_type_cls: either a fully qualified class name or a class object """ return get_filtered_datatypes(project_id, data_type_cls, filters) @staticmethod def create_link(data_ids, project_id): """ For a list of dataType IDs and a project id create all the required links. """ for data in data_ids: link = model.Links(data, project_id) dao.store_entity(link) @staticmethod def remove_link(dt_id, project_id): """ Remove the link from the datatype given by dt_id to project given by project_id. """ link = dao.get_link(dt_id, project_id) if link is not None: dao.remove_entity(model.Links, link.id) def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data): """ Launch an operation, specified by AdapterInstance, for CurrentUser, Current Project and a given set of UI Input Data. """ operation_name = str(adapter_instance.__class__.__name__) try: self.logger.info("Starting operation " + operation_name) project = dao.get_project_by_id(project_id) tmp_folder = self.file_helper.get_project_folder( project, self.file_helper.TEMP_FOLDER) result = OperationService().initiate_operation( current_user, project.id, adapter_instance, tmp_folder, visible, **data) self.logger.info("Finished operation:" + operation_name) return result except TVBException as excep: self.logger.exception( "Could not launch operation " + operation_name + " with the given set of input data, because: " + excep.message) raise OperationException(excep.message, excep) except Exception as excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!") raise OperationException(str(excep)) @staticmethod def get_upload_algorithms(): """ :return: List of StoredAdapter entities """ categories = dao.get_uploader_categories() categories_ids = [categ.id for categ in categories] return dao.get_adapters_from_categories(categories_ids) def get_analyze_groups(self): """ :return: list of AlgorithmTransientGroup entities """ categories = dao.get_launchable_categories(elimin_viewers=True) categories_ids = [categ.id for categ in categories] stored_adapters = dao.get_adapters_from_categories(categories_ids) groups_list = [] for adapter in stored_adapters: # For empty groups, this time, we fill the actual adapter group = AlgorithmTransientGroup( adapter.group_name or adapter.displayname, adapter.group_description or adapter.description) group = self._find_group(groups_list, group) group.children.append(adapter) return categories[0], groups_list @staticmethod def _find_group(groups_list, new_group): for i in range(len(groups_list) - 1, -1, -1): current_group = groups_list[i] if current_group.name == new_group.name and current_group.description == new_group.description: return current_group # Not found in list groups_list.append(new_group) return new_group def get_visualizers_for_group(self, dt_group_gid): categories = dao.get_visualisers_categories() return self._get_launchable_algorithms(dt_group_gid, categories)[1] def get_launchable_algorithms(self, datatype_gid): """ :param datatype_gid: Filter only algorithms compatible with this GUID :return: dict(category_name: List AlgorithmTransientGroup) """ categories = dao.get_launchable_categories() datatype_instance, filtered_adapters = self._get_launchable_algorithms( datatype_gid, categories) if isinstance(datatype_instance, model.DataTypeGroup): # If part of a group, update also with specific analyzers of the child datatype dt_group = dao.get_datatype_group_by_gid(datatype_gid) datatypes = dao.get_datatypes_from_datatype_group(dt_group.id) if len(datatypes): datatype = datatypes[-1] analyze_category = dao.get_launchable_categories(True) _, inner_analyzers = self._get_launchable_algorithms( datatype.gid, analyze_category) filtered_adapters.extend(inner_analyzers) categories_dict = dict() for c in categories: categories_dict[c.id] = c.displayname return self._group_adapters_by_category(filtered_adapters, categories_dict) def _get_launchable_algorithms(self, datatype_gid, categories): datatype_instance = dao.get_datatype_by_gid(datatype_gid) data_class = datatype_instance.__class__ all_compatible_classes = [data_class.__name__] for one_class in getmro(data_class): if issubclass( one_class, MappedType ) and one_class.__name__ not in all_compatible_classes: all_compatible_classes.append(one_class.__name__) self.logger.debug("Searching in categories: " + str(categories) + " for classes " + str(all_compatible_classes)) categories_ids = [categ.id for categ in categories] launchable_adapters = dao.get_applicable_adapters( all_compatible_classes, categories_ids) filtered_adapters = [] for stored_adapter in launchable_adapters: filter_chain = FilterChain.from_json( stored_adapter.datatype_filter) if not filter_chain or filter_chain.get_python_filter_equivalent( datatype_instance): filtered_adapters.append(stored_adapter) return datatype_instance, filtered_adapters def _group_adapters_by_category(self, stored_adapters, categories): """ :param stored_adapters: list StoredAdapter :return: dict(category_name: List AlgorithmTransientGroup), empty groups all in the same AlgorithmTransientGroup """ categories_dict = dict() for adapter in stored_adapters: category_name = categories.get(adapter.fk_category) if category_name in categories_dict: groups_list = categories_dict.get(category_name) else: groups_list = [] categories_dict[category_name] = groups_list group = AlgorithmTransientGroup(adapter.group_name, adapter.group_description) group = self._find_group(groups_list, group) group.children.append(adapter) return categories_dict @staticmethod def get_generic_entity(entity_type, filter_value, select_field): return dao.get_generic_entity(entity_type, filter_value, select_field) ########################################################################## ######## Methods below are for MeasurePoint selections ################### ########################################################################## @staticmethod def get_selections_for_project(project_id, datatype_gid): """ Retrieved from DB saved selections for current project. If a certain selection doesn't have all the labels between the labels of the given connectivity than this selection will not be returned. :returns: List of ConnectivitySelection entities. """ return dao.get_selections_for_project(project_id, datatype_gid) @staticmethod def save_measure_points_selection(ui_name, selected_nodes, datatype_gid, project_id): """ Store in DB a ConnectivitySelection. """ select_entities = dao.get_selections_for_project( project_id, datatype_gid, ui_name) if select_entities: # when the name of the new selection is within the available selections then update that selection: select_entity = select_entities[0] select_entity.selected_nodes = selected_nodes else: select_entity = model.MeasurePointsSelection( ui_name, selected_nodes, datatype_gid, project_id) dao.store_entity(select_entity) ########################################################################## ########## Bellow are PSE Filters specific methods ################## ########################################################################## @staticmethod def get_stored_pse_filters(datatype_group_gid): return dao.get_stored_pse_filters(datatype_group_gid) @staticmethod def save_pse_filter(ui_name, datatype_group_gid, threshold_value, applied_on): """ Store in DB a PSE filter. """ select_entities = dao.get_stored_pse_filters(datatype_group_gid, ui_name) if select_entities: # when the UI name is already in DB, update the existing entity select_entity = select_entities[0] select_entity.threshold_value = threshold_value select_entity.applied_on = applied_on # this is the type, as in applied on size or color else: select_entity = model.StoredPSEFilter(ui_name, datatype_group_gid, threshold_value, applied_on) dao.store_entity(select_entity)
class ABCAdapter(object): """ Root Abstract class for all TVB Adapters. """ # todo this constants copy is not nice TYPE_SELECT = input_tree.TYPE_SELECT TYPE_MULTIPLE = input_tree.TYPE_MULTIPLE STATIC_ACCEPTED_TYPES = input_tree.STATIC_ACCEPTED_TYPES KEY_TYPE = input_tree.KEY_TYPE KEY_OPTIONS = input_tree.KEY_OPTIONS KEY_ATTRIBUTES = input_tree.KEY_ATTRIBUTES KEY_NAME = input_tree.KEY_NAME KEY_DESCRIPTION = input_tree.KEY_DESCRIPTION KEY_VALUE = input_tree.KEY_VALUE KEY_LABEL = input_tree.KEY_LABEL KEY_DEFAULT = input_tree.KEY_DEFAULT KEY_DATATYPE = input_tree.KEY_DATATYPE KEY_DTYPE = input_tree.KEY_DTYPE KEY_DISABLED = input_tree.KEY_DISABLED KEY_ALL = input_tree.KEY_ALL KEY_CONDITION = input_tree.KEY_CONDITION KEY_FILTERABLE = input_tree.KEY_FILTERABLE KEY_REQUIRED = input_tree.KEY_REQUIRED KEY_ID = input_tree.KEY_ID KEY_UI_HIDE = input_tree.KEY_UI_HIDE # TODO: move everything related to parameters PRE + POST into parameters_factory KEYWORD_PARAMS = input_tree.KEYWORD_PARAMS KEYWORD_SEPARATOR = input_tree.KEYWORD_SEPARATOR KEYWORD_OPTION = input_tree.KEYWORD_OPTION INTERFACE_ATTRIBUTES_ONLY = interface.INTERFACE_ATTRIBUTES_ONLY INTERFACE_ATTRIBUTES = interface.INTERFACE_ATTRIBUTES # model.Algorithm instance that will be set for each adapter created by in build_adapter method stored_adapter = None __metaclass__ = ABCMeta def __init__(self): # It will be populate with key from DataTypeMetaData self.meta_data = {DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT} self.file_handler = FilesHelper() self.storage_path = '.' # Will be populate with current running operation's identifier self.operation_id = None self.user_id = None self.log = get_logger(self.__class__.__module__) self.tree_manager = InputTreeManager() @classmethod def get_group_name(cls): if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "name"): return cls._ui_group.name return None @classmethod def get_group_description(cls): if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "description"): return cls._ui_group.description return None @classmethod def get_ui_name(cls): if hasattr(cls, "_ui_name"): return cls._ui_name else: return cls.__name__ @classmethod def get_ui_description(cls): if hasattr(cls, "_ui_description"): return cls._ui_description @classmethod def get_ui_subsection(cls): if hasattr(cls, "_ui_subsection"): return cls._ui_subsection if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "subsection"): return cls._ui_group.subsection @staticmethod def can_be_active(): """ To be overridden where needed (e.g. Matlab dependent adapters). :return: By default True, and False when the current Adapter can not be executed in the current env for various reasons (e.g. no Matlab or Octave installed) """ return True @abstractmethod def get_input_tree(self): """ Describes inputs and outputs of the launch method. """ @abstractmethod def get_output(self): """ Describes inputs and outputs of the launch method. """ def configure(self, **kwargs): """ To be implemented in each Adapter that requires any specific configurations before the actual launch. """ @abstractmethod def get_required_memory_size(self, **kwargs): """ Abstract method to be implemented in each adapter. Should return the required memory for launching the adapter. """ @abstractmethod def get_required_disk_size(self, **kwargs): """ Abstract method to be implemented in each adapter. Should return the required memory for launching the adapter in kilo-Bytes. """ def get_execution_time_approximation(self, **kwargs): """ Method should approximate based on input arguments, the time it will take for the operation to finish (in seconds). """ return -1 @abstractmethod def launch(self): """ To be implemented in each Adapter. Will contain the logic of the Adapter. Any returned DataType will be stored in DB, by the Framework. """ def add_operation_additional_info(self, message): """ Adds additional info on the operation to be displayed in the UI. Usually a warning message. """ current_op = dao.get_operation_by_id(self.operation_id) current_op.additional_info = message dao.store_entity(current_op) @nan_not_allowed() def _prelaunch(self, operation, uid=None, available_disk_space=0, **kwargs): """ Method to wrap LAUNCH. Will prepare data, and store results on return. """ self.meta_data.update(json.loads(operation.meta_data)) self.storage_path = self.file_handler.get_project_folder(operation.project, str(operation.id)) self.operation_id = operation.id self.current_project_id = operation.project.id self.user_id = operation.fk_launched_by self.configure(**kwargs) # Compare the amount of memory the current algorithms states it needs, # with the average between the RAM available on the OS and the free memory at the current moment. # We do not consider only the free memory, because some OSs are freeing late and on-demand only. total_free_memory = psutil.virtual_memory().free + psutil.swap_memory().free total_existent_memory = psutil.virtual_memory().total + psutil.swap_memory().total memory_reference = (total_free_memory + total_existent_memory) / 2 adapter_required_memory = self.get_required_memory_size(**kwargs) if adapter_required_memory > memory_reference: msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)." raise NoMemoryAvailableException(msg % (adapter_required_memory / 2 ** 30, memory_reference / 2 ** 30)) # Compare the expected size of the operation results with the HDD space currently available for the user # TVB defines a quota per user. required_disk_space = self.get_required_disk_size(**kwargs) if available_disk_space < 0: msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution." raise NoMemoryAvailableException(msg % (- available_disk_space / 2 ** 10)) if available_disk_space < required_disk_space: msg = ("You only have %.2f GB of disk space available but the operation you " "launched might require %.2f Stopping execution...") raise NoMemoryAvailableException(msg % (available_disk_space / 2 ** 20, required_disk_space / 2 ** 20)) operation.start_now() operation.estimated_disk_size = required_disk_space dao.store_entity(operation) result = self.launch(**kwargs) if not isinstance(result, (list, tuple)): result = [result, ] self.__check_integrity(result) return self._capture_operation_results(result, uid) def _capture_operation_results(self, result, user_tag=None): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ results_to_store = [] data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id # All entities will have the same subject and state subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT] state = self.meta_data[DataTypeMetaData.KEY_STATE] burst_reference = None if DataTypeMetaData.KEY_BURST in self.meta_data: burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST] perpetuated_identifier = None if DataTypeMetaData.KEY_TAG_1 in self.meta_data: perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1] for res in result: if res is None: continue res.subject = str(subject) res.state = state res.fk_parent_burst = burst_reference res.fk_from_operation = self.operation_id res.framework_metadata = self.meta_data if not res.user_tag_1: res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier else: res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier res.fk_datatype_group = data_type_group_id ## Compute size-on disk, in case file-storage is used if hasattr(res, 'storage_path') and hasattr(res, 'get_storage_file_name'): associated_file = os.path.join(res.storage_path, res.get_storage_file_name()) res.close_file() res.disk_size = self.file_handler.compute_size_on_disk(associated_file) res = dao.store_entity(res) # Write metaData res.persist_full_metadata() results_to_store.append(res) del result[0:len(result)] result.extend(results_to_store) if len(result) and self._is_group_launch(): ## Update the operation group name operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group) operation_group.fill_operationgroup_name(result[0].type) dao.store_entity(operation_group) return 'Operation ' + str(self.operation_id) + ' has finished.', len(results_to_store) def __check_integrity(self, result): """ Check that the returned parameters for LAUNCH operation are of the type specified in the adapter's interface. """ entity_id = self.__module__ + '.' + self.__class__.__name__ for result_entity in result: if type(result_entity) == list and len(result_entity) > 0: #### Determine the first element not None first_item = None for res in result_entity: if res is not None: first_item = res break if first_item is None: return #### All list items are None #### Now check if the first item has a supported type if not self.__is_data_in_supported_types(first_item): msg = "Unexpected DataType %s" raise InvalidParameterException(msg % type(first_item)) first_item_type = type(first_item) for res in result_entity: if not isinstance(res, first_item_type): msg = '%s-Heterogeneous types (%s).Expected %s list.' raise InvalidParameterException(msg % (entity_id, type(res), first_item_type)) else: if not self.__is_data_in_supported_types(result_entity): msg = "Unexpected DataType %s" raise InvalidParameterException(msg % type(result_entity)) def __is_data_in_supported_types(self, data): """ This method checks if the provided data is one of the adapter supported return types """ if data is None: return True for supported_type in self.get_output(): if isinstance(data, supported_type): return True ##### Data can't be mapped on any supported type !! return False def _is_group_launch(self): """ Return true if this adapter is launched from a group of operations """ operation = dao.get_operation_by_id(self.operation_id) return operation.fk_operation_group is not None @staticmethod def load_entity_by_gid(data_gid): """ Load a generic DataType, specified by GID. """ return load_entity_by_gid(data_gid) @staticmethod def build_adapter_from_class(adapter_class): """ Having a subclass of ABCAdapter, prepare an instance for launching an operation with it. """ if not issubclass(adapter_class, ABCAdapter): raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!") try: stored_adapter = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__) adapter_instance = adapter_class() adapter_instance.stored_adapter = stored_adapter return adapter_instance except Exception as excep: LOGGER.exception(excep) raise IntrospectionException(str(excep)) @staticmethod def build_adapter(stored_adapter): """ Having a module and a class name, create an instance of ABCAdapter. """ try: ad_module = importlib.import_module(stored_adapter.module) adapter_class = getattr(ad_module, stored_adapter.classname) adapter_instance = adapter_class() adapter_instance.stored_adapter = stored_adapter return adapter_instance except Exception: msg = "Could not load Adapter Instance for Stored row %s" % stored_adapter LOGGER.exception(msg) raise IntrospectionException(msg) ####### METHODS for PROCESSING PARAMETERS start here ############################# def review_operation_inputs(self, parameters): """ :returns: a list with the inputs from the parameters list that are instances of DataType,\ and a dictionary with all parameters which are different than the declared defauts """ flat_interface = self.flaten_input_interface() return self.tree_manager.review_operation_inputs(parameters, flat_interface) def prepare_ui_inputs(self, kwargs, validation_required=True): """ Prepare the inputs received from a HTTP Post in a form that will be used by the Python adapter. """ algorithm_inputs = self.get_input_tree() algorithm_inputs = InputTreeManager.prepare_param_names(algorithm_inputs) self.tree_manager.append_required_defaults(kwargs, algorithm_inputs) return self.convert_ui_inputs(kwargs, validation_required=validation_required) def convert_ui_inputs(self, kwargs, validation_required=True): """ Convert HTTP POST parameters into Python parameters. """ return self.tree_manager.convert_ui_inputs(self.flaten_input_interface(), kwargs, self.meta_data, validation_required) def noise_configurable_parameters(self): return [entry[self.KEY_NAME] for entry in self.flaten_input_interface() if 'configurableNoise' in entry] def flaten_input_interface(self): """ Return a simple dictionary, instead of a Tree.""" return self.tree_manager.flatten(self.get_input_tree())
class TestGenshiSimulator(GenshiTest): """ For the simulator interface, test that various fields are generated correctly. """ algorithm = dao.get_algorithm_by_module( 'tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter') adapter_instance = ABCAdapter.build_adapter(algorithm) input_tree = adapter_instance.get_input_tree() input_tree = InputTreeManager.prepare_param_names(input_tree) def setup_method(self): """ Set up any additionally needed parameters. """ super(TestGenshiSimulator, self).setup_method() self.template_specification['inputList'] = self.input_tree self.template_specification['draw_hidden_ranges'] = True self.template_specification[common.KEY_PARAMETERS_CONFIG] = False resulted_html = _template2string(self.template_specification) self.soup = BeautifulSoup(resulted_html) #file = open("output.html", 'w') #file.write(self.soup.prettify()) #file.close() def test_sub_algo_inputs(self): """ Check the name of inputs generated for each sub-algorithm is done properly with only one option that is not disabled """ exp = re.compile('model_parameters_option_[a-zA-Z]*') all_inputs = self.soup.find_all('input', attrs=dict(name=exp)) count_disabled = 0 for one_entry in all_inputs: ## Replacing with IN won't work if one_entry.has_attr('disabled'): count_disabled += 1 assert len(all_inputs) > 100, "Not enough input fields generated" assert count_disabled > 100, "Not enough input fields disabled" def test_hidden_ranger_fields(self): """ Check that the default ranger hidden fields are generated correctly """ ranger1 = self.soup.find_all('input', attrs=dict(type="hidden", id=RANGE_PARAMETER_1)) ranger2 = self.soup.find_all('input', attrs=dict(type="hidden", id=RANGE_PARAMETER_2)) assert 1 == len(ranger1), "First ranger generated wrong" assert 1 == len(ranger2), "Second ranger generated wrong" def test_sub_algorithms(self): """ Check that the correct number of sub-algorithms is created and that only one of them is not disable """ fail_message = "Something went wrong with generating the sub-algorithms." exp = re.compile('data_model[A-Z][a-zA-Z]*') enabled_algo = self.soup.find_all('div', attrs=dict(id=exp, style="display:block")) all_algo_disabled = self.soup.find_all('div', attrs=dict( id=exp, style="display:none")) assert 1 == len(enabled_algo) assert 14 == len(all_algo_disabled) assert not enabled_algo[0] in all_algo_disabled, fail_message def test_normal_ranger(self): """ Check the normal ranger generation. Only one ranger should be created because the minValue/ maxValue is specified only for one field. It should also be disabled because it is not as default. """ fail_message = "Something went wrong with generating the ranger." exp = re.compile('data_model*') ranger_parent = self.soup.find_all('table', attrs={ 'id': exp, 'class': "ranger-div-class" }) assert len(ranger_parent) > 100, fail_message range_expand = self.soup.find_all( 'input', attrs=dict( id= "data_modelGeneric2dOscillatormodel_parameters_option_Generic2dOscillator_tau_RANGER_buttonExpand" )) assert 1 == len(range_expand) def test_multiple_select(self): """ Checks the correct creation of a multiple select component. """ fail_message = "Something went wrong with creating multiple select." exp = re.compile('data_monitors[A-Z][a-zA-Z]*') all_multiple_options = self.soup.find_all('div', attrs=dict(id=exp)) disabled_options = self.soup.find_all('div', attrs=dict(id=exp, disabled='disabled')) assert 9 == len(all_multiple_options), fail_message assert 8 == len(disabled_options), fail_message exp = re.compile('monitors_parameters*') all_multiple_params = self.soup.find_all('input', attrs=dict(name=exp)) disabled_params = self.soup.find_all('input', attrs=dict(name=exp, disabled='disabled')) assert len(all_multiple_params) > 50, fail_message assert len(disabled_params) > 50, fail_message
class ABCAdapter(object): """ Root Abstract class for all TVB Adapters. """ # todo this constants copy is not nice TYPE_SELECT = input_tree.TYPE_SELECT TYPE_MULTIPLE = input_tree.TYPE_MULTIPLE STATIC_ACCEPTED_TYPES = input_tree.STATIC_ACCEPTED_TYPES KEY_TYPE = input_tree.KEY_TYPE KEY_OPTIONS = input_tree.KEY_OPTIONS KEY_ATTRIBUTES = input_tree.KEY_ATTRIBUTES KEY_NAME = input_tree.KEY_NAME KEY_DESCRIPTION = input_tree.KEY_DESCRIPTION KEY_VALUE = input_tree.KEY_VALUE KEY_LABEL = input_tree.KEY_LABEL KEY_DEFAULT = input_tree.KEY_DEFAULT KEY_DATATYPE = input_tree.KEY_DATATYPE KEY_DTYPE = input_tree.KEY_DTYPE KEY_DISABLED = input_tree.KEY_DISABLED KEY_ALL = input_tree.KEY_ALL KEY_CONDITION = input_tree.KEY_CONDITION KEY_FILTERABLE = input_tree.KEY_FILTERABLE KEY_REQUIRED = input_tree.KEY_REQUIRED KEY_ID = input_tree.KEY_ID KEY_UI_HIDE = input_tree.KEY_UI_HIDE # TODO: move everything related to parameters PRE + POST into parameters_factory KEYWORD_PARAMS = input_tree.KEYWORD_PARAMS KEYWORD_SEPARATOR = input_tree.KEYWORD_SEPARATOR KEYWORD_OPTION = input_tree.KEYWORD_OPTION INTERFACE_ATTRIBUTES_ONLY = "attributes-only" INTERFACE_ATTRIBUTES = "attributes" # model.Algorithm instance that will be set for each adapter created by in build_adapter method stored_adapter = None def __init__(self): # It will be populate with key from DataTypeMetaData self.meta_data = { DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT } self.generic_attributes = GenericAttributes() self.generic_attributes.subject = DataTypeMetaData.DEFAULT_SUBJECT self.file_handler = FilesHelper() self.storage_path = '.' # Will be populate with current running operation's identifier self.operation_id = None self.user_id = None self.log = get_logger(self.__class__.__module__) self.tree_manager = InputTreeManager() self.submitted_form = None @classmethod def get_group_name(cls): if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "name"): return cls._ui_group.name return None @classmethod def get_group_description(cls): if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "description"): return cls._ui_group.description return None @classmethod def get_ui_name(cls): if hasattr(cls, "_ui_name"): return cls._ui_name else: return cls.__name__ @classmethod def get_ui_description(cls): if hasattr(cls, "_ui_description"): return cls._ui_description @classmethod def get_ui_subsection(cls): if hasattr(cls, "_ui_subsection"): return cls._ui_subsection if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "subsection"): return cls._ui_group.subsection @staticmethod def can_be_active(): """ To be overridden where needed (e.g. Matlab dependent adapters). :return: By default True, and False when the current Adapter can not be executed in the current env for various reasons (e.g. no Matlab or Octave installed) """ return True def get_input_tree(self): """ Describes inputs and outputs of the launch method. """ return None def submit_form(self, form): self.submitted_form = form # TODO separate usage of get_form_class (returning a class) and return of a submitted instance def get_form(self): if self.submitted_form is not None: return self.submitted_form return self.get_form_class() @abstractmethod def get_form_class(self): return None @abstractmethod def get_output(self): """ Describes inputs and outputs of the launch method. """ def configure(self, **kwargs): """ To be implemented in each Adapter that requires any specific configurations before the actual launch. """ @abstractmethod def get_required_memory_size(self, **kwargs): """ Abstract method to be implemented in each adapter. Should return the required memory for launching the adapter. """ @abstractmethod def get_required_disk_size(self, **kwargs): """ Abstract method to be implemented in each adapter. Should return the required memory for launching the adapter in kilo-Bytes. """ def get_execution_time_approximation(self, **kwargs): """ Method should approximate based on input arguments, the time it will take for the operation to finish (in seconds). """ return -1 @abstractmethod def launch(self): """ To be implemented in each Adapter. Will contain the logic of the Adapter. Any returned DataType will be stored in DB, by the Framework. """ def add_operation_additional_info(self, message): """ Adds additional info on the operation to be displayed in the UI. Usually a warning message. """ current_op = dao.get_operation_by_id(self.operation_id) current_op.additional_info = message dao.store_entity(current_op) def _prepare_generic_attributes(self, user_tag=None): self.generic_attributes.subject = str( self.meta_data.get(DataTypeMetaData.KEY_SUBJECT)) self.generic_attributes.state = self.meta_data.get( DataTypeMetaData.KEY_STATE) perpetuated_identifier = self.generic_attributes.user_tag_1 if DataTypeMetaData.KEY_TAG_1 in self.meta_data: perpetuated_identifier = self.meta_data.get( DataTypeMetaData.KEY_TAG_1) if not self.generic_attributes.user_tag_1: self.generic_attributes.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier else: self.generic_attributes.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier @nan_not_allowed() def _prelaunch(self, operation, uid=None, available_disk_space=0, **kwargs): """ Method to wrap LAUNCH. Will prepare data, and store results on return. """ self.meta_data.update(json.loads(operation.meta_data)) self.storage_path = self.file_handler.get_project_folder( operation.project, str(operation.id)) self.operation_id = operation.id self.current_project_id = operation.project.id self.user_id = operation.fk_launched_by self.configure(**kwargs) # Compare the amount of memory the current algorithms states it needs, # with the average between the RAM available on the OS and the free memory at the current moment. # We do not consider only the free memory, because some OSs are freeing late and on-demand only. total_free_memory = psutil.virtual_memory().free + psutil.swap_memory( ).free total_existent_memory = psutil.virtual_memory( ).total + psutil.swap_memory().total memory_reference = (total_free_memory + total_existent_memory) / 2 adapter_required_memory = self.get_required_memory_size(**kwargs) if adapter_required_memory > memory_reference: msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)." raise NoMemoryAvailableException( msg % (adapter_required_memory / 2**30, memory_reference / 2**30)) # Compare the expected size of the operation results with the HDD space currently available for the user # TVB defines a quota per user. required_disk_space = self.get_required_disk_size(**kwargs) if available_disk_space < 0: msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution." raise NoMemoryAvailableException(msg % (-available_disk_space / 2**10)) if available_disk_space < required_disk_space: msg = ( "You only have %.2f GB of disk space available but the operation you " "launched might require %.2f Stopping execution...") raise NoMemoryAvailableException( msg % (available_disk_space / 2**20, required_disk_space / 2**20)) operation.start_now() operation.estimated_disk_size = required_disk_space dao.store_entity(operation) self._prepare_generic_attributes(uid) result = self.launch(**kwargs) if not isinstance(result, (list, tuple)): result = [ result, ] self.__check_integrity(result) return self._capture_operation_results(result) def _capture_operation_results(self, result): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string( datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id( operation.fk_operation_group).id burst_reference = None if DataTypeMetaData.KEY_BURST in self.meta_data: burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST] count_stored = 0 group_type = None # In case of a group, the first not-none type is sufficient to memorize here for res in result: if res is None: continue res.subject = self.generic_attributes.subject res.state = self.generic_attributes.state res.fk_parent_burst = burst_reference res.fk_from_operation = self.operation_id res.framework_metadata = self.meta_data res.user_tag_1 = self.generic_attributes.user_tag_1 res.user_tag_2 = self.generic_attributes.user_tag_2 res.fk_datatype_group = data_type_group_id # Compute size-on disk, in case file-storage is used associated_file = h5.path_for_stored_index(res) if os.path.exists(associated_file): res.disk_size = self.file_handler.compute_size_on_disk( associated_file) with H5File.from_file(associated_file) as f: f.store_generic_attributes(self.generic_attributes) dao.store_entity(res) group_type = res.type count_stored += 1 if count_stored > 0 and self._is_group_launch(): # Update the operation group name operation_group = dao.get_operationgroup_by_id( operation.fk_operation_group) operation_group.fill_operationgroup_name(group_type) dao.store_entity(operation_group) return 'Operation ' + str( self.operation_id) + ' has finished.', count_stored def __check_integrity(self, result): """ Check that the returned parameters for LAUNCH operation are of the type specified in the adapter's interface. """ for result_entity in result: if result_entity is None: continue if not self.__is_data_in_supported_types(result_entity): msg = "Unexpected output DataType %s" raise InvalidParameterException(msg % type(result_entity)) def __is_data_in_supported_types(self, data): if data is None: return True for supported_type in self.get_output(): if isinstance(data, supported_type): return True # Data can't be mapped on any supported type !! return False def _is_group_launch(self): """ Return true if this adapter is launched from a group of operations """ operation = dao.get_operation_by_id(self.operation_id) return operation.fk_operation_group is not None @staticmethod def load_entity_by_gid(data_gid): """ Load a generic DataType, specified by GID. """ return load_entity_by_gid(data_gid) @staticmethod def build_adapter_from_class(adapter_class): """ Having a subclass of ABCAdapter, prepare an instance for launching an operation with it. """ if not issubclass(adapter_class, ABCAdapter): raise IntrospectionException( "Invalid data type: It should extend adapters.ABCAdapter!") try: stored_adapter = dao.get_algorithm_by_module( adapter_class.__module__, adapter_class.__name__) adapter_instance = adapter_class() adapter_instance.stored_adapter = stored_adapter return adapter_instance except Exception as excep: LOGGER.exception(excep) raise IntrospectionException(str(excep)) @staticmethod def build_adapter(stored_adapter): """ Having a module and a class name, create an instance of ABCAdapter. """ try: ad_module = importlib.import_module(stored_adapter.module) adapter_class = getattr(ad_module, stored_adapter.classname) adapter_instance = adapter_class() adapter_instance.stored_adapter = stored_adapter return adapter_instance except Exception: msg = "Could not load Adapter Instance for Stored row %s" % stored_adapter LOGGER.exception(msg) raise IntrospectionException(msg) # METHODS for PROCESSING PARAMETERS start here ############################# def review_operation_inputs(self, parameters): """ :returns: a list with the inputs from the parameters list that are instances of DataType,\ and a dictionary with all parameters which are different than the declared defauts """ flat_interface = self.flaten_input_interface() return self.tree_manager.review_operation_inputs( parameters, flat_interface) def prepare_ui_inputs(self, kwargs, validation_required=True): """ Prepare the inputs received from a HTTP Post in a form that will be used by the Python adapter. """ algorithm_inputs = self.get_input_tree() algorithm_inputs = InputTreeManager.prepare_param_names( algorithm_inputs) self.tree_manager.append_required_defaults(kwargs, algorithm_inputs) return self.convert_ui_inputs(kwargs, validation_required=validation_required) def convert_ui_inputs(self, kwargs, validation_required=True): """ Convert HTTP POST parameters into Python parameters. """ return self.tree_manager.convert_ui_inputs( self.flaten_input_interface(), kwargs, self.meta_data, validation_required) def noise_configurable_parameters(self): return [ entry[self.KEY_NAME] for entry in self.flaten_input_interface() if 'configurableNoise' in entry ] def flaten_input_interface(self): # TODO: temporary condition to pass introspection on neoforms form = self.get_form_class()() if form: return [ form._get_original_field_name(form_field) for form_field in form.fields ] return self.tree_manager.flatten(self.get_input_tree())
def create_new_portlet_configuration(self, name=""): """ Create a PortletConfiguration entity with the default values from the portlet XML declaration and the adapter input trees. """ chain_adapters = self.reader.get_adapters_chain(self.algo_identifier) analyze_steps = [] view_step = None idx = 0 for adapter_declaration in chain_adapters: adapter_instance, algorithm_group = self.build_adapter_from_declaration(adapter_declaration) ### Get the flatten interface for the adapter, and in case of ##### ### sub-algorithms also get the pair {algorithm : value} ##### algorithm_field = adapter_declaration[KEY_FIELD] if algorithm_field: default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT] else: default_algorithm = "" if default_algorithm: prefix = InputTreeManager.form_prefix(algorithm_field, None, default_algorithm) alg_inputs = adapter_instance.tree_manager.flatten( adapter_instance.xml_reader.get_inputs(default_algorithm), prefix ) else: alg_inputs = adapter_instance.flaten_input_interface() ################################################################### ### Get the overwrites defined in the portlet configuration ####### ### for this specific adapter in the adapter chain ####### ### split in static and dynamic ones ####### prepared_params = {KEY_STATIC: {}, KEY_DYNAMIC: {}} all_portlet_defined_params = self.reader.get_inputs(self.algo_identifier) specific_adapter_overwrites = [ entry for entry in all_portlet_defined_params if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] == adapter_declaration[ABCAdapter.KEY_NAME] ] for entry in specific_adapter_overwrites: if ABCAdapter.KEY_DEFAULT in entry: declared_value = entry[ABCAdapter.KEY_DEFAULT] elif ABCAdapter.KEY_VALUE in entry: declared_value = entry[ABCAdapter.KEY_VALUE] else: declared_value = "" if entry[ABCAdapter.KEY_TYPE] == KEY_DYNAMIC: prepared_params[KEY_DYNAMIC][entry[ABCAdapter.KEY_NAME]] = declared_value else: prepared_params[KEY_STATIC][entry[ABCAdapter.KEY_NAME]] = declared_value ################################################################### ### Now just fill the rest of the adapter inputs if they are not ## ### present in neither dynamic or static overwrites. In case of ## ### sub-algorithms also add as static the algorithm : value pair ## for input_dict in alg_inputs: input_name = input_dict[ABCAdapter.KEY_NAME] if input_name not in prepared_params[KEY_STATIC] and input_name not in prepared_params[KEY_DYNAMIC]: if ABCAdapter.KEY_DEFAULT in input_dict: input_value = input_dict[ABCAdapter.KEY_DEFAULT] else: input_value = "" prepared_params[KEY_STATIC][input_name] = input_value if default_algorithm: prepared_params[KEY_STATIC][algorithm_field] = default_algorithm ################################################################### ### Now parse the dynamic inputs declared in the portlets XML ###### ### into workflow_step specific format. #### for param_name in prepared_params[KEY_DYNAMIC]: new_value = self._portlet_dynamic2workflow_step(prepared_params[KEY_DYNAMIC][param_name]) prepared_params[KEY_DYNAMIC][param_name] = new_value ################################################################### ###Finally get the actual algorithm id from the DB as we need the # ###algorithm id, then build the workflow step given the computed # ###parameter set, then build and return the portlet configuration## algorithm = dao.get_algorithm_by_group(algorithm_group.id, default_algorithm) if idx == len(chain_adapters) - 1: view_step = WorkflowStepView( algorithm_id=algorithm.id, portlet_id=self.portlet_id, ui_name=name, static_param=prepared_params[KEY_STATIC], dynamic_param=prepared_params[KEY_DYNAMIC], ) else: workflow_step = WorkflowStep( algorithm_id=algorithm.id, static_param=prepared_params[KEY_STATIC], dynamic_param=prepared_params[KEY_DYNAMIC], ) analyze_steps.append(workflow_step) idx += 1 portlet_configuration = PortletConfiguration(self.portlet_id) portlet_configuration.set_analyzers(analyze_steps) portlet_configuration.set_visualizer(view_step) return portlet_configuration
class FlowService: """ Service Layer for all TVB generic Work-Flow operations. """ def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() self.input_tree_manager = InputTreeManager() def get_category_by_id(self, identifier): """ Pass to DAO the retrieve of category by ID operation.""" return dao.get_category_by_id(identifier) @staticmethod def get_raw_categories(): """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)""" return dao.get_raw_categories() @staticmethod def get_visualisers_category(): """Retrieve all Algorithm categories, with display capability""" result = dao.get_visualisers_categories() if not result: raise ValueError("View Category not found!!!") return result[0] @staticmethod def get_algorithm_by_identifier(ident): """ Retrieve Algorithm entity by ID. Return None, if ID is not found in DB. """ return dao.get_algorithm_by_id(ident) @staticmethod def load_operation(operation_id): """ Retrieve previously stored Operation from DB, and load operation.burst attribute""" operation = dao.get_operation_by_id(operation_id) operation.burst = dao.get_burst_for_operation_id(operation_id) return operation @staticmethod def get_operation_numbers(proj_id): """ Count total number of operations started for current project. """ return dao.get_operation_numbers(proj_id) def prepare_adapter(self, project_id, stored_adapter): """ Having a StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'. """ adapter_module = stored_adapter.module adapter_name = stored_adapter.classname try: # Prepare Adapter Interface, by populating with existent data, # in case of a parameter of type DataType. adapter_instance = ABCAdapter.build_adapter(stored_adapter) interface = adapter_instance.get_input_tree() interface = self.input_tree_manager.fill_input_tree_with_options( interface, project_id, stored_adapter.fk_category) interface = self.input_tree_manager.prepare_param_names(interface) return interface except Exception: self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module) raise OperationException("Could not prepare " + adapter_name) @staticmethod def get_algorithm_by_module_and_class(module, classname): """ Get the db entry from the algorithm table for the given module and class. """ return dao.get_algorithm_by_module(module, classname) @staticmethod def get_available_datatypes(project_id, data_type_cls, filters=None): """ Return all dataTypes that match a given name and some filters. :param data_type_cls: either a fully qualified class name or a class object """ return get_filtered_datatypes(project_id, data_type_cls, filters) @staticmethod def create_link(data_ids, project_id): """ For a list of dataType IDs and a project id create all the required links. """ for data in data_ids: link = model.Links(data, project_id) dao.store_entity(link) @staticmethod def remove_link(dt_id, project_id): """ Remove the link from the datatype given by dt_id to project given by project_id. """ link = dao.get_link(dt_id, project_id) if link is not None: dao.remove_entity(model.Links, link.id) def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data): """ Launch an operation, specified by AdapterInstance, for CurrentUser, Current Project and a given set of UI Input Data. """ operation_name = str(adapter_instance.__class__.__name__) try: self.logger.info("Starting operation " + operation_name) project = dao.get_project_by_id(project_id) tmp_folder = self.file_helper.get_project_folder( project, self.file_helper.TEMP_FOLDER) result = OperationService().initiate_operation( current_user, project.id, adapter_instance, tmp_folder, visible, **data) self.logger.info("Finished operation:" + operation_name) return result except TVBException, excep: self.logger.exception( "Could not launch operation " + operation_name + " with the given set of input data, because: " + excep.message) raise OperationException(excep.message, excep) except Exception, excep: self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!") raise OperationException(str(excep))