def get_filtered_datatypes(self, dt_module, dt_class, filters, has_all_option, has_none_option): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ index_class = getattr(sys.modules[dt_module], dt_class)() filters_dict = json.loads(filters) fields = [] operations = [] values = [] for idx in range(len(filters_dict['fields'])): fields.append(filters_dict['fields'][idx]) operations.append(filters_dict['operations'][idx]) values.append(filters_dict['values'][idx]) filter = FilterChain(fields=fields, operations=operations, values=values) project = common.get_current_project() form = Form(project_id=project.id, draw_ranges=True) data_type_gid_attr = DataTypeGidAttr( linked_datatype=REGISTRY.get_datatype_for_index(index_class)) data_type_gid_attr.required = not string2bool(has_none_option) select_field = TraitDataTypeSelectField( data_type_gid_attr, form, conditions=filter, has_all_option=string2bool(has_all_option)) return {'options': select_field.options()}
def test_switch_online_help(self): """ Test the switchOnlineHelp method and make sure it adds corresponding entry to UserPreferences. """ self._expect_redirect('/user/profile', self.user_c.switchOnlineHelp) self.assertFalse(utils.string2bool(self.test_user.preferences[UserPreferences.ONLINE_HELP_ACTIVE]), "Online help should be switched to False.")
def save_simulator_configuration(self, exclude_ranges, **data): """ :param exclude_ranges: should be a boolean value. If it is True than the ranges will be excluded from the simulation parameters. Data is a dictionary with pairs in one of the forms: { 'simulator_parameters' : { $name$ : { 'value' : $value$, 'is_disabled' : true/false } }, 'burstName': $burst_name} The names for the checkboxes next to the parameter with name $name$ is always $name$_checked Save this dictionary in an easy to process form from which you could rebuild either only the selected entries, or all of the simulator tree with the given default values. """ exclude_ranges = string2bool(str(exclude_ranges)) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) if BURST_NAME in data: burst_config.name = data[BURST_NAME] data = json.loads(data['simulator_parameters']) for entry in data: if exclude_ranges and (entry.endswith("_checked") or entry == RANGE_PARAMETER_1 or entry == RANGE_PARAMETER_2): continue burst_config.update_simulation_parameter(entry, data[entry]) checkbox_for_entry = entry + "_checked" if checkbox_for_entry in data: burst_config.update_simulation_parameter( entry, data[checkbox_for_entry], KEY_PARAMETER_CHECKED)
def get_operation_details(self, entity_gid, is_group=False, back_page='burst'): """ Returns the HTML which contains the details for the given operation. """ if string2bool(str(is_group)): ### we have an OperationGroup entity. template_specification = self._compute_operation_details(entity_gid, True) #I expect that all the operations from a group are visible or not template_specification["nodeType"] = graph_structures.NODE_OPERATION_GROUP_TYPE else: ### we have a simple Operation template_specification = self._compute_operation_details(entity_gid) template_specification["displayRelevantButton"] = True template_specification["nodeType"] = graph_structures.NODE_OPERATION_TYPE template_specification["backPageIdentifier"] = back_page overlay_class = "can-browse editor-node node-type-" + template_specification["nodeType"] if template_specification["isRelevant"]: overlay_class += " node-relevant" else: overlay_class += " node_irrelevant" template_specification = self.fill_overlay_attributes(template_specification, "Details", "Operation", "project/details_operation_overlay", overlay_class) return FlowController().fill_default_attributes(template_specification)
def save_simulator_configuration(self, exclude_ranges, **data): """ :param exclude_ranges: should be a boolean value. If it is True than the ranges will be excluded from the simulation parameters. Data is a dictionary with pairs in one of the forms: { 'simulator_parameters' : { $name$ : { 'value' : $value$, 'is_disabled' : true/false } }, 'burstName': $burst_name} The names for the checkboxes next to the parameter with name $name$ is always $name$_checked Save this dictionary in an easy to process form from which you could rebuild either only the selected entries, or all of the simulator tree with the given default values. """ exclude_ranges = string2bool(str(exclude_ranges)) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) if BURST_NAME in data: burst_config.name = data[BURST_NAME] data = json.loads(data['simulator_parameters']) for entry in data: if exclude_ranges and (entry.endswith("_checked") or entry == RANGE_PARAMETER_1 or entry == RANGE_PARAMETER_2): continue burst_config.update_simulation_parameter(entry, data[entry]) checkbox_for_entry = entry + "_checked" if checkbox_for_entry in data: burst_config.update_simulation_parameter(entry, data[checkbox_for_entry], KEY_PARAMETER_CHECKED)
def _deserialize_value(value): """ This method takes value loaded from H5 file and transform it to TVB data. :param value: the value that was read from the H5 file :returns: a TVB specific deserialized value of the input NOTE: this method was a part of TVB 1.0 hdf5storage manager, but since this script needs to be independent of current storage manager, we duplicate it here. """ if value is not None: if isinstance(value, numpy.string_): if len(value) == 0: value = None else: value = str(value) if isinstance(value, str): if value.startswith(BOOL_VALUE_PREFIX): # Remove bool prefix and transform to bool return string2bool(value[len(BOOL_VALUE_PREFIX):]) if value.startswith(DATETIME_VALUE_PREFIX): # Remove datetime prefix and transform to datetime return string2date(value[len(DATETIME_VALUE_PREFIX):], date_format=DATE_TIME_FORMAT) return value
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): #Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise #In order for the filter object not to "stack up" on multiple calls to #this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) #Get dataTypes that match the filters from DB then populate with values values, total_count = InputTreeManager().populate_option_values_for_dtype( common.get_current_project().id, datatype, new_filter, self.context.get_current_step() ) #Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED] if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): # Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise # In order for the filter object not to "stack up" on multiple calls to # this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) # Get dataTypes that match the filters from DB then populate with values values, total_count = InputTreeManager().populate_option_values_for_dtype( common.get_current_project().id, datatype, new_filter, self.context.get_current_step()) # Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED] if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def get_simple_adapter_interface(self, algorithm_id, parent_div='', is_uploader=False): """ AJAX exposed method. Will return only the interface for a adapter, to be used when tabs are needed. """ curent_project = common.get_current_project() is_uploader = string2bool(is_uploader) template_specification = self.get_adapter_template(curent_project.id, algorithm_id, is_uploader) template_specification[common.KEY_PARENT_DIV] = parent_div return self.fill_default_attributes(template_specification)
def test_string2bool(self): """ Check the date2string method for various inputs. """ self.assertTrue(string2bool("True"), "Expect True boolean for input 'True'") self.assertTrue(string2bool(u"True"), "Expect True boolean for input u'True'") self.assertTrue(string2bool("true"), "Expect True boolean for input 'true'") self.assertTrue(string2bool(u"true"), "Expect True boolean for input u'true'") self.assertFalse(string2bool("False"), "Expect True boolean for input 'False'") self.assertFalse(string2bool(u"False"), "Expect True boolean for input u'False'") self.assertFalse(string2bool("somethingelse"), "Expect True boolean for input 'somethingelse'") self.assertFalse(string2bool(u"somethingelse"), "Expect True boolean for input u'somethingelse'")
def test_string2bool(self): """ Check the date2string method for various inputs. """ assert string2bool("True"), "Expect True boolean for input 'True'" assert string2bool("True"), "Expect True boolean for input u'True'" assert string2bool("true"), "Expect True boolean for input 'true'" assert string2bool("true"), "Expect True boolean for input u'true'" assert not string2bool("False"), "Expect True boolean for input 'False'" assert not string2bool("False"), "Expect True boolean for input u'False'" assert not string2bool("somethingelse"), "Expect True boolean for input 'somethingelse'" assert not string2bool("somethingelse"), "Expect True boolean for input u'somethingelse'"
def is_online_help_active(self): """ This method returns True if this user should see online help. """ is_help_active = True if UserPreferences.ONLINE_HELP_ACTIVE in self.preferences: flag_str = self.preferences[UserPreferences.ONLINE_HELP_ACTIVE] is_help_active = utils.string2bool(flag_str) return is_help_active
def removelink(self, link_data, project_id, is_group): """ Delegate the creation of the actual link to the flow service. """ if not string2bool(str(is_group)): self.flow_service.remove_link(link_data, project_id) else: all_data = self.project_service.get_datatype_in_group(link_data) for data in all_data: self.flow_service.remove_link(data.id, project_id) self.flow_service.remove_link(int(link_data), project_id)
def createlink(self, link_data, project_id, is_group): """ Delegate the creation of the actual link to the flow service. """ if not string2bool(str(is_group)): self.flow_service.create_link([link_data], project_id) else: all_data = self.project_service.get_datatype_in_group(link_data) # Link all Dts in group and the DT_Group entity data_ids = [data.id for data in all_data] data_ids.append(int(link_data)) self.flow_service.create_link(data_ids, project_id)
def get_filtered_datatypes(self, dt_module, dt_class, filters, has_all_option, has_none_option): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ index_class = getattr(sys.modules[dt_module], dt_class)() filters_dict = json.loads(filters) for idx in range(len(filters_dict['fields'])): if filters_dict['values'][idx] in ['True', 'False']: filters_dict['values'][idx] = string2bool(filters_dict['values'][idx]) filter = FilterChain(fields=filters_dict['fields'], operations=filters_dict['operations'], values=filters_dict['values']) project = common.get_current_project() data_type_gid_attr = DataTypeGidAttr(linked_datatype=REGISTRY.get_datatype_for_index(index_class)) data_type_gid_attr.required = not string2bool(has_none_option) select_field = TraitDataTypeSelectField(data_type_gid_attr, conditions=filter, has_all_option=string2bool(has_all_option)) self.algorithm_service.fill_selectfield_with_datatypes(select_field, project.id) return {'options': select_field.options()}
def set_visibility(self, entity_type, entity_gid, to_de_relevant): """ Method used for setting the relevancy/visibility on a DataType(Group)/Operation(Group. """ to_de_relevant = string2bool(to_de_relevant) is_operation, is_group = False, False if entity_type == graph_structures.NODE_OPERATION_TYPE: is_group = False is_operation = True elif entity_type == graph_structures.NODE_OPERATION_GROUP_TYPE: is_group = True is_operation = True if is_operation: self.project_service.set_operation_and_group_visibility(entity_gid, to_de_relevant, is_group) else: self.project_service.set_datatype_visibility(entity_gid, to_de_relevant)
def test_string2bool(self): """ Chech the date2string method for various inputs. """ self.assertTrue(string2bool("True"), "Expect True boolean for input 'True'") self.assertTrue(string2bool(u"True"), "Expect True boolean for input u'True'") self.assertTrue(string2bool("true"), "Expect True boolean for input 'true'") self.assertTrue(string2bool(u"true"), "Expect True boolean for input u'true'") self.assertFalse(string2bool("False"), "Expect True boolean for input 'False'") self.assertFalse(string2bool(u"False"), "Expect True boolean for input u'False'") self.assertFalse(string2bool("somethingelse"), "Expect True boolean for input 'somethingelse'") self.assertFalse(string2bool(u"somethingelse"), "Expect True boolean for input u'somethingelse'")
def _deserialize_value(self, value): """ This method takes value loaded from H5 file and transform it to TVB data. """ if value is not None: if isinstance(value, numpy.string_): if len(value) == 0: value = None else: value = str(value) if isinstance(value, str): if value.startswith(self.BOOL_VALUE_PREFIX): # Remove bool prefix and transform to bool return utils.string2bool(value[len(self.BOOL_VALUE_PREFIX):]) if value.startswith(self.DATETIME_VALUE_PREFIX): # Remove datetime prefix and transform to datetime return utils.string2date(value[len(self.DATETIME_VALUE_PREFIX):], date_format=self.DATE_TIME_FORMAT) return value
def gettemplatefordimensionselect(self, entity_gid=None, select_name="", reset_session='False', parameters_prefix="dimensions", required_dimension=1, expected_shape="", operations=""): """ Returns the HTML which contains the selects components which allows the user to reduce the dimension of a multi-dimensional array. We try to obtain the aggregation_functions from the entity, which is a list of lists. For each dimension should be a list with the supported aggregation functions. We create a DICT for each of those lists. The key will be the name of the function and the value will be its label. entity_gid the GID of the entity for which is displayed the component select_name the name of the parent select. The select in which is displayed the entity with the given GID parameters_prefix a string which will be used for computing the names of the component required_dimension the expected dimension for the resulted array expected_shape and operations used for applying conditions on the resulted array e.g.: If the resulted array is a 3D array and we want that the length of the second dimension to be smaller then 512 then the expected_shape and operations should be: ``expected_shape=x,512,x`` and ``operations='x,<,x`` """ template_params = {"select_name": "", "data": [], "parameters_prefix": parameters_prefix, "array_shape": "", "required_dimension": required_dimension, "currentDim": "", "required_dim_msg": "", "expected_shape": expected_shape, "operations": operations} #if reload => populate the selected values session_dict = self.context.get_current_default() dimensions = {1: [0], 3: [0]} selected_agg_functions = {} if not string2bool(str(reset_session)) and session_dict is not None: starts_with_str = select_name + "_" + parameters_prefix + "_" ui_sel_items = dict((k, v) for k, v in session_dict.items() if k.startswith(starts_with_str)) dimensions, selected_agg_functions, required_dimension, _ = MappedArray().parse_selected_items(ui_sel_items) template_params["selected_items"] = dimensions template_params["selected_functions"] = selected_agg_functions aggregation_functions = [] default_agg_functions = self.accepted__aggregation_functions() labels_set = ["Time", "Channel", "Line"] if entity_gid is not None: actual_entity = ABCAdapter.load_entity_by_gid(entity_gid) if hasattr(actual_entity, 'shape'): array_shape = actual_entity.shape new_shape, current_dim = self._compute_current_dimension(list(array_shape), dimensions, selected_agg_functions) if required_dimension is not None and current_dim != int(required_dimension): template_params["required_dim_msg"] = "Please select a " + str(required_dimension) + "D array" if not current_dim: template_params["currentDim"] = "1 element" else: template_params["currentDim"] = str(current_dim) + "D array" template_params["array_shape"] = json.dumps(new_shape) if hasattr(actual_entity, 'dimensions_labels') and actual_entity.dimensions_labels is not None: labels_set = actual_entity.dimensions_labels #make sure there exists labels for each dimension while len(labels_set) < len(array_shape): labels_set.append("Undefined") if (hasattr(actual_entity, 'aggregation_functions') and actual_entity.aggregation_functions is not None and len(actual_entity.aggregation_functions) == len(array_shape)): #will be a list of lists of aggregation functions defined_functions = actual_entity.aggregation_functions for function in defined_functions: if not len(function): aggregation_functions.append({}) else: func_dict = {} for function_key in function: func_dict[function_key] = default_agg_functions[function_key] aggregation_functions.append(func_dict) else: for _ in array_shape: aggregation_functions.append(default_agg_functions) result = [] for i, shape in enumerate(array_shape): labels = [] values = [] for j in xrange(shape): labels.append(labels_set[i] + " " + str(j)) values.append(entity_gid + "_" + str(i) + "_" + str(j)) result.append([labels, values, aggregation_functions[i]]) template_params["select_name"] = select_name template_params["data"] = result return template_params return template_params
def gettemplatefordimensionselect(self, entity_gid=None, select_name="", reset_session='False', parameters_prefix="dimensions", required_dimension=1, expected_shape="", operations=""): """ Returns the HTML which contains the selects components which allows the user to reduce the dimension of a multi-dimensional array. We try to obtain the aggregation_functions from the entity, which is a list of lists. For each dimension should be a list with the supported aggregation functions. We create a DICT for each of those lists. The key will be the name of the function and the value will be its label. entity_gid the GID of the entity for which is displayed the component select_name the name of the parent select. The select in which is displayed the entity with the given GID parameters_prefix a string which will be used for computing the names of the component required_dimension the expected dimension for the resulted array expected_shape and operations used for applying conditions on the resulted array e.g.: If the resulted array is a 3D array and we want that the length of the second dimension to be smaller then 512 then the expected_shape and operations should be: ``expected_shape=x,512,x`` and ``operations='x,<,x`` """ template_params = { "select_name": "", "data": [], "parameters_prefix": parameters_prefix, "array_shape": "", "required_dimension": required_dimension, "currentDim": "", "required_dim_msg": "", "expected_shape": expected_shape, "operations": operations } #if reload => populate the selected values session_dict = self.context.get_current_default() dimensions = {1: [0], 3: [0]} selected_agg_functions = {} if not string2bool(str(reset_session)) and session_dict is not None: starts_with_str = select_name + "_" + parameters_prefix + "_" ui_sel_items = dict((k, v) for k, v in session_dict.items() if k.startswith(starts_with_str)) dimensions, selected_agg_functions, required_dimension, _ = MappedArray( ).parse_selected_items(ui_sel_items) template_params["selected_items"] = dimensions template_params["selected_functions"] = selected_agg_functions aggregation_functions = [] default_agg_functions = self.accepted__aggregation_functions() labels_set = ["Time", "Channel", "Line"] if entity_gid is not None: actual_entity = ABCAdapter.load_entity_by_gid(entity_gid) if hasattr(actual_entity, 'shape'): array_shape = actual_entity.shape new_shape, current_dim = self._compute_current_dimension( list(array_shape), dimensions, selected_agg_functions) if required_dimension is not None and current_dim != int( required_dimension): template_params[ "required_dim_msg"] = "Please select a " + str( required_dimension) + "D array" if not current_dim: template_params["currentDim"] = "1 element" else: template_params["currentDim"] = str( current_dim) + "D array" template_params["array_shape"] = json.dumps(new_shape) if hasattr(actual_entity, 'dimensions_labels' ) and actual_entity.dimensions_labels is not None: labels_set = actual_entity.dimensions_labels #make sure there exists labels for each dimension while len(labels_set) < len(array_shape): labels_set.append("Undefined") if (hasattr(actual_entity, 'aggregation_functions') and actual_entity.aggregation_functions is not None and len(actual_entity.aggregation_functions) == len(array_shape)): #will be a list of lists of aggregation functions defined_functions = actual_entity.aggregation_functions for function in defined_functions: if not len(function): aggregation_functions.append({}) else: func_dict = {} for function_key in function: func_dict[ function_key] = default_agg_functions[ function_key] aggregation_functions.append(func_dict) else: for _ in array_shape: aggregation_functions.append(default_agg_functions) result = [] for i, shape in enumerate(array_shape): labels = [] values = [] for j in xrange(shape): labels.append(labels_set[i] + " " + str(j)) values.append(entity_gid + "_" + str(i) + "_" + str(j)) result.append([labels, values, aggregation_functions[i]]) template_params["select_name"] = select_name template_params["data"] = result return template_params return template_params
def from_dict(self, dictionary, dao, user_id=None, project_gid=None): """ Add specific attributes from a input dictionary. """ # If user id was specified try to load it, otherwise use System account user = dao.get_system_user() if user_id is None else dao.get_user_by_id(user_id) self.fk_launched_by = user.id # Find parent Project prj_to_load = project_gid if project_gid is not None else dictionary['fk_launched_in'] parent_project = dao.get_project_by_gid(prj_to_load) self.fk_launched_in = parent_project.id self.project = parent_project # Find parent Algorithm source_algorithm = json.loads(dictionary['fk_from_algo']) algorithm = dao.get_algorithm_by_module(source_algorithm['module'], source_algorithm['classname']) if algorithm: self.algorithm = algorithm self.fk_from_algo = algorithm.id else: # The algorithm that produced this operation no longer exists most likely due to # exported operation from different version. Fallback to tvb importer. LOG.warning("Algorithm group %s was not found in DB. Most likely cause is that archive was exported " "from a different TVB version. Using fallback TVB_Importer as source of " "this operation." % (source_algorithm['module'],)) algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) self.fk_from_algo = algorithm.id dictionary['additional_info'] = ("The original parameters for this operation were: \nAdapter: %s " "\nParameters %s" % (source_algorithm['module'] + '.' + source_algorithm['classname'], dictionary['parameters'])) # Find OperationGroup, if any if 'fk_operation_group' in dictionary: group_dict = json.loads(dictionary['fk_operation_group']) op_group = None if group_dict: op_group = dao.get_operationgroup_by_gid(group_dict['gid']) if not op_group: name = group_dict['name'] ranges = [group_dict['range1'], group_dict['range2'], group_dict['range3']] gid = group_dict['gid'] op_group = OperationGroup(self.fk_launched_in, name, ranges) op_group.gid = gid op_group = dao.store_entity(op_group) self.operation_group = op_group self.fk_operation_group = op_group.id else: self.operation_group = None self.fk_operation_group = None self.parameters = dictionary['parameters'] self.meta_data = dictionary['meta_data'] self.create_date = string2date(dictionary['create_date']) if dictionary['start_date'] != "None": self.start_date = string2date(dictionary['start_date']) if dictionary['completion_date'] != "None": self.completion_date = string2date(dictionary['completion_date']) self.status = self._parse_status(dictionary['status']) self.visible = string2bool(dictionary['visible']) self.range_values = dictionary['range_values'] self.user_group = dictionary['user_group'] self.additional_info = dictionary['additional_info'] self.gid = dictionary['gid'] return self
def from_dict(self, dictionary, dao, user_id=None, project_gid=None): """ Add specific attributes from a input dictionary. """ # If user id was specified try to load it, otherwise use System account user = dao.get_system_user( ) if user_id is None else dao.get_user_by_id(user_id) self.fk_launched_by = user.id # Find parent Project prj_to_load = project_gid if project_gid is not None else dictionary[ 'fk_launched_in'] parent_project = dao.get_project_by_gid(prj_to_load) self.fk_launched_in = parent_project.id self.project = parent_project # Find parent Algorithm source_algorithm = json.loads(dictionary['fk_from_algo']) algorithm = dao.get_algorithm_by_module(source_algorithm['module'], source_algorithm['classname']) if algorithm: self.algorithm = algorithm self.fk_from_algo = algorithm.id else: # The algorithm that produced this operation no longer exists most likely due to # exported operation from different version. Fallback to tvb importer. LOG.warning( "Algorithm group %s was not found in DB. Most likely cause is that archive was exported " "from a different TVB version. Using fallback TVB_Importer as source of " "this operation." % (source_algorithm['module'], )) algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) self.fk_from_algo = algorithm.id dictionary['additional_info'] = ( "The original parameters for this operation were: \nAdapter: %s " "\nParameters %s" % (source_algorithm['module'] + '.' + source_algorithm['classname'], dictionary['parameters'])) # Find OperationGroup, if any if 'fk_operation_group' in dictionary: group_dict = json.loads(dictionary['fk_operation_group']) op_group = None if group_dict: op_group = dao.get_operationgroup_by_gid(group_dict['gid']) if not op_group: name = group_dict['name'] ranges = [ group_dict['range1'], group_dict['range2'], group_dict['range3'] ] gid = group_dict['gid'] op_group = OperationGroup(self.fk_launched_in, name, ranges) op_group.gid = gid op_group = dao.store_entity(op_group) self.operation_group = op_group self.fk_operation_group = op_group.id else: self.operation_group = None self.fk_operation_group = None self.parameters = dictionary['parameters'] self.meta_data = dictionary['meta_data'] self.create_date = string2date(dictionary['create_date']) if dictionary['start_date'] != "None": self.start_date = string2date(dictionary['start_date']) if dictionary['completion_date'] != "None": self.completion_date = string2date(dictionary['completion_date']) self.status = self._parse_status(dictionary['status']) self.visible = string2bool(dictionary['visible']) self.range_values = dictionary['range_values'] self.user_group = dictionary['user_group'] self.additional_info = dictionary['additional_info'] self.gid = dictionary['gid'] return self