def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=[CORTICAL])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + ".no_of_ranges"], operations=["=="], values=[2])
def pse_filter(): return FilterChain(fields=[FilterChain.datatype + '.type'], operations=['!='], values=['SimulatorIndex'])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.data_ndim'], operations=["=="], values=[4])
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): # TODO: fix this use-case """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message( "Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads( FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): # Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][ FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise # In order for the filter object not to "stack up" on multiple calls to # this method, create a deepCopy to work with if constants.ELEM_CONDITIONS in current_node: new_filter = copy.deepcopy(current_node[constants.ELEM_CONDITIONS]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) # Get dataTypes that match the filters from DB then populate with values values, total_count = [], 0 # Create a dictionary that matches what the template expects parameters = { ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: constants.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype } if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if constants.ATT_REQUIRED in current_node: parameters[constants.ATT_REQUIRED] = current_node[ constants.ATT_REQUIRED] if len(values) > 0 and string2bool( str(parameters[constants.ATT_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str( values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = { "inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key } return self.fill_default_attributes(template_specification)
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.undirected'], operations=["=="], values=['1'])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + ".no_of_ranges", FilterChain.datatype + ".no_of_ranges", FilterChain.datatype + ".count_results"], operations=["<=", ">=", "<="], values=[2, 1, MAX_NUMBER_OF_POINT_TO_SUPPORT])
def get_dynamic_filters(self): return FilterChain().get_filters_for_type(self.datatype_index)
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None): """ We suppose that there are max 2 ranges and from each operation results exactly one dataType. :param datatype_group_gid: the group id for the `DataType` to be visualised :param back_page: Page where back button will direct :param color_metric: String referring to metric to apply on colors :param size_metric: String referring to metric to apply on sizes :returns: `ContextDiscretePSE` :raises Exception: when `datatype_group_id` is invalid (not in database) """ datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid) if datatype_group is None: raise Exception( "Selected DataTypeGroup is no longer present in the database. " "It might have been remove or the specified id is not the correct one." ) operation_group = dao.get_operationgroup_by_id( datatype_group.fk_operation_group) name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels( operation_group, operation_group.range1) name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels( operation_group, operation_group.range2) pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page) pse_context.setRanges(name1, values1, labels1, name2, values2, labels2, only_numbers1 and only_numbers2) final_dict = {} operations = dao.get_operations_in_group(operation_group.id) fake_numbers1 = dict( list(zip(values1, list(range(len(list(values1))))))) fake_numbers2 = dict( list(zip(values2, list(range(len(list(values2))))))) for operation_ in operations: if not operation_.has_finished: pse_context.has_started_ops = True range_values = eval(operation_.range_values) key_1 = DiscretePSEAdapter.get_value_on_axe( range_values, only_numbers1, name1, fake_numbers1) key_2 = DiscretePSEAdapter.get_value_on_axe( range_values, only_numbers2, name2, fake_numbers2) datatype = None if operation_.status == STATUS_FINISHED: pse_filter = FilterChain( fields=[FilterChain.datatype + '.type'], operations=['!='], values=['SimulatorIndex']) datatypes = dao.get_results_for_operation( operation_.id, pse_filter) if len(datatypes) > 0: datatype = datatypes[0] if datatype.type == "DatatypeMeasureIndex": # Load proper entity class from DB. measures = dao.get_generic_entity( DatatypeMeasureIndex, datatype.gid) else: measures = dao.get_generic_entity( DatatypeMeasureIndex, datatype.gid, 'source_gid') pse_context.prepare_metrics_datatype(measures, datatype) if key_1 not in final_dict: final_dict[key_1] = {} final_dict[key_1][key_2] = pse_context.build_node_info( operation_, datatype) pse_context.fill_object(final_dict) # datatypes_dict is not actually used in the drawing of the PSE and actually # causes problems in case of NaN values, so just remove it before creating the json pse_context.datatypes_dict = {} if not only_numbers1: pse_context.values_x = list(range(len(list(values1)))) if not only_numbers2: pse_context.values_y = list(range(len(list(values2)))) return pse_context
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.ndim', FilterChain.datatype + '.has_volume_mapping'], operations=["==", "=="], values=[1, True])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.ndim', FilterChain.datatype + '.has_volume_mapping', FilterChain.datatype + '.subtype'], operations=[">=", "==", "not in"], values=[2, True, ["RegionVolumeMapping", "StructuralMRI"]])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=[SurfaceTypesEnum.CORTICAL_SURFACE.value])