def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['==']) reference_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters) dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) self._import_csv_test_connectivity(reference_connectivity_index.gid, TEST_SUBJECT_B) dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) assert dt_count_before + 1 == dt_count_after filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like']) imported_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters) # check relationship between the imported connectivity and the reference assert reference_connectivity_index.number_of_regions == imported_connectivity_index.number_of_regions assert not reference_connectivity_index.number_of_connections == imported_connectivity_index.number_of_connections reference_connectivity = h5.load_from_index(reference_connectivity_index) imported_connectivity = h5.load_from_index(imported_connectivity_index) assert not (reference_connectivity.weights == imported_connectivity.weights).all() assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all() assert (reference_connectivity.centres == imported_connectivity.centres).all() assert (reference_connectivity.orientations == imported_connectivity.orientations).all() assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()
def __init__(self): super(ConnectivityViewerForm, self).__init__() self.connectivity = TraitDataTypeSelectField( ConnectivityViewerModel.connectivity, name='input_data', conditions=self.get_filters()) surface_conditions = FilterChain( fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=['Cortical Surface']) self.surface_data = TraitDataTypeSelectField( ConnectivityViewerModel.surface_data, name='surface_data', conditions=surface_conditions) self.step = FloatField(ConnectivityViewerModel.step, name='step') colors_conditions = FilterChain( fields=[FilterChain.datatype + '.ndim'], operations=["=="], values=[1]) self.colors = TraitDataTypeSelectField(ConnectivityViewerModel.colors, name='colors', conditions=colors_conditions) rays_conditions = FilterChain(fields=[FilterChain.datatype + '.ndim'], operations=["=="], values=[1]) self.rays = TraitDataTypeSelectField(ConnectivityViewerModel.rays, name='rays', conditions=rays_conditions)
def __init__(self, session_stored_simulator=None, prefix='', project_id=None): super(iEEGMonitorForm, self).__init__(session_stored_simulator, prefix, project_id) sensor_filter = FilterChain( fields=[FilterChain.datatype + '.sensors_type'], operations=["=="], values=[SensorTypes.TYPE_INTERNAL.value]) projection_filter = FilterChain( fields=[FilterChain.datatype + '.projection_type'], operations=["=="], values=[ProjectionsType.SEEG.value]) self.projection = TraitDataTypeSelectField( iEEGViewModel.projection, self, name='projection', conditions=projection_filter) self.sigma = ScalarField(iEEG.sigma, self) self.sensors = TraitDataTypeSelectField(iEEGViewModel.sensors, self, name='sensors', conditions=sensor_filter)
def __init__(self, variables_of_interest_indexes, prefix='', project_id=None): super(iEEGMonitorForm, self).__init__(variables_of_interest_indexes, prefix, project_id) sensor_filter = FilterChain( fields=[FilterChain.datatype + '.sensors_type'], operations=["=="], values=[SEEG_S]) projection_filter = FilterChain( fields=[FilterChain.datatype + '.projection_type'], operations=["=="], values=[SEEG_P]) self.projection = DataTypeSelectField(ProjectionMatrixIndex, self, name='projection', required=True, label=iEEG.projection.label, doc=iEEG.projection.doc, conditions=projection_filter) self.sigma = ScalarField(iEEG.sigma, self) self.sensors = DataTypeSelectField(SensorsIndex, self, name='sensors', required=True, label=iEEG.sensors.label, doc=iEEG.sensors.doc, conditions=sensor_filter)
def __init__(self, session_stored_simulator=None, is_period_disabled=False): super(EEGMonitorForm, self).__init__(session_stored_simulator, is_period_disabled) sensor_filter = FilterChain( fields=[FilterChain.datatype + '.sensors_type'], operations=["=="], values=[SensorTypesEnum.TYPE_EEG.value]) projection_filter = FilterChain( fields=[FilterChain.datatype + '.projection_type'], operations=["=="], values=[ProjectionsTypeEnum.EEG.value]) self.projection = TraitDataTypeSelectField( EEGViewModel.projection, name='projection', conditions=projection_filter) self.reference = StrField(EEG.reference) self.sensors = TraitDataTypeSelectField(EEGViewModel.sensors, name='sensors', conditions=sensor_filter) self.sigma = FloatField(EEG.sigma)
def test_filter_sql_equivalent(self): """ Test applying a filter on DB. """ data_type = ValueWrapperIndex() data_type.data_name = "name_1" data_type.data_value = "value_1" dao.store_entity(data_type) data_type = ValueWrapperIndex() data_type.data_name = "name_2" data_type.data_value = "value_2" dao.store_entity(data_type) data_type = ValueWrapperIndex() data_type.data_name = "name_1" data_type.data_value = "value_3" dao.store_entity(data_type) test_filter_1 = FilterChain(fields=[FilterChain.datatype + '.data_name'], operations=['=='], values=['name_1']) test_filter_2 = FilterChain(fields=[FilterChain.datatype + '.data_name'], operations=['=='], values=['name_22']) test_filter_3 = FilterChain(fields=[FilterChain.datatype + '.data_name', FilterChain.datatype + '.data_value'], operations=['==', 'in'], values=["name_1", ['value_1', 'value_3']]) test_filter_4 = FilterChain(fields=[FilterChain.datatype + '.data_name', FilterChain.datatype + '.data_value'], operations=['==', 'in'], values=["name_1", ['value_1', 'value_2']]) all_stored_dts = self.count_all_entities(ValueWrapperIndex) assert 3 == all_stored_dts self._evaluate_db_filter(test_filter_1, 2) self._evaluate_db_filter(test_filter_2, 0) self._evaluate_db_filter(test_filter_3, 2) self._evaluate_db_filter(test_filter_4, 1)
def __init__(self, variables_of_interest_indexes={}, prefix='', project_id=None): super(iEEGMonitorForm, self).__init__(variables_of_interest_indexes, prefix, project_id) sensor_filter = FilterChain( fields=[FilterChain.datatype + '.sensors_type'], operations=["=="], values=[SEEG_S]) projection_filter = FilterChain( fields=[FilterChain.datatype + '.projection_type'], operations=["=="], values=[SEEG_P]) self.projection = TraitDataTypeSelectField( iEEGViewModel.projection, self, name='projection', conditions=projection_filter) self.sigma = ScalarField(iEEG.sigma, self) self.sensors = TraitDataTypeSelectField(iEEGViewModel.sensors, self, name='sensors', conditions=sensor_filter)
def test_filter_addition(self): """ test addition in filter chain """ filter1 = FilterChain(fields=[FilterChain.datatype + '.attribute_1'], operations=["=="], values=['test_val']) filter2 = FilterChain(fields=[FilterChain.datatype + '.attribute_2'], operations=['in'], values=[['test_val2', 1]]) test_filter = filter1 + filter2 should_pass( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=1)) should_pass( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2='test_val2')) should_fail( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=2)) should_fail( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val1', attribute_2=1))
def __init__(self, project_id=None, surface_index=None, connectivity_gid=None): super(SimulatorRMFragment, self).__init__(project_id) rm_conditions = None lc_conditions = None if surface_index: rm_conditions = FilterChain( fields=[ FilterChain.datatype + '.fk_surface_gid', FilterChain.datatype + '.fk_connectivity_gid' ], operations=["==", "=="], values=[str(surface_index.gid), str(connectivity_gid.hex)]) lc_conditions = FilterChain( fields=[rm_conditions.fields[0]], operations=[rm_conditions.operations[0]], values=[rm_conditions.values[0]]) self.rm = TraitDataTypeSelectField(CortexViewModel.region_mapping_data, self.project_id, name='region_mapping', conditions=rm_conditions) self.lc = TraitDataTypeSelectField(CortexViewModel.local_connectivity, self.project_id, name='local_connectivity', conditions=lc_conditions) self.coupling_strength = ArrayField(CortexViewModel.coupling_strength, self.project_id)
def test_invalid_filter(self): """ Error test-case when evaluating filter in Python. """ test_filter = FilterChain(fields=[FilterChain.datatype + '.attribute_1'], operations=["in"], values=[None]) with pytest.raises(InvalidFilterEntity): test_filter.get_python_filter_equivalent(TestFiltering.DummyFilterClass(attribute_1=['test_val', 'test2']))
def test_invalid_input(self): """ Error test-case. """ test_filter = FilterChain(fields=[FilterChain.datatype + '.other_attribute_1'], operations=["in"], values=['test']) with pytest.raises(InvalidFilterChainInput): test_filter.get_python_filter_equivalent(TestFiltering.DummyFilterClass(attribute_1=['test_val', 'test2']))
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): # TODO: fix this use-case """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): # Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise # In order for the filter object not to "stack up" on multiple calls to # this method, create a deepCopy to work with if constants.ELEM_CONDITIONS in current_node: new_filter = copy.deepcopy(current_node[constants.ELEM_CONDITIONS]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) # Get dataTypes that match the filters from DB then populate with values values, total_count = [], 0 # Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: constants.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if constants.ATT_REQUIRED in current_node: parameters[constants.ATT_REQUIRED] = current_node[constants.ATT_REQUIRED] if len(values) > 0 and string2bool(str(parameters[constants.ATT_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def test_get_filtered_by_column(self): """ Test the filter function when retrieving dataTypes with a filter after a column from a class specific table (e.g. DATA_arraywrapper). """ operation_1 = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) operation_2 = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) one_dim_array = numpy.arange(5) two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]]) self._store_float_array(one_dim_array, "John Doe 1", operation_1.id) self._store_float_array(one_dim_array, "John Doe 2", operation_1.id) self._store_float_array(two_dim_array, "John Doe 3", operation_2.id) count = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert count, 3 == "Problems with inserting data" first_filter = FilterChain( fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[1]) count = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", first_filter)[1] assert count, 2 == "Data was not filtered" second_filter = FilterChain( fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[2]) filtered_data = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", second_filter)[0] assert len(filtered_data), 1 == "Data was not filtered" assert filtered_data[0][3] == "John Doe 3" third_filter = FilterChain( fields=[FilterChain.datatype + '._length_1d'], operations=["=="], values=[3]) filtered_data = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", third_filter)[0] assert len(filtered_data), 1 == "Data was not filtered correct" assert filtered_data[0][3] == "John Doe 3" try: if os.path.exists('One_dim.txt'): os.remove('One_dim.txt') if os.path.exists('Two_dim.txt'): os.remove('Two_dim.txt') if os.path.exists('One_dim-1.txt'): os.remove('One_dim-1.txt') except Exception: pass
def test_filter_add_condition(self): """ Test that adding a condition to a filter is working. """ test_filter = FilterChain(fields=[FilterChain.datatype + '.attribute_1'], operations=["=="], values=['test_val']) filter_input = TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=1) should_pass(test_filter, filter_input) test_filter.add_condition(FilterChain.datatype + '.attribute_2', '==', 2) should_fail(test_filter, filter_input)
def __init__(self, session_stored_simulator=None): super(MEGMonitorForm, self).__init__(session_stored_simulator) sensor_filter = FilterChain(fields=[FilterChain.datatype + '.sensors_type'], operations=["=="], values=[SensorTypes.TYPE_MEG.value]) projection_filter = FilterChain(fields=[FilterChain.datatype + '.projection_type'], operations=["=="], values=[ProjectionsType.MEG.value]) self.projection = TraitDataTypeSelectField(MEGViewModel.projection, name='projection', conditions=projection_filter) self.sensors = TraitDataTypeSelectField(MEGViewModel.sensors, name='sensors', conditions=sensor_filter)
def _build_custom_filter(filter_data): """ Param filter_data should be at this point a dictionary of the form: {'type' : 'filter_type', 'value' : 'filter_value'} If 'filter_type' is not handled just return None. """ filter_data = json.loads(filter_data) if filter_data['type'] == 'from_burst': return FilterChain('Burst', ['BurstConfiguration.id'], [filter_data['value']], operations=["=="]) if filter_data['type'] == 'from_datatype': return FilterChain('Datatypes', [FilterChain.operation + '.parameters'], [filter_data['value']], operations=["like"]) return None
def get_launchable_algorithms_for_datatype(self, datatype, categories): data_class = datatype.__class__ all_compatible_classes = [data_class.__name__] for one_class in getmro(data_class): # from tvb.basic.traits.types_mapped import MappedType if issubclass( one_class, DataType ) and one_class.__name__ not in all_compatible_classes: all_compatible_classes.append(one_class.__name__) self.logger.debug("Searching in categories: " + str(categories) + " for classes " + str(all_compatible_classes)) categories_ids = [categ.id for categ in categories] launchable_adapters = dao.get_applicable_adapters( all_compatible_classes, categories_ids) filtered_adapters = [] has_operations_warning = False for stored_adapter in launchable_adapters: filter_chain = FilterChain.from_json( stored_adapter.datatype_filter) try: if not filter_chain or filter_chain.get_python_filter_equivalent( datatype): filtered_adapters.append(stored_adapter) except (TypeError, InvalidFilterChainInput): self.logger.exception("Could not evaluate filter on " + str(stored_adapter)) has_operations_warning = True return datatype, filtered_adapters, has_operations_warning
def test_remove_used_surface(self): """ Tries to remove an used surface """ filter = FilterChain(fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=[CORTICAL]) mapping = try_get_last_datatype(self.test_project.id, RegionMappingIndex) surface = try_get_last_datatype(self.test_project.id, SurfaceIndex, filter) assert mapping is not None, "There should be one Mapping." assert surface is not None, "There should be one Costical Surface." assert surface.gid == mapping.fk_surface_gid, "The surfaces should have the same GID" try: self.project_service.remove_datatype(self.test_project.id, surface.gid) raise AssertionError( "The surface should still be used by a RegionMapping " + str(surface.gid)) except RemoveDataTypeException: # OK, do nothing pass res = dao.get_datatype_by_gid(surface.gid) assert surface.id == res.id, "A used surface was deleted"
def get_filtered_datatypes(self, dt_module, dt_class, filters, has_all_option, has_none_option): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ index_class = getattr(sys.modules[dt_module], dt_class)() filters_dict = json.loads(filters) for idx in range(len(filters_dict['fields'])): if filters_dict['values'][idx] in ['True', 'False']: filters_dict['values'][idx] = string2bool( filters_dict['values'][idx]) filter = FilterChain(fields=filters_dict['fields'], operations=filters_dict['operations'], values=filters_dict['values']) project = common.get_current_project() data_type_gid_attr = DataTypeGidAttr( linked_datatype=REGISTRY.get_datatype_for_index(index_class)) data_type_gid_attr.required = not string2bool(has_none_option) select_field = TraitDataTypeSelectField( data_type_gid_attr, conditions=filter, has_all_option=string2bool(has_all_option)) self.algorithm_service.fill_selectfield_with_datatypes( select_field, project.id) return {'options': select_field.options()}
def get_filtered_datatypes(self, dt_module, dt_class, filters, has_all_option, has_none_option): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ index_class = getattr(sys.modules[dt_module], dt_class)() filters_dict = json.loads(filters) fields = [] operations = [] values = [] for idx in range(len(filters_dict['fields'])): fields.append(filters_dict['fields'][idx]) operations.append(filters_dict['operations'][idx]) values.append(filters_dict['values'][idx]) filter = FilterChain(fields=fields, operations=operations, values=values) project = common.get_current_project() form = Form(project_id=project.id, draw_ranges=True) data_type_gid_attr = DataTypeGidAttr( linked_datatype=REGISTRY.get_datatype_for_index(index_class)) data_type_gid_attr.required = not string2bool(has_none_option) select_field = TraitDataTypeSelectField( data_type_gid_attr, form, conditions=filter, has_all_option=string2bool(has_all_option)) return {'options': select_field.options()}
def test_complex_filter(self): """ Test a filter with at least 2 conditions """ test_filter = FilterChain(fields=[ FilterChain.datatype + '.attribute_1', FilterChain.datatype + '.attribute_2' ], operations=["==", 'in'], values=['test_val', ['test_val2', 1]]) should_pass( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=1)) should_pass( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=1)) should_fail( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val', attribute_2=2)) should_fail( test_filter, TestFiltering.DummyFilterClass(attribute_1='test_val1', attribute_2=1))
def transactional_setup_method(self): """ Reset the database before each test. """ self.test_user = TestFactory.create_user("UserPM") self.test_project = TestFactory.create_project(self.test_user) zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorsImporterModel.OPTIONS['EEG Sensors']) zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, CORTICAL, True) field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [CORTICAL], ['==']) self.surface = TestFactory.get_entity(self.test_project, SurfaceIndex, filters) assert self.surface is not None self.sensors = TestFactory.get_entity(self.test_project, SensorsIndex) assert self.sensors is not None self.importer = TestFactory.create_adapter( 'tvb.adapters.uploaders.projection_matrix_importer', 'ProjectionMatrixSurfaceEEGImporter')
def get_extra_algorithm_filters(): return { "KuramotoIndex": FilterChain(fields=[FilterChain.datatype + '.data_length_2d'], operations=[">="], values=[2]) }
def _get_values_from_db(self): all_conditions = FilterChain() all_conditions += self.conditions all_conditions += self.dynamic_conditions filtered_datatypes, count = dao.get_values_of_datatype( self.owner.project_id, self.datatype_index, all_conditions) return filtered_datatypes
def __init__(self, equation_choices, prefix='', project_id=None): super(LocalConnectivityCreatorForm, self).__init__(prefix, project_id) filter_for_cortical = FilterChain( fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=[CORTICAL]) self.surface = TraitDataTypeSelectField( LocalConnectivityCreatorModel.surface, self, name=self.get_input_name(), conditions=filter_for_cortical) self.spatial = SelectField(LocalConnectivityCreatorModel.equation, self, name='spatial', choices=equation_choices, display_none_choice=False) self.spatial_params = FormField(GaussianEquationForm, self, name=self.NAME_EQUATION_PARAMS_DIV, label='Equation parameters') self.cutoff = ScalarField(LocalConnectivityCreatorModel.cutoff, self) self.display_name = ScalarField( LocalConnectivityCreatorModel.display_name, self, name='display_name')
def setup_method(self): """ Sets up the environment for running the tests; creates a test user, a test project, a connectivity and a surface; imports a CFF data-set """ self.test_user = TestFactory.create_user("UserRM") self.test_project = TestFactory.create_project(self.test_user) zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') self.connectivity = TestFactory.import_zip_connectivity( self.test_user, self.test_project, zip_path, "John") field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.CORTICAL_SURFACE.value], ['==']) cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) self.surface = TestFactory.get_entity(self.test_project, SurfaceIndex, filters)
def get_filters(): return FilterChain(fields=[ FilterChain.datatype + '.ndim', FilterChain.datatype + '.has_volume_mapping' ], operations=["==", "=="], values=[1, True])
def get_filters(): return FilterChain(fields=[ FilterChain.datatype + ".no_of_ranges", FilterChain.datatype + ".no_of_ranges", FilterChain.datatype + ".count_results" ], operations=["<=", ">=", "<="], values=[2, 1, MAX_NUMBER_OF_POINT_TO_SUPPORT])
def get_filters(): return FilterChain(fields=[FilterChain.datatype + '.time_series_type'], operations=["in"], values=[[ 'TimeSeriesEEG', 'TimeSeriesSEEG', 'TimeSeriesMEG', 'TimeSeriesRegion', 'TimeSeriesSurface' ]])
def get_filters(): return FilterChain(fields=[ FilterChain.datatype + '.time_series_type', FilterChain.datatype + '.has_surface_mapping' ], operations=["in", "=="], values=[['TimeSeriesRegion', 'TimeSeriesSurface'], True])