def test_filter_sql_equivalent(self):
        """
        Test applying a filter on DB.
        """
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value3"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value3"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)

        test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['value1'])
        test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='], values=['vaue2'])
        test_filter_3 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value1', 'value2']])
        test_filter_4 = FilterChain(fields=[FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'],
                                    operations=['==', 'in'], values=["value1", ['value5', 'value6']])
        
        all_stored_dts = self.count_all_entities(Datatype1)
        self.assertEqual(3, all_stored_dts)
        
        self._evaluate_db_filter(test_filter_1, 2)
        self._evaluate_db_filter(test_filter_2, 0)
        self._evaluate_db_filter(test_filter_3, 1)
        self._evaluate_db_filter(test_filter_4, 0)
Exemplo n.º 2
0
 def get_input_tree(self):
     """
     Take as Input a Connectivity Object.
     """
     return [{
         'name': 'input_data',
         'label': 'Connectivity Matrix',
         'type': Connectivity,
         'required': True
     }, {
         'name':
         'surface_data',
         'label':
         'Brain Surface',
         'type':
         CorticalSurface,
         'description':
         'The Brain Surface is used to give you an idea of the connectivity position relative '
         'to the full brain cortical surface.  This surface will be displayed as a shadow '
         '(only used in 3D Edges viewer).'
     }, {
         'name':
         'colors',
         'label':
         'Node Colors',
         'type':
         ConnectivityMeasure,
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=["=="],
                     values=[1]),
         'description':
         'A ConnectivityMesure DataType that establishes a colormap for the nodes '
         'displayed in the 2D Connectivity viewers.'
     }, {
         'name':
         'step',
         'label':
         'Color Threshold',
         'type':
         'float',
         'description':
         'All nodes with a value greater than this threshold will be displayed as red discs, '
         'otherwise they will be yellow. (This applies to 2D Connectivity Viewers and the '
         'threshold will depend on the metric used to set the Node Color)'
     }, {
         'name':
         'rays',
         'label':
         'Shapes Dimensions',
         'type':
         ConnectivityMeasure,
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=["=="],
                     values=[1]),
         'description':
         'A ConnectivityMeasure datatype used to establish the size of the spheres representing '
         'each node. (It only applies to 3D Nodes viewer).'
     }]
Exemplo n.º 3
0
 def test_filter_addition(self):
     """
     test addition in filter chain
     """
     filter1 = FilterChain(fields=[FilterChain.datatype + '.attribute_1'],
                           operations=["=="],
                           values=['test_val'])
     filter2 = FilterChain(fields=[FilterChain.datatype + '.attribute_2'],
                           operations=['in'],
                           values=[['test_val2', 1]])
     test_filter = filter1 + filter2
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2='test_val2'))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=2))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val1',
                                        attribute_2=1))
Exemplo n.º 4
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        TestConnectivityZip.import_test_connectivity96(self.test_user,
                                                       self.test_project,
                                                       subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())

        self._import_csv_test_connectivity(reference_connectivity.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        assert dt_count_before + 1 == dt_count_after

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        # check relationship between the imported connectivity and the reference
        assert (reference_connectivity.centres == imported_connectivity.centres).all()
        assert (reference_connectivity.orientations == imported_connectivity.orientations).all()

        assert reference_connectivity.number_of_regions == imported_connectivity.number_of_regions
        assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()

        assert not (reference_connectivity.weights == imported_connectivity.weights).all()
        assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all()
Exemplo n.º 5
0
    def get_input_tree(self):
        """
        Take as Input a Connectivity Object.
        """

        filters_ui = [UIFilter(linked_elem_name="colors",
                               linked_elem_field=FilterChain.datatype + "._connectivity"),
                      UIFilter(linked_elem_name="rays",
                               linked_elem_field=FilterChain.datatype + "._connectivity")]

        json_ui_filter = json.dumps([ui_filter.to_dict() for ui_filter in filters_ui])


        return [{'name': 'input_data', 'label': 'Connectivity Matrix', 'type': Connectivity,
                 'required': True, KWARG_FILTERS_UI: json_ui_filter},
                {'name': 'surface_data', 'label': 'Brain Surface', 'type': CorticalSurface,
                 'description': 'The Brain Surface is used to give you an idea of the connectivity position relative '
                                'to the full brain cortical surface.  This surface will be displayed as a shadow '
                                '(only used in 3D Edges tab).'},
                {'name': 'colors', 'label': 'Node Colors', 'type': ConnectivityMeasure,
                 'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                           operations=["=="], values=[1]),
                 'description': 'A ConnectivityMeasure DataType that establishes a colormap for the nodes '
                                'displayed in the 2D Connectivity tabs.'},
                {'name': 'step', 'label': 'Color Threshold', 'type': 'float',
                 'description': 'All nodes with a value greater or equal (>=) than this threshold will be displayed '
                                'as red discs, otherwise (<) they will be yellow. (This applies to 2D Connectivity  '
                                'tabs and the threshold will depend on the metric used to set the Node Color)'},
                {'name': 'rays', 'label': 'Shapes Dimensions', 'type': ConnectivityMeasure,
                 'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                           operations=["=="], values=[1]),
                 'description': 'A ConnectivityMeasure datatype used to establish the size of the spheres representing '
                                'each node. (It only applies to 3D Nodes tab).'}]
Exemplo n.º 6
0
    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 3, "Problems with inserting data")
        first_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[1])
        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            first_filter)[1]
        self.assertEqual(count, 2, "Data was not filtered")

        second_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[2])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            second_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered")
        self.assertEqual(filtered_data[0][3], "John Doe 3")

        third_filter = FilterChain(
            fields=[FilterChain.datatype + '._length_1d'],
            operations=["=="],
            values=[3])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            third_filter)[0]
        self.assertEqual(len(filtered_data), 1,
                         "Data was not filtered correct")
        self.assertEqual(filtered_data[0][3], "John Doe 3")
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass
Exemplo n.º 7
0
 def _build_custom_filter(filter_data):
     """
     Param filter_data should be at this point a dictionary of the form:
     {'type' : 'fitler_type', 'value' : 'fitler_value'}
     If 'filter_type' is not handled just return None.
     """
     filter_data = json.loads(filter_data)
     if filter_data['type'] == 'from_burst':
         return FilterChain('Burst', [FilterChain.datatype + '.fk_parent_burst'],
                                             [filter_data['value']], operations=["=="])
     if filter_data['type'] == 'from_datatype':
         return FilterChain('Datatypes', [FilterChain.operation + '.parameters'],
                                                 [filter_data['value']], operations=["like"])
     return None
Exemplo n.º 8
0
 def get_input_tree(self):
     return [{'name': 'data_0', 'label': 'Connectivity Measures 1',
              'type': ConnectivityMeasure, 'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Punctual values for each node in the connectivity matrix. '
                             'This will give the colors of the resulting topographic image.'},
             {'name': 'data_1', 'label': 'Connectivity Measures 2', 'type': ConnectivityMeasure,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Comparative values'},
             {'name': 'data_2', 'label': 'Connectivity Measures 3', 'type': ConnectivityMeasure,
              'conditions': FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                        operations=["=="], values=[1]),
              'description': 'Comparative values'}]
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        test_subject = "test"
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: test_subject
        }

        # Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".subject"],
                                  operations=["=="],
                                  values=[test_subject])
        region_mapping = self._get_entity(RegionMapping, data_filter)

        return region_mapping
Exemplo n.º 10
0
 def get_input_tree(self):
     return [{
         'name':
         'measure',
         'label':
         'Measure',
         'type':
         MappedArray,
         'required':
         True,
         'description':
         'A measure to view on anatomy',
         'conditions':
         FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                     operations=[">="],
                     values=[2])
     }, {
         'name': 'region_mapping_volume',
         'label': 'Region mapping',
         'type': RegionVolumeMapping,
         'required': False,
     }, {
         'name': 'data_slice',
         'label': 'slice indices in numpy syntax',
         'type': 'str',
         'required': False
     },
             _MappedArrayVolumeBase.get_background_input_tree()]
Exemplo n.º 11
0
    def get_input_tree(self):
        """
        Compute interface based on introspected algorithms found.
        """
        algorithm = BaseTimeseriesMetricAlgorithm()
        algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
        tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
        tree[0]['conditions'] = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[4])

        algo_names = self.available_algorithms.keys()
        options = []
        for name in algo_names:
            options.append({
                ABCAdapter.KEY_NAME: name,
                ABCAdapter.KEY_VALUE: name
            })
        tree.append({
            'name':
            'algorithms',
            'label':
            'Selected metrics to be applied',
            'type':
            ABCAdapter.TYPE_MULTIPLE,
            'required':
            False,
            'options':
            options,
            'description':
            'The selected metric algorithms will be applied on the input TimeSeries'
        })

        return tree
Exemplo n.º 12
0
 def test_complex_filter(self):
     """
     Test a filter with at least 2 conditions
     """
     test_filter = FilterChain(fields=[
         FilterChain.datatype + '.attribute_1',
         FilterChain.datatype + '.attribute_2'
     ],
                               operations=["==", 'in'],
                               values=['test_val', ['test_val2', 1]])
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_pass(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=1))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val',
                                        attribute_2=2))
     self.__should_fail(
         test_filter,
         TestFiltering.DummyFilterClass(attribute_1='test_val1',
                                        attribute_2=1))
 def get_input_tree(self):
     return [
         {
             'name': 'region_mapping_volume',
             'label': 'Region mapping',
             'type': RegionVolumeMapping,
             'required': True,
         },
         {
             'name':
             'connectivity_measure',
             'label':
             'Connectivity measure',
             'type':
             ConnectivityMeasure,
             'required':
             False,
             'description':
             'A connectivity measure',
             'conditions':
             FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                         operations=["=="],
                         values=[1])
         },
     ]
Exemplo n.º 14
0
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test"
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
Exemplo n.º 15
0
 def get_input_tree(self):
     return [{
         'name':
         'time_series',
         'label':
         'Time Series (Region or Surface)',
         'type':
         TimeSeries,
         'required':
         True,
         'conditions':
         FilterChain(fields=[
             FilterChain.datatype + '.type',
             FilterChain.datatype + '._has_surface_mapping'
         ],
                     operations=["in", "=="],
                     values=[['TimeSeriesRegion', 'TimeSeriesSurface'],
                             True])
     }, {
         'name':
         'shell_surface',
         'label':
         'Shell Surface',
         'type':
         Surface,
         'required':
         False,
         'description':
         "Surface to be displayed semi-transparently, for visual purposes only."
     }]
Exemplo n.º 16
0
    def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        previous_tree = self.context.get_session_tree_for_key(tree_session_key)
        if previous_tree is None:
            common.set_error_message("Adapter Interface not in session for filtering!")
            raise cherrypy.HTTPRedirect("/tvb?error=True")
        current_node = self._get_node(previous_tree, name)
        if current_node is None:
            raise Exception("Could not find node :" + name)
        datatype = current_node[ABCAdapter.KEY_DATATYPE]

        filters = json.loads(filters)
        availablefilter = json.loads(FilterChain.get_filters_for_type(datatype))
        for i, filter_ in enumerate(filters[FILTER_FIELDS]):
            # Check for filter input of type 'date' as these need to be converted
            if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date':
                try:
                    temp_date = string2date(filters[FILTER_VALUES][i], False)
                    filters[FILTER_VALUES][i] = temp_date
                except ValueError:
                    raise
        # In order for the filter object not to "stack up" on multiple calls to
        # this method, create a deepCopy to work with
        if ABCAdapter.KEY_CONDITION in current_node:
            new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION])
        else:
            new_filter = FilterChain()
        new_filter.fields.extend(filters[FILTER_FIELDS])
        new_filter.operations.extend(filters[FILTER_OPERATIONS])
        new_filter.values.extend(filters[FILTER_VALUES])
        # Get dataTypes that match the filters from DB then populate with values
        values, total_count = InputTreeManager().populate_option_values_for_dtype(
            common.get_current_project().id,
            datatype, new_filter,
            self.context.get_current_step())
        # Create a dictionary that matches what the template expects
        parameters = {ABCAdapter.KEY_NAME: name,
                      ABCAdapter.KEY_FILTERABLE: availablefilter,
                      ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT,
                      ABCAdapter.KEY_OPTIONS: values,
                      ABCAdapter.KEY_DATATYPE: datatype}

        if total_count > MAXIMUM_DATA_TYPES_DISPLAYED:
            parameters[KEY_WARNING] = WARNING_OVERFLOW

        if ABCAdapter.KEY_REQUIRED in current_node:
            parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED]
            if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])):
                parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE])
        previous_selected = self.context.get_current_default(name)
        if previous_selected in [str(vv['value']) for vv in values]:
            parameters[ABCAdapter.KEY_DEFAULT] = previous_selected

        template_specification = {"inputRow": parameters, "disabled": False,
                                  "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key}
        return self.fill_default_attributes(template_specification)
Exemplo n.º 17
0
 def test_invalid_filter(self):
     """
     Error test-case when evaluating filter in Python.
     """
     test_filter = FilterChain(fields = [FilterChain.datatype + '.attribute_1'], 
                               operations = ["in"], values = [None])
     self.assertRaises(InvalidFilterEntity, test_filter.get_python_filter_equivalent, 
                       FilteringTest.DummyFilterClass(attribute_1 = ['test_val', 'test2']))
Exemplo n.º 18
0
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        with pytest.raises(OperationException):
            self._import_csv_test_connectivity(bad_reference_connectivity.gid, TEST_SUBJECT_A)
Exemplo n.º 19
0
 def test_invalid_input(self):
     """
     Error test-case.
     """
     test_filter = FilterChain(fields = [FilterChain.datatype + '.other_attribute_1'], 
                               operations = ["in"], values = ['test'])
     self.assertRaises(InvalidFilterChainInput, test_filter.get_python_filter_equivalent, 
                       FilteringTest.DummyFilterClass(attribute_1 = ['test_val', 'test2']))
Exemplo n.º 20
0
    def test_filter_sql_equivalent(self):
        """
        Test applying a filter on DB.
        """
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value3"
        data_type.row2 = "value2"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)
        data_type = Datatype1()
        data_type.row1 = "value1"
        data_type.row2 = "value3"
        datatypes_factory.DatatypesFactory()._store_datatype(data_type)

        test_filter_1 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='],
                                    values=['value1'])
        test_filter_2 = FilterChain(fields=[FilterChain.datatype + '._row1'],
                                    operations=['=='],
                                    values=['vaue2'])
        test_filter_3 = FilterChain(fields=[
            FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'
        ],
                                    operations=['==', 'in'],
                                    values=["value1", ['value1', 'value2']])
        test_filter_4 = FilterChain(fields=[
            FilterChain.datatype + '._row1', FilterChain.datatype + '._row2'
        ],
                                    operations=['==', 'in'],
                                    values=["value1", ['value5', 'value6']])

        all_stored_dts = self.get_all_entities(Datatype1)
        self.assertTrue(
            len(all_stored_dts) == 3, "Expected 3 DTs to be stored for "
            "test_filte_sql_equivalent. Got %s instead." %
            (len(all_stored_dts, )))

        self._evaluate_db_filter(test_filter_1, 2)
        self._evaluate_db_filter(test_filter_2, 0)
        self._evaluate_db_filter(test_filter_3, 1)
        self._evaluate_db_filter(test_filter_4, 0)
Exemplo n.º 21
0
def search_and_export_ts(project_id, export_folder=os.path.join("~", "TVB")):

    #### This is the simplest filter you could write: filter and entity by Subject
    filter_connectivity = FilterChain(
        fields=[FilterChain.datatype + '.subject'],
        operations=["=="],
        values=[DataTypeMetaData.DEFAULT_SUBJECT])

    connectivities = _retrieve_entities_by_filters(Connectivity, project_id,
                                                   filter_connectivity)

    #### A more complex filter: by linked entity (connectivity), BOLD monitor, sampling, operation param:
    filter_timeseries = FilterChain(
        fields=[
            FilterChain.datatype + '._connectivity',
            FilterChain.datatype + '._title',
            FilterChain.datatype + '._sample_period',
            FilterChain.datatype + '._sample_rate',
            FilterChain.operation + '.parameters'
        ],
        operations=["==", "like", ">=", "<=", "like"],
        values=[
            connectivities[0].gid, "Bold", "500", "0.002",
            '"conduction_speed": "3.0"'
        ])

    #### If you want to filter another type of TS, change the kind class bellow,
    #### instead of TimeSeriesRegion use TimeSeriesEEG, or TimeSeriesSurface, etc.
    timeseries = _retrieve_entities_by_filters(TimeSeriesRegion, project_id,
                                               filter_timeseries)

    for ts in timeseries:
        print("=============================")
        print(ts.summary_info)
        print(" Original file: " + str(ts.get_storage_file_path()))
        destination_file = os.path.expanduser(
            os.path.join(export_folder, ts.get_storage_file_name()))
        FilesHelper.copy_file(ts.get_storage_file_path(), destination_file)
        if os.path.exists(destination_file):
            print(" TS file copied at: " + destination_file)
        else:
            print(
                " Some error happened when trying to copy at destination folder!!"
            )
Exemplo n.º 22
0
 def get_input_tree(self):
     """
     Take as Input a Connectivity Object.
     """
     return [{'name': 'datatype_group',
              'label': 'Datatype Group',
              'type': DataTypeGroup,
              'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + ".no_of_ranges"],
                                        operations=["=="], values=[2])}]
Exemplo n.º 23
0
 def get_input_tree(self):
     return [{'name': 'time_series', 'label': 'Time Series (Region or Surface)',
              'type': TimeSeries, 'required': True,
              'conditions': FilterChain(fields=[FilterChain.datatype + '.type',
                                                FilterChain.datatype + '._nr_dimensions'],
                                        operations=["in", "=="],
                                        values=[['TimeSeriesRegion', 'TimeSeriesSurface'], 4]),
              'description': 'Depending on the simulation length and your browser capabilities, you might experience'
                             ' after multiple runs, browser crashes. In such cases, it is recommended to empty the'
                             ' browser cache and try again. Sorry for the inconvenience.'}]
Exemplo n.º 24
0
 def build_datatype_filters(selected=RELEVANT_VIEW, single_filter=None):
     """
     Return all visibility filters for data structure page, or only one filter.
     """
     filters = {StaticFiltersFactory.FULL_VIEW: FilterChain(StaticFiltersFactory.FULL_VIEW),
                StaticFiltersFactory.RELEVANT_VIEW: FilterChain(StaticFiltersFactory.RELEVANT_VIEW,
                                                                [FilterChain.datatype + '.visible'],
                                                                [True], operations=["=="])}
     if selected is None or len(selected) == 0:
         selected = StaticFiltersFactory.RELEVANT_VIEW
     if selected in filters:
         filters[selected].selected = True
     if single_filter is not None:
         if single_filter in filters:
             return filters[single_filter]
         else:
             ### We have some custom filter to build
             return StaticFiltersFactory._build_custom_filter(single_filter)
     return filters.values()
Exemplo n.º 25
0
 def test_filter_add_condition(self):
     """
     Test that adding a condition to a filter is working.
     """
     test_filter = FilterChain(fields = [FilterChain.datatype + '.attribute_1'], 
                               operations = ["=="], values = ['test_val'])
     filter_input = FilteringTest.DummyFilterClass(attribute_1 = 'test_val', attribute_2 = 1)
     self.__should_pass(test_filter, filter_input)
     test_filter.add_condition(FilterChain.datatype + '.attribute_2', '==', 2)
     self.__should_fail(test_filter, filter_input)
Exemplo n.º 26
0
 def get_input_tree(self):
     return [
         dict(name="connectivity",
              label=self._ui_connectivity_label,
              type=Connectivity,
              required=True,
              conditions=FilterChain(
                  fields=[FilterChain.datatype + '._undirected'],
                  operations=["=="],
                  values=['1']))
     ]
Exemplo n.º 27
0
 def get_input_tree(self):
     """
     Return a list of lists describing the interface to the analyzer. This
     is used by the GUI to generate the menus and fields necessary for
     defining a simulation.
     """
     algorithm = NodeCovariance()
     algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
     tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
     tree[0]['conditions'] = FilterChain(fields = [FilterChain.datatype + '._nr_dimensions'], operations = ["=="], values = [4])
     return tree
Exemplo n.º 28
0
    def get_input_tree(self):
        # todo: filter connectivity measures: same length as regions and 1-dimensional

        filters_ui = [
            UIFilter(linked_elem_name="region_map",
                     linked_elem_field=FilterChain.datatype + "._surface"),
            # UIFilter(linked_elem_name="connectivity_measure",
            #          linked_elem_field=FilterChain.datatype + "._surface")
        ]

        json_ui_filter = json.dumps(
            [ui_filter.to_dict() for ui_filter in filters_ui])

        return [{
            'name': 'surface',
            'label': 'Brain surface',
            'type': Surface,
            'required': True,
            'description': '',
            KWARG_FILTERS_UI: json_ui_filter
        }, {
            'name': 'region_map',
            'label': 'Region mapping',
            'type': RegionMapping,
            'required': False,
            'description': 'A region map'
        }, {
            'name':
            'connectivity_measure',
            'label':
            'Connectivity measure',
            'type':
            ConnectivityMeasure,
            'required':
            False,
            'description':
            'A connectivity measure',
            'conditions':
            FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                        operations=["=="],
                        values=[1])
        }, {
            'name':
            'shell_surface',
            'label':
            'Shell Surface',
            'type':
            Surface,
            'required':
            False,
            'description':
            "Face surface to be displayed semi-transparently, for orientation only."
        }]
Exemplo n.º 29
0
    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [datetime.now(),
                       datetime.strptime("08-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2010", "%m-%d-%Y"),
                       datetime.strptime("05-06-2010", "%m-%d-%Y"),
                       datetime.strptime("07-21-2011", "%m-%d-%Y")]
        end_dates = [datetime.now(),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2010", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y"),
                     datetime.strptime("08-12-2011", "%m-%d-%Y")]
        for i in range(5):
            operation = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, 'test params',
                                        status="FINISHED", start_date=start_dates[i], completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb_test.datatypes.datatype1.Datatype1")
        for row in returned_data:
            if row[1] != 'Datatype1':
                self.fail("Some invalid data was returned!")
        self.assertEqual(4, len(returned_data), "Invalid length of result")

        filter_op = FilterChain(fields=[FilterChain.datatype + ".state", FilterChain.operation + ".start_date"],
                                values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb_test.datatypes.datatype1.Datatype1", filter_op)
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(returned_subjects) != 2:
            self.fail("DataTypes were not filtered properly!")
Exemplo n.º 30
0
 def _get_filter(cls, nodes_list):
     """Get default filter"""
     fields = None
     values = None
     operations = None
     for node in nodes_list:
         if node.nodeName == ELEM_COND_FIELDS:
             fields = eval(node.getAttribute(ATT_FILTER_VALUES))
         if node.nodeName == ELEM_COND_OPS:
             operations = eval(node.getAttribute(ATT_FILTER_VALUES))
         if node.nodeName == ELEM_COND_VALUES:
             values = eval(node.getAttribute(ATT_FILTER_VALUES))
     return FilterChain(fields=fields, values=values, operations=operations)